bugged
stringlengths 6
599k
| fixed
stringlengths 10
599k
| __index_level_0__
int64 0
1.13M
|
|---|---|---|
public PlayPenComponent getComponent(int i) { return children.get(i); }
|
public PlayPenComponent getComponent(int i) { if (i < children.size()){ return children.get(i); } else { return relations.get(i-children.size()); } }
| 1,112,121
|
public int getComponentCount() { return children.size(); }
|
public int getComponentCount() { return children.size()+relations.size(); }
| 1,112,123
|
public void remove(int j) { PlayPenComponent c = children.get(j); Rectangle r = c.getBounds(); c.removePlayPenComponentListener(playPenComponentEventPassthrough); c.removeSelectionListener(getOwner()); children.remove(j); getOwner().repaint(r); }
|
public void remove(int j) { PlayPenComponent c; if (j < children.size()) { c= children.get(j); } else { c = relations.get(j-children.size()); } Rectangle r = c.getBounds(); c.removePlayPenComponentListener(playPenComponentEventPassthrough); c.removeSelectionListener(getOwner()); children.remove(j); getOwner().repaint(r); }
| 1,112,124
|
public void remove(int j) { PlayPenComponent c = children.get(j); Rectangle r = c.getBounds(); c.removePlayPenComponentListener(playPenComponentEventPassthrough); c.removeSelectionListener(getOwner()); children.remove(j); getOwner().repaint(r); }
|
public void remove(int j) { PlayPenComponent c = children.get(j); Rectangle r = c.getBounds(); c.removePlayPenComponentListener(playPenComponentEventPassthrough); c.removeSelectionListener(getOwner()); if (j < children.size()) { children.remove(j); } else { relations.remove(j-children.size()); } getOwner().repaint(r); }
| 1,112,125
|
public CheckDataPanel(HaploView hv){ this(hv.theData); this.hv = hv; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); JPanel missingPanel = new JPanel(); JLabel countsLabel; countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); missingPanel.add(countsLabel); JButton missingButton = new JButton("Show Excluded Individuals"); JButton individualButton = new JButton("Individual Summary"); individualButton.setEnabled(true); JButton mendelButton = new JButton("Mendel Errors"); mendelButton.setEnabled(true); if (hv.theData.getPedFile().getAxedPeople().size() == 0){ missingButton.setEnabled(false); } missingButton.addActionListener(this); missingPanel.add(missingButton); individualButton.addActionListener(this); missingPanel.add(individualButton); mendelButton.addActionListener(this); missingPanel.add(mendelButton); missingPanel.setBorder(BorderFactory.createLineBorder(Color.black)); JPanel extraPanel = new JPanel(); extraPanel.add(missingPanel); CheckDataTableSorter sorter = new CheckDataTableSorter(tableModel); table = new JTable(sorter); sorter.setTableHeader(table.getTableHeader()); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); table.setDefaultRenderer(Class.forName("java.lang.String"),renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); table.getColumnModel().getColumn(0).setMinWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setMinWidth(100); table.getColumnModel().getColumn(2).setMinWidth(60); } JScrollPane tableScroller = new JScrollPane(table); //changed 600 to 800 and tableScroller.getPreferredSize().height to Integer.MAX_VALUE. tableScroller.setMaximumSize(new Dimension(800, Integer.MAX_VALUE)); add(extraPanel); add(tableScroller); if (theData.dupsToBeFlagged){ JOptionPane.showMessageDialog(hv, "Two or more SNPs have identical position. They have been flagged in yellow\n"+ "and the less completely genotyped duplicate has been deselected.", "Duplicate SNPs", JOptionPane.INFORMATION_MESSAGE); } if (theData.dupNames){ JOptionPane.showMessageDialog(hv, "Two or more SNPs have identical names. They have been renamed with\n"+ ".X extensions where X is an integer unique to each duplicate.", "Duplicate SNPs", JOptionPane.INFORMATION_MESSAGE); } }
|
public CheckDataPanel(HaploView hv){ this(hv.theData); this.hv = hv; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); JPanel missingPanel = new JPanel(); JLabel countsLabel; countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); missingPanel.add(countsLabel); JButton missingButton = new JButton("Show Excluded Individuals"); JButton individualButton = new JButton("Individual Summary"); individualButton.setEnabled(true); JButton mendelButton = new JButton("Mendel Errors"); mendelButton.setEnabled(true); if (hv.theData.getPedFile().getAxedPeople().size() == 0){ missingButton.setEnabled(false); } missingButton.addActionListener(this); missingPanel.add(missingButton); individualButton.addActionListener(this); missingPanel.add(individualButton); mendelButton.addActionListener(this); missingPanel.add(mendelButton); missingPanel.setBorder(BorderFactory.createLineBorder(Color.black)); JPanel extraPanel = new JPanel(); extraPanel.add(missingPanel); sorter = new CheckDataTableSorter(tableModel); table = new JTable(sorter); sorter.setTableHeader(table.getTableHeader()); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); table.setDefaultRenderer(Class.forName("java.lang.String"),renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); table.getColumnModel().getColumn(0).setMinWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setMinWidth(100); table.getColumnModel().getColumn(2).setMinWidth(60); } JScrollPane tableScroller = new JScrollPane(table); //changed 600 to 800 and tableScroller.getPreferredSize().height to Integer.MAX_VALUE. tableScroller.setMaximumSize(new Dimension(800, Integer.MAX_VALUE)); add(extraPanel); add(tableScroller); if (theData.dupsToBeFlagged){ JOptionPane.showMessageDialog(hv, "Two or more SNPs have identical position. They have been flagged in yellow\n"+ "and the less completely genotyped duplicate has been deselected.", "Duplicate SNPs", JOptionPane.INFORMATION_MESSAGE); } if (theData.dupNames){ JOptionPane.showMessageDialog(hv, "Two or more SNPs have identical names. They have been renamed with\n"+ ".X extensions where X is an integer unique to each duplicate.", "Duplicate SNPs", JOptionPane.INFORMATION_MESSAGE); } }
| 1,112,128
|
public void saveDprimeToText(File dumpDprimeFile) throws IOException{ FileWriter saveDprimeWriter = new FileWriter(dumpDprimeFile); if (infoKnown){ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\tDist\n"); long dist; for (int i = 0; i < filteredDPrimeTable.length; i++){ for (int j = 0; j < filteredDPrimeTable[i].length; j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ if(filteredDPrimeTable[i][j] != null) { dist = (Chromosome.getFilteredMarker(j)).getPosition() - (Chromosome.getFilteredMarker(i)).getPosition(); saveDprimeWriter.write(Chromosome.getFilteredMarker(i).getName() + "\t" + Chromosome.getFilteredMarker(j).getName() + "\t" + filteredDPrimeTable[i][j].toString() + "\t" + dist + "\n"); } } } } }else{ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\n"); for (int i = 0; i < filteredDPrimeTable.length; i++){ for (int j = 0; j < filteredDPrimeTable[i].length; j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ if(filteredDPrimeTable[i][j] != null) { saveDprimeWriter.write((i+1) + "\t" + (j+1) + "\t" + filteredDPrimeTable[i][j] + "\n"); } } } } } saveDprimeWriter.close(); }
|
public void saveDprimeToText(File dumpDprimeFile) throws IOException{ FileWriter saveDprimeWriter = new FileWriter(dumpDprimeFile); if (infoKnown){ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\tDist\n"); long dist; for (int i = 0; i < filteredDPrimeTable.length; i++){ for (int j = 0; j < filteredDPrimeTable[i].length; j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ if(filteredDPrimeTable[i][j] != null) { dist = (Chromosome.getFilteredMarker(j)).getPosition() - (Chromosome.getFilteredMarker(i)).getPosition(); saveDprimeWriter.write(Chromosome.getFilteredMarker(i).getName() + "\t" + Chromosome.getFilteredMarker(j).getName() + "\t" + filteredDPrimeTable[i][j].toString() + "\t" + dist + "\n"); } } } } }else{ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\n"); for (int i = 0; i < filteredDPrimeTable.length; i++){ for (int j = 0; j < filteredDPrimeTable[i].length; j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ if(filteredDPrimeTable[i][j] != null) { saveDprimeWriter.write((Chromosome.realIndex[i]+1) + "\t" + (Chromosome.realIndex[j]+1) + "\t" + filteredDPrimeTable[i][j] + "\n"); } } } } } saveDprimeWriter.close(); }
| 1,112,130
|
public void saveHapsToText(Haplotype[][] finishedHaplos, double[] multidprime, File saveHapsFile) throws IOException{ if (finishedHaplos == null) return; NumberFormat nf = NumberFormat.getInstance(Locale.US); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); //open file for saving haps text FileWriter saveHapsWriter = new FileWriter(saveHapsFile); //go through each block and print haplos for (int i = 0; i < finishedHaplos.length; i++){ //write block header saveHapsWriter.write("BLOCK " + (i+1) + ". MARKERS:"); int[] markerNums = finishedHaplos[i][0].getMarkers(); boolean[] tags = finishedHaplos[i][0].getTags(); for (int j = 0; j < markerNums.length; j++){ saveHapsWriter.write(" " + (markerNums[j]+1)); if (tags[j]) saveHapsWriter.write("!"); } saveHapsWriter.write("\n"); //write haps and crossover percentages for (int j = 0; j < finishedHaplos[i].length; j++){ int[] theGeno = finishedHaplos[i][j].getGeno(); StringBuffer theHap = new StringBuffer(theGeno.length); for (int k = 0; k < theGeno.length; k++){ theHap.append(theGeno[k]); } saveHapsWriter.write(theHap.toString() + " (" + nf.format(finishedHaplos[i][j].getPercentage()) + ")"); if (i < finishedHaplos.length-1){ saveHapsWriter.write("\t|"); for (int crossCount = 0; crossCount < finishedHaplos[i+1].length; crossCount++){ if (crossCount != 0) saveHapsWriter.write("\t"); saveHapsWriter.write(nf.format(finishedHaplos[i][j].getCrossover(crossCount))); } saveHapsWriter.write("|"); } saveHapsWriter.write("\n"); } if (i < finishedHaplos.length - 1){ saveHapsWriter.write("Multiallelic Dprime: " + multidprime[i] + "\n"); }else{ saveHapsWriter.write("\n"); } } saveHapsWriter.close(); }
|
public void saveHapsToText(Haplotype[][] finishedHaplos, double[] multidprime, File saveHapsFile) throws IOException{ if (finishedHaplos == null) return; NumberFormat nf = NumberFormat.getInstance(Locale.US); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); //open file for saving haps text FileWriter saveHapsWriter = new FileWriter(saveHapsFile); //go through each block and print haplos for (int i = 0; i < finishedHaplos.length; i++){ //write block header saveHapsWriter.write("BLOCK " + (i+1) + ". MARKERS:"); int[] markerNums = finishedHaplos[i][0].getMarkers(); boolean[] tags = finishedHaplos[i][0].getTags(); for (int j = 0; j < markerNums.length; j++){ saveHapsWriter.write(" " + (Chromosome.realIndex[markerNums[j]]+1)); if (tags[j]) saveHapsWriter.write("!"); } saveHapsWriter.write("\n"); //write haps and crossover percentages for (int j = 0; j < finishedHaplos[i].length; j++){ int[] theGeno = finishedHaplos[i][j].getGeno(); StringBuffer theHap = new StringBuffer(theGeno.length); for (int k = 0; k < theGeno.length; k++){ theHap.append(theGeno[k]); } saveHapsWriter.write(theHap.toString() + " (" + nf.format(finishedHaplos[i][j].getPercentage()) + ")"); if (i < finishedHaplos.length-1){ saveHapsWriter.write("\t|"); for (int crossCount = 0; crossCount < finishedHaplos[i+1].length; crossCount++){ if (crossCount != 0) saveHapsWriter.write("\t"); saveHapsWriter.write(nf.format(finishedHaplos[i][j].getCrossover(crossCount))); } saveHapsWriter.write("|"); } saveHapsWriter.write("\n"); } if (i < finishedHaplos.length - 1){ saveHapsWriter.write("Multiallelic Dprime: " + multidprime[i] + "\n"); }else{ saveHapsWriter.write("\n"); } } saveHapsWriter.close(); }
| 1,112,131
|
private void addUser(RemoteManagerClient remoteManagerClient, String username, String internalPassword) { remoteManagerClient.executeCommand("adduser " + username + " " + internalPassword); List answers = remoteManagerClient.readAnswer(); log.info("user created: " + username); }
|
private void addUser(RemoteManagerClient remoteManagerClient, String username, String internalPassword) { remoteManagerClient.executeCommand("adduser " + username + " " + internalPassword); remoteManagerClient.readAnswer(); log.info("user created: " + username); }
| 1,112,133
|
private void setupForwardedMailInterceptor() throws StartupException { SMTPMailSink smtpMailSink = new SMTPMailSink(); smtpMailSink.setSmtpListenerPort(m_postageConfiguration.getTestserverPortSMTPForwarding()); smtpMailSink.setResults(m_results); try { smtpMailSink.initialize(); } catch (Exception e) { throw new StartupException("failed to setup"); } m_smtpMailSink = smtpMailSink; log.info("forwarded mail interceptor is set up."); }
|
private void setupForwardedMailInterceptor() throws StartupException { SMTPMailSink smtpMailSink = new SMTPMailSink(); smtpMailSink.setSmtpListenerPort(m_postageConfiguration.getTestserverPortSMTPForwarding()); smtpMailSink.setResults(m_results); try { smtpMailSink.initialize(); } catch (Exception e) { throw new StartupException("failed to setup",e); } m_smtpMailSink = smtpMailSink; log.info("forwarded mail interceptor is set up."); }
| 1,112,134
|
private void setupInternalUserAccounts() throws StartupException { try { String host = m_postageConfiguration.getTestserverHost(); int remoteManagerPort = m_postageConfiguration.getTestserverRemoteManagerPort(); String remoteManagerUsername = m_postageConfiguration.getTestserverRemoteManagerUsername(); String remoteManagerPassword = m_postageConfiguration.getTestserverRemoteManagerPassword(); int internalUserCount = m_postageConfiguration.getInternalUsers().getCount(); String internalUsernamePrefix = m_postageConfiguration.getInternalUsers().getNamePrefix(); String internalPassword = m_postageConfiguration.getInternalUsers().getPassword(); Set existingUsers = getExistingUsers(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); RemoteManagerClient remoteManagerClient = new RemoteManagerClient(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); boolean loginSuccess = remoteManagerClient.login(); ArrayList internalUsers = new ArrayList(); for (int i = 1; i <= internalUserCount; i++) { String username = internalUsernamePrefix + i; if (existingUsers.contains(username)) { log.info("user already exists: " + username); if (!m_postageConfiguration.isInternalReuseExisting()) { remoteManagerClient.executeCommand("deluser " + username); List answers = remoteManagerClient.readAnswer(); addUser(remoteManagerClient, username, internalPassword); answers = remoteManagerClient.readAnswer(); log.info("user deleted and re-created: " + username); } remoteManagerClient.executeCommand("setpassword " + username + " " + internalPassword); List answers = remoteManagerClient.readAnswer(); } else { addUser(remoteManagerClient, username, internalPassword); } internalUsers.add(username); } m_postageConfiguration.getInternalUsers().setExistingUsers(internalUsers); remoteManagerClient.disconnect(); } catch (Exception e) { throw new StartupException("error setting up internal user accounts", e); } }
|
private void setupInternalUserAccounts() throws StartupException { try { String host = m_postageConfiguration.getTestserverHost(); int remoteManagerPort = m_postageConfiguration.getTestserverRemoteManagerPort(); String remoteManagerUsername = m_postageConfiguration.getTestserverRemoteManagerUsername(); String remoteManagerPassword = m_postageConfiguration.getTestserverRemoteManagerPassword(); int internalUserCount = m_postageConfiguration.getInternalUsers().getCount(); String internalUsernamePrefix = m_postageConfiguration.getInternalUsers().getNamePrefix(); String internalPassword = m_postageConfiguration.getInternalUsers().getPassword(); Set existingUsers = getExistingUsers(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); RemoteManagerClient remoteManagerClient = new RemoteManagerClient(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); remoteManagerClient.login(); ArrayList internalUsers = new ArrayList(); for (int i = 1; i <= internalUserCount; i++) { String username = internalUsernamePrefix + i; if (existingUsers.contains(username)) { log.info("user already exists: " + username); if (!m_postageConfiguration.isInternalReuseExisting()) { remoteManagerClient.executeCommand("deluser " + username); List answers = remoteManagerClient.readAnswer(); addUser(remoteManagerClient, username, internalPassword); answers = remoteManagerClient.readAnswer(); log.info("user deleted and re-created: " + username); } remoteManagerClient.executeCommand("setpassword " + username + " " + internalPassword); List answers = remoteManagerClient.readAnswer(); } else { addUser(remoteManagerClient, username, internalPassword); } internalUsers.add(username); } m_postageConfiguration.getInternalUsers().setExistingUsers(internalUsers); remoteManagerClient.disconnect(); } catch (Exception e) { throw new StartupException("error setting up internal user accounts", e); } }
| 1,112,135
|
private void setupInternalUserAccounts() throws StartupException { try { String host = m_postageConfiguration.getTestserverHost(); int remoteManagerPort = m_postageConfiguration.getTestserverRemoteManagerPort(); String remoteManagerUsername = m_postageConfiguration.getTestserverRemoteManagerUsername(); String remoteManagerPassword = m_postageConfiguration.getTestserverRemoteManagerPassword(); int internalUserCount = m_postageConfiguration.getInternalUsers().getCount(); String internalUsernamePrefix = m_postageConfiguration.getInternalUsers().getNamePrefix(); String internalPassword = m_postageConfiguration.getInternalUsers().getPassword(); Set existingUsers = getExistingUsers(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); RemoteManagerClient remoteManagerClient = new RemoteManagerClient(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); boolean loginSuccess = remoteManagerClient.login(); ArrayList internalUsers = new ArrayList(); for (int i = 1; i <= internalUserCount; i++) { String username = internalUsernamePrefix + i; if (existingUsers.contains(username)) { log.info("user already exists: " + username); if (!m_postageConfiguration.isInternalReuseExisting()) { remoteManagerClient.executeCommand("deluser " + username); List answers = remoteManagerClient.readAnswer(); addUser(remoteManagerClient, username, internalPassword); answers = remoteManagerClient.readAnswer(); log.info("user deleted and re-created: " + username); } remoteManagerClient.executeCommand("setpassword " + username + " " + internalPassword); List answers = remoteManagerClient.readAnswer(); } else { addUser(remoteManagerClient, username, internalPassword); } internalUsers.add(username); } m_postageConfiguration.getInternalUsers().setExistingUsers(internalUsers); remoteManagerClient.disconnect(); } catch (Exception e) { throw new StartupException("error setting up internal user accounts", e); } }
|
private void setupInternalUserAccounts() throws StartupException { try { String host = m_postageConfiguration.getTestserverHost(); int remoteManagerPort = m_postageConfiguration.getTestserverRemoteManagerPort(); String remoteManagerUsername = m_postageConfiguration.getTestserverRemoteManagerUsername(); String remoteManagerPassword = m_postageConfiguration.getTestserverRemoteManagerPassword(); int internalUserCount = m_postageConfiguration.getInternalUsers().getCount(); String internalUsernamePrefix = m_postageConfiguration.getInternalUsers().getNamePrefix(); String internalPassword = m_postageConfiguration.getInternalUsers().getPassword(); Set existingUsers = getExistingUsers(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); RemoteManagerClient remoteManagerClient = new RemoteManagerClient(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); boolean loginSuccess = remoteManagerClient.login(); ArrayList internalUsers = new ArrayList(); for (int i = 1; i <= internalUserCount; i++) { String username = internalUsernamePrefix + i; if (existingUsers.contains(username)) { log.info("user already exists: " + username); if (!m_postageConfiguration.isInternalReuseExisting()) { remoteManagerClient.executeCommand("deluser " + username); remoteManagerClient.readAnswer(); addUser(remoteManagerClient, username, internalPassword); answers = remoteManagerClient.readAnswer(); log.info("user deleted and re-created: " + username); } remoteManagerClient.executeCommand("setpassword " + username + " " + internalPassword); remoteManagerClient.readAnswer(); } else { addUser(remoteManagerClient, username, internalPassword); } internalUsers.add(username); } m_postageConfiguration.getInternalUsers().setExistingUsers(internalUsers); remoteManagerClient.disconnect(); } catch (Exception e) { throw new StartupException("error setting up internal user accounts", e); } }
| 1,112,136
|
private void setupInternalUserAccounts() throws StartupException { try { String host = m_postageConfiguration.getTestserverHost(); int remoteManagerPort = m_postageConfiguration.getTestserverRemoteManagerPort(); String remoteManagerUsername = m_postageConfiguration.getTestserverRemoteManagerUsername(); String remoteManagerPassword = m_postageConfiguration.getTestserverRemoteManagerPassword(); int internalUserCount = m_postageConfiguration.getInternalUsers().getCount(); String internalUsernamePrefix = m_postageConfiguration.getInternalUsers().getNamePrefix(); String internalPassword = m_postageConfiguration.getInternalUsers().getPassword(); Set existingUsers = getExistingUsers(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); RemoteManagerClient remoteManagerClient = new RemoteManagerClient(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); boolean loginSuccess = remoteManagerClient.login(); ArrayList internalUsers = new ArrayList(); for (int i = 1; i <= internalUserCount; i++) { String username = internalUsernamePrefix + i; if (existingUsers.contains(username)) { log.info("user already exists: " + username); if (!m_postageConfiguration.isInternalReuseExisting()) { remoteManagerClient.executeCommand("deluser " + username); List answers = remoteManagerClient.readAnswer(); addUser(remoteManagerClient, username, internalPassword); answers = remoteManagerClient.readAnswer(); log.info("user deleted and re-created: " + username); } remoteManagerClient.executeCommand("setpassword " + username + " " + internalPassword); List answers = remoteManagerClient.readAnswer(); } else { addUser(remoteManagerClient, username, internalPassword); } internalUsers.add(username); } m_postageConfiguration.getInternalUsers().setExistingUsers(internalUsers); remoteManagerClient.disconnect(); } catch (Exception e) { throw new StartupException("error setting up internal user accounts", e); } }
|
private void setupInternalUserAccounts() throws StartupException { try { String host = m_postageConfiguration.getTestserverHost(); int remoteManagerPort = m_postageConfiguration.getTestserverRemoteManagerPort(); String remoteManagerUsername = m_postageConfiguration.getTestserverRemoteManagerUsername(); String remoteManagerPassword = m_postageConfiguration.getTestserverRemoteManagerPassword(); int internalUserCount = m_postageConfiguration.getInternalUsers().getCount(); String internalUsernamePrefix = m_postageConfiguration.getInternalUsers().getNamePrefix(); String internalPassword = m_postageConfiguration.getInternalUsers().getPassword(); Set existingUsers = getExistingUsers(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); RemoteManagerClient remoteManagerClient = new RemoteManagerClient(host, remoteManagerPort, remoteManagerUsername, remoteManagerPassword); boolean loginSuccess = remoteManagerClient.login(); ArrayList internalUsers = new ArrayList(); for (int i = 1; i <= internalUserCount; i++) { String username = internalUsernamePrefix + i; if (existingUsers.contains(username)) { log.info("user already exists: " + username); if (!m_postageConfiguration.isInternalReuseExisting()) { remoteManagerClient.executeCommand("deluser " + username); List remoteManagerClient.readAnswer(); addUser(remoteManagerClient, username, internalPassword); remoteManagerClient.readAnswer(); log.info("user deleted and re-created: " + username); } remoteManagerClient.executeCommand("setpassword " + username + " " + internalPassword); List remoteManagerClient.readAnswer(); } else { addUser(remoteManagerClient, username, internalPassword); } internalUsers.add(username); } m_postageConfiguration.getInternalUsers().setExistingUsers(internalUsers); remoteManagerClient.disconnect(); } catch (Exception e) { throw new StartupException("error setting up internal user accounts", e); } }
| 1,112,137
|
public void delete() { getODMGImplementation(); getODMGDatabase(); Transaction tx = odmg.newTransaction(); tx.begin(); // First make sure that this object is deleted from its parent's subfolders list (if it has a parent) setParentFolder( null ); try { db.deletePersistent( this ); tx.commit(); } catch ( Exception e ) { tx.abort(); log.warn( "Error deleteing photo folder: " + e.getMessage() ); } }
|
public void delete() { getODMGImplementation(); getODMGDatabase(); Transaction tx = odmg.newTransaction(); tx.begin(); // First make sure that this object is deleted from its parent's subfolders list (if it has a parent) setParentFolder( null ); try { db.deletePersistent( this ); tx.commit(); } catch ( Exception e ) { tx.abort(); log.warn( "Error deleteing photo folder: " + e.getMessage() ); } }
| 1,112,139
|
public void delete() { getODMGImplementation(); getODMGDatabase(); Transaction tx = odmg.newTransaction(); tx.begin(); // First make sure that this object is deleted from its parent's subfolders list (if it has a parent) setParentFolder( null ); try { db.deletePersistent( this ); tx.commit(); } catch ( Exception e ) { tx.abort(); log.warn( "Error deleteing photo folder: " + e.getMessage() ); } }
|
public void delete() { getODMGImplementation(); getODMGDatabase(); Transaction tx = odmg.newTransaction(); tx.begin(); // First make sure that this object is deleted from its parent's subfolders list (if it has a parent) setParentFolder( null ); try { db.deletePersistent( this ); tx.commit(); } catch ( Exception e ) { tx.abort(); log.warn( "Error deleteing photo folder: " + e.getMessage() ); } }
| 1,112,140
|
public void delete() { getODMGImplementation(); getODMGDatabase(); Transaction tx = odmg.newTransaction(); tx.begin(); // First make sure that this object is deleted from its parent's subfolders list (if it has a parent) setParentFolder( null ); try { db.deletePersistent( this ); tx.commit(); } catch ( Exception e ) { tx.abort(); log.warn( "Error deleteing photo folder: " + e.getMessage() ); } }
|
public void delete() { getODMGImplementation(); getODMGDatabase(); Transaction tx = odmg.newTransaction(); tx.begin(); // First make sure that this object is deleted from its parent's subfolders list (if it has a parent) setParentFolder( null ); try { db.deletePersistent( this ); tx.commit(); } catch ( Exception e ) { tx.abort(); log.warn( "Error deleteing photo folder: " + e.getMessage() ); } }
| 1,112,141
|
public static PhotoFolder getRoot() { getODMGImplementation(); getODMGDatabase(); DList folders = null; Transaction tx = odmg.newTransaction(); tx.begin(); try { OQLQuery query = odmg.newOQLQuery(); query.create( "select folders from " + PhotoFolder.class.getName() + " where folderId = 1" ); folders = (DList) query.execute(); } catch ( Exception e ) { tx.abort(); return null; } PhotoFolder rootFolder = (PhotoFolder) folders.get( 0 ); return rootFolder; }
|
public static PhotoFolder getRoot() { getODMGImplementation(); getODMGDatabase(); DList folders = null; Transaction tx = odmg.newTransaction(); tx.begin(); try { OQLQuery query = odmg.newOQLQuery(); query.create( "select folders from " + PhotoFolder.class.getName() + " where folderId = 1" ); folders = (DList) query.execute(); } catch ( Exception e ) { tx.abort(); return null; } PhotoFolder rootFolder = (PhotoFolder) folders.get( 0 ); return rootFolder; }
| 1,112,142
|
public static void main( String[] args ) { org.apache.log4j.BasicConfigurator.configure(); log.setLevel( org.apache.log4j.Level.DEBUG ); Implementation odmg = getODMGImplementation(); Database db = getODMGDatabase(); Transaction tx = odmg.newTransaction(); tx.begin(); DList folders = null; try { OQLQuery query = odmg.newOQLQuery(); query.create( "select folders from " + PhotoFolder.class.getName() + " where folderId = 0" ); folders = (DList) query.execute(); tx.commit(); } catch ( Exception e ) { tx.abort(); log.error( e.getMessage() ); } Iterator iter = folders.iterator(); boolean found = false; log.debug( "Starting to go thourh..." ); while ( iter.hasNext() ) { PhotoFolder folder = (PhotoFolder) iter.next(); log.debug( "Folder " + folder.getName() ); if ( folder.getFolderId() == 0 ) { found = true; log.info( "Found!!!" ); } } }
|
public static void main( String[] args ) { org.apache.log4j.BasicConfigurator.configure(); log.setLevel( org.apache.log4j.Level.DEBUG ); Implementation odmg = getODMGImplementation(); Database db = getODMGDatabase(); Transaction tx = odmg.newTransaction(); tx.begin(); DList folders = null; try { OQLQuery query = odmg.newOQLQuery(); query.create( "select folders from " + PhotoFolder.class.getName() + " where folderId = 1" ); folders = (DList) query.execute(); tx.commit(); } catch ( Exception e ) { tx.abort(); log.error( e.getMessage() ); } Iterator iter = folders.iterator(); boolean found = false; log.debug( "Starting to go thourh..." ); while ( iter.hasNext() ) { PhotoFolder folder = (PhotoFolder) iter.next(); log.debug( "Folder " + folder.getName() ); if ( folder.getFolderId() == 0 ) { found = true; log.info( "Found!!!" ); } } }
| 1,112,144
|
public void stateChanged(ChangeEvent e) { int tabNum = tabs.getSelectedIndex(); if (tabNum == VIEW_D_NUM || tabNum == VIEW_HAP_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(true); }else if (tabNum == VIEW_TDT_NUM || tabNum == VIEW_CHECK_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(false); }else{ exportMenuItems[0].setEnabled(false); exportMenuItems[1].setEnabled(false); } //if we've adjusted the haps display thresh we need to change the haps ass panel if (tabNum == VIEW_TDT_NUM){ JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(tabNum); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel htp = (HaploAssocPanel) metaAssoc.getComponent(1); if (htp.initialHaplotypeDisplayThreshold != Options.getHaplotypeDisplayThreshold()){ metaAssoc.remove(1); metaAssoc.add("Haplotypes", new HaploAssocPanel(theData.getHaplotypes())); } } if (tabNum == VIEW_D_NUM){ keyMenu.setEnabled(true); }else{ keyMenu.setEnabled(false); } viewMenuItems[tabs.getSelectedIndex()].setSelected(true); if (checkPanel != null && checkPanel.changed){ //first store up the current blocks Vector currentBlocks = new Vector(); for (int blocks = 0; blocks < theData.blocks.size(); blocks++){ int thisBlock[] = (int[]) theData.blocks.elementAt(blocks); int thisBlockReal[] = new int[thisBlock.length]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlockReal[marker] = Chromosome.realIndex[thisBlock[marker]]; } currentBlocks.add(thisBlockReal); } window.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); JTable table = checkPanel.getTable(); boolean[] markerResults = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResults[i] = ((Boolean)table.getValueAt(i,CheckDataPanel.STATUS_COL)).booleanValue(); } Chromosome.doFilter(markerResults); //after editing the filtered marker list, needs to be prodded into //resizing correctly dPrimeDisplay.computePreferredSize(); dPrimeDisplay.colorDPrime(currentScheme); hapDisplay.theData = theData; if (currentBlockDef != BLOX_CUSTOM){ changeBlocks(currentBlockDef); }else{ //adjust the blocks Vector theBlocks = new Vector(); for (int x = 0; x < currentBlocks.size(); x++){ Vector goodies = new Vector(); int currentBlock[] = (int[])currentBlocks.elementAt(x); for (int marker = 0; marker < currentBlock.length; marker++){ for (int y = 0; y < Chromosome.realIndex.length; y++){ //we only keep markers from the input that are "good" from checkdata //we also realign the input file to the current "good" subset since input is //indexed of all possible markers in the dataset if (Chromosome.realIndex[y] == currentBlock[marker]){ goodies.add(new Integer(y)); } } } int thisBlock[] = new int[goodies.size()]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlock[marker] = ((Integer)goodies.elementAt(marker)).intValue(); } if (thisBlock.length > 1){ theBlocks.add(thisBlock); } } theData.guessBlocks(BLOX_CUSTOM, theBlocks); } if (tdtPanel != null){ tdtPanel.refreshTable(); } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); checkPanel.changed=false; } if (hapDisplay != null && theData.blocksChanged){ setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try{ hapDisplay.getHaps(); }catch(HaploViewException hv){ JOptionPane.showMessageDialog(window, hv.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } hapScroller.setViewportView(hapDisplay); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); theData.blocksChanged = false; } }
|
public void stateChanged(ChangeEvent e) { int tabNum = tabs.getSelectedIndex(); if (tabNum == VIEW_D_NUM || tabNum == VIEW_HAP_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(true); }else if (tabNum == VIEW_TDT_NUM || tabNum == VIEW_CHECK_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(false); }else{ exportMenuItems[0].setEnabled(false); exportMenuItems[1].setEnabled(false); } //if we've adjusted the haps display thresh we need to change the haps ass panel if (tabNum == VIEW_TDT_NUM){ JTabbedPane metaAssoc= (JTabbedPane)tabs.getComponentAt(tabNum); //this is the haps ass tab inside the assoc super-tab HaploAssocPanel htp = (HaploAssocPanel) metaAssoc.getComponent(1); if (htp.initialHaplotypeDisplayThreshold != Options.getHaplotypeDisplayThreshold()){ metaAssoc.remove(1); metaAssoc.add("Haplotypes", new HaploAssocPanel(theData.getHaplotypes())); } } if (tabNum == VIEW_D_NUM){ keyMenu.setEnabled(true); }else{ keyMenu.setEnabled(false); } viewMenuItems[tabs.getSelectedIndex()].setSelected(true); if (checkPanel != null && checkPanel.changed){ //first store up the current blocks Vector currentBlocks = new Vector(); for (int blocks = 0; blocks < theData.blocks.size(); blocks++){ int thisBlock[] = (int[]) theData.blocks.elementAt(blocks); int thisBlockReal[] = new int[thisBlock.length]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlockReal[marker] = Chromosome.realIndex[thisBlock[marker]]; } currentBlocks.add(thisBlockReal); } window.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); JTable table = checkPanel.getTable(); boolean[] markerResults = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResults[i] = ((Boolean)table.getValueAt(i,CheckDataPanel.STATUS_COL)).booleanValue(); } Chromosome.doFilter(markerResults); //after editing the filtered marker list, needs to be prodded into //resizing correctly dPrimeDisplay.computePreferredSize(); dPrimeDisplay.colorDPrime(currentScheme); hapDisplay.theData = theData; if (currentBlockDef != BLOX_CUSTOM){ changeBlocks(currentBlockDef); }else{ //adjust the blocks Vector theBlocks = new Vector(); for (int x = 0; x < currentBlocks.size(); x++){ Vector goodies = new Vector(); int currentBlock[] = (int[])currentBlocks.elementAt(x); for (int marker = 0; marker < currentBlock.length; marker++){ for (int y = 0; y < Chromosome.realIndex.length; y++){ //we only keep markers from the input that are "good" from checkdata //we also realign the input file to the current "good" subset since input is //indexed of all possible markers in the dataset if (Chromosome.realIndex[y] == currentBlock[marker]){ goodies.add(new Integer(y)); } } } int thisBlock[] = new int[goodies.size()]; for (int marker = 0; marker < thisBlock.length; marker++){ thisBlock[marker] = ((Integer)goodies.elementAt(marker)).intValue(); } if (thisBlock.length > 1){ theBlocks.add(thisBlock); } } theData.guessBlocks(BLOX_CUSTOM, theBlocks); } if (tdtPanel != null){ tdtPanel.refreshTable(); } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); checkPanel.changed=false; } if (hapDisplay != null && theData.blocksChanged){ setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try{ hapDisplay.getHaps(); }catch(HaploViewException hv){ JOptionPane.showMessageDialog(window, hv.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } hapScroller.setViewportView(hapDisplay); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); theData.blocksChanged = false; } }
| 1,112,147
|
void browse(int browseType){ String name; String markerInfoName = ""; HaploView h = (HaploView) this.getParent(); h.fc.setSelectedFile(null); int returned = h.fc.showOpenDialog(this); if (returned != JFileChooser.APPROVE_OPTION) return; File file = h.fc.getSelectedFile(); if (browseType == GENO){ name = file.getName(); genoFileField.setText(file.getParent()+File.separator+name); if(infoFileField.getText().equals("")){ //baseName should be everything but the final ".XXX" extension StringTokenizer st = new StringTokenizer(name,"."); String baseName = st.nextToken(); for (int i = 0; i < st.countTokens()-1; i++){ baseName = baseName.concat(".").concat(st.nextToken()); } //check for info file for original file sample.haps //either sample.haps.info or sample.info File maybeMarkers1 = new File(file.getParent(), name + MARKER_DATA_EXT); File maybeMarkers2 = new File(file.getParent(), baseName + MARKER_DATA_EXT); if (maybeMarkers1.exists()){ markerInfoName = maybeMarkers1.getName(); }else if (maybeMarkers2.exists()){ markerInfoName = maybeMarkers2.getName(); }else{ return; } infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }else if (browseType==INFO){ markerInfoName = file.getName(); infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }
|
void browse(int browseType){ String name; String markerInfoName = ""; HaploView h = (HaploView) this.getParent(); h.fc.setSelectedFile(new File("")); int returned = h.fc.showOpenDialog(this); if (returned != JFileChooser.APPROVE_OPTION) return; File file = h.fc.getSelectedFile(); if (browseType == GENO){ name = file.getName(); genoFileField.setText(file.getParent()+File.separator+name); if(infoFileField.getText().equals("")){ //baseName should be everything but the final ".XXX" extension StringTokenizer st = new StringTokenizer(name,"."); String baseName = st.nextToken(); for (int i = 0; i < st.countTokens()-1; i++){ baseName = baseName.concat(".").concat(st.nextToken()); } //check for info file for original file sample.haps //either sample.haps.info or sample.info File maybeMarkers1 = new File(file.getParent(), name + MARKER_DATA_EXT); File maybeMarkers2 = new File(file.getParent(), baseName + MARKER_DATA_EXT); if (maybeMarkers1.exists()){ markerInfoName = maybeMarkers1.getName(); }else if (maybeMarkers2.exists()){ markerInfoName = maybeMarkers2.getName(); }else{ return; } infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }else if (browseType==INFO){ markerInfoName = file.getName(); infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }
| 1,112,151
|
public Expression createExpression( ExpressionFactory factory, String tagName, String attributeName, String attributeValue) throws Exception { // #### may need to include some namespace URI information in the XPath instance? if (attributeName.equals("select")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath expression: " + attributeValue ); } try { XPath xpath = new Dom4jXPath(attributeValue); return new XPathExpression(xpath); } catch (JaxenException e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } if (attributeName.equals("match")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath pattern: " + attributeValue ); } try { Pattern pattern = DocumentHelper.createPattern( attributeValue ); return new XPathPatternExpression(pattern); } catch (Exception e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } // will use the default expression instead return super.createExpression(factory, tagName, attributeName, attributeValue); }
|
public Expression createExpression( ExpressionFactory factory, TagScript tagScript, String attributeName, String attributeValue) throws Exception { // #### may need to include some namespace URI information in the XPath instance? if (attributeName.equals("select")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath expression: " + attributeValue ); } try { XPath xpath = new Dom4jXPath(attributeValue); return new XPathExpression(xpath); } catch (JaxenException e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } if (attributeName.equals("match")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath pattern: " + attributeValue ); } try { Pattern pattern = DocumentHelper.createPattern( attributeValue ); return new XPathPatternExpression(pattern); } catch (Exception e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } // will use the default expression instead return super.createExpression(factory, tagName, attributeName, attributeValue); }
| 1,112,152
|
public Expression createExpression( ExpressionFactory factory, String tagName, String attributeName, String attributeValue) throws Exception { // #### may need to include some namespace URI information in the XPath instance? if (attributeName.equals("select")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath expression: " + attributeValue ); } try { XPath xpath = new Dom4jXPath(attributeValue); return new XPathExpression(xpath); } catch (JaxenException e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } if (attributeName.equals("match")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath pattern: " + attributeValue ); } try { Pattern pattern = DocumentHelper.createPattern( attributeValue ); return new XPathPatternExpression(pattern); } catch (Exception e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } // will use the default expression instead return super.createExpression(factory, tagName, attributeName, attributeValue); }
|
public Expression createExpression( ExpressionFactory factory, String tagName, String attributeName, String attributeValue) throws Exception { // #### may need to include some namespace URI information in the XPath instance? if (attributeName.equals("select")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath expression: " + attributeValue ); } try { XPath xpath = new Dom4jXPath(attributeValue); return new XPathExpression(xpath, tagScript); } catch (JaxenException e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } if (attributeName.equals("match")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath pattern: " + attributeValue ); } try { Pattern pattern = DocumentHelper.createPattern( attributeValue ); return new XPathPatternExpression(pattern); } catch (Exception e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } // will use the default expression instead return super.createExpression(factory, tagName, attributeName, attributeValue); }
| 1,112,153
|
public Expression createExpression( ExpressionFactory factory, String tagName, String attributeName, String attributeValue) throws Exception { // #### may need to include some namespace URI information in the XPath instance? if (attributeName.equals("select")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath expression: " + attributeValue ); } try { XPath xpath = new Dom4jXPath(attributeValue); return new XPathExpression(xpath); } catch (JaxenException e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } if (attributeName.equals("match")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath pattern: " + attributeValue ); } try { Pattern pattern = DocumentHelper.createPattern( attributeValue ); return new XPathPatternExpression(pattern); } catch (Exception e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } // will use the default expression instead return super.createExpression(factory, tagName, attributeName, attributeValue); }
|
public Expression createExpression( ExpressionFactory factory, String tagName, String attributeName, String attributeValue) throws Exception { // #### may need to include some namespace URI information in the XPath instance? if (attributeName.equals("select")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath expression: " + attributeValue ); } try { XPath xpath = new Dom4jXPath(attributeValue); return new XPathExpression(xpath); } catch (JaxenException e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } if (attributeName.equals("match")) { if ( log.isDebugEnabled() ) { log.debug( "Parsing XPath pattern: " + attributeValue ); } try { Pattern pattern = DocumentHelper.createPattern( attributeValue ); return new XPathPatternExpression(pattern); } catch (Exception e) { throw new JellyException( "Could not parse XPath expression: \"" + attributeValue + "\" reason: " + e, e ); } } // will use the default expression instead return super.createExpression(factory, tagScript, attributeName, attributeValue); }
| 1,112,154
|
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); }else if (command == "Continue"){ JTable table = checkPanel.getTable(); checkWindow.dispose(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } try{ new TextMethods().linkageToHaps(markerResultArray,checkPanel.getPedFile(),"test.haps"); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } //processInput(new File(hapInputFileName+".haps")); } else if (command == READ_MARKERS){ fc.setSelectedFile(null); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile()); } }else if (command == "Clear All Blocks"){ //theBlocks.clearBlocks(); }else if (command == DEFINE_BLOCKS){ defineBlocks(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
|
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); }else if (command == "Continue"){ JTable table = checkPanel.getTable(); checkWindow.dispose(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } try{ new TextMethods().linkageToHaps(markerResultArray,checkPanel.getPedFile(),"test.haps"); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } //processInput(new File(hapInputFileName+".haps")); } else if (command == READ_MARKERS){ fc.setSelectedFile(null); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile()); } }else if (command == "Clear All Blocks"){ //theBlocks.clearBlocks(); }else if (command == DEFINE_BLOCKS){ defineBlocks(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
| 1,112,155
|
private static void hapsTextOnly(String hapsFile,int outputType){ try { HaploData theData; File OutputFile; File inputFile = new File(hapsFile); if(!inputFile.exists()){ System.out.println("haps input file " + hapsFile + " does not exist"); } switch(outputType){ case 1: OutputFile = new File(hapsFile + ".4GAMblocks"); break; case 2: OutputFile = new File(hapsFile + ".MJDblocks"); break; default: OutputFile = new File(hapsFile + ".SFSblocks"); break; } theData = new HaploData(new File(hapsFile)); String name = hapsFile; String baseName = hapsFile.substring(0,name.length()-5); File maybeInfo = new File(baseName + ".info"); if (maybeInfo.exists()){ theData.prepareMarkerInput(maybeInfo); } //theData.doMonitoredComputation(); Haplotype[][] haplos; if(outputType == 1 || outputType == 2){ theData.guessBlocks(outputType); } haplos = theData.generateHaplotypes(theData.blocks, 1); new TextMethods().saveHapsToText(orderHaps(haplos, theData), theData.getMultiDprime(), OutputFile); } catch(IOException e){} }
|
private static void hapsTextOnly(String hapsFile,int outputType, int maxDistance){ try { HaploData theData; File OutputFile; File inputFile = new File(hapsFile); if(!inputFile.exists()){ System.out.println("haps input file " + hapsFile + " does not exist"); } switch(outputType){ case 1: OutputFile = new File(hapsFile + ".4GAMblocks"); break; case 2: OutputFile = new File(hapsFile + ".MJDblocks"); break; default: OutputFile = new File(hapsFile + ".SFSblocks"); break; } theData = new HaploData(new File(hapsFile)); String name = hapsFile; String baseName = hapsFile.substring(0,name.length()-5); File maybeInfo = new File(baseName + ".info"); if (maybeInfo.exists()){ theData.prepareMarkerInput(maybeInfo); } //theData.doMonitoredComputation(); Haplotype[][] haplos; if(outputType == 1 || outputType == 2){ theData.guessBlocks(outputType); } haplos = theData.generateHaplotypes(theData.blocks, 1); new TextMethods().saveHapsToText(orderHaps(haplos, theData), theData.getMultiDprime(), OutputFile); } catch(IOException e){} }
| 1,112,156
|
private static void hapsTextOnly(String hapsFile,int outputType){ try { HaploData theData; File OutputFile; File inputFile = new File(hapsFile); if(!inputFile.exists()){ System.out.println("haps input file " + hapsFile + " does not exist"); } switch(outputType){ case 1: OutputFile = new File(hapsFile + ".4GAMblocks"); break; case 2: OutputFile = new File(hapsFile + ".MJDblocks"); break; default: OutputFile = new File(hapsFile + ".SFSblocks"); break; } theData = new HaploData(new File(hapsFile)); String name = hapsFile; String baseName = hapsFile.substring(0,name.length()-5); File maybeInfo = new File(baseName + ".info"); if (maybeInfo.exists()){ theData.prepareMarkerInput(maybeInfo); } //theData.doMonitoredComputation(); Haplotype[][] haplos; if(outputType == 1 || outputType == 2){ theData.guessBlocks(outputType); } haplos = theData.generateHaplotypes(theData.blocks, 1); new TextMethods().saveHapsToText(orderHaps(haplos, theData), theData.getMultiDprime(), OutputFile); } catch(IOException e){} }
|
private static void hapsTextOnly(String hapsFile,int outputType){ try { HaploData theData; File OutputFile; File inputFile = new File(hapsFile); if(!inputFile.exists()){ System.out.println("haps input file " + hapsFile + " does not exist"); } switch(outputType){ case 1: OutputFile = new File(hapsFile + ".4GAMblocks"); break; case 2: OutputFile = new File(hapsFile + ".MJDblocks"); break; default: OutputFile = new File(hapsFile + ".SFSblocks"); break; } theData = new HaploData(new File(hapsFile)); String name = hapsFile; String baseName = hapsFile.substring(0,name.length()-5); File maybeInfo = new File(baseName + ".info"); if (maybeInfo.exists()){ theData.prepareMarkerInput(maybeInfo); } //theData.doMonitoredComputation(); Haplotype[][] haplos; if(outputType == 1 || outputType == 2){ theData.guessBlocks(outputType); } haplos = theData.generateHaplotypes(theData.blocks, 1); new TextMethods().saveHapsToText(orderHaps(haplos, theData), theData.getMultiDprime(), OutputFile); } catch(IOException e){} }
| 1,112,158
|
public static void main(String[] args) {//throws IOException{ try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { } //setup view object HaploView window = new HaploView(); window.setTitle("HaploView beta"); window.setSize(800,600); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); }
|
public static void main(String[] args) {//throws IOException{ try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { } //setup view object HaploView window = new HaploView(); window.setTitle("HaploView beta"); window.setSize(800,600); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); }
| 1,112,159
|
public static void main(String[] args) {//throws IOException{ try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { } //setup view object HaploView window = new HaploView(); window.setTitle("HaploView beta"); window.setSize(800,600); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); }
|
public static void main(String[] args) {//throws IOException{ try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { } //setup view object HaploView window = new HaploView(); window.setTitle("HaploView beta"); window.setSize(800,600); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); }
| 1,112,160
|
public static void main(String[] args) {//throws IOException{ try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { } //setup view object HaploView window = new HaploView(); window.setTitle("HaploView beta"); window.setSize(800,600); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); }
|
public static void main(String[] args) {//throws IOException{ try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { } //setup view object HaploView window = new HaploView(); window.setTitle("HaploView beta"); window.setSize(800,600); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); }
| 1,112,161
|
public synchronized String addTicket(String personalId) { HttpSession session = IWContext.getInstance().getSession(); String sessionId = session.getId(); if (!this.sessionIdSession.containsKey(sessionId)) { this.sessionIdSession.put(sessionId, session); } return getTicket(personalId, sessionId); }
|
public synchronized String addTicket(String personalId) { HttpSession session = IWContext.getInstance().getSession(); String sessionId = session.getId(); if (!this.sessionIdSession.containsKey(sessionId)) { this.sessionIdSession.put(sessionId, session); } return getTicket(personalId, sessionId); }
| 1,112,162
|
public synchronized String addTicket(String personalId) { HttpSession session = IWContext.getInstance().getSession(); String sessionId = session.getId(); if (!this.sessionIdSession.containsKey(sessionId)) { this.sessionIdSession.put(sessionId, session); } return getTicket(personalId, sessionId); }
|
public synchronized String addTicket(String personalId) { HttpSession session = IWContext.getInstance().getSession(); String sessionId = session.getId(); if (!this.sessionIdSession.containsKey(sessionId)) { this.sessionIdSession.put(sessionId, session); } return getTicket(sessionId); }
| 1,112,163
|
private String getTicket(String personalId, String sessionId) { int length = personalId.length(); StringBuffer token = new StringBuffer(); token.append(length); token.append(SEPARATOR); token.append(personalId); token.append(sessionId); return token.toString(); }
|
private String getTicket(String personalId, String sessionId) { int length = personalId.length(); StringBuffer token = new StringBuffer(); token.append(length); token.append(SEPARATOR); token.append(personalId); token.append(sessionId); return token.toString(); }
| 1,112,164
|
public boolean isValid(String ticket) { return (validate(ticket) != null); }
|
public boolean isValid(String ticket) { return (validate(ticket) != null); }
| 1,112,165
|
public String validate(String ticket) { int index = ticket.indexOf(SEPARATOR); if (index < 1) { return null; } String personalId = null; String sessionId = null; try { int lengthOfPersonalId = Integer.parseInt(ticket.substring(0, index)); if (lengthOfPersonalId < 1) { return null; } int startIndexSessionId = index + 1 + lengthOfPersonalId; int ticketLength = ticket.length(); if (startIndexSessionId >= ticketLength) { return null; } personalId = ticket.substring(index+1, startIndexSessionId); sessionId = ticket.substring(startIndexSessionId, ticketLength); } catch (NumberFormatException e) { return null; } return (this.sessionIdSession.containsKey(sessionId)) ? personalId : null; // // getting a session object from this request// MessageContext context = MessageContext.getCurrentContext();// AxisHttpSession myAxisSession = (AxisHttpSession) context.getSession();// HttpSession mySession = myAxisSession.getRep();// ServletContext myServletContext = mySession.getServletContext();// // getting the application context// IWMainApplication mainApplication = IWMainApplication.getIWMainApplication(myServletContext);// IWApplicationContext iwac = mainApplication.getIWApplicationContext(); // first try to get the user login name// String userLogin = getUserLogin(personalId, iwac);// if (userLogin == null) {// return false;// } // the first test should be sufficient but we perform also the test with the session // vice versa the test with the session should be sufficient //return isLoggedOnUsingLoggedOnMap(userLogin, mySession, iwac) && isLoggedOnUsingSession(sessionId, iwac); }
|
public String validate(String ticket) { int index = ticket.indexOf(SEPARATOR); if (index < 1) { return null; } String personalId = null; String sessionId = null; try { int lengthOfPersonalId = Integer.parseInt(ticket.substring(0, index)); if (lengthOfPersonalId < 1) { return null; } int startIndexSessionId = index + 1 + lengthOfPersonalId; int ticketLength = ticket.length(); if (startIndexSessionId >= ticketLength) { return null; } personalId = ticket.substring(index+1, startIndexSessionId); sessionId = ticket.substring(startIndexSessionId, ticketLength); } catch (NumberFormatException e) { return null; } return (this.sessionIdSession.containsKey(sessionId)) ? personalId : null; // // getting a session object from this request// MessageContext context = MessageContext.getCurrentContext();// AxisHttpSession myAxisSession = (AxisHttpSession) context.getSession();// HttpSession mySession = myAxisSession.getRep();// ServletContext myServletContext = mySession.getServletContext();// // getting the application context// IWMainApplication mainApplication = IWMainApplication.getIWMainApplication(myServletContext);// IWApplicationContext iwac = mainApplication.getIWApplicationContext(); // first try to get the user login name// String userLogin = getUserLogin(personalId, iwac);// if (userLogin == null) {// return false;// } // the first test should be sufficient but we perform also the test with the session // vice versa the test with the session should be sufficient //return isLoggedOnUsingLoggedOnMap(userLogin, mySession, iwac) && isLoggedOnUsingSession(sessionId, iwac); }
| 1,112,166
|
public Object execute(String serviceMethod, Vector parameters) throws Exception { try { int index = serviceMethod.indexOf(ServiceProxy.CLASS_METHOD_DELIMITER); final String className = serviceMethod.substring(0, index); final String methodName = serviceMethod.substring(index + 1); Class serviceClass = Class.forName(className); Class[] parameterTypes = getParameterTypes((List)parameters.get(0)); Method method = serviceClass.getMethod(methodName, parameterTypes); Object serviceObject = ServiceFactory.getService(serviceClass); Object result = method.invoke(serviceObject, toObjectArray(parameters, parameterTypes)); return Marshaller.marshal(result); } catch (Exception e) { logger.log(Level.SEVERE, "Error while invoking: " + serviceMethod, e); throw e; } }
|
public Object execute(String serviceMethod, Vector parameters) throws Exception { try { int index = serviceMethod.indexOf(ServiceProxy.CLASS_METHOD_DELIMITER); final String className = serviceMethod.substring(0, index); final String methodName = serviceMethod.substring(index + 1); Class serviceClass = Class.forName(className); Class[] parameterTypes = getParameterTypes((List)parameters.get(0)); Method method = serviceClass.getMethod(methodName, parameterTypes); Object serviceObject = ServiceFactory.getService(serviceClass); Object result = method.invoke(serviceObject, toObjectArray(parameters, parameterTypes)); return Marshaller.marshal(result); } catch (Exception e) { logger.log(Level.SEVERE, "Error while invoking: " + serviceMethod, e); throw e; } }
| 1,112,167
|
private static Object[] toObjectArray(Vector parameters, Class[] parameterTypes){ Object[] args = new Object[parameterTypes.length]; for(int i=0; i<parameterTypes.length; i++){ if(i == 0){ /* first argument must always be ServiceContext */ assert parameterTypes[i].equals(ServiceContext.class): "First argument to the service method must be " + "ServiceContext "; ServiceContextImpl serviceContext = (ServiceContextImpl)Unmarshaller.unmarshal( ServiceContextImpl.class, (String)parameters.get(i+1)); if(serviceContext.getUser() != null){ UserManager userManager = UserManager.getInstance(); User completeUser = userManager.getUser(serviceContext.getUser().getUsername()); serviceContext.setUser(completeUser); } args[i] = serviceContext; }else{ args[i] = Unmarshaller.unmarshal(parameterTypes[i], (String)parameters.get(i+1)); } } return args; }
|
private static Object[] toObjectArray(Vector parameters, Class[] parameterTypes){ Object[] args = new Object[parameterTypes.length]; for(int i=0; i<parameterTypes.length; i++){ if(i == 0){ /* first argument must always be ServiceContext */ assert parameterTypes[i].equals(ServiceContext.class): "First argument to the service method must be " + "ServiceContext "; ServiceContextImpl serviceContext = (ServiceContextImpl)Unmarshaller.unmarshal( ServiceContextImpl.class, (String)parameters.get(i+1)); if(serviceContext.getUser() != null){ UserManager userManager = UserManager.getInstance(); User completeUser = userManager.getUser(serviceContext.getUser().getUsername()); serviceContext.setUser(completeUser); } args[i] = serviceContext; }else{ args[i] = Unmarshaller.unmarshal(parameterTypes[i], (String)parameters.get(i+1)); } } return args; }
| 1,112,168
|
protected String evaluteScriptAsText(String fileName) throws Exception { JellyContext context = new JellyContext(); // allow scripts to refer to any resource inside this project // using an absolute URI like /src/test/org/apache/foo.xml context.setRootURL(new File(".").toURL()); // cature the output StringWriter buffer = new StringWriter(); XMLOutput output = XMLOutput.createXMLOutput(buffer); context.runScript( new File(fileName), output ); String text = buffer.toString().trim(); if (log.isDebugEnabled()) { log.debug("Evaluated script as..."); log.debug(text); } return text; }
|
protected String evaluteScriptAsText(String fileName) throws Exception { JellyContext context = new JellyContext(); // allow scripts to refer to any resource inside this project // using an absolute URI like /src/test/org/apache/foo.xml context.setRootURL(new File(".").toURL()); // cature the output StringWriter buffer = new StringWriter(); XMLOutput output = XMLOutput.createXMLOutput(buffer); context.runScript( new File(fileName), output ); String text = buffer.toString().trim(); if (log.isDebugEnabled()) { log.debug("Evaluated script as..."); log.debug(text); } return text; }
| 1,112,170
|
public void testDoctype() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/testDoctype.jelly"); assertEquals("Produces the correct output", "<!DOCTYPE foo PUBLIC \"publicID\" \"foo.dtd\">\n<foo></foo>", text); }
|
public void testDoctype() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/testDoctype.jelly"); assertEquals("Should produce the correct output", "<!DOCTYPE foo PUBLIC \"publicID\" \"foo.dtd\">\n<foo></foo>", text); }
| 1,112,171
|
public void testParse() throws Exception { InputStream in = new FileInputStream(testBaseDir + "/example.jelly"); XMLParser parser = new XMLParser(); Script script = parser.parse(in); script = script.compile(); log.debug("Found: " + script); assertTrue("Parsed a Script", script instanceof Script); StringWriter buffer = new StringWriter(); script.run(parser.getContext(), XMLOutput.createXMLOutput(buffer)); String text = buffer.toString().trim(); if (log.isDebugEnabled()) { log.debug("Evaluated script as..."); log.debug(text); } assertEquals("Produces the correct output", "It works!", text); }
|
public void testParse() throws Exception { InputStream in = new FileInputStream(testBaseDir + "/example.jelly"); XMLParser parser = new XMLParser(); Script script = parser.parse(in); script = script.compile(); log.debug("Found: " + script); assertTrue("Parsed a Script", script instanceof Script); StringWriter buffer = new StringWriter(); script.run(parser.getContext(), XMLOutput.createXMLOutput(buffer)); String text = buffer.toString().trim(); if (log.isDebugEnabled()) { log.debug("Evaluated script as..."); log.debug(text); } assertEquals("Should produce the correct output", "It works!", text); }
| 1,112,172
|
public void testTransform() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformExample.jelly"); assertEquals("Produces the correct output", "It works!", text); }
|
public void testTransform() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformExample.jelly"); assertEquals("Should produce the correct output", "It works!", text); }
| 1,112,173
|
public void testTransformAllInLine() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformExampleAllInLine.jelly"); assertEquals("Produces the correct output", "It works!", text); }
|
public void testTransformAllInLine() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformExampleAllInLine.jelly"); assertEquals("Should produce the correct output", "It works!", text); }
| 1,112,174
|
public void testTransformParams() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformParamExample.jelly"); assertEquals("Produces the correct output", "It works!", text); }
|
public void testTransformParams() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformParamExample.jelly"); assertEquals("Should produce the correct output", "It works!", text); }
| 1,112,175
|
public void testTransformParamsInLine() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformParamExample2.jelly"); assertEquals("Produces the correct output", "It works!", text); }
|
public void testTransformParamsInLine() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformParamExample2.jelly"); assertEquals("Should produce the correct output", "It works!", text); }
| 1,112,176
|
public void testTransformSAXOutput() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformExampleSAXOutput.jelly"); assertEquals("Produces the correct output", "It works!", text); }
|
public void testTransformSAXOutput() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformExampleSAXOutput.jelly"); assertEquals("Should produce the correct output", "It works!", text); }
| 1,112,177
|
public void testTransformSAXOutputNestedTransforms() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformExampleSAXOutputNestedTransforms.jelly"); assertEquals("Produces the correct output", "It works!", text); }
|
public void testTransformSAXOutputNestedTransforms() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformExampleSAXOutputNestedTransforms.jelly"); assertEquals("Should produce the correct output", "It works!", text); }
| 1,112,178
|
public void testTransformSchematron() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/schematron/transformSchematronExample.jelly"); assertEquals("Produces the correct output", "Report count=1:assert count=2", text); }
|
public void testTransformSchematron() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/schematron/transformSchematronExample.jelly"); assertEquals("Should produce the correct output", "Report count=1:assert count=2", text); }
| 1,112,179
|
public void testTransformXmlVar() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformExampleXmlVar.jelly"); assertEquals("Produces the correct output", "It works!", text); }
|
public void testTransformXmlVar() throws Exception { String text = evaluteScriptAsText(testBaseDir + "/transformExampleXmlVar.jelly"); assertEquals("Should produce the correct output", "It works!", text); }
| 1,112,180
|
public void testSourceDropDownsWithOnlyCatalog() { ArchitectDataSource ds = new ArchitectDataSource(); ds.setDisplayName("Schemaless Database"); ds.setDriverClass("regress.ca.sqlpower.architect.MockJDBCDriver"); ds.setUser("fake"); ds.setPass("fake"); //this creates a mock jdbc database with only catalogs ds.setUrl("jdbc:mock:" + "dbmd.catalogTerm=Catalog" + "&catalogs=cat1,cat2,cat3" + "&tables.cat1=tab1" + "&tables.cat2=tab2" + "&tables.cat3=tab3"); sourcePhysicalRadio.setSelected(true); sourceDatabaseDropdown.addItem(ds); sourceDatabaseDropdown.setSelectedItem(ds); flushAWT(); assertFalse(sourceSchemaDropdown.isEnabled()); assertTrue(sourceCatalogDropdown.isEnabled()); }
|
public void testSourceDropDownsWithOnlyCatalog() { ArchitectDataSource ds = new ArchitectDataSource(); ds.setDisplayName("Schemaless Database"); ds.setDriverClass("regress.ca.sqlpower.architect.MockJDBCDriver"); ds.setUser("fake"); ds.setPass("fake"); //this creates a mock jdbc database with only catalogs ds.setUrl("jdbc:mock:" + "dbmd.catalogTerm=Catalog" + "&catalogs=cat1,cat2,cat3" + "&tables.cat1=tab1" + "&tables.cat2=tab2" + "&tables.cat3=tab3"); sourcePhysicalRadio.setSelected(true); ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni().addDataSource(ds); sourceDatabaseDropdown.setSelectedItem(ds); flushAWT(); assertFalse(sourceSchemaDropdown.isEnabled()); assertTrue(sourceCatalogDropdown.isEnabled()); }
| 1,112,181
|
public void testSourceDropDownsWithSchemaAndCatalog() { ArchitectDataSource ds = new ArchitectDataSource(); ds.setDisplayName("Schemaless Database"); ds.setDriverClass("regress.ca.sqlpower.architect.MockJDBCDriver"); ds.setUser("fake"); ds.setPass("fake"); //this creates a mock jdbc database with catalogs and schemas ds.setUrl("jdbc:mock:dbmd.catalogTerm=Catalog&dbmd.schemaTerm=Schema&catalogs=cow_catalog&schemas.cow_catalog=moo_schema,quack_schema&tables.cow_catalog.moo_schema=braaaap,pffft&tables.cow_catalog.quack_schema=duck,goose"); sourcePhysicalRadio.setSelected(true); sourceDatabaseDropdown.addItem(ds); sourceDatabaseDropdown.setSelectedItem(ds); flushAWT(); assertTrue(sourceCatalogDropdown.isEnabled()); assertTrue(sourceSchemaDropdown.isEnabled()); }
|
public void testSourceDropDownsWithSchemaAndCatalog() { ArchitectDataSource ds = new ArchitectDataSource(); ds.setDisplayName("Schemaless Database"); ds.setDriverClass("regress.ca.sqlpower.architect.MockJDBCDriver"); ds.setUser("fake"); ds.setPass("fake"); //this creates a mock jdbc database with catalogs and schemas ds.setUrl("jdbc:mock:dbmd.catalogTerm=Catalog&dbmd.schemaTerm=Schema&catalogs=cow_catalog&schemas.cow_catalog=moo_schema,quack_schema&tables.cow_catalog.moo_schema=braaaap,pffft&tables.cow_catalog.quack_schema=duck,goose"); sourcePhysicalRadio.setSelected(true); ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni().addDataSource(ds); sourceDatabaseDropdown.setSelectedItem(ds); flushAWT(); assertTrue(sourceCatalogDropdown.isEnabled()); assertTrue(sourceSchemaDropdown.isEnabled()); }
| 1,112,182
|
public Context newContext(Map newVariables) { // XXXX: should allow this new context to // XXXX: inherit parent contexts? // XXXX: Or at least publish the parent scope // XXXX: as a Map in this new variable scope? newVariables.put( "parentScope", variables ); Context answer = new Context( newVariables ); answer.taglibs = this.taglibs; return answer; }
|
public Context newContext(Map newVariables) { // XXXX: should allow this new context to // XXXX: inherit parent contexts? // XXXX: Or at least publish the parent scope // XXXX: as a Map in this new variable scope? newVariables.put( "parentScope", variables ); Context answer = new Context( newVariables ); answer.taglibs = this.taglibs; return answer; }
| 1,112,184
|
public void stateChanged(ChangeEvent e) { int tabNum = tabs.getSelectedIndex(); if (tabNum == VIEW_D_NUM || tabNum == VIEW_HAP_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(true); }else if (tabNum == VIEW_TDT_NUM || tabNum == VIEW_CHECK_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(false); }else{ exportMenuItems[0].setEnabled(false); exportMenuItems[1].setEnabled(false); } if (tabNum == VIEW_D_NUM){ keyMenu.setEnabled(true); }else{ keyMenu.setEnabled(false); } viewMenuItems[tabs.getSelectedIndex()].setSelected(true); if (checkPanel != null && checkPanel.changed){ window.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); JTable table = checkPanel.getTable(); boolean[] markerResults = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResults[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } int count = 0; for (int i = 0; i < Chromosome.getSize(); i++){ if (markerResults[i]){ count++; } } Chromosome.realIndex = new int[count]; int k = 0; for (int i =0; i < Chromosome.getSize(); i++){ if (markerResults[i]){ Chromosome.realIndex[k] = i; k++; } } theData.filteredDPrimeTable = theData.getFilteredTable(); //after editing the filtered marker list, needs to be prodded into //resizing correctly Dimension size = dPrimeDisplay.getSize(); Dimension pref = dPrimeDisplay.getPreferredSize(); Rectangle visRect = dPrimeDisplay.getVisibleRect(); if (size.width != pref.width && pref.width > visRect.width){ ((JViewport)dPrimeDisplay.getParent()).setViewSize(pref); } hapDisplay.theData = theData; changeBlocks(currentBlockDef, dPrimeDisplay.currentScheme); if (tdtPanel != null){ tdtPanel.refreshTable(); } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); checkPanel.changed=false; } if (hapDisplay != null && theData.blocksChanged){ setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try{ hapDisplay.getHaps(); }catch(HaploViewException hv){ JOptionPane.showMessageDialog(window, hv.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } hapScroller.setViewportView(hapDisplay); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); theData.blocksChanged = false; } }
|
public void stateChanged(ChangeEvent e) { int tabNum = tabs.getSelectedIndex(); if (tabNum == VIEW_D_NUM || tabNum == VIEW_HAP_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(true); }else if (tabNum == VIEW_TDT_NUM || tabNum == VIEW_CHECK_NUM){ exportMenuItems[0].setEnabled(true); exportMenuItems[1].setEnabled(false); }else{ exportMenuItems[0].setEnabled(false); exportMenuItems[1].setEnabled(false); } if (tabNum == VIEW_D_NUM){ keyMenu.setEnabled(true); }else{ keyMenu.setEnabled(false); } viewMenuItems[tabs.getSelectedIndex()].setSelected(true); if (checkPanel != null && checkPanel.changed){ window.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); JTable table = checkPanel.getTable(); boolean[] markerResults = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResults[i] = ((Boolean)table.getValueAt(i,8)).booleanValue(); } int count = 0; for (int i = 0; i < Chromosome.getSize(); i++){ if (markerResults[i]){ count++; } } Chromosome.realIndex = new int[count]; int k = 0; for (int i =0; i < Chromosome.getSize(); i++){ if (markerResults[i]){ Chromosome.realIndex[k] = i; k++; } } theData.filteredDPrimeTable = theData.getFilteredTable(); //after editing the filtered marker list, needs to be prodded into //resizing correctly Dimension size = dPrimeDisplay.getSize(); Dimension pref = dPrimeDisplay.getPreferredSize(); Rectangle visRect = dPrimeDisplay.getVisibleRect(); if (size.width != pref.width && pref.width > visRect.width){ ((JViewport)dPrimeDisplay.getParent()).setViewSize(pref); } hapDisplay.theData = theData; changeBlocks(currentBlockDef, dPrimeDisplay.currentScheme); if (tdtPanel != null){ tdtPanel.refreshTable(); } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); checkPanel.changed=false; } if (hapDisplay != null && theData.blocksChanged){ setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try{ hapDisplay.getHaps(); }catch(HaploViewException hv){ JOptionPane.showMessageDialog(window, hv.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } hapScroller.setViewportView(hapDisplay); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); theData.blocksChanged = false; } }
| 1,112,185
|
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command == READ_MARKERS){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile(),null); } }else if (command == CUST_BLOCKS){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command == CLEAR_BLOCKS){ colorMenuItems[0].setSelected(true); for (int i = 1; i< colorMenuItems.length; i++){ colorMenuItems[i].setEnabled(false); } changeBlocks(3,1); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method,1); for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true); //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ dPrimeDisplay.refresh(Integer.valueOf(command.substring(5)).intValue()+1); changeKey(Integer.valueOf(command.substring(5)).intValue()+1); //exporting clauses }else if (command == EXPORT_PNG){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), PNG_MODE, fc.getSelectedFile()); } }else if (command == EXPORT_TEXT){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), TXT_MODE, fc.getSelectedFile()); } }else if (command == "Select All"){ checkPanel.selectAll(); }else if (command == "Rescore Markers"){ String cut = hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); checkPanel.redoRatings(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
|
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command == READ_MARKERS){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile(),null); } }else if (command == CUST_BLOCKS){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command == CLEAR_BLOCKS){ colorMenuItems[0].setSelected(true); for (int i = 1; i< colorMenuItems.length; i++){ colorMenuItems[i].setEnabled(false); } changeBlocks(3,1); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method,1); for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true); //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ dPrimeDisplay.refresh(Integer.valueOf(command.substring(5)).intValue()+1); changeKey(Integer.valueOf(command.substring(5)).intValue()+1); //exporting clauses }else if (command == EXPORT_PNG){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), PNG_MODE, fc.getSelectedFile()); } }else if (command == EXPORT_TEXT){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), TXT_MODE, fc.getSelectedFile()); } }else if (command == "Select All"){ checkPanel.selectAll(); }else if (command == "Rescore Markers"){ String cut = cdc.hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); checkPanel.redoRatings(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
| 1,112,186
|
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command == READ_MARKERS){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile(),null); } }else if (command == CUST_BLOCKS){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command == CLEAR_BLOCKS){ colorMenuItems[0].setSelected(true); for (int i = 1; i< colorMenuItems.length; i++){ colorMenuItems[i].setEnabled(false); } changeBlocks(3,1); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method,1); for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true); //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ dPrimeDisplay.refresh(Integer.valueOf(command.substring(5)).intValue()+1); changeKey(Integer.valueOf(command.substring(5)).intValue()+1); //exporting clauses }else if (command == EXPORT_PNG){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), PNG_MODE, fc.getSelectedFile()); } }else if (command == EXPORT_TEXT){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), TXT_MODE, fc.getSelectedFile()); } }else if (command == "Select All"){ checkPanel.selectAll(); }else if (command == "Rescore Markers"){ String cut = hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); checkPanel.redoRatings(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
|
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command == READ_MARKERS){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile(),null); } }else if (command == CUST_BLOCKS){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command == CLEAR_BLOCKS){ colorMenuItems[0].setSelected(true); for (int i = 1; i< colorMenuItems.length; i++){ colorMenuItems[i].setEnabled(false); } changeBlocks(3,1); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method,1); for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true); //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ dPrimeDisplay.refresh(Integer.valueOf(command.substring(5)).intValue()+1); changeKey(Integer.valueOf(command.substring(5)).intValue()+1); //exporting clauses }else if (command == EXPORT_PNG){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), PNG_MODE, fc.getSelectedFile()); } }else if (command == EXPORT_TEXT){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), TXT_MODE, fc.getSelectedFile()); } }else if (command == "Select All"){ checkPanel.selectAll(); }else if (command == "Rescore Markers"){ String cut = hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = cdc.genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); checkPanel.redoRatings(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
| 1,112,187
|
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command == READ_MARKERS){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile(),null); } }else if (command == CUST_BLOCKS){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command == CLEAR_BLOCKS){ colorMenuItems[0].setSelected(true); for (int i = 1; i< colorMenuItems.length; i++){ colorMenuItems[i].setEnabled(false); } changeBlocks(3,1); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method,1); for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true); //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ dPrimeDisplay.refresh(Integer.valueOf(command.substring(5)).intValue()+1); changeKey(Integer.valueOf(command.substring(5)).intValue()+1); //exporting clauses }else if (command == EXPORT_PNG){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), PNG_MODE, fc.getSelectedFile()); } }else if (command == EXPORT_TEXT){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), TXT_MODE, fc.getSelectedFile()); } }else if (command == "Select All"){ checkPanel.selectAll(); }else if (command == "Rescore Markers"){ String cut = hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); checkPanel.redoRatings(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
|
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command == READ_MARKERS){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile(),null); } }else if (command == CUST_BLOCKS){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command == CLEAR_BLOCKS){ colorMenuItems[0].setSelected(true); for (int i = 1; i< colorMenuItems.length; i++){ colorMenuItems[i].setEnabled(false); } changeBlocks(3,1); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method,1); for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true); //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ dPrimeDisplay.refresh(Integer.valueOf(command.substring(5)).intValue()+1); changeKey(Integer.valueOf(command.substring(5)).intValue()+1); //exporting clauses }else if (command == EXPORT_PNG){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), PNG_MODE, fc.getSelectedFile()); } }else if (command == EXPORT_TEXT){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), TXT_MODE, fc.getSelectedFile()); } }else if (command == "Select All"){ checkPanel.selectAll(); }else if (command == "Rescore Markers"){ String cut = hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = cdc.mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); checkPanel.redoRatings(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
| 1,112,188
|
public static void main(String[] args) { boolean nogui = false; //HaploView window; for(int i = 0;i<args.length;i++) { if(args[i].equals("-n") || args[i].equals("-h")) { nogui = true; } } if(nogui) { HaploText textOnly = new HaploText(args); } else { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { } //System.setProperty("swing.disableFileChooserSpeedFix", "true"); window = new HaploView(); window.argHandler(args); //setup view object window.setTitle("HaploView beta"); window.setSize(800,600); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); } }
|
public static void main(String[] args) { boolean nogui = false; //HaploView window; for(int i = 0;i<args.length;i++) { if(args[i].equals("-n") || args[i].equals("-h")) { nogui = true; } } if(nogui) { HaploText textOnly = new HaploText(args); } else { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { } //System.setProperty("swing.disableFileChooserSpeedFix", "true"); window = new HaploView(); window.argHandler(args); //setup view object window.setTitle(TITLE_STRING); window.setSize(800,600); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); } }
| 1,112,190
|
void processData(final String[][] hminfo) { if (inputOptions[2].equals("")){ inputOptions[2] = "0"; } maxCompDist = Long.parseLong(inputOptions[2])*1000; this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; theData.infoKnown = false; if (!(inputOptions[1].equals(""))){ readMarkers(new File(inputOptions[1]), null); } if (hminfo != null){ readMarkers(null,hminfo); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); colorMenuItems[0].setSelected(true); colorMenuItems[1].setEnabled(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(theData); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); JPanel failPanel = new JPanel(); failPanel.setLayout(new BoxLayout(failPanel,BoxLayout.Y_AXIS)); JPanel holdPanel = new JPanel(); holdPanel.add(new JLabel("HW p-value cutoff: ")); hwcut = new NumberTextField(String.valueOf(CheckData.hwCut),6,true); holdPanel.add(hwcut); failPanel.add(holdPanel); holdPanel = new JPanel(); holdPanel.add(new JLabel("Min genotype %: ")); genocut = new NumberTextField(String.valueOf(CheckData.failedGenoCut),2, false); holdPanel.add(genocut); failPanel.add(holdPanel); holdPanel = new JPanel(); holdPanel.add(new JLabel("Max # mendel errors: ")); mendcut = new NumberTextField(String.valueOf(CheckData.numMendErrCut),2,false); holdPanel.add(mendcut); failPanel.add(holdPanel); JButton rescore = new JButton("Rescore Markers"); rescore.addActionListener(window); failPanel.add(rescore); JButton selAll = new JButton("Select All"); selAll.addActionListener(window); JPanel ctrlPanel = new JPanel(); ctrlPanel.add(failPanel); ctrlPanel.add(selAll); checkPanel.add(ctrlPanel); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(assocTest > 0) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; tdtPanel = new TDTPanel(theData.chromosomes, assocTest); tabs.addTab(viewItems[VIEW_TDT_NUM], tdtPanel); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }
|
void processData(final String[][] hminfo) { if (inputOptions[2].equals("")){ inputOptions[2] = "0"; } maxCompDist = Long.parseLong(inputOptions[2])*1000; this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; theData.infoKnown = false; if (!(inputOptions[1].equals(""))){ readMarkers(new File(inputOptions[1]), null); } if (hminfo != null){ readMarkers(null,hminfo); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); colorMenuItems[0].setSelected(true); colorMenuItems[1].setEnabled(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(theData); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); JPanel failPanel = new JPanel(); failPanel.setLayout(new BoxLayout(failPanel,BoxLayout.Y_AXIS)); JPanel holdPanel = new JPanel(); holdPanel.add(new JLabel("HW p-value cutoff: ")); hwcut = new NumberTextField(String.valueOf(CheckData.hwCut),6,true); holdPanel.add(hwcut); failPanel.add(holdPanel); holdPanel = new JPanel(); holdPanel.add(new JLabel("Min genotype %: ")); genocut = new NumberTextField(String.valueOf(CheckData.failedGenoCut),2, false); holdPanel.add(genocut); failPanel.add(holdPanel); holdPanel = new JPanel(); holdPanel.add(new JLabel("Max # mendel errors: ")); mendcut = new NumberTextField(String.valueOf(CheckData.numMendErrCut),2,false); holdPanel.add(mendcut); failPanel.add(holdPanel); JButton rescore = new JButton("Rescore Markers"); rescore.addActionListener(window); failPanel.add(rescore); JButton selAll = new JButton("Select All"); selAll.addActionListener(window); JPanel ctrlPanel = new JPanel(); ctrlPanel.add(failPanel); ctrlPanel.add(selAll); checkPanel.add(ctrlPanel); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(assocTest > 0) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; tdtPanel = new TDTPanel(theData.chromosomes, assocTest); tabs.addTab(viewItems[VIEW_TDT_NUM], tdtPanel); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }
| 1,112,191
|
public Object construct(){ dPrimeDisplay=null; theData.infoKnown = false; if (!(inputOptions[1].equals(""))){ readMarkers(new File(inputOptions[1]), null); } if (hminfo != null){ readMarkers(null,hminfo); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); colorMenuItems[0].setSelected(true); colorMenuItems[1].setEnabled(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(theData); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); JPanel failPanel = new JPanel(); failPanel.setLayout(new BoxLayout(failPanel,BoxLayout.Y_AXIS)); JPanel holdPanel = new JPanel(); holdPanel.add(new JLabel("HW p-value cutoff: ")); hwcut = new NumberTextField(String.valueOf(CheckData.hwCut),6,true); holdPanel.add(hwcut); failPanel.add(holdPanel); holdPanel = new JPanel(); holdPanel.add(new JLabel("Min genotype %: ")); genocut = new NumberTextField(String.valueOf(CheckData.failedGenoCut),2, false); holdPanel.add(genocut); failPanel.add(holdPanel); holdPanel = new JPanel(); holdPanel.add(new JLabel("Max # mendel errors: ")); mendcut = new NumberTextField(String.valueOf(CheckData.numMendErrCut),2,false); holdPanel.add(mendcut); failPanel.add(holdPanel); JButton rescore = new JButton("Rescore Markers"); rescore.addActionListener(window); failPanel.add(rescore); JButton selAll = new JButton("Select All"); selAll.addActionListener(window); JPanel ctrlPanel = new JPanel(); ctrlPanel.add(failPanel); ctrlPanel.add(selAll); checkPanel.add(ctrlPanel); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(assocTest > 0) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; tdtPanel = new TDTPanel(theData.chromosomes, assocTest); tabs.addTab(viewItems[VIEW_TDT_NUM], tdtPanel); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; return null; }
|
public Object construct(){ dPrimeDisplay=null; theData.infoKnown = false; if (!(inputOptions[1].equals(""))){ readMarkers(new File(inputOptions[1]), null); } if (hminfo != null){ readMarkers(null,hminfo); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); colorMenuItems[0].setSelected(true); colorMenuItems[1].setEnabled(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(theData); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); JPanel failPanel = new JPanel(); failPanel.setLayout(new BoxLayout(failPanel,BoxLayout.Y_AXIS)); JPanel holdPanel = new JPanel(); holdPanel.add(new JLabel("HW p-value cutoff: ")); hwcut = new NumberTextField(String.valueOf(CheckData.hwCut),6,true); holdPanel.add(hwcut); failPanel.add(holdPanel); holdPanel = new JPanel(); holdPanel.add(new JLabel("Min genotype %: ")); genocut = new NumberTextField(String.valueOf(CheckData.failedGenoCut),2, false); holdPanel.add(genocut); failPanel.add(holdPanel); holdPanel = new JPanel(); holdPanel.add(new JLabel("Max # mendel errors: ")); mendcut = new NumberTextField(String.valueOf(CheckData.numMendErrCut),2,false); holdPanel.add(mendcut); failPanel.add(holdPanel); JButton rescore = new JButton("Rescore Markers"); rescore.addActionListener(window); failPanel.add(rescore); JButton selAll = new JButton("Select All"); selAll.addActionListener(window); JPanel ctrlPanel = new JPanel(); ctrlPanel.add(failPanel); ctrlPanel.add(selAll); checkPanel.add(ctrlPanel); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(assocTest > 0) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; tdtPanel = new TDTPanel(theData.chromosomes, assocTest); tabs.addTab(viewItems[VIEW_TDT_NUM], tdtPanel); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; return null; }
| 1,112,192
|
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File pedFile = new File(inputOptions[0]); try { if (pedFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + pedFile.getName()); } checkPanel = new CheckDataPanel(pedFile, type); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); theData = new HaploData(assocTest); JTable table = checkPanel.getTable(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } theData.linkageToChrom(markerResultArray,checkPanel.getPedFile(),checkPanel.getPedFile().getHMInfo()); processData(checkPanel.getPedFile().getHMInfo()); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
|
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File inFile = new File(inputOptions[0]); try { if (pedFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + pedFile.getName()); } checkPanel = new CheckDataPanel(pedFile, type); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); theData = new HaploData(assocTest); JTable table = checkPanel.getTable(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } theData.linkageToChrom(markerResultArray,checkPanel.getPedFile(),checkPanel.getPedFile().getHMInfo()); processData(checkPanel.getPedFile().getHMInfo()); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
| 1,112,193
|
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File pedFile = new File(inputOptions[0]); try { if (pedFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + pedFile.getName()); } checkPanel = new CheckDataPanel(pedFile, type); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); theData = new HaploData(assocTest); JTable table = checkPanel.getTable(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } theData.linkageToChrom(markerResultArray,checkPanel.getPedFile(),checkPanel.getPedFile().getHMInfo()); processData(checkPanel.getPedFile().getHMInfo()); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
|
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File pedFile = new File(inputOptions[0]); try { if (pedFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + pedFile.getName()); } checkPanel = new CheckDataPanel(pedFile, type); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); theData = new HaploData(assocTest); JTable table = checkPanel.getTable(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } theData.linkageToChrom(markerResultArray,checkPanel.getPedFile(),checkPanel.getPedFile().getHMInfo()); processData(checkPanel.getPedFile().getHMInfo()); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
| 1,112,194
|
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File pedFile = new File(inputOptions[0]); try { if (pedFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + pedFile.getName()); } checkPanel = new CheckDataPanel(pedFile, type); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); theData = new HaploData(assocTest); JTable table = checkPanel.getTable(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } theData.linkageToChrom(markerResultArray,checkPanel.getPedFile(),checkPanel.getPedFile().getHMInfo()); processData(checkPanel.getPedFile().getHMInfo()); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
|
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File pedFile = new File(inputOptions[0]); try { if (pedFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + pedFile.getName()); } checkPanel = new CheckDataPanel(theData.getPedFile()); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); theData = new HaploData(assocTest); JTable table = checkPanel.getTable(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } theData.linkageToChrom(markerResultArray,checkPanel.getPedFile(),checkPanel.getPedFile().getHMInfo()); processData(checkPanel.getPedFile().getHMInfo()); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
| 1,112,195
|
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File pedFile = new File(inputOptions[0]); try { if (pedFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + pedFile.getName()); } checkPanel = new CheckDataPanel(pedFile, type); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); theData = new HaploData(assocTest); JTable table = checkPanel.getTable(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } theData.linkageToChrom(markerResultArray,checkPanel.getPedFile(),checkPanel.getPedFile().getHMInfo()); processData(checkPanel.getPedFile().getHMInfo()); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
|
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File pedFile = new File(inputOptions[0]); try { if (pedFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + pedFile.getName()); } checkPanel = new CheckDataPanel(pedFile, type); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); theData = new HaploData(assocTest); JTable table = checkPanel.getTable(); boolean[] markerResultArray = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResultArray[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } theData.linkageToChrom(markerResultArray,checkPanel.getPedFile(),checkPanel.getPedFile().getHMInfo()); processData(checkPanel.getPedFile().getHMInfo()); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
| 1,112,196
|
public static void main( String args[] ) { JFrame frame = new JFrame( "PhotoInfoEditorTest" ); PhotoView view = new PhotoView(); frame.getContentPane().add( view, BorderLayout.CENTER ); frame.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { System.exit(0); } } ); File f = new File("c:\\java\\photovault\\testfiles\\test1.jpg" ); try { BufferedImage bi = ImageIO.read(f); view.setImage( bi ); view.setScale( 0.3f ); System.out.println( "Succesfully loaded \""+ f.getPath() + "\"" ); } catch (IOException e ) { System.out.println( "Error loading image \""+ f.getPath() + "\"" ); } frame.pack(); frame.setVisible( true ); }
|
public static void main( String args[] ) { JFrame frame = new JFrame( "PhotoInfoEditorTest" ); PhotoView view = new PhotoView(); frame.getContentPane().add( view, BorderLayout.CENTER ); frame.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { System.exit(0); } } ); File f = new File("c:\\java\\photovault\\testfiles\\test1.jpg" ); try { BufferedImage bi = ImageIO.read(f); view.setImage( bi ); view.setScale( 0.3f ); log.debug( "Succesfully loaded \""+ f.getPath() + "\"" ); } catch (IOException e ) { System.out.println( "Error loading image \""+ f.getPath() + "\"" ); } frame.pack(); frame.setVisible( true ); }
| 1,112,197
|
public static void main( String args[] ) { JFrame frame = new JFrame( "PhotoInfoEditorTest" ); PhotoView view = new PhotoView(); frame.getContentPane().add( view, BorderLayout.CENTER ); frame.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { System.exit(0); } } ); File f = new File("c:\\java\\photovault\\testfiles\\test1.jpg" ); try { BufferedImage bi = ImageIO.read(f); view.setImage( bi ); view.setScale( 0.3f ); System.out.println( "Succesfully loaded \""+ f.getPath() + "\"" ); } catch (IOException e ) { System.out.println( "Error loading image \""+ f.getPath() + "\"" ); } frame.pack(); frame.setVisible( true ); }
|
public static void main( String args[] ) { JFrame frame = new JFrame( "PhotoInfoEditorTest" ); PhotoView view = new PhotoView(); frame.getContentPane().add( view, BorderLayout.CENTER ); frame.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { System.exit(0); } } ); File f = new File("c:\\java\\photovault\\testfiles\\test1.jpg" ); try { BufferedImage bi = ImageIO.read(f); view.setImage( bi ); view.setScale( 0.3f ); System.out.println( "Succesfully loaded \""+ f.getPath() + "\"" ); } catch (IOException e ) { log.debug( "Error loading image \""+ f.getPath() + "\"" ); } frame.pack(); frame.setVisible( true ); }
| 1,112,198
|
public Tag createTag() throws Exception;
|
public Tag createTag(String name, Attributes attributes) throws Exception;
| 1,112,199
|
public Vector readBlocks(File infile) throws HaploViewException, IOException{ if (!infile.exists()){ throw new HaploViewException("File " + infile.getName() + " doesn't exist!"); } Vector cust = new Vector(); BufferedReader in = new BufferedReader(new FileReader(infile)); String currentLine; int lineCount = 0; int highestYet = -1; while ((currentLine = in.readLine()) != null){ lineCount ++; StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() == 1){ //complain if we have only one col throw new HaploViewException("File error on line " + lineCount + " in " + infile.getName()); }else if (st.countTokens() == 0){ //skip blank lines continue; } try{ int[] thisBlock = new int[st.countTokens()]; int x = 0; while (st.hasMoreTokens()){ //we're being nice to users and letting them input blocks with 1-offset thisBlock[x] = new Integer(st.nextToken()).intValue()-1; if (thisBlock[x] > Chromosome.getSize() || thisBlock[x] < 0){ throw new HaploViewException("Error, marker in block out of bounds: " + thisBlock[x] + "\non line " + lineCount); } if (thisBlock[x] <= highestYet){ throw new HaploViewException("Error, markers/blocks out of order or overlap:\n" + "on line " + lineCount); } highestYet = thisBlock[x]; x++; } cust.add(thisBlock); }catch (NumberFormatException nfe) { throw new HaploViewException("Format error on line " + lineCount + " in " + infile.getName()); } } return cust; }
|
public Vector readBlocks(File infile) throws HaploViewException, IOException{ if (!infile.exists()){ throw new HaploViewException("File " + infile.getName() + " doesn't exist!"); } Vector cust = new Vector(); BufferedReader in = new BufferedReader(new FileReader(infile)); String currentLine; int lineCount = 0; int highestYet = -1; while ((currentLine = in.readLine()) != null){ lineCount ++; StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() == 1){ //complain if we have only one col throw new HaploViewException("File error on line " + lineCount + " in " + infile.getName()); }else if (st.countTokens() == 0){ //skip blank lines continue; } try{ int[] thisBlock = new int[st.countTokens()]; int x = 0; while (st.hasMoreTokens()){ //we're being nice to users and letting them input blocks with 1-offset thisBlock[x] = new Integer(st.nextToken()).intValue()-1; if (thisBlock[x] > Chromosome.getSize() || thisBlock[x] < 0){ throw new HaploViewException("Error, marker in block out of bounds: " + thisBlock[x] + "\non line " + lineCount); } if (thisBlock[x] <= highestYet){ throw new HaploViewException("Error, markers/blocks out of order or overlap:\n" + "on line " + lineCount); } highestYet = thisBlock[x]; x++; } cust.add(thisBlock); }catch (NumberFormatException nfe) { throw new HaploViewException("Format error on line " + lineCount + " in " + infile.getName()); } } return cust; }
| 1,112,200
|
public Vector readBlocks(File infile) throws HaploViewException, IOException{ if (!infile.exists()){ throw new HaploViewException("File " + infile.getName() + " doesn't exist!"); } Vector cust = new Vector(); BufferedReader in = new BufferedReader(new FileReader(infile)); String currentLine; int lineCount = 0; int highestYet = -1; while ((currentLine = in.readLine()) != null){ lineCount ++; StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() == 1){ //complain if we have only one col throw new HaploViewException("File error on line " + lineCount + " in " + infile.getName()); }else if (st.countTokens() == 0){ //skip blank lines continue; } try{ int[] thisBlock = new int[st.countTokens()]; int x = 0; while (st.hasMoreTokens()){ //we're being nice to users and letting them input blocks with 1-offset thisBlock[x] = new Integer(st.nextToken()).intValue()-1; if (thisBlock[x] > Chromosome.getSize() || thisBlock[x] < 0){ throw new HaploViewException("Error, marker in block out of bounds: " + thisBlock[x] + "\non line " + lineCount); } if (thisBlock[x] <= highestYet){ throw new HaploViewException("Error, markers/blocks out of order or overlap:\n" + "on line " + lineCount); } highestYet = thisBlock[x]; x++; } cust.add(thisBlock); }catch (NumberFormatException nfe) { throw new HaploViewException("Format error on line " + lineCount + " in " + infile.getName()); } } return cust; }
|
public Vector readBlocks(File infile) throws HaploViewException, IOException{ if (!infile.exists()){ throw new HaploViewException("File " + infile.getName() + " doesn't exist!"); } Vector cust = new Vector(); BufferedReader in = new BufferedReader(new FileReader(infile)); String currentLine; int lineCount = 0; int highestYet = -1; while ((currentLine = in.readLine()) != null){ lineCount ++; StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() == 1){ //complain if we have only one col throw new HaploViewException("File error on line " + lineCount + " in " + infile.getName()); }else if (st.countTokens() == 0){ //skip blank lines continue; } try{ int[] thisBlock = new int[st.countTokens()]; int x = 0; while (st.hasMoreTokens()){ //we're being nice to users and letting them input blocks with 1-offset Integer nextInLine = new Integer(st.nextToken()); for (int y = 0; y < Chromosome.realIndex.length; y++){ if (Chromosome.realIndex[y] == nextInLine.intValue() - 1){ goodies.add(new Integer(y)); } } } int thisBlock[] = new int[goodies.size()]; for (int x = 0; x < goodies.size(); x++){ thisBlock[x] = ((Integer)goodies.elementAt(x)).intValue(); if (thisBlock[x] > Chromosome.getSize() || thisBlock[x] < 0){ throw new HaploViewException("Error, marker in block out of bounds: " + thisBlock[x] + "\non line " + lineCount); } if (thisBlock[x] <= highestYet){ throw new HaploViewException("Error, markers/blocks out of order or overlap:\n" + "on line " + lineCount); } highestYet = thisBlock[x]; x++; } cust.add(thisBlock); }catch (NumberFormatException nfe) { throw new HaploViewException("Format error on line " + lineCount + " in " + infile.getName()); } } return cust; }
| 1,112,201
|
public Vector readBlocks(File infile) throws HaploViewException, IOException{ if (!infile.exists()){ throw new HaploViewException("File " + infile.getName() + " doesn't exist!"); } Vector cust = new Vector(); BufferedReader in = new BufferedReader(new FileReader(infile)); String currentLine; int lineCount = 0; int highestYet = -1; while ((currentLine = in.readLine()) != null){ lineCount ++; StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() == 1){ //complain if we have only one col throw new HaploViewException("File error on line " + lineCount + " in " + infile.getName()); }else if (st.countTokens() == 0){ //skip blank lines continue; } try{ int[] thisBlock = new int[st.countTokens()]; int x = 0; while (st.hasMoreTokens()){ //we're being nice to users and letting them input blocks with 1-offset thisBlock[x] = new Integer(st.nextToken()).intValue()-1; if (thisBlock[x] > Chromosome.getSize() || thisBlock[x] < 0){ throw new HaploViewException("Error, marker in block out of bounds: " + thisBlock[x] + "\non line " + lineCount); } if (thisBlock[x] <= highestYet){ throw new HaploViewException("Error, markers/blocks out of order or overlap:\n" + "on line " + lineCount); } highestYet = thisBlock[x]; x++; } cust.add(thisBlock); }catch (NumberFormatException nfe) { throw new HaploViewException("Format error on line " + lineCount + " in " + infile.getName()); } } return cust; }
|
public Vector readBlocks(File infile) throws HaploViewException, IOException{ if (!infile.exists()){ throw new HaploViewException("File " + infile.getName() + " doesn't exist!"); } Vector cust = new Vector(); BufferedReader in = new BufferedReader(new FileReader(infile)); String currentLine; int lineCount = 0; int highestYet = -1; while ((currentLine = in.readLine()) != null){ lineCount ++; StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() == 1){ //complain if we have only one col throw new HaploViewException("File error on line " + lineCount + " in " + infile.getName()); }else if (st.countTokens() == 0){ //skip blank lines continue; } try{ int[] thisBlock = new int[st.countTokens()]; int x = 0; while (st.hasMoreTokens()){ //we're being nice to users and letting them input blocks with 1-offset thisBlock[x] = new Integer(st.nextToken()).intValue()-1; if (thisBlock[x] > Chromosome.getSize() || thisBlock[x] < 0){ throw new HaploViewException("Error, marker in block out of bounds: " + thisBlock[x] + "\non line " + lineCount); } if (thisBlock[x] <= highestYet){ throw new HaploViewException("Error, markers/blocks out of order or overlap:\n" + "on line " + lineCount); } highestYet = thisBlock[x]; } cust.add(thisBlock); }catch (NumberFormatException nfe) { throw new HaploViewException("Format error on line " + lineCount + " in " + infile.getName()); } } return cust; }
| 1,112,202
|
public void testReadFromDB() throws Exception { Connection con = db.getConnection(); Statement stmt = null; String lastSQL = null; try { stmt = con.createStatement(); try { stmt.executeUpdate("DROP TABLE relationship_test_child"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume object to be dropped didn't exist."); System.out.println(ex.getMessage()); } try { stmt.executeUpdate("DROP TABLE relationship_test_parent"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume object to be dropped didn't exist."); System.out.println(ex.getMessage()); } lastSQL = "CREATE TABLE relationship_test_parent (\n" + " pkcol_1 integer not null,\n" + " pkcol_2 integer not null,\n" + " attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "CREATE TABLE relationship_test_child (\n" + " parent_pkcol_1 integer not null,\n" + " parent_pkcol_2 integer not null,\n" + " child_attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_parent\n" + " ADD CONSTRAINT relationship_test_pk\n" + " PRIMARY KEY (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_child\n" + " ADD CONSTRAINT relationship_test_fk\n" + " FOREIGN KEY (parent_pkcol_1, parent_pkcol_2)\n" + " REFERENCES relationship_test_parent (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); } catch (SQLException ex) { System.out.println("SQL Statement Failed:\n"+lastSQL+"\nStack trace of SQLException follows:"); ex.printStackTrace(); fail("SQL statement failed. See system console for details."); } finally { if (stmt != null) stmt.close(); } SQLTable parent = db.getTableByName("relationship_test_parent"); SQLTable child = db.getTableByName("relationship_test_child"); if (parent == null) { parent = db.getTableByName("relationship_test_parent".toUpperCase()); } SQLRelationship rel = (SQLRelationship) parent.getExportedKeys().get(0); assertEquals("relationship_test_fk", rel.getName().toLowerCase()); assertSame(parent, rel.getPkTable()); assertSame(child, rel.getFkTable()); assertEquals((SQLRelationship.ZERO | SQLRelationship.ONE | SQLRelationship.MANY), rel.getFkCardinality()); assertEquals(SQLRelationship.ONE, rel.getPkCardinality()); }
|
public void testReadFromDB() throws Exception { Connection con = db.getConnection(); Statement stmt = null; String lastSQL = null; try { stmt = con.createStatement(); try { stmt.executeUpdate("DROP TABLE relationship_test_child"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume relationship_test_child didn't exist."); System.out.println(ex.getMessage()); } try { stmt.executeUpdate("DROP TABLE relationship_test_parent"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume relationship_test_child didn't exist."); System.out.println(ex.getMessage()); } lastSQL = "CREATE TABLE relationship_test_parent (\n" + " pkcol_1 integer not null,\n" + " pkcol_2 integer not null,\n" + " attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "CREATE TABLE relationship_test_child (\n" + " parent_pkcol_1 integer not null,\n" + " parent_pkcol_2 integer not null,\n" + " child_attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_parent\n" + " ADD CONSTRAINT relationship_test_pk\n" + " PRIMARY KEY (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_child\n" + " ADD CONSTRAINT relationship_test_fk\n" + " FOREIGN KEY (parent_pkcol_1, parent_pkcol_2)\n" + " REFERENCES relationship_test_parent (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); } catch (SQLException ex) { System.out.println("SQL Statement Failed:\n"+lastSQL+"\nStack trace of SQLException follows:"); ex.printStackTrace(); fail("SQL statement failed. See system console for details."); } finally { if (stmt != null) stmt.close(); } SQLTable parent = db.getTableByName("relationship_test_parent"); SQLTable child = db.getTableByName("relationship_test_child"); if (parent == null) { parent = db.getTableByName("relationship_test_parent".toUpperCase()); } SQLRelationship rel = (SQLRelationship) parent.getExportedKeys().get(0); assertEquals("relationship_test_fk", rel.getName().toLowerCase()); assertSame(parent, rel.getPkTable()); assertSame(child, rel.getFkTable()); assertEquals((SQLRelationship.ZERO | SQLRelationship.ONE | SQLRelationship.MANY), rel.getFkCardinality()); assertEquals(SQLRelationship.ONE, rel.getPkCardinality()); }
| 1,112,203
|
public void testReadFromDB() throws Exception { Connection con = db.getConnection(); Statement stmt = null; String lastSQL = null; try { stmt = con.createStatement(); try { stmt.executeUpdate("DROP TABLE relationship_test_child"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume object to be dropped didn't exist."); System.out.println(ex.getMessage()); } try { stmt.executeUpdate("DROP TABLE relationship_test_parent"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume object to be dropped didn't exist."); System.out.println(ex.getMessage()); } lastSQL = "CREATE TABLE relationship_test_parent (\n" + " pkcol_1 integer not null,\n" + " pkcol_2 integer not null,\n" + " attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "CREATE TABLE relationship_test_child (\n" + " parent_pkcol_1 integer not null,\n" + " parent_pkcol_2 integer not null,\n" + " child_attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_parent\n" + " ADD CONSTRAINT relationship_test_pk\n" + " PRIMARY KEY (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_child\n" + " ADD CONSTRAINT relationship_test_fk\n" + " FOREIGN KEY (parent_pkcol_1, parent_pkcol_2)\n" + " REFERENCES relationship_test_parent (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); } catch (SQLException ex) { System.out.println("SQL Statement Failed:\n"+lastSQL+"\nStack trace of SQLException follows:"); ex.printStackTrace(); fail("SQL statement failed. See system console for details."); } finally { if (stmt != null) stmt.close(); } SQLTable parent = db.getTableByName("relationship_test_parent"); SQLTable child = db.getTableByName("relationship_test_child"); if (parent == null) { parent = db.getTableByName("relationship_test_parent".toUpperCase()); } SQLRelationship rel = (SQLRelationship) parent.getExportedKeys().get(0); assertEquals("relationship_test_fk", rel.getName().toLowerCase()); assertSame(parent, rel.getPkTable()); assertSame(child, rel.getFkTable()); assertEquals((SQLRelationship.ZERO | SQLRelationship.ONE | SQLRelationship.MANY), rel.getFkCardinality()); assertEquals(SQLRelationship.ONE, rel.getPkCardinality()); }
|
public void testReadFromDB() throws Exception { Connection con = db.getConnection(); Statement stmt = null; String lastSQL = null; try { stmt = con.createStatement(); try { stmt.executeUpdate("DROP TABLE relationship_test_child"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume relationship_test_parent didn't exist."); System.out.println(ex.getMessage()); } try { stmt.executeUpdate("DROP TABLE relationship_test_parent"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume relationship_test_parent didn't exist."); System.out.println(ex.getMessage()); } lastSQL = "CREATE TABLE relationship_test_parent (\n" + " pkcol_1 integer not null,\n" + " pkcol_2 integer not null,\n" + " attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "CREATE TABLE relationship_test_child (\n" + " parent_pkcol_1 integer not null,\n" + " parent_pkcol_2 integer not null,\n" + " child_attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_parent\n" + " ADD CONSTRAINT relationship_test_pk\n" + " PRIMARY KEY (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_child\n" + " ADD CONSTRAINT relationship_test_fk\n" + " FOREIGN KEY (parent_pkcol_1, parent_pkcol_2)\n" + " REFERENCES relationship_test_parent (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); } catch (SQLException ex) { System.out.println("SQL Statement Failed:\n"+lastSQL+"\nStack trace of SQLException follows:"); ex.printStackTrace(); fail("SQL statement failed. See system console for details."); } finally { if (stmt != null) stmt.close(); } SQLTable parent = db.getTableByName("relationship_test_parent"); SQLTable child = db.getTableByName("relationship_test_child"); if (parent == null) { parent = db.getTableByName("relationship_test_parent".toUpperCase()); } SQLRelationship rel = (SQLRelationship) parent.getExportedKeys().get(0); assertEquals("relationship_test_fk", rel.getName().toLowerCase()); assertSame(parent, rel.getPkTable()); assertSame(child, rel.getFkTable()); assertEquals((SQLRelationship.ZERO | SQLRelationship.ONE | SQLRelationship.MANY), rel.getFkCardinality()); assertEquals(SQLRelationship.ONE, rel.getPkCardinality()); }
| 1,112,204
|
public Expression createExpression(final String text) throws Exception { final Expression jexlExpression = new JexlExpression( org.apache.commons.jexl.ExpressionFactory.createExpression(text) ); if ( isSupportAntVariables() && isValidAntVariableName(text) ) { ExpressionSupport expr = new ExpressionSupport() { public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; } }; return expr; } return jexlExpression; }
|
public Expression createExpression(final String text) throws Exception { final Expression jexlExpression = new JexlExpression( org.apache.commons.jexl.ExpressionFactory.createExpression(text) ); if ( isSupportAntVariables() && isValidAntVariableName(text) ) { ExpressionSupport expr = new ExpressionSupport() { public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; } }; return expr; } return jexlExpression; }
| 1,112,205
|
public Expression createExpression(final String text) throws Exception { final Expression jexlExpression = new JexlExpression( org.apache.commons.jexl.ExpressionFactory.createExpression(text) ); if ( isSupportAntVariables() && isValidAntVariableName(text) ) { ExpressionSupport expr = new ExpressionSupport() { public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; } }; return expr; } return jexlExpression; }
|
public Expression createExpression(final String text) throws Exception { final Expression jexlExpression = new JexlExpression( org.apache.commons.jexl.ExpressionFactory.createExpression(text) ); if ( isSupportAntVariables() && isValidAntVariableName(text) ) { ExpressionSupport expr = new ExpressionSupport() { public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; } }; return expr; } return jexlExpression; }
| 1,112,206
|
public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; }
|
public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; }
| 1,112,207
|
public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; }
|
public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; }
| 1,112,208
|
public boolean evaluateAsBoolean(JellyContext context) { Object value = evaluate(context); if ( value instanceof Boolean ) { Boolean b = (Boolean) value; return b.booleanValue(); } else if ( value instanceof String ) { // return Boolean.getBoolean( (String) value ); String str = (String) value; if ( str.equals( "on" ) || str.equals( "yes" ) || str.equals( "1" ) || str.equals( "true" ) ) { return true; } else { return false; } } return false; }
|
public boolean evaluateAsBoolean(JellyContext context) { Object value = evaluateAsValue(context); if ( value instanceof Boolean ) { Boolean b = (Boolean) value; return b.booleanValue(); } else if ( value instanceof String ) { // return Boolean.getBoolean( (String) value ); String str = (String) value; if ( str.equals( "on" ) || str.equals( "yes" ) || str.equals( "1" ) || str.equals( "true" ) ) { return true; } else { return false; } } return false; }
| 1,112,209
|
public Iterator evaluateAsIterator(JellyContext context) { Object value = evaluate(context); if ( value == null ) { return EMPTY_ITERATOR; } else if ( value instanceof Iterator ) { return (Iterator) value; } else if ( value instanceof List ) { List list = (List) value; return list.iterator(); } else if ( value instanceof Map ) { Map map = (Map) value; return map.entrySet().iterator(); } else if ( value.getClass().isArray() ) { return new ArrayIterator( value ); } else if ( value instanceof Enumeration ) { return new EnumerationIterator((Enumeration ) value); } else if ( value instanceof Collection ) { Collection collection = (Collection) value; return collection.iterator(); } else { // XXX: should we return single iterator? return new SingletonIterator( value ); } }
|
public Iterator evaluateAsIterator(JellyContext context) { Object value = evaluateAsValue(context); if ( value == null ) { return EMPTY_ITERATOR; } else if ( value instanceof Iterator ) { return (Iterator) value; } else if ( value instanceof List ) { List list = (List) value; return list.iterator(); } else if ( value instanceof Map ) { Map map = (Map) value; return map.entrySet().iterator(); } else if ( value.getClass().isArray() ) { return new ArrayIterator( value ); } else if ( value instanceof Enumeration ) { return new EnumerationIterator((Enumeration ) value); } else if ( value instanceof Collection ) { Collection collection = (Collection) value; return collection.iterator(); } else { // XXX: should we return single iterator? return new SingletonIterator( value ); } }
| 1,112,210
|
public String evaluateAsString(JellyContext context) { Object value = evaluate(context); if ( value != null ) { return value.toString(); } return null; }
|
public String evaluateAsString(JellyContext context) { Object value = evaluateAsValue(context); if ( value != null ) { return value.toString(); } return null; }
| 1,112,211
|
public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); }
|
public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); }
| 1,112,215
|
public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); }
|
public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); }
| 1,112,216
|
public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); }
|
public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); }
| 1,112,217
|
public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); }
|
public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); }
| 1,112,218
|
public void actionPerformed(ActionEvent evt) { SQLTable t = new SQLTable(); try { t.initFolders(true); } catch (ArchitectException e) { logger.error("Couldn't add folder to table \""+t.getName()+"\"", e); JOptionPane.showMessageDialog(null, "Failed to add folder to table:\n"+e.getMessage()); } t.setName("New_Table"); TablePane tp = new TablePane(t, pp); pp.addFloating(tp); pp.setMouseMode(PlayPen.mouseModeType.CREATING_TABLE); }
|
public void actionPerformed(ActionEvent evt) { SQLTable t = new SQLTable(); try { t.initFolders(true); } catch (ArchitectException e) { logger.error("Couldn't add folder to table \""+t.getName()+"\"", e); JOptionPane.showMessageDialog(null, "Failed to add folder to table:\n"+e.getMessage()); } t.setName("New_Table"); TablePane tp = new TablePane(t, pp); pp.addFloating(tp); pp.setMouseMode(PlayPen.MouseModeType.CREATING_TABLE); }
| 1,112,221
|
public String draw(DashboardContext context) { ApplicationConfig appConfig = context.getWebContext().getApplicationConfig(); assert appConfig != null: "No application context present"; try{ StringBuffer graphComponent = new StringBuffer().append( "<applet code=\"org/jmanage/webui/applets/GraphApplet.class\"").append( " width=\"{0}\" height=\"{1}\"").append( " archive=\"/applets/applets.jar,/applets/jfreechart-0.9.20.jar,").append( "/applets/jcommon-0.9.5.jar\" >").append( "<param name=\"graphTitle\" value=\""+getName()+"\"></param>").append( "<param name=\"pollingInterval\" value=\""+getPollingIntervalInSeconds()+"\"></param>").append( "<param name=\"remoteURL\" value=\"http://localhost:9090/app/fetchAttributeValues.do;jsessionid={2}\"></param>").append( "<param name=\"displayNames\" value=\"").append(getAttributeDisplayNamesForGraph()).append("\"></param>").append( "<param name=\"attributes\" value=\"").append(getAttributesForGraph(appConfig.getName())).append("\"></param>").append( "<param value=\"\" name=\"yAxisLabel\"></param>").append("</applet>"); return graphComponent.toString(); }catch(Exception e){ return "Failure rendering component"; } }
|
public String draw(DashboardContext context) { ApplicationConfig appConfig = context.getWebContext().getApplicationConfig(); assert appConfig != null: "No application context present"; try{ StringBuffer graphComponent = new StringBuffer().append( "<applet code=\"org/jmanage/webui/applets/GraphApplet.class\"").append( " width=\"{0}\" height=\"{1}\"").append( " archive=\"/applets/applets.jar,/applets/jfreechart-0.9.20.jar,").append( "/applets/jcommon-0.9.5.jar\" >").append( "<param name=\"graphTitle\" value=\""+getName()+"\"></param>").append( "<param name=\"pollingInterval\" value=\""+getPollingIntervalInSeconds()+"\"></param>").append( "<param name=\"remoteURL\" value=\""+context.getServerPath()+"/app/fetchAttributeValues.do;jsessionid={2}\"></param>").append(//localhost:9090/app/fetchAttributeValues.do;jsessionid={2}\"></param>").append( "<param name=\"displayNames\" value=\"").append(getAttributeDisplayNamesForGraph()).append("\"></param>").append( "<param name=\"attributes\" value=\"").append(getAttributesForGraph(appConfig.getName())).append("\"></param>").append( "<param value=\"\" name=\"yAxisLabel\"></param>").append("</applet>"); return graphComponent.toString(); }catch(Exception e){ return "Failure rendering component"; } }
| 1,112,222
|
EM(Vector chromosomes, int numTrios){ //we need to add extra copies of haploid chromosomes so we add a second copy this.chromosomes = new Vector(); for(int i=0;i<chromosomes.size();i++) { this.chromosomes.add(chromosomes.elementAt(i)); if(((Chromosome)this.chromosomes.lastElement()).isHaploid()){ this.chromosomes.add(chromosomes.elementAt(i)); } } this.numTrios = numTrios; }
|
EM(Vector chromosomes, int numTrios, Vector extraInds){ //we need to add extra copies of haploid chromosomes so we add a second copy this.chromosomes = new Vector(); for(int i=0;i<chromosomes.size();i++) { this.chromosomes.add(chromosomes.elementAt(i)); if(((Chromosome)this.chromosomes.lastElement()).isHaploid()){ this.chromosomes.add(chromosomes.elementAt(i)); } } this.numTrios = numTrios; }
| 1,112,223
|
EM(Vector chromosomes, int numTrios){ //we need to add extra copies of haploid chromosomes so we add a second copy this.chromosomes = new Vector(); for(int i=0;i<chromosomes.size();i++) { this.chromosomes.add(chromosomes.elementAt(i)); if(((Chromosome)this.chromosomes.lastElement()).isHaploid()){ this.chromosomes.add(chromosomes.elementAt(i)); } } this.numTrios = numTrios; }
|
EM(Vector chromosomes, int numTrios){ //we need to add extra copies of haploid chromosomes so we add a second copy this.chromosomes = new Vector(); for(int i=0;i<chromosomes.size();i++) { this.chromosomes.add(chromosomes.elementAt(i)); if(((Chromosome)this.chromosomes.lastElement()).isHaploid()){ this.chromosomes.add(chromosomes.elementAt(i)); } } this.numTrios = numTrios + extraTrioCount; }
| 1,112,224
|
public void doEM(int[] theBlock) throws HaploViewException{ //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } byte[] thisHap; Vector inputHaploSingletons = new Vector(); Vector inputHaploTrios = new Vector(); Vector affSingletons = new Vector(); Vector affTrios = new Vector(); Vector affKids = new Vector(); Vector haploidTrios = new Vector(); Vector haploidSingletons = new Vector(); //whichVector[i] stores a value which indicates which vector chromosome i's genotype should go in //1 indicates inputHaploSingletons (singletons), 2 indicates inputHaploTrios, //3 indicates a person from a broken trio who is treated as a singleton //0 indicates none (too much missing data) int[] whichVector = new int[chromosomes.size()]; for(int i=0;i<numTrios*4; i+=4) { Chromosome parentAFirst = (Chromosome) chromosomes.elementAt(i); Chromosome parentASecond = (Chromosome) chromosomes.elementAt(i+1); Chromosome parentBFirst = (Chromosome) chromosomes.elementAt(i+2); Chromosome parentBSecond = (Chromosome) chromosomes.elementAt(i+3); boolean tooManyMissingInASegmentA = false; boolean tooManyMissingInASegmentB = false; int totalMissingA = 0; int totalMissingB = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missingA = 0; int missingB = 0; for (int j = 0; j < block_size[n]; j++){ byte AFirstGeno = parentAFirst.getGenotype(theBlock[segmentShift+j]); byte ASecondGeno = parentASecond.getGenotype(theBlock[segmentShift+j]); byte BFirstGeno = parentBFirst.getGenotype(theBlock[segmentShift+j]); byte BSecondGeno = parentBSecond.getGenotype(theBlock[segmentShift+j]); if(AFirstGeno == 0 || ASecondGeno == 0) missingA++; if(BFirstGeno == 0 || BSecondGeno == 0) missingB++; } segmentShift += block_size[n]; if (missingA >= MISSINGLIMIT){ tooManyMissingInASegmentA = true; } if (missingB >= MISSINGLIMIT){ tooManyMissingInASegmentB = true; } totalMissingA += missingA; totalMissingB += missingB; } if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3 && !tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //both parents are good so all 4 chroms are added as a trio whichVector[i] = 2; whichVector[i+1] = 2; whichVector[i+2] = 2; whichVector[i+3] = 2; } else if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) { //first person good, so he's added as a singleton, other parent is dropped whichVector[i] = 3; whichVector[i+1] =3; whichVector[i+2] =0; whichVector[i+3]=0; } else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //second person good, so he's added as a singleton, other parent is dropped whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =3; whichVector[i+3]=3; } else { //both people have too much missing data so neither is used whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =0; whichVector[i+3]=0; } } for (int i = numTrios*4; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); boolean tooManyMissingInASegment = false; int totalMissing = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missing = 0; for (int j = 0; j < block_size[n]; j++){ byte theGeno = thisChrom.getGenotype(theBlock[segmentShift+j]); byte nextGeno = nextChrom.getGenotype(theBlock[segmentShift+j]); if(theGeno == 0 || nextGeno == 0) missing++; } segmentShift += block_size[n]; if (missing >= MISSINGLIMIT){ tooManyMissingInASegment = true; } totalMissing += missing; } //we want to use chromosomes without too many missing genotypes in a given //subsegment (first term) or without too many missing genotypes in the //whole block (second term) if (!tooManyMissingInASegment && totalMissing <= 1+theBlock.length/3){ whichVector[i-1] = 1; whichVector[i] = 1; } } //we only want to add an affected status every other chromosome, so we flip this boolean each time boolean addAff = true; for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); if(whichVector[i] > 0) { thisHap = new byte[theBlock.length]; for (int j = 0; j < theBlock.length; j++){ byte a1 = Chromosome.getMarker(theBlock[j]).getMajor(); byte a2 = Chromosome.getMarker(theBlock[j]).getMinor(); byte theGeno = thisChrom.getGenotype(theBlock[j]); if (theGeno >= 5){ thisHap[j] = 'h'; } else { if (theGeno == 0){ thisHap[j] = '0'; }else if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ throw new HaploViewException("Marker with > 2 alleles: " + Chromosome.getMarker(theBlock[j]).getDisplayName()); } } } if(whichVector[i] == 1) { inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(thisChrom.getAffected())); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } else if(whichVector[i] ==2) { inputHaploTrios.add(thisHap); if(addAff) { affTrios.add(new Integer(thisChrom.getAffected())); affKids.add(thisChrom.getKidAffected()); haploidTrios.add(new Boolean(thisChrom.isHaploid())); } }else if (whichVector[i] == 3){ inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(0)); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } if(addAff) { addAff = false; } else { addAff =true; } } } numFilteredTrios = inputHaploTrios.size() / 4; inputHaploTrios.addAll(inputHaploSingletons); affTrios.addAll(affSingletons); byte[][] input_haplos = (byte[][])inputHaploTrios.toArray(new byte[0][0]); haploidTrios.addAll(haploidSingletons); haploid = new boolean[haploidTrios.size()]; for(int i=0;i<haploidTrios.size();i++){ haploid[i] = ((Boolean)haploidTrios.elementAt(i)).booleanValue(); } full_em_breakup(input_haplos, block_size, affTrios, affKids); }
|
public void doEM(int[] theBlock) throws HaploViewException{ //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } byte[] thisHap; Vector inputHaploSingletons = new Vector(); Vector inputHaploTrios = new Vector(); Vector affSingletons = new Vector(); Vector affTrios = new Vector(); Vector affKids = new Vector(); Vector haploidTrios = new Vector(); Vector haploidSingletons = new Vector(); //whichVector[i] stores a value which indicates which vector chromosome i's genotype should go in //1 indicates inputHaploSingletons (singletons), 2 indicates inputHaploTrios, //3 indicates a person from a broken trio who is treated as a singleton //0 indicates none (too much missing data) int[] whichVector = new int[chromosomes.size()]; for(int i=0;i<numTrios*4; i+=4) { Chromosome parentAFirst = (Chromosome) chromosomes.elementAt(i); Chromosome parentASecond = (Chromosome) chromosomes.elementAt(i+1); Chromosome parentBFirst = (Chromosome) chromosomes.elementAt(i+2); Chromosome parentBSecond = (Chromosome) chromosomes.elementAt(i+3); boolean tooManyMissingInASegmentA = false; boolean tooManyMissingInASegmentB = false; int totalMissingA = 0; int totalMissingB = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missingA = 0; int missingB = 0; for (int j = 0; j < block_size[n]; j++){ byte AFirstGeno = parentAFirst.getGenotype(theBlock[segmentShift+j]); byte ASecondGeno = parentASecond.getGenotype(theBlock[segmentShift+j]); byte BFirstGeno = parentBFirst.getGenotype(theBlock[segmentShift+j]); byte BSecondGeno = parentBSecond.getGenotype(theBlock[segmentShift+j]); if(AFirstGeno == 0 || ASecondGeno == 0) missingA++; if(BFirstGeno == 0 || BSecondGeno == 0) missingB++; } segmentShift += block_size[n]; if (missingA >= MISSINGLIMIT){ tooManyMissingInASegmentA = true; } if (missingB >= MISSINGLIMIT){ tooManyMissingInASegmentB = true; } totalMissingA += missingA; totalMissingB += missingB; } if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3 && !tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //both parents are good so all 4 chroms are added as a trio whichVector[i] = 2; whichVector[i+1] = 2; whichVector[i+2] = 2; whichVector[i+3] = 2; } else if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) { //first person good, so he's added as a singleton, other parent is dropped whichVector[i] = 3; whichVector[i+1] =3; whichVector[i+2] =0; whichVector[i+3]=0; } else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //second person good, so he's added as a singleton, other parent is dropped whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =3; whichVector[i+3]=3; } else { //both people have too much missing data so neither is used whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =0; whichVector[i+3]=0; } } for (int i = numTrios*4; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); boolean tooManyMissingInASegment = false; int totalMissing = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missing = 0; for (int j = 0; j < block_size[n]; j++){ byte theGeno = thisChrom.getGenotype(theBlock[segmentShift+j]); byte nextGeno = nextChrom.getGenotype(theBlock[segmentShift+j]); if(theGeno == 0 || nextGeno == 0) missing++; } segmentShift += block_size[n]; if (missing >= MISSINGLIMIT){ tooManyMissingInASegment = true; } totalMissing += missing; } //we want to use chromosomes without too many missing genotypes in a given //subsegment (first term) or without too many missing genotypes in the //whole block (second term) if (!tooManyMissingInASegment && totalMissing <= 1+theBlock.length/3){ whichVector[i-1] = 1; whichVector[i] = 1; } } //we only want to add an affected status every other chromosome, so we flip this boolean each time boolean addAff = true; for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); if(whichVector[i] > 0) { thisHap = new byte[theBlock.length]; for (int j = 0; j < theBlock.length; j++){ byte a1 = Chromosome.getMarker(theBlock[j]).getMajor(); byte a2 = Chromosome.getMarker(theBlock[j]).getMinor(); byte theGeno = thisChrom.getGenotype(theBlock[j]); if (theGeno >= 5){ thisHap[j] = 'h'; } else { if (theGeno == 0){ thisHap[j] = '0'; }else if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ throw new HaploViewException("Marker with > 2 alleles: " + Chromosome.getMarker(theBlock[j]).getDisplayName()); } } } if(whichVector[i] == 1) { inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(thisChrom.getAffected())); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } else if(whichVector[i] ==2) { inputHaploTrios.add(thisHap); if(addAff) { affTrios.add(new Integer(thisChrom.getAffected())); affKids.add(thisChrom.getKidAffected()); haploidTrios.add(new Boolean(thisChrom.isHaploid())); } }else if (whichVector[i] == 3){ inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(0)); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } if(addAff) { addAff = false; } else { addAff =true; } } } numFilteredTrios = inputHaploTrios.size() / 4; inputHaploTrios.addAll(inputHaploSingletons); affTrios.addAll(affSingletons); byte[][] input_haplos = (byte[][])inputHaploTrios.toArray(new byte[0][0]); haploidTrios.addAll(haploidSingletons); haploid = new boolean[haploidTrios.size()]; for(int i=0;i<haploidTrios.size();i++){ haploid[i] = ((Boolean)haploidTrios.elementAt(i)).booleanValue(); } full_em_breakup(input_haplos, block_size, affTrios, affKids); }
| 1,112,226
|
public void doEM(int[] theBlock) throws HaploViewException{ //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } byte[] thisHap; Vector inputHaploSingletons = new Vector(); Vector inputHaploTrios = new Vector(); Vector affSingletons = new Vector(); Vector affTrios = new Vector(); Vector affKids = new Vector(); Vector haploidTrios = new Vector(); Vector haploidSingletons = new Vector(); //whichVector[i] stores a value which indicates which vector chromosome i's genotype should go in //1 indicates inputHaploSingletons (singletons), 2 indicates inputHaploTrios, //3 indicates a person from a broken trio who is treated as a singleton //0 indicates none (too much missing data) int[] whichVector = new int[chromosomes.size()]; for(int i=0;i<numTrios*4; i+=4) { Chromosome parentAFirst = (Chromosome) chromosomes.elementAt(i); Chromosome parentASecond = (Chromosome) chromosomes.elementAt(i+1); Chromosome parentBFirst = (Chromosome) chromosomes.elementAt(i+2); Chromosome parentBSecond = (Chromosome) chromosomes.elementAt(i+3); boolean tooManyMissingInASegmentA = false; boolean tooManyMissingInASegmentB = false; int totalMissingA = 0; int totalMissingB = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missingA = 0; int missingB = 0; for (int j = 0; j < block_size[n]; j++){ byte AFirstGeno = parentAFirst.getGenotype(theBlock[segmentShift+j]); byte ASecondGeno = parentASecond.getGenotype(theBlock[segmentShift+j]); byte BFirstGeno = parentBFirst.getGenotype(theBlock[segmentShift+j]); byte BSecondGeno = parentBSecond.getGenotype(theBlock[segmentShift+j]); if(AFirstGeno == 0 || ASecondGeno == 0) missingA++; if(BFirstGeno == 0 || BSecondGeno == 0) missingB++; } segmentShift += block_size[n]; if (missingA >= MISSINGLIMIT){ tooManyMissingInASegmentA = true; } if (missingB >= MISSINGLIMIT){ tooManyMissingInASegmentB = true; } totalMissingA += missingA; totalMissingB += missingB; } if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3 && !tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //both parents are good so all 4 chroms are added as a trio whichVector[i] = 2; whichVector[i+1] = 2; whichVector[i+2] = 2; whichVector[i+3] = 2; } else if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) { //first person good, so he's added as a singleton, other parent is dropped whichVector[i] = 3; whichVector[i+1] =3; whichVector[i+2] =0; whichVector[i+3]=0; } else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //second person good, so he's added as a singleton, other parent is dropped whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =3; whichVector[i+3]=3; } else { //both people have too much missing data so neither is used whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =0; whichVector[i+3]=0; } } for (int i = numTrios*4; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); boolean tooManyMissingInASegment = false; int totalMissing = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missing = 0; for (int j = 0; j < block_size[n]; j++){ byte theGeno = thisChrom.getGenotype(theBlock[segmentShift+j]); byte nextGeno = nextChrom.getGenotype(theBlock[segmentShift+j]); if(theGeno == 0 || nextGeno == 0) missing++; } segmentShift += block_size[n]; if (missing >= MISSINGLIMIT){ tooManyMissingInASegment = true; } totalMissing += missing; } //we want to use chromosomes without too many missing genotypes in a given //subsegment (first term) or without too many missing genotypes in the //whole block (second term) if (!tooManyMissingInASegment && totalMissing <= 1+theBlock.length/3){ whichVector[i-1] = 1; whichVector[i] = 1; } } //we only want to add an affected status every other chromosome, so we flip this boolean each time boolean addAff = true; for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); if(whichVector[i] > 0) { thisHap = new byte[theBlock.length]; for (int j = 0; j < theBlock.length; j++){ byte a1 = Chromosome.getMarker(theBlock[j]).getMajor(); byte a2 = Chromosome.getMarker(theBlock[j]).getMinor(); byte theGeno = thisChrom.getGenotype(theBlock[j]); if (theGeno >= 5){ thisHap[j] = 'h'; } else { if (theGeno == 0){ thisHap[j] = '0'; }else if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ throw new HaploViewException("Marker with > 2 alleles: " + Chromosome.getMarker(theBlock[j]).getDisplayName()); } } } if(whichVector[i] == 1) { inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(thisChrom.getAffected())); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } else if(whichVector[i] ==2) { inputHaploTrios.add(thisHap); if(addAff) { affTrios.add(new Integer(thisChrom.getAffected())); affKids.add(thisChrom.getKidAffected()); haploidTrios.add(new Boolean(thisChrom.isHaploid())); } }else if (whichVector[i] == 3){ inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(0)); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } if(addAff) { addAff = false; } else { addAff =true; } } } numFilteredTrios = inputHaploTrios.size() / 4; inputHaploTrios.addAll(inputHaploSingletons); affTrios.addAll(affSingletons); byte[][] input_haplos = (byte[][])inputHaploTrios.toArray(new byte[0][0]); haploidTrios.addAll(haploidSingletons); haploid = new boolean[haploidTrios.size()]; for(int i=0;i<haploidTrios.size();i++){ haploid[i] = ((Boolean)haploidTrios.elementAt(i)).booleanValue(); } full_em_breakup(input_haplos, block_size, affTrios, affKids); }
|
public void doEM(int[] theBlock) throws HaploViewException{ //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } byte[] thisHap; Vector inputHaploSingletons = new Vector(); Vector inputHaploTrios = new Vector(); Vector affSingletons = new Vector(); Vector affTrios = new Vector(); Vector affKids = new Vector(); Vector haploidTrios = new Vector(); Vector haploidSingletons = new Vector(); //whichVector[i] stores a value which indicates which vector chromosome i's genotype should go in //1 indicates inputHaploSingletons (singletons), 2 indicates inputHaploTrios, //3 indicates a person from a broken trio who is treated as a singleton //0 indicates none (too much missing data) int[] whichVector = new int[chromosomes.size()]; for(int i=0;i<numTrios*4; i+=4) { Chromosome parentAFirst = (Chromosome) chromosomes.elementAt(i); Chromosome parentASecond = (Chromosome) chromosomes.elementAt(i+1); Chromosome parentBFirst = (Chromosome) chromosomes.elementAt(i+2); Chromosome parentBSecond = (Chromosome) chromosomes.elementAt(i+3); boolean tooManyMissingInASegmentA = false; boolean tooManyMissingInASegmentB = false; int totalMissingA = 0; int totalMissingB = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missingA = 0; int missingB = 0; for (int j = 0; j < block_size[n]; j++){ byte AFirstGeno = parentAFirst.getGenotype(theBlock[segmentShift+j]); byte ASecondGeno = parentASecond.getGenotype(theBlock[segmentShift+j]); byte BFirstGeno = parentBFirst.getGenotype(theBlock[segmentShift+j]); byte BSecondGeno = parentBSecond.getGenotype(theBlock[segmentShift+j]); if(AFirstGeno == 0 || ASecondGeno == 0) missingA++; if(BFirstGeno == 0 || BSecondGeno == 0) missingB++; } segmentShift += block_size[n]; if (missingA >= MISSINGLIMIT){ tooManyMissingInASegmentA = true; } if (missingB >= MISSINGLIMIT){ tooManyMissingInASegmentB = true; } totalMissingA += missingA; totalMissingB += missingB; } if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3 && !tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //both parents are good so all 4 chroms are added as a trio whichVector[i] = 2; whichVector[i+1] = 2; whichVector[i+2] = 2; whichVector[i+3] = 2; } else if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) { //first person good, so he's added as a singleton, other parent is dropped whichVector[i] = 3; whichVector[i+1] =3; whichVector[i+2] =0; whichVector[i+3]=0; } else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3 && i/4 >= extraTrioCount){ //second person good, so he's added as a singleton, other parent is dropped whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =3; whichVector[i+3]=3; } else { //both people have too much missing data so neither is used whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =0; whichVector[i+3]=0; } } for (int i = numTrios*4; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); boolean tooManyMissingInASegment = false; int totalMissing = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missing = 0; for (int j = 0; j < block_size[n]; j++){ byte theGeno = thisChrom.getGenotype(theBlock[segmentShift+j]); byte nextGeno = nextChrom.getGenotype(theBlock[segmentShift+j]); if(theGeno == 0 || nextGeno == 0) missing++; } segmentShift += block_size[n]; if (missing >= MISSINGLIMIT){ tooManyMissingInASegment = true; } totalMissing += missing; } //we want to use chromosomes without too many missing genotypes in a given //subsegment (first term) or without too many missing genotypes in the //whole block (second term) if (!tooManyMissingInASegment && totalMissing <= 1+theBlock.length/3){ whichVector[i-1] = 1; whichVector[i] = 1; } } //we only want to add an affected status every other chromosome, so we flip this boolean each time boolean addAff = true; for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); if(whichVector[i] > 0) { thisHap = new byte[theBlock.length]; for (int j = 0; j < theBlock.length; j++){ byte a1 = Chromosome.getMarker(theBlock[j]).getMajor(); byte a2 = Chromosome.getMarker(theBlock[j]).getMinor(); byte theGeno = thisChrom.getGenotype(theBlock[j]); if (theGeno >= 5){ thisHap[j] = 'h'; } else { if (theGeno == 0){ thisHap[j] = '0'; }else if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ throw new HaploViewException("Marker with > 2 alleles: " + Chromosome.getMarker(theBlock[j]).getDisplayName()); } } } if(whichVector[i] == 1) { inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(thisChrom.getAffected())); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } else if(whichVector[i] ==2) { inputHaploTrios.add(thisHap); if(addAff) { affTrios.add(new Integer(thisChrom.getAffected())); affKids.add(thisChrom.getKidAffected()); haploidTrios.add(new Boolean(thisChrom.isHaploid())); } }else if (whichVector[i] == 3){ inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(0)); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } if(addAff) { addAff = false; } else { addAff =true; } } } numFilteredTrios = inputHaploTrios.size() / 4; inputHaploTrios.addAll(inputHaploSingletons); affTrios.addAll(affSingletons); byte[][] input_haplos = (byte[][])inputHaploTrios.toArray(new byte[0][0]); haploidTrios.addAll(haploidSingletons); haploid = new boolean[haploidTrios.size()]; for(int i=0;i<haploidTrios.size();i++){ haploid[i] = ((Boolean)haploidTrios.elementAt(i)).booleanValue(); } full_em_breakup(input_haplos, block_size, affTrios, affKids); }
| 1,112,227
|
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
|
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1 && i >= updatedExtraTrioCount*2) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
| 1,112,228
|
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
|
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
| 1,112,229
|
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
|
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1 && i >= updatedExtraTrioCount*2) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
| 1,112,230
|
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
|
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
| 1,112,231
|
public void saveHapsToText(Haplotype[][] finishedHaplos, File saveHapsFile) throws IOException{ NumberFormat nf = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); //open file for saving haps text FileWriter saveHapsWriter = new FileWriter(saveHapsFile); int[][]lookupPos = new int[finishedHaplos.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[finishedHaplos[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][finishedHaplos[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + finishedHaplos[p][q].getListOrder()); } } //go through each block and print haplos for (int i = 0; i < finishedHaplos.length; i++){ //write block header saveHapsWriter.write("BLOCK " + (i+1) + ". MARKERS:"); int[] markerNums = finishedHaplos[i][0].getMarkers(); for (int j = 0; j < finishedHaplos[i].length; j++){ saveHapsWriter.write(" " + (markerNums[j]+1)); } saveHapsWriter.write("\n"); //write haps and crossover percentages for (int j = 0; j < finishedHaplos[i].length; j++){ int curHapNum = lookupPos[i][j]; String theHap = new String(); int[] theGeno = finishedHaplos[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ theHap += theGeno[k]; } saveHapsWriter.write(theHap + " (" + nf.format(finishedHaplos[i][curHapNum].getPercentage()) + ")"); if (i < finishedHaplos.length-1){ saveHapsWriter.write("\t|"); for (int crossCount = 0; crossCount < finishedHaplos[i+1].length; crossCount++){ if (crossCount != 0) saveHapsWriter.write("\t"); saveHapsWriter.write(nf.format(finishedHaplos[i][curHapNum].getCrossover(crossCount))); } saveHapsWriter.write("|"); } saveHapsWriter.write("\n"); } saveHapsWriter.write("\n"); } saveHapsWriter.close(); }
|
public void saveHapsToText(Haplotype[][] finishedHaplos, File saveHapsFile) throws IOException{ NumberFormat nf = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); //open file for saving haps text FileWriter saveHapsWriter = new FileWriter(saveHapsFile); int[][]lookupPos = new int[finishedHaplos.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[finishedHaplos[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][finishedHaplos[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + finishedHaplos[p][q].getListOrder()); } } //go through each block and print haplos for (int i = 0; i < finishedHaplos.length; i++){ //write block header saveHapsWriter.write("BLOCK " + (i+1) + ". MARKERS:"); int[] markerNums = finishedHaplos[i][0].getMarkers(); for (int j = 0; j < markerNums.length; j++){ saveHapsWriter.write(" " + (markerNums[j]+1)); } saveHapsWriter.write("\n"); //write haps and crossover percentages for (int j = 0; j < markerNums.length; j++){ int curHapNum = lookupPos[i][j]; String theHap = new String(); int[] theGeno = finishedHaplos[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ theHap += theGeno[k]; } saveHapsWriter.write(theHap + " (" + nf.format(finishedHaplos[i][curHapNum].getPercentage()) + ")"); if (i < finishedHaplos.length-1){ saveHapsWriter.write("\t|"); for (int crossCount = 0; crossCount < finishedHaplos[i+1].length; crossCount++){ if (crossCount != 0) saveHapsWriter.write("\t"); saveHapsWriter.write(nf.format(finishedHaplos[i][curHapNum].getCrossover(crossCount))); } saveHapsWriter.write("|"); } saveHapsWriter.write("\n"); } saveHapsWriter.write("\n"); } saveHapsWriter.close(); }
| 1,112,232
|
public Object evaluate(JellyContext context) { try { JexlContext jexlContext = new JellyJexlContext( context ); if (log.isDebugEnabled()) { log.debug("Evaluating EL: " + expression); } Object value = expression.evaluate(jexlContext); if (log.isDebugEnabled()) { log.debug("value of expression: " + value); } return value; } catch (Exception e) { log.warn("Caught exception evaluating: " + expression + ". Reason: " + e, e); return null; } }
|
public Object evaluate(JellyContext context) { try { JexlContext jexlContext = new JellyJexlContext( context ); if (log.isDebugEnabled()) { log.debug("Evaluating EL: " + expression.getExpression()); } Object value = expression.evaluate(jexlContext); if (log.isDebugEnabled()) { log.debug("value of expression: " + value); } return value; } catch (Exception e) { log.warn("Caught exception evaluating: " + expression + ". Reason: " + e, e); return null; } }
| 1,112,238
|
public PairwiseLinkage computeDPrime(int pos1, int pos2){ compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bug in the alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
|
public PairwiseLinkage computeDPrime(int pos1, int pos2){ compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]))/LN10 + ((double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bug in the alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
| 1,112,239
|
public PairwiseLinkage computeDPrime(int pos1, int pos2){ compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bug in the alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
|
public PairwiseLinkage computeDPrime(int pos1, int pos2){ compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bug in the alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
| 1,112,240
|
public Object evaluate(Context context) { return value; }
|
public Object evaluate(JellyContext context) { return value; }
| 1,112,241
|
public static Volume getVolume( String volName ) { Volume vol = null; if ( volumes != null ) { vol = (Volume) volumes.get( volName ); } return vol; }
|
public static Volume getVolume( String volName ) { Volume vol = null; if ( volumes != null ) { vol = (Volume) volumes.get( volName ); } return vol; }
| 1,112,242
|
public Tag getTag() throws Exception { Tag tag = (Tag) tagHolder.get(); if ( tag == null ) { tag = createTag(); if ( tag != null ) { configureTag(tag); tagHolder.set(tag); } } return tag; }
|
public Tag getTag() throws Exception { Tag tag = (Tag) tagHolder.get(); if ( tag == null ) { tag = createTag(); if ( tag != null ) { tagHolder.set(tag); } } return tag; }
| 1,112,244
|
public TableCellRenderer getCellRenderer(int row, int column) { ProfileColumn pc = ProfileColumn.values()[column]; switch(pc) { case DATABASE: case SCHEMA: case CATALOG: case TABLE: case COLUMN: return new SQLObjectRendererFactory(); case RUNDATE: return new DateRendererFactory(); case PERCENT_UNIQUE: case PERCENT_NULL: return new PercentRendererFactory(); case AVERAGE_LENGTH: return new DecimalRendererFactory(); case MIN_VALUE: case MAX_VALUE: case AVERAGE_VALUE: return new ValueRendererFactory(); default: return super.getCellRenderer(row, column); } }
|
public TableCellRenderer getCellRenderer(int row, int column) { int modelColumn = convertColumnIndexToModel(column); ProfileColumn pc = ProfileColumn.values()[modelColumn]; switch(pc) { case DATABASE: case SCHEMA: case CATALOG: case TABLE: case COLUMN: return new SQLObjectRendererFactory(); case RUNDATE: return new DateRendererFactory(); case PERCENT_UNIQUE: case PERCENT_NULL: return new PercentRendererFactory(); case AVERAGE_LENGTH: return new DecimalRendererFactory(); case MIN_VALUE: case MAX_VALUE: case AVERAGE_VALUE: return new ValueRendererFactory(); default: return super.getCellRenderer(row, column); } }
| 1,112,246
|
void browse(int browseType){ String name; String markerInfoName = ""; HaploView h = (HaploView) this.getParent(); h.fc.setSelectedFile(new File("")); int returned = h.fc.showOpenDialog(this); if (returned != JFileChooser.APPROVE_OPTION) return; File file = h.fc.getSelectedFile(); if (browseType == GENO){ name = file.getName(); genoFileField.setText(file.getParent()+File.separator+name); if(infoFileField.getText().equals("")){ //baseName should be everything but the final ".XXX" extension StringTokenizer st = new StringTokenizer(name,"."); String baseName = st.nextToken(); int numPieces = st.countTokens()-1; for (int i = 0; i < numPieces; i++){ baseName = baseName.concat(".").concat(st.nextToken()); } System.out.println(baseName); //check for info file for original file sample.haps //either sample.haps.info or sample.info File maybeMarkers1 = new File(file.getParent(), name + MARKER_DATA_EXT); File maybeMarkers2 = new File(file.getParent(), baseName + MARKER_DATA_EXT); if (maybeMarkers1.exists()){ markerInfoName = maybeMarkers1.getName(); }else if (maybeMarkers2.exists()){ markerInfoName = maybeMarkers2.getName(); }else{ return; } infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }else if (browseType==INFO){ markerInfoName = file.getName(); infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }
|
void browse(int browseType){ String name; String markerInfoName = ""; HaploView h = (HaploView) this.getParent(); h.fc.setSelectedFile(new File("")); int returned = h.fc.showOpenDialog(this); if (returned != JFileChooser.APPROVE_OPTION) return; File file = h.fc.getSelectedFile(); if (browseType == GENO){ name = file.getName(); genoFileField.setText(file.getParent()+File.separator+name); if(infoFileField.getText().equals("")){ //baseName should be everything but the final ".XXX" extension StringTokenizer st = new StringTokenizer(name,"."); String baseName = st.nextToken(); int numPieces = st.countTokens()-1; for (int i = 0; i < numPieces; i++){ baseName = baseName.concat(".").concat(st.nextToken()); } //check for info file for original file sample.haps //either sample.haps.info or sample.info File maybeMarkers1 = new File(file.getParent(), name + MARKER_DATA_EXT); File maybeMarkers2 = new File(file.getParent(), baseName + MARKER_DATA_EXT); if (maybeMarkers1.exists()){ markerInfoName = maybeMarkers1.getName(); }else if (maybeMarkers2.exists()){ markerInfoName = maybeMarkers2.getName(); }else{ return; } infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }else if (browseType==INFO){ markerInfoName = file.getName(); infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }
| 1,112,247
|
public void run(Context context, XMLOutput output) throws Exception { // #### note this mechanism does not work properly for arbitrarily // #### nested dynamic tags. A better way is required. Tag tag = findAncestorWithClass(this, DynamicTag.class); if ( tag == null ) { // throw new JellyException( "Cannot invoke body, no dynamic tag is defined in this block" ); log.warn( "Cannot invoke body, no dynamic tag is defined in this block" ); } else { tag.getBody().run(context, output); } }
|
public void run(Context context, XMLOutput output) throws Exception { // #### note this mechanism does not work properly for arbitrarily // #### nested dynamic tags. A better way is required. Tag tag = findAncestorWithClass(this, DynamicTag.class); if ( tag == null ) { // throw new JellyException( "Cannot invoke body, no dynamic tag is defined in this block" ); log.warn( "Cannot invoke body, no dynamic tag is defined in this block" ); } else { tag.getBody().run(context, output); } }
| 1,112,248
|
public void run(Context context, XMLOutput output) throws Exception { // #### note this mechanism does not work properly for arbitrarily // #### nested dynamic tags. A better way is required. Tag tag = findAncestorWithClass(this, DynamicTag.class); if ( tag == null ) { // throw new JellyException( "Cannot invoke body, no dynamic tag is defined in this block" ); log.warn( "Cannot invoke body, no dynamic tag is defined in this block" ); } else { tag.getBody().run(context, output); } }
|
public void run(Context context, XMLOutput output) throws Exception { // #### note this mechanism does not work properly for arbitrarily // #### nested dynamic tags. A better way is required. Tag tag = findAncestorWithClass(this, DynamicTag.class); if ( tag == null ) { // throw new JellyException( "Cannot invoke body, no dynamic tag is defined in this block" ); log.warn( "Cannot invoke body, no dynamic tag is defined in this block" ); } else { Tag tag = findAncestorWithClass(this, DynamicTag.class); if ( tag == null ) { throw new JellyException( "Cannot invoke body, no dynamic tag is defined in this block" ); } else { tag.getBody().run(context, output); } } }
| 1,112,249
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.