bugged
stringlengths
6
599k
fixed
stringlengths
10
599k
__index_level_0__
int64
0
1.13M
public void parsePhasedData(String[] info) throws IOException, PedFileException{ if (info[4].equals("")){ Chromosome.setDataChrom("none"); }else{ Chromosome.setDataChrom("chr" + info[4]); } Chromosome.setDataBuild("ncbi_b35"); Vector sampleData = new Vector(); Vector legendData = new Vector(); Vector legendMarkers = new Vector(); Vector legendPositions = new Vector(); Individual ind = null; byte[] byteDataT = new byte[0]; byte[] byteDataU = new byte[0]; this.allIndividuals = new Vector(); File phasedFile = new File(info[0]); File sampleFile = new File(info[1]); File legendFile = new File(info[2]); if (phasedFile.length() < 1){ throw new PedFileException("Genotypes file is empty or non-existent: " + phasedFile.getName()); }else if (sampleFile.length() < 1){ throw new PedFileException("Sample file is empty or non-existent: " + sampleFile.getName()); }else if (legendFile.length() < 1){ throw new PedFileException("Legend file is empty or non-existent: " + legendFile.getName()); } //read in the individual ids data. try{ BufferedReader sampleBuffReader; if (Options.getGzip()){ FileInputStream sampleFis = new FileInputStream(sampleFile); GZIPInputStream sampleInputStream = new GZIPInputStream(sampleFis); sampleBuffReader = new BufferedReader(new InputStreamReader(sampleInputStream)); }else{ FileReader sampleReader = new FileReader(sampleFile); sampleBuffReader = new BufferedReader(sampleReader); } String sampleLine; while((sampleLine = sampleBuffReader.readLine())!=null){ StringTokenizer sampleTokenizer = new StringTokenizer(sampleLine); sampleData.add(sampleTokenizer.nextToken()); } }catch(NoSuchElementException nse){ throw new PedFileException("File format error: " + sampleFile.getName()); } //read in the legend data try{ BufferedReader legendBuffReader; if (Options.getGzip()){ FileInputStream legendFis = new FileInputStream(legendFile); GZIPInputStream legendInputStream = new GZIPInputStream(legendFis); legendBuffReader = new BufferedReader(new InputStreamReader(legendInputStream)); }else{ FileReader legendReader = new FileReader(legendFile); legendBuffReader = new BufferedReader(legendReader); } String legendLine; String zero, one; while((legendLine = legendBuffReader.readLine())!=null){ StringTokenizer legendSt = new StringTokenizer(legendLine); String markerid = legendSt.nextToken(); if (markerid.equals("marker")){ //skip header continue; } legendMarkers.add(markerid); legendPositions.add(legendSt.nextToken()); byte[] legendBytes = new byte[2]; zero = legendSt.nextToken(); one = legendSt.nextToken(); if (zero.equalsIgnoreCase("A")){ legendBytes[0] = 1; }else if (zero.equalsIgnoreCase("C")){ legendBytes[0] = 2; }else if (zero.equalsIgnoreCase("G")){ legendBytes[0] = 3; }else if (zero.equalsIgnoreCase("T")){ legendBytes[0] = 4; }else{ throw new PedFileException("Invalid allele: " + zero); } if (one.equalsIgnoreCase("A")){ legendBytes[1] = 1; }else if (one.equalsIgnoreCase("C")){ legendBytes[1] = 2; }else if (one.equalsIgnoreCase("G")){ legendBytes[1] = 3; }else if (one.equalsIgnoreCase("T")){ legendBytes[1] = 4; }else{ throw new PedFileException("Invalid allele: " + one); } legendData.add(legendBytes); } hminfo = new String[legendPositions.size()][2]; for (int i = 0; i < legendPositions.size(); i++){ //marker name. hminfo[i][0] = (String)legendMarkers.get(i); //marker position. hminfo[i][1] = (String)legendPositions.get(i); } }catch(NoSuchElementException nse){ throw new PedFileException("File format error: " + legendFile.getName()); } //read in the phased data. try{ BufferedReader phasedBuffReader; if (Options.getGzip()){ FileInputStream phasedFis = new FileInputStream(phasedFile); GZIPInputStream phasedInputStream = new GZIPInputStream(phasedFis); phasedBuffReader = new BufferedReader(new InputStreamReader(phasedInputStream)); }else{ FileReader phasedReader = new FileReader(phasedFile); phasedBuffReader = new BufferedReader(phasedReader); } String phasedLine; int columns = 0; String token; boolean even = false; int iterator = 0; while((phasedLine = phasedBuffReader.readLine()) != null){ StringTokenizer phasedSt = new StringTokenizer(phasedLine); columns = phasedSt.countTokens(); if(even){ iterator++; }else{ //Only set up a new individual every 2 lines. ind = new Individual(columns, true); try{ ind.setIndividualID((String)sampleData.get(iterator)); }catch (ArrayIndexOutOfBoundsException e){ throw new PedFileException("File error: Sample file is missing individual IDs"); } if (columns != legendData.size()){ throw new PedFileException("File error: invalid number of markers on Individual " + ind.getIndividualID()); } String details = (String)hapMapTranslate.get(ind.getIndividualID()); //exception in case of wierd compression combos in input files if (details == null){ throw new PedFileException("File format error: " + sampleFile.getName()); } StringTokenizer dt = new StringTokenizer(details, "\n\t\" \""); ind.setFamilyID(dt.nextToken().trim()); //skip individualID since we already have it. dt.nextToken(); ind.setDadID(dt.nextToken()); ind.setMomID(dt.nextToken()); try { ind.setGender(Integer.parseInt(dt.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(dt.nextToken().trim())); }catch(NumberFormatException nfe) { throw new PedFileException("File error: invalid gender or affected status for indiv " + ind.getIndividualID()); } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } int index = 0; if (!even){ byteDataT = new byte[columns]; }else{ byteDataU = new byte[columns]; } while(phasedSt.hasMoreTokens()){ token = phasedSt.nextToken(); if (!even){ if (token.equalsIgnoreCase("0")){ byteDataT[index] = ((byte[])legendData.get(index))[0]; }else if (token.equalsIgnoreCase("1")){ byteDataT[index] = ((byte[])legendData.get(index))[1]; }else { throw new PedFileException("File format error: " + phasedFile.getName()); } }else{ if (token.equalsIgnoreCase("0")){ byteDataU[index] = ((byte[])legendData.get(index))[0]; }else if (token.equalsIgnoreCase("1")){ byteDataU[index] = ((byte[])legendData.get(index))[1]; }else { throw new PedFileException("File format error: " + phasedFile.getName()); } } index++; } if (even){ for(int i=0; i < columns; i++){ ind.addMarker(byteDataT[i], byteDataU[i]); } } even = !even; } }catch(NoSuchElementException nse){ throw new PedFileException("File format error: " + phasedFile.getName()); } }
public void parsePhasedData(String[] info) throws IOException, PedFileException{ if (info[4].equals("")){ Chromosome.setDataChrom("none"); }else{ Chromosome.setDataChrom("chr" + info[4]); } Chromosome.setDataBuild("ncbi_b35"); Vector sampleData = new Vector(); Vector legendData = new Vector(); Vector legendMarkers = new Vector(); Vector legendPositions = new Vector(); Individual ind = null; byte[] byteDataT = new byte[0]; byte[] byteDataU = new byte[0]; this.allIndividuals = new Vector(); File phasedFile = new File(info[0]); File sampleFile = new File(info[1]); File legendFile = new File(info[2]); if (phasedFile.length() < 1){ throw new PedFileException("Genotypes file is empty or non-existent: " + phasedFile.getName()); }else if (sampleFile.length() < 1){ throw new PedFileException("Sample file is empty or non-existent: " + sampleFile.getName()); }else if (legendFile.length() < 1){ throw new PedFileException("Legend file is empty or non-existent: " + legendFile.getName()); } //read in the individual ids data. try{ BufferedReader sampleBuffReader; if (Options.getGzip()){ FileInputStream sampleFis = new FileInputStream(sampleFile); GZIPInputStream sampleInputStream = new GZIPInputStream(sampleFis); sampleBuffReader = new BufferedReader(new InputStreamReader(sampleInputStream)); }else{ FileReader sampleReader = new FileReader(sampleFile); sampleBuffReader = new BufferedReader(sampleReader); } String sampleLine; while((sampleLine = sampleBuffReader.readLine())!=null){ StringTokenizer sampleTokenizer = new StringTokenizer(sampleLine); sampleData.add(sampleTokenizer.nextToken()); } }catch(NoSuchElementException nse){ throw new PedFileException("File format error: " + sampleFile.getName()); } //read in the legend data try{ BufferedReader legendBuffReader; if (Options.getGzip()){ FileInputStream legendFis = new FileInputStream(legendFile); GZIPInputStream legendInputStream = new GZIPInputStream(legendFis); legendBuffReader = new BufferedReader(new InputStreamReader(legendInputStream)); }else{ FileReader legendReader = new FileReader(legendFile); legendBuffReader = new BufferedReader(legendReader); } String legendLine; String zero, one; while((legendLine = legendBuffReader.readLine())!=null){ StringTokenizer legendSt = new StringTokenizer(legendLine); String markerid = legendSt.nextToken(); if (markerid.equals("marker")){ //skip header continue; } legendMarkers.add(markerid); legendPositions.add(legendSt.nextToken()); byte[] legendBytes = new byte[2]; zero = legendSt.nextToken(); one = legendSt.nextToken(); if (zero.equalsIgnoreCase("A")){ legendBytes[0] = 1; }else if (zero.equalsIgnoreCase("C")){ legendBytes[0] = 2; }else if (zero.equalsIgnoreCase("G")){ legendBytes[0] = 3; }else if (zero.equalsIgnoreCase("T")){ legendBytes[0] = 4; }else{ throw new PedFileException("Invalid allele: " + zero); } if (one.equalsIgnoreCase("A")){ legendBytes[1] = 1; }else if (one.equalsIgnoreCase("C")){ legendBytes[1] = 2; }else if (one.equalsIgnoreCase("G")){ legendBytes[1] = 3; }else if (one.equalsIgnoreCase("T")){ legendBytes[1] = 4; }else{ throw new PedFileException("Invalid allele: " + one); } legendData.add(legendBytes); } hminfo = new String[legendPositions.size()][2]; for (int i = 0; i < legendPositions.size(); i++){ //marker name. hminfo[i][0] = (String)legendMarkers.get(i); //marker position. hminfo[i][1] = (String)legendPositions.get(i); } }catch(NoSuchElementException nse){ throw new PedFileException("File format error: " + legendFile.getName()); } //read in the phased data. try{ BufferedReader phasedBuffReader; if (Options.getGzip()){ FileInputStream phasedFis = new FileInputStream(phasedFile); GZIPInputStream phasedInputStream = new GZIPInputStream(phasedFis); phasedBuffReader = new BufferedReader(new InputStreamReader(phasedInputStream)); }else{ FileReader phasedReader = new FileReader(phasedFile); phasedBuffReader = new BufferedReader(phasedReader); } String phasedLine; int columns = 0; String token; boolean even = false; int iterator = 0; while((phasedLine = phasedBuffReader.readLine()) != null){ StringTokenizer phasedSt = new StringTokenizer(phasedLine); columns = phasedSt.countTokens(); if(even){ iterator++; }else{ //Only set up a new individual every 2 lines. ind = new Individual(columns, true); try{ ind.setIndividualID((String)sampleData.get(iterator)); }catch (ArrayIndexOutOfBoundsException e){ throw new PedFileException("File error: Sample file is missing individual IDs"); } if (columns != legendData.size()){ throw new PedFileException("File error: invalid number of markers on Individual " + ind.getIndividualID()); } String details = (String)hapMapTranslate.get(ind.getIndividualID()); //exception in case of wierd compression combos in input files if (details == null){ throw new PedFileException("File format error: " + sampleFile.getName()); } StringTokenizer dt = new StringTokenizer(details, "\n\t\" \""); ind.setFamilyID(dt.nextToken().trim()); //skip individualID since we already have it. dt.nextToken(); ind.setDadID(dt.nextToken()); ind.setMomID(dt.nextToken()); try { ind.setGender(Integer.parseInt(dt.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(dt.nextToken().trim())); }catch(NumberFormatException nfe) { throw new PedFileException("File error: invalid gender or affected status for indiv " + ind.getIndividualID()); } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } int index = 0; if (!even){ byteDataT = new byte[columns]; }else{ byteDataU = new byte[columns]; } while(phasedSt.hasMoreTokens()){ token = phasedSt.nextToken(); if (!even){ if (token.equalsIgnoreCase("0")){ byteDataT[index] = ((byte[])legendData.get(index))[0]; }else if (token.equalsIgnoreCase("1")){ byteDataT[index] = ((byte[])legendData.get(index))[1]; }else { throw new PedFileException("File format error: " + phasedFile.getName()); } }else{ if (token.equalsIgnoreCase("0")){ byteDataU[index] = ((byte[])legendData.get(index))[0]; }else if (token.equalsIgnoreCase("1")){ byteDataU[index] = ((byte[])legendData.get(index))[1]; }else { throw new PedFileException("File format error: " + phasedFile.getName()); } } index++; } if (even){ for(int i=0; i < columns; i++){ ind.addMarker(byteDataT[i], byteDataU[i]); } } even = !even; } }catch(NoSuchElementException nse){ throw new PedFileException("File format error: " + phasedFile.getName()); } }
1,112,499
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File inFile = new File(inputOptions[0]); try { if (inFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + inFile.getName()); } theData = new HaploData(assocTest); theData.linkageToChrom(inFile, type); processData(theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(theData.getPedFile()); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File inFile = new File(inputOptions[0]); try { if (inFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + inFile.getName()); } theData = new HaploData(assocTest); theData.linkageToChrom(inFile, type); checkPanel = new CheckDataPanel(theData.getPedFile()); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
1,112,501
public void paintComponent(Graphics g){ DPrimeTable dPrimeTable = theData.dpTable; if (Chromosome.getSize() < 2){ //if there zero or only one valid marker return; } Vector blocks = theData.blocks; Rectangle visRect = getVisibleRect(); //deal with zooming if (chartSize.getWidth() > (3*visRect.width)){ showWM = true; }else{ showWM = false; } boolean printValues = true; if (zoomLevel != 0 || Options.getPrintWhat() == LD_NONE){ printValues = false; } printWhat = Options.getPrintWhat(); Graphics2D g2 = (Graphics2D) g; Dimension size = getSize(); Dimension pref = getPreferredSize(); g2.setColor(BG_GREY); //if it's a big dataset, resize properly, if it's small make sure to fill whole background if (size.height < pref.height){ g2.fillRect(0,0,pref.width,pref.height); setSize(pref); }else{ g2.fillRect(0,0,size.width, size.height); } g2.setColor(Color.black); //okay so this dumb if block is to prevent the ugly repainting //bug when loading markers after the data are already being displayed, //results in a little off-centering for small datasets, but not too bad. if (!forExport){ if (!theData.infoKnown){ g2.translate((size.width - pref.width) / 2, (size.height - pref.height) / 2); } else { g2.translate((size.width - pref.width) / 2, 0); } } FontMetrics boxFontMetrics = g2.getFontMetrics(boxFont); int diamondX[] = new int[4]; int diamondY[] = new int[4]; Polygon diamond; double lineSpan = alignedPositions[alignedPositions.length-1] - alignedPositions[0]; long minpos = Chromosome.getMarker(0).getPosition(); long maxpos = Chromosome.getMarker(Chromosome.getSize()-1).getPosition(); double spanpos = maxpos - minpos; //See http://www.hapmap.org/cgi-perl/gbrowse/gbrowse_img //for more info on GBrowse img. int imgHeight = 0; if (Options.isGBrowseShown() && Chromosome.getDataChrom() != null && !Chromosome.getDataChrom().equalsIgnoreCase("none")){ g2.drawImage(gBrowseImage,H_BORDER-GBROWSE_MARGIN,V_BORDER,this); imgHeight = gBrowseImage.getHeight(this) + TRACK_GAP; // get height so we can shift everything down } left = H_BORDER; top = V_BORDER + imgHeight; // push the haplotype display down to make room for gbrowse image. if (forExport){ left -= exportStart * boxSize; } FontMetrics metrics; int ascent; g2.setFont(boldMarkerNameFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); //the following values are the bounds on the boxes we want to //display given that the current window is 'visRect' lowX = getBoundaryMarker(visRect.x-clickXShift-(visRect.y +visRect.height-clickYShift)) - 1; highX = getBoundaryMarker(visRect.x + visRect.width); lowY = getBoundaryMarker((visRect.x-clickXShift)+(visRect.y-clickYShift)) - 1; highY = getBoundaryMarker((visRect.x-clickXShift+visRect.width) + (visRect.y-clickYShift+visRect.height)); if (lowX < 0) { lowX = 0; } if (highX > Chromosome.getSize()-1){ highX = Chromosome.getSize()-1; } if (lowY < lowX+1){ lowY = lowX+1; } if (highY > Chromosome.getSize()){ highY = Chromosome.getSize(); } if (forExport){ lowX = exportStart; lowY = exportStart; highX = exportStop; highY = exportStop+1; } if (theData.trackExists){ //draw the analysis track above where the marker positions will be marked JFreeChart jfc = ChartFactory.createXYLineChart(null,null,null, theData.analysisTracks, PlotOrientation.VERTICAL,false,false,false); //customise the analysis track XYPlot xyp = (XYPlot)jfc.getPlot(); //no x axis, since it takes up too much space. xyp.getDomainAxis().setAxisLineVisible(false); xyp.getDomainAxis().setTickLabelsVisible(false); xyp.getDomainAxis().setTickMarksVisible(false); //x range must align with markers xyp.getDomainAxis().setRange(minpos,maxpos); //size of the axis and graph inset double axisWidth = xyp.getRangeAxis(). reserveSpace(g2,xyp,new Rectangle(0,TRACK_HEIGHT),RectangleEdge.LEFT,null).getLeft(); RectangleInsets insets = xyp.getInsets(); jfc.setBackgroundPaint(BG_GREY); BufferedImage bi = jfc.createBufferedImage( (int)(lineSpan + axisWidth + insets.getLeft() + insets.getRight()),TRACK_HEIGHT); //hide the axis in the margin so everything lines up. g2.drawImage(bi,(int)(left - axisWidth - insets.getLeft()),top,this); top += TRACK_HEIGHT + TRACK_GAP; } if (theData.infoKnown) { Color green = new Color(0, 127, 0); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); //// draw the marker locations g2.setStroke(thinnerStroke); g2.setColor(Color.white); g2.fill(new Rectangle2D.Double(left+1, top+1, lineSpan-1, TICK_HEIGHT-1)); g2.setColor(Color.black); g2.draw(new Rectangle2D.Double(left, top, lineSpan, TICK_HEIGHT)); for (int i = 0; i < Chromosome.getSize(); i++){ double pos = (Chromosome.getMarker(i).getPosition() - minpos) / spanpos; double xx = left + lineSpan*pos; // if we're zoomed, use the line color to indicate whether there is extra data available // (since the marker names are not displayed when zoomed) if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setColor(green); //draw tick g2.setStroke(thickerStroke); g2.draw(new Line2D.Double(xx, top, xx, top + TICK_HEIGHT)); if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setStroke(thickerStroke); else g2.setStroke(thinnerStroke); //draw connecting line g2.draw(new Line2D.Double(xx, top + TICK_HEIGHT, left + alignedPositions[i], top+TICK_BOTTOM)); if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setColor(Color.black); } top += TICK_BOTTOM + TICK_HEIGHT; //// draw the marker names if (printMarkerNames){ widestMarkerName = metrics.stringWidth(Chromosome.getMarker(0).getDisplayName()); for (int x = 1; x < Chromosome.getSize(); x++) { int thiswide = metrics.stringWidth(Chromosome.getMarker(x).getDisplayName()); if (thiswide > widestMarkerName) widestMarkerName = thiswide; } g2.translate(left, top + widestMarkerName); g2.rotate(-Math.PI / 2.0); boolean foundSNP = false; for (int x = 0; x < Chromosome.getSize(); x++) { if (theData.isInBlock[x]){ g2.setFont(boldMarkerNameFont); }else{ g2.setFont(markerNameFont); } if (Chromosome.getMarker(x).getExtra() != null) g2.setColor(green); if (theHV != null){ if (Chromosome.getMarker(x).getDisplayName().equals(theHV.getChosenMarker())){ g2.setColor(Color.blue); foundSNP = true; } } g2.drawString(Chromosome.getMarker(x).getDisplayName(),(float)TEXT_GAP, (float)alignedPositions[x] + ascent/3); if (Chromosome.getMarker(x).getExtra() != null) g2.setColor(Color.black); if (foundSNP){ g2.setColor(Color.BLACK); foundSNP = false; } } g2.rotate(Math.PI / 2.0); g2.translate(-left, -(top + widestMarkerName)); // move everybody down top += widestMarkerName + TEXT_GAP; } g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } top += blockDispHeight; //// draw the marker numbers if (printMarkerNames){ g2.setFont(markerNumFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); for (int x = 0; x < Chromosome.getSize(); x++) { String mark = String.valueOf(Chromosome.realIndex[x] + 1); g2.drawString(mark, (float)(left + alignedPositions[x] - metrics.stringWidth(mark)/2), (float)(top + ascent)); } top += boxRadius/2; // give a little space between numbers and boxes } //clickxshift and clickyshift are used later to translate from x,y coords //to the pair of markers comparison at those coords if (!(theData.infoKnown)){ clickXShift = left + (size.width-pref.width)/2; clickYShift = top + (size.height - pref.height)/2; } else { clickXShift = left + (size.width-pref.width)/2; clickYShift = top; } // draw table column by column for (int x = lowX; x < highX; x++) { //always draw the fewest possible boxes if (lowY < x+1){ lowY = x+1; } for (int y = lowY; y < highY; y++) { if (dPrimeTable.getLDStats(x,y) == null){ continue; } double d = dPrimeTable.getLDStats(x,y).getDPrime(); double r = dPrimeTable.getLDStats(x,y).getRSquared(); //double l = dPrimeTable.getLDStats(x,y).getLOD(); Color boxColor = dPrimeTable.getLDStats(x,y).getColor(); // draw markers above int xx = left + (int)((alignedPositions[x] + alignedPositions[y])/2); int yy = top + (int)((alignedPositions[y] - alignedPositions[x]) / 2); diamondX[0] = xx; diamondY[0] = yy - boxRadius; diamondX[1] = xx + boxRadius; diamondY[1] = yy; diamondX[2] = xx; diamondY[2] = yy + boxRadius; diamondX[3] = xx - boxRadius; diamondY[3] = yy; diamond = new Polygon(diamondX, diamondY, 4); g2.setColor(boxColor); g2.fillPolygon(diamond); if(printValues){ g2.setFont(boxFont); ascent = boxFontMetrics.getAscent(); int val; if (printWhat == D_PRIME){ val = (int) (d * 100); }else if (printWhat == R_SQ){ val = (int) (r * 100); }else{ val = 100; } g2.setColor((val < 50) ? Color.gray : Color.black); if (boxColor.getGreen() < 100 && boxColor.getBlue() < 100 && boxColor.getRed() < 100){ g2.setColor(Color.white); } if (val != 100) { String valu = String.valueOf(val); int widf = boxFontMetrics.stringWidth(valu); g.drawString(valu, xx - widf/2, yy + ascent/2); } } } } //highlight blocks g2.setFont(markerNameFont); ascent = g2.getFontMetrics().getAscent(); //g.setColor(new Color(153,255,153)); g2.setColor(Color.black); //g.setColor(new Color(51,153,51)); for (int i = 0; i < blocks.size(); i++){ int[] theBlock = (int[])blocks.elementAt(i); int first = theBlock[0]; int last = theBlock[theBlock.length-1]; //big vee around whole thing g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left + alignedPositions[first] - boxRadius, top, left + (alignedPositions[first] + alignedPositions[last])/2, top + (alignedPositions[last] - alignedPositions[first])/2 + boxRadius)); g2.draw(new Line2D.Double(left + (alignedPositions[first] + alignedPositions[last])/2, top + (alignedPositions[last] - alignedPositions[first])/2 + boxRadius, left + alignedPositions[last] + boxRadius, top)); for (int j = first; j < last; j++){ g2.setStroke(fatStroke); if (theData.isInBlock[j]){ g2.draw(new Line2D.Double(left+alignedPositions[j]-boxSize/2, top-blockDispHeight, left+alignedPositions[j+1]-boxSize/2, top-blockDispHeight)); }else{ g2.draw(new Line2D.Double(left + alignedPositions[j] + boxSize/2, top-blockDispHeight, left+alignedPositions[j+1]-boxSize/2, top-blockDispHeight)); g2.setStroke(dashedFatStroke); g2.draw(new Line2D.Double(left+alignedPositions[j] - boxSize/2, top-blockDispHeight, left+alignedPositions[j] + boxSize/2, top-blockDispHeight)); } } //cap off the end of the block g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left+alignedPositions[last]-boxSize/2, top-blockDispHeight, left+alignedPositions[last]+boxSize/2, top-blockDispHeight)); //lines to connect to block display g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left + alignedPositions[first]-boxSize/2, top-1, left+alignedPositions[first]-boxSize/2, top-blockDispHeight)); g2.draw(new Line2D.Double(left+alignedPositions[last]+boxSize/2, top-1, left+alignedPositions[last]+boxSize/2, top-blockDispHeight)); if (printMarkerNames){ String labelString = new String ("Block " + (i+1)); if (theData.infoKnown){ long blockSize = Chromosome.getMarker(last).getPosition() - Chromosome.getMarker(first).getPosition(); labelString += " (" + blockSize/1000 + " kb)"; } g2.drawString(labelString, (float)(left+alignedPositions[first]-boxSize/2+TEXT_GAP), (float)(top-boxSize/3)); } } g2.setStroke(thickerStroke); if (showWM && !forExport){ //dataset is big enough to require worldmap if (wmMaxWidth == 0){ wmMaxWidth = visRect.width/3; } double scalefactor; scalefactor = (double)(chartSize.width)/wmMaxWidth; double prefBoxSize = boxSize/(scalefactor*((double)wmMaxWidth/(double)(wmMaxWidth))); //stick WM_BD in the middle of the blank space at the top of the worldmap final int WM_BD_GAP = (int)(infoHeight/(scalefactor*2)); final int WM_BD_HEIGHT = 2; CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(), BorderFactory.createLoweredBevelBorder()); if (noImage){ //first time through draw a worldmap if dataset is big: worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2, (int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2, BufferedImage.TYPE_3BYTE_BGR); Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(1,1,worldmap.getWidth()-1,worldmap.getHeight()-1); //make a pretty border gw2.setColor(Color.black); wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth(),worldmap.getHeight()); wmInteriorRect = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth(), worldmap.getHeight()); float[] smallDiamondX = new float[4]; float[] smallDiamondY = new float[4]; GeneralPath gp; for (int x = 0; x < Chromosome.getSize()-1; x++){ for (int y = x+1; y < Chromosome.getSize(); y++){ if (dPrimeTable.getLDStats(x,y) == null){ continue; } double xx = ((alignedPositions[y] + alignedPositions[x])/(scalefactor*2)) + wmBorder.getBorderInsets(this).left; double yy = ((alignedPositions[y] - alignedPositions[x] + infoHeight*2)/(scalefactor*2)) + wmBorder.getBorderInsets(this).top; smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2); smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy; smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2); smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy; gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length); gp.moveTo(smallDiamondX[0],smallDiamondY[0]); for (int i = 1; i < smallDiamondX.length; i++){ gp.lineTo(smallDiamondX[i], smallDiamondY[i]); } gp.closePath(); gw2.setColor(dPrimeTable.getLDStats(x,y).getColor()); gw2.fill(gp); } } noImage = false; } //draw block display in worldmap Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(wmBorder.getBorderInsets(this).left, wmBorder.getBorderInsets(this).top+WM_BD_GAP, wmInteriorRect.width, WM_BD_HEIGHT); gw2.setColor(Color.black); boolean even = true; for (int i = 0; i < blocks.size(); i++){ int first = ((int[])blocks.elementAt(i))[0]; int last = ((int[])blocks.elementAt(i))[((int[])blocks.elementAt(i)).length-1]; int voffset; if (even){ voffset = 0; }else{ voffset = WM_BD_HEIGHT/2; } gw2.fillRect(wmBorder.getBorderInsets(this).left - (int)prefBoxSize/2 + (int)(alignedPositions[first]/scalefactor), wmBorder.getBorderInsets(this).top+voffset+WM_BD_GAP, (int)(prefBoxSize + (alignedPositions[last] - alignedPositions[first])/scalefactor), WM_BD_HEIGHT/2); even = !even; } wmResizeCorner = new Rectangle(visRect.x + worldmap.getWidth() - (worldmap.getWidth()-wmInteriorRect.width)/2, visRect.y + visRect.height - worldmap.getHeight(), (worldmap.getWidth()-wmInteriorRect.width)/2, (worldmap.getHeight() -wmInteriorRect.height)/2); g2.drawImage(worldmap,visRect.x, visRect.y + visRect.height - worldmap.getHeight(), this); wmInteriorRect.x = visRect.x + (worldmap.getWidth() - wmInteriorRect.width)/2; wmInteriorRect.y = visRect.y+visRect.height-worldmap.getHeight() + (worldmap.getHeight() - wmInteriorRect.height)/2; //draw the outline of the viewport g2.setColor(Color.black); double hRatio = wmInteriorRect.getWidth()/pref.getWidth(); double vRatio = wmInteriorRect.getHeight()/pref.getHeight(); int hBump = worldmap.getWidth()-wmInteriorRect.width; int vBump = worldmap.getHeight()-wmInteriorRect.height; //bump a few pixels to avoid drawing on the border g2.drawRect((int)(visRect.x*hRatio)+hBump/2+visRect.x, (int)(visRect.y*vRatio)+vBump/2+(visRect.y + visRect.height - worldmap.getHeight()), (int)(visRect.width*hRatio), (int)(visRect.height*vRatio)); } //see if the user has right-clicked to popup some marker info if(popupDrawRect != null){ //dumb bug where little datasets popup the box in the wrong place int smallDatasetSlopH = 0; int smallDatasetSlopV = 0; if (pref.getHeight() < visRect.height){ smallDatasetSlopV = (int)(visRect.height - pref.getHeight())/2; } if (pref.getWidth() < visRect.width){ smallDatasetSlopH = (int)(visRect.width - pref.getWidth())/2; } g2.setColor(Color.white); g2.fillRect(popupDrawRect.x+1-smallDatasetSlopH, popupDrawRect.y+1-smallDatasetSlopV, popupDrawRect.width-1, popupDrawRect.height-1); g2.setColor(Color.black); g2.drawRect(popupDrawRect.x-smallDatasetSlopH, popupDrawRect.y-smallDatasetSlopV, popupDrawRect.width, popupDrawRect.height); g.setFont(popupFont); for (int x = 0; x < displayStrings.size(); x++){ g.drawString((String)displayStrings.elementAt(x),popupDrawRect.x + popupLeftMargin-smallDatasetSlopH, popupDrawRect.y+((x+1)*metrics.getHeight())-smallDatasetSlopV); } } // draw the cached last right-click selection // The purpose of testing for empty string is just to avoid an 2-unit empty white box if (lastSelection != null){ if ((zoomLevel == 0) && (!lastSelection.equals("")) && (!forExport)) { g2.setFont(boxFont); // a bit extra on all side int last_descent = g2.getFontMetrics().getDescent(); int last_box_x = (visRect.x + LAST_SELECTION_LEFT) - 2; int last_box_y = (visRect.y - g2.getFontMetrics().getHeight() + LAST_SELECTION_TOP + last_descent) - 1 ; int last_box_width = g2.getFontMetrics().stringWidth(lastSelection) + 4; int last_box_height = g2.getFontMetrics().getHeight() + 2; g2.setColor(Color.white); g2.fillRect(last_box_x, last_box_y, last_box_width, last_box_height); g2.setColor(Color.black); g2.drawRect(last_box_x, last_box_y, last_box_width, last_box_height); g2.drawString(lastSelection, LAST_SELECTION_LEFT + visRect.x, LAST_SELECTION_TOP + visRect.y); } } //see if we're drawing a worldmap resize rect if (resizeWMRect != null){ g2.setColor(Color.black); g2.drawRect(resizeWMRect.x, resizeWMRect.y, resizeWMRect.width, resizeWMRect.height); } //see if we're drawing a block selector rect if (blockRect != null){ g2.setColor(Color.black); g2.setStroke(dashedThinStroke); g2.drawRect(blockRect.x, blockRect.y, blockRect.width, blockRect.height); } }
public void paintComponent(Graphics g){ DPrimeTable dPrimeTable = theData.dpTable; if (Chromosome.getSize() < 2){ //if there zero or only one valid marker return; } Vector blocks = theData.blocks; Rectangle visRect = getVisibleRect(); //deal with zooming if (chartSize.getWidth() > (3*visRect.width)){ showWM = true; }else{ showWM = false; } boolean printValues = true; if (zoomLevel != 0 || Options.getPrintWhat() == LD_NONE){ printValues = false; } printWhat = Options.getPrintWhat(); Graphics2D g2 = (Graphics2D) g; Dimension size = getSize(); Dimension pref = getPreferredSize(); g2.setColor(BG_GREY); //if it's a big dataset, resize properly, if it's small make sure to fill whole background if (size.height < pref.height){ g2.fillRect(0,0,pref.width,pref.height); setSize(pref); }else{ g2.fillRect(0,0,size.width, size.height); } g2.setColor(Color.black); //okay so this dumb if block is to prevent the ugly repainting //bug when loading markers after the data are already being displayed, //results in a little off-centering for small datasets, but not too bad. if (!forExport){ if (!theData.infoKnown){ g2.translate((size.width - pref.width) / 2, (size.height - pref.height) / 2); } else { g2.translate((size.width - pref.width) / 2, 0); } } FontMetrics boxFontMetrics = g2.getFontMetrics(boxFont); int diamondX[] = new int[4]; int diamondY[] = new int[4]; Polygon diamond; double lineSpan = alignedPositions[alignedPositions.length-1] - alignedPositions[0]; long minpos = Chromosome.getMarker(0).getPosition(); long maxpos = Chromosome.getMarker(Chromosome.getSize()-1).getPosition(); double spanpos = maxpos - minpos; //See http://www.hapmap.org/cgi-perl/gbrowse/gbrowse_img //for more info on GBrowse img. int imgHeight = 0; if (Options.isGBrowseShown() && Chromosome.getDataChrom() != null && !Chromosome.getDataChrom().equalsIgnoreCase("none")){ g2.drawImage(gBrowseImage,H_BORDER-GBROWSE_MARGIN,V_BORDER,this); imgHeight = gBrowseImage.getHeight(this) + TRACK_GAP; // get height so we can shift everything down } left = H_BORDER; top = V_BORDER + imgHeight; // push the haplotype display down to make room for gbrowse image. if (forExport){ left -= exportStart * boxSize; } FontMetrics metrics; int ascent; g2.setFont(boldMarkerNameFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); //the following values are the bounds on the boxes we want to //display given that the current window is 'visRect' lowX = getBoundaryMarker(visRect.x-clickXShift-(visRect.y +visRect.height-clickYShift)) - 1; highX = getBoundaryMarker(visRect.x + visRect.width); lowY = getBoundaryMarker((visRect.x-clickXShift)+(visRect.y-clickYShift)) - 1; highY = getBoundaryMarker((visRect.x-clickXShift+visRect.width) + (visRect.y-clickYShift+visRect.height)); if (lowX < 0) { lowX = 0; } if (highX > Chromosome.getSize()-1){ highX = Chromosome.getSize()-1; } if (lowY < lowX+1){ lowY = lowX+1; } if (highY > Chromosome.getSize()){ highY = Chromosome.getSize(); } if (forExport){ lowX = exportStart; lowY = exportStart; highX = exportStop; highY = exportStop+1; } if (theData.trackExists){ //draw the analysis track above where the marker positions will be marked JFreeChart jfc = ChartFactory.createXYLineChart(null,null,null, theData.analysisTracks, PlotOrientation.VERTICAL,false,false,false); //customise the analysis track XYPlot xyp = (XYPlot)jfc.getPlot(); //no x axis, since it takes up too much space. xyp.getDomainAxis().setAxisLineVisible(false); xyp.getDomainAxis().setTickLabelsVisible(false); xyp.getDomainAxis().setTickMarksVisible(false); //x range must align with markers xyp.getDomainAxis().setRange(minpos,maxpos); //size of the axis and graph inset double axisWidth = xyp.getRangeAxis(). reserveSpace(g2,xyp,new Rectangle(0,TRACK_HEIGHT),RectangleEdge.LEFT,null).getLeft(); RectangleInsets insets = xyp.getInsets(); jfc.setBackgroundPaint(BG_GREY); BufferedImage bi = jfc.createBufferedImage( (int)(lineSpan + axisWidth + insets.getLeft() + insets.getRight()),TRACK_HEIGHT); //hide the axis in the margin so everything lines up. g2.drawImage(bi,(int)(left - axisWidth - insets.getLeft()),top,this); top += TRACK_HEIGHT + TRACK_GAP; } if (theData.infoKnown) { Color green = new Color(0, 127, 0); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); //// draw the marker locations g2.setStroke(thinnerStroke); g2.setColor(Color.white); g2.fill(new Rectangle2D.Double(left+1, top+1, lineSpan-1, TICK_HEIGHT-1)); g2.setColor(Color.black); g2.draw(new Rectangle2D.Double(left, top, lineSpan, TICK_HEIGHT)); for (int i = 0; i < Chromosome.getSize(); i++){ double pos = (Chromosome.getMarker(i).getPosition() - minpos) / spanpos; double xx = left + lineSpan*pos; // if we're zoomed, use the line color to indicate whether there is extra data available // (since the marker names are not displayed when zoomed) if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setColor(green); //draw tick g2.setStroke(thickerStroke); g2.draw(new Line2D.Double(xx, top, xx, top + TICK_HEIGHT)); if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setStroke(thickerStroke); else g2.setStroke(thinnerStroke); //draw connecting line g2.draw(new Line2D.Double(xx, top + TICK_HEIGHT, left + alignedPositions[i], top+TICK_BOTTOM)); if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setColor(Color.black); } top += TICK_BOTTOM + TICK_HEIGHT; //// draw the marker names if (printMarkerNames){ widestMarkerName = metrics.stringWidth(Chromosome.getMarker(0).getDisplayName()); for (int x = 1; x < Chromosome.getSize(); x++) { int thiswide = metrics.stringWidth(Chromosome.getMarker(x).getDisplayName()); if (thiswide > widestMarkerName) widestMarkerName = thiswide; } g2.translate(left, top + widestMarkerName); g2.rotate(-Math.PI / 2.0); boolean foundSNP = false; for (int x = 0; x < Chromosome.getSize(); x++) { if (theData.isInBlock[x]){ g2.setFont(boldMarkerNameFont); }else{ g2.setFont(markerNameFont); } if (Chromosome.getMarker(x).getExtra() != null) g2.setColor(green); if (theHV != null){ if (Chromosome.getMarker(x).getDisplayName().equals(theHV.getChosenMarker())){ g2.setColor(Color.blue); foundSNP = true; } } g2.drawString(Chromosome.getMarker(x).getDisplayName(),(float)TEXT_GAP, (float)alignedPositions[x] + ascent/3); if (Chromosome.getMarker(x).getExtra() != null) g2.setColor(Color.black); if (foundSNP){ g2.setColor(Color.BLACK); foundSNP = false; } } g2.rotate(Math.PI / 2.0); g2.translate(-left, -(top + widestMarkerName)); // move everybody down top += widestMarkerName + TEXT_GAP; } g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } top += blockDispHeight; //// draw the marker numbers if (printMarkerNames){ g2.setFont(markerNumFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); for (int x = 0; x < Chromosome.getSize(); x++) { String mark = String.valueOf(Chromosome.realIndex[x] + 1); g2.drawString(mark, (float)(left + alignedPositions[x] - metrics.stringWidth(mark)/2), (float)(top + ascent)); } top += boxRadius/2; // give a little space between numbers and boxes } //clickxshift and clickyshift are used later to translate from x,y coords //to the pair of markers comparison at those coords if (!(theData.infoKnown)){ clickXShift = left + (size.width-pref.width)/2; clickYShift = top + (size.height - pref.height)/2; } else { clickXShift = left + (size.width-pref.width)/2; clickYShift = top; } // draw table column by column for (int x = lowX; x < highX; x++) { //always draw the fewest possible boxes if (lowY < x+1){ lowY = x+1; } for (int y = lowY; y < highY; y++) { if (dPrimeTable.getLDStats(x,y) == null){ continue; } double d = dPrimeTable.getLDStats(x,y).getDPrime(); double r = dPrimeTable.getLDStats(x,y).getRSquared(); //double l = dPrimeTable.getLDStats(x,y).getLOD(); Color boxColor = dPrimeTable.getLDStats(x,y).getColor(); // draw markers above int xx = left + (int)((alignedPositions[x] + alignedPositions[y])/2); int yy = top + (int)((alignedPositions[y] - alignedPositions[x]) / 2); diamondX[0] = xx; diamondY[0] = yy - boxRadius; diamondX[1] = xx + boxRadius; diamondY[1] = yy; diamondX[2] = xx; diamondY[2] = yy + boxRadius; diamondX[3] = xx - boxRadius; diamondY[3] = yy; diamond = new Polygon(diamondX, diamondY, 4); g2.setColor(boxColor); g2.fillPolygon(diamond); if(printValues){ g2.setFont(boxFont); ascent = boxFontMetrics.getAscent(); int val; if (printWhat == D_PRIME){ val = (int) (d * 100); }else if (printWhat == R_SQ){ val = (int) (r * 100); }else{ val = 100; } g2.setColor((val < 50) ? Color.gray : Color.black); if (boxColor.getGreen() < 100 && boxColor.getBlue() < 100 && boxColor.getRed() < 100){ g2.setColor(Color.white); } if (val != 100) { String valu = String.valueOf(val); int widf = boxFontMetrics.stringWidth(valu); g.drawString(valu, xx - widf/2, yy + ascent/2); } } } } //highlight blocks g2.setFont(markerNameFont); ascent = g2.getFontMetrics().getAscent(); //g.setColor(new Color(153,255,153)); g2.setColor(Color.black); //g.setColor(new Color(51,153,51)); for (int i = 0; i < blocks.size(); i++){ int[] theBlock = (int[])blocks.elementAt(i); int first = theBlock[0]; int last = theBlock[theBlock.length-1]; //big vee around whole thing g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left + alignedPositions[first] - boxRadius, top, left + (alignedPositions[first] + alignedPositions[last])/2, top + (alignedPositions[last] - alignedPositions[first])/2 + boxRadius)); g2.draw(new Line2D.Double(left + (alignedPositions[first] + alignedPositions[last])/2, top + (alignedPositions[last] - alignedPositions[first])/2 + boxRadius, left + alignedPositions[last] + boxRadius, top)); for (int j = first; j < last; j++){ g2.setStroke(fatStroke); if (theData.isInBlock[j]){ g2.draw(new Line2D.Double(left+alignedPositions[j]-boxSize/2, top-blockDispHeight, left+alignedPositions[j+1]-boxSize/2, top-blockDispHeight)); }else{ g2.draw(new Line2D.Double(left + alignedPositions[j] + boxSize/2, top-blockDispHeight, left+alignedPositions[j+1]-boxSize/2, top-blockDispHeight)); g2.setStroke(dashedFatStroke); g2.draw(new Line2D.Double(left+alignedPositions[j] - boxSize/2, top-blockDispHeight, left+alignedPositions[j] + boxSize/2, top-blockDispHeight)); } } //cap off the end of the block g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left+alignedPositions[last]-boxSize/2, top-blockDispHeight, left+alignedPositions[last]+boxSize/2, top-blockDispHeight)); //lines to connect to block display g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left + alignedPositions[first]-boxSize/2, top-1, left+alignedPositions[first]-boxSize/2, top-blockDispHeight)); g2.draw(new Line2D.Double(left+alignedPositions[last]+boxSize/2, top-1, left+alignedPositions[last]+boxSize/2, top-blockDispHeight)); if (printMarkerNames){ String labelString = new String ("Block " + (i+1)); if (theData.infoKnown){ long blockSize = Chromosome.getMarker(last).getPosition() - Chromosome.getMarker(first).getPosition(); labelString += " (" + blockSize/1000 + " kb)"; } g2.drawString(labelString, (float)(left+alignedPositions[first]-boxSize/2+TEXT_GAP), (float)(top-boxSize/3)); } } g2.setStroke(thickerStroke); if (showWM && !forExport){ //dataset is big enough to require worldmap if (wmMaxWidth == 0){ wmMaxWidth = visRect.width/3; } double scalefactor; scalefactor = (double)(chartSize.width)/wmMaxWidth; double prefBoxSize = boxSize/(scalefactor*((double)wmMaxWidth/(double)(wmMaxWidth))); //stick WM_BD in the middle of the blank space at the top of the worldmap final int WM_BD_GAP = (int)(infoHeight/(scalefactor*2)); final int WM_BD_HEIGHT = 2; CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(), BorderFactory.createLoweredBevelBorder()); if (noImage){ //first time through draw a worldmap if dataset is big: worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2, (int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2, BufferedImage.TYPE_3BYTE_BGR); Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(1,1,worldmap.getWidth()-1,worldmap.getHeight()-1); //make a pretty border gw2.setColor(Color.black); wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth(),worldmap.getHeight()); wmInteriorRect = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth(), worldmap.getHeight()); float[] smallDiamondX = new float[4]; float[] smallDiamondY = new float[4]; GeneralPath gp; for (int x = 0; x < Chromosome.getSize()-1; x++){ for (int y = x+1; y < Chromosome.getSize(); y++){ if (dPrimeTable.getLDStats(x,y) == null){ continue; } double xx = ((alignedPositions[y] + alignedPositions[x])/(scalefactor*2)) + wmBorder.getBorderInsets(this).left; double yy = ((alignedPositions[y] - alignedPositions[x] + infoHeight*2)/(scalefactor*2)) + wmBorder.getBorderInsets(this).top; smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2); smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy; smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2); smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy; gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length); gp.moveTo(smallDiamondX[0],smallDiamondY[0]); for (int i = 1; i < smallDiamondX.length; i++){ gp.lineTo(smallDiamondX[i], smallDiamondY[i]); } gp.closePath(); gw2.setColor(dPrimeTable.getLDStats(x,y).getColor()); gw2.fill(gp); } } noImage = false; } //draw block display in worldmap Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(wmBorder.getBorderInsets(this).left, wmBorder.getBorderInsets(this).top+WM_BD_GAP, wmInteriorRect.width, WM_BD_HEIGHT); gw2.setColor(Color.black); boolean even = true; for (int i = 0; i < blocks.size(); i++){ int first = ((int[])blocks.elementAt(i))[0]; int last = ((int[])blocks.elementAt(i))[((int[])blocks.elementAt(i)).length-1]; int voffset; if (even){ voffset = 0; }else{ voffset = WM_BD_HEIGHT/2; } gw2.fillRect(wmBorder.getBorderInsets(this).left - (int)prefBoxSize/2 + (int)(alignedPositions[first]/scalefactor), wmBorder.getBorderInsets(this).top+voffset+WM_BD_GAP, (int)(prefBoxSize + (alignedPositions[last] - alignedPositions[first])/scalefactor), WM_BD_HEIGHT/2); even = !even; } wmResizeCorner = new Rectangle(visRect.x + worldmap.getWidth() - (worldmap.getWidth()-wmInteriorRect.width)/2, visRect.y + visRect.height - worldmap.getHeight(), (worldmap.getWidth()-wmInteriorRect.width)/2, (worldmap.getHeight() -wmInteriorRect.height)/2); g2.drawImage(worldmap,visRect.x, visRect.y + visRect.height - worldmap.getHeight(), this); wmInteriorRect.x = visRect.x + (worldmap.getWidth() - wmInteriorRect.width)/2; wmInteriorRect.y = visRect.y+visRect.height-worldmap.getHeight() + (worldmap.getHeight() - wmInteriorRect.height)/2; //draw the outline of the viewport g2.setColor(Color.black); double hRatio = wmInteriorRect.getWidth()/pref.getWidth(); double vRatio = wmInteriorRect.getHeight()/pref.getHeight(); int hBump = worldmap.getWidth()-wmInteriorRect.width; int vBump = worldmap.getHeight()-wmInteriorRect.height; //bump a few pixels to avoid drawing on the border g2.drawRect((int)(visRect.x*hRatio)+hBump/2+visRect.x, (int)(visRect.y*vRatio)+vBump/2+(visRect.y + visRect.height - worldmap.getHeight()), (int)(visRect.width*hRatio), (int)(visRect.height*vRatio)); } //see if the user has right-clicked to popup some marker info if(popupDrawRect != null){ //dumb bug where little datasets popup the box in the wrong place int smallDatasetSlopH = 0; int smallDatasetSlopV = 0; if (pref.getHeight() < visRect.height){ smallDatasetSlopV = (int)(visRect.height - pref.getHeight())/2; } if (pref.getWidth() < visRect.width){ smallDatasetSlopH = (int)(visRect.width - pref.getWidth())/2; } g2.setColor(Color.white); g2.fillRect(popupDrawRect.x+1-smallDatasetSlopH, popupDrawRect.y+1-smallDatasetSlopV, popupDrawRect.width-1, popupDrawRect.height-1); g2.setColor(Color.black); g2.drawRect(popupDrawRect.x-smallDatasetSlopH, popupDrawRect.y-smallDatasetSlopV, popupDrawRect.width, popupDrawRect.height); g.setFont(popupFont); for (int x = 0; x < displayStrings.size(); x++){ g.drawString((String)displayStrings.elementAt(x),popupDrawRect.x + popupLeftMargin-smallDatasetSlopH, popupDrawRect.y+((x+1)*metrics.getHeight())-smallDatasetSlopV); } } // draw the cached last right-click selection // The purpose of testing for empty string is just to avoid an 2-unit empty white box if (lastSelection != null){ if ((zoomLevel == 0) && (!lastSelection.equals("")) && (!forExport)) { g2.setFont(boxFont); // a bit extra on all side int last_descent = g2.getFontMetrics().getDescent(); int last_box_x = (visRect.x + LAST_SELECTION_LEFT) - 2; int last_box_y = (visRect.y - g2.getFontMetrics().getHeight() + LAST_SELECTION_TOP + last_descent) - 1 ; int last_box_width = g2.getFontMetrics().stringWidth(lastSelection) + 4; int last_box_height = g2.getFontMetrics().getHeight() + 2; g2.setColor(Color.white); g2.fillRect(last_box_x, last_box_y, last_box_width, last_box_height); g2.setColor(Color.black); g2.drawRect(last_box_x, last_box_y, last_box_width, last_box_height); g2.drawString(lastSelection, LAST_SELECTION_LEFT + visRect.x, LAST_SELECTION_TOP + visRect.y); } } //see if we're drawing a worldmap resize rect if (resizeWMRect != null){ g2.setColor(Color.black); g2.drawRect(resizeWMRect.x, resizeWMRect.y, resizeWMRect.width, resizeWMRect.height); } //see if we're drawing a block selector rect if (blockRect != null){ g2.setColor(Color.black); g2.setStroke(dashedThinStroke); g2.drawRect(blockRect.x, blockRect.y, blockRect.width, blockRect.height); } }
1,112,503
private JUnitOJBManager() { System.setProperty( "photovault.configfile", "conf/junittest_config.xml" ); log.error( "Initializing OB for JUnit tests" ); createDatabase(); PhotovaultSettings settings = PhotovaultSettings.getSettings(); settings.setConfiguration( "pv_junit" ); PVDatabase db = settings.getDatabase( "pv_junit" ); if ( db == null ) { log.error( "Could not find dbname for configuration " ); return; } if ( ODMG.initODMG( "", "", db ) ) { log.debug( "Connection succesful!!!" ); } else { log.error( "Error logging into Photovault" ); } if ( db.getSchemaVersion() < PVDatabase.CURRENT_SCHEMA_VERSION ) { SchemaUpdateAction updater = new SchemaUpdateAction( db ); updater.upgradeDatabase(); } }
private JUnitOJBManager() { System.setProperty( "photovault.configfile", "conf/junittest_config.xml" ); log.error( "Initializing OB for JUnit tests" ); createDatabase(); PhotovaultSettings settings = PhotovaultSettings.getSettings(); settings.setConfiguration( "pv_junit" ); PVDatabase db = settings.getDatabase( "pv_junit" ); if ( db == null ) { log.error( "Could not find dbname for configuration " ); return; } if ( ODMG.initODMG( "", "", db ) ) { log.debug( "Connection succesful!!!" ); } else { log.error( "Error logging into Photovault" ); } if ( db.getSchemaVersion() < PVDatabase.CURRENT_SCHEMA_VERSION ) { SchemaUpdateAction updater = new SchemaUpdateAction( db ); updater.upgradeDatabase(); } }
1,112,505
private final void setDatabase(SQLDatabase newdb) { if (newdb == null) throw new NullPointerException("db must be non-null"); this.db = newdb; db.setIgnoreReset(true); if (db.getDataSource() == null) { ArchitectDataSource dbcs = new ArchitectDataSource(); dbcs.setName("Target Database"); dbcs.setDisplayName("Target Database"); db.setDataSource(dbcs); } try { ArchitectUtils.listenToHierarchy(this, db); } catch (ArchitectException ex) { logger.error("Couldn't listen to database", ex); } tableNames = new HashSet(); }
private final void setDatabase(SQLDatabase newdb) { if (newdb == null) throw new NullPointerException("db must be non-null"); this.db = newdb; db.setIgnoreReset(true); if (db.getDataSource() == null) { ArchitectDataSource dbcs = new ArchitectDataSource(); dbcs.setName("Target Database"); dbcs.setDisplayName("Target Database"); db.setDataSource(dbcs); } try { ArchitectUtils.listenToHierarchy(this, db); } catch (ArchitectException ex) { logger.error("Couldn't listen to database", ex); } tableNames = new HashSet(); }
1,112,509
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
1,112,510
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
1,112,511
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
1,112,512
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
1,112,513
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
1,112,514
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
1,112,515
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
1,112,516
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
1,112,517
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
1,112,518
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
1,112,519
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ thisHap[j] = '0'; } } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
1,112,520
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
1,112,521
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
1,112,522
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
1,112,523
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } byte[][] input_haplos = (byte[][])inputHaploVector.toArray(new byte[0][0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
1,112,524
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
1,112,525
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
1,112,526
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ byte returnBit = Byte.parseByte(aString.substring(j,j+1)); if (returnBit == 1){ genos[j] = Chromosome.getFilteredMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getFilteredMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getFilteredMarker(theBlock[j]).getMinor(); } } } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
1,112,527
void prepareHapsInput(File infile) throws IOException, HaploViewException{ //this method is called to suck in data from a file (its only argument) //of genotypes and sets up the Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; if(infile.length() < 1){ throw new HaploViewException("Genotype file is empty or does not exist: " + infile.getName()); } //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; int numTokens = 0; boolean even = true; while ((currentLine = in.readLine()) != null){ lineCount++; //each line is expected to be of the format: //ped indiv geno geno geno geno... if (currentLine.length() == 0){ //skip blank lines continue; } even = !even; StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) genos = new byte[st.countTokens()]; int q = 0; if (numTokens == 0){ numTokens = st.countTokens(); } if (numTokens != st.countTokens()){ throw new HaploViewException("Genotype file error:\nLine " + lineCount + " appears to have an incorrect number of entries"); } while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ try{ genos[q] = Byte.parseByte(thisGenotype); }catch (NumberFormatException nfe){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + thisGenotype + "\" on line " + lineCount + " not allowed."); } } if (genos[q] < 0 || genos[q] > 5){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + genos[q] + "\" on line " + lineCount + " not allowed."); } q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, false, infile.getName())); } if (!even){ //we're missing a line here throw new HaploViewException("Genotype file appears to have an odd number of lines.\n"+ "Each individual is required to have two chromosomes"); } chromosomes = chroms; //initialize realIndex Chromosome.realIndex = new int[genos.length]; for (int i = 0; i < genos.length; i++){ Chromosome.realIndex[i] = i; } }
void prepareHapsInput(File infile) throws IOException, HaploViewException{ //this method is called to suck in data from a file (its only argument) //of genotypes and sets up the Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; if(infile.length() < 1){ throw new HaploViewException("Genotype file is empty or does not exist: " + infile.getName()); } //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; int numTokens = 0; boolean even = true; while ((currentLine = in.readLine()) != null){ lineCount++; //each line is expected to be of the format: //ped indiv geno geno geno geno... if (currentLine.length() == 0){ //skip blank lines continue; } even = !even; StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) genos = new byte[st.countTokens()]; int q = 0; if (numTokens == 0){ numTokens = st.countTokens(); } if (numTokens != st.countTokens()){ throw new HaploViewException("Genotype file error:\nLine " + lineCount + " appears to have an incorrect number of entries"); } while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 9; }else{ try{ genos[q] = Byte.parseByte(thisGenotype); }catch (NumberFormatException nfe){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + thisGenotype + "\" on line " + lineCount + " not allowed."); } } if (genos[q] < 0 || genos[q] > 5){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + genos[q] + "\" on line " + lineCount + " not allowed."); } q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, false, infile.getName())); } if (!even){ //we're missing a line here throw new HaploViewException("Genotype file appears to have an odd number of lines.\n"+ "Each individual is required to have two chromosomes"); } chromosomes = chroms; //initialize realIndex Chromosome.realIndex = new int[genos.length]; for (int i = 0; i < genos.length; i++){ Chromosome.realIndex[i] = i; } }
1,112,528
void prepareHapsInput(File infile) throws IOException, HaploViewException{ //this method is called to suck in data from a file (its only argument) //of genotypes and sets up the Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; if(infile.length() < 1){ throw new HaploViewException("Genotype file is empty or does not exist: " + infile.getName()); } //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; int numTokens = 0; boolean even = true; while ((currentLine = in.readLine()) != null){ lineCount++; //each line is expected to be of the format: //ped indiv geno geno geno geno... if (currentLine.length() == 0){ //skip blank lines continue; } even = !even; StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) genos = new byte[st.countTokens()]; int q = 0; if (numTokens == 0){ numTokens = st.countTokens(); } if (numTokens != st.countTokens()){ throw new HaploViewException("Genotype file error:\nLine " + lineCount + " appears to have an incorrect number of entries"); } while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ try{ genos[q] = Byte.parseByte(thisGenotype); }catch (NumberFormatException nfe){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + thisGenotype + "\" on line " + lineCount + " not allowed."); } } if (genos[q] < 0 || genos[q] > 5){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + genos[q] + "\" on line " + lineCount + " not allowed."); } q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, false, infile.getName())); } if (!even){ //we're missing a line here throw new HaploViewException("Genotype file appears to have an odd number of lines.\n"+ "Each individual is required to have two chromosomes"); } chromosomes = chroms; //initialize realIndex Chromosome.realIndex = new int[genos.length]; for (int i = 0; i < genos.length; i++){ Chromosome.realIndex[i] = i; } }
void prepareHapsInput(File infile) throws IOException, HaploViewException{ //this method is called to suck in data from a file (its only argument) //of genotypes and sets up the Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; if(infile.length() < 1){ throw new HaploViewException("Genotype file is empty or does not exist: " + infile.getName()); } //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; int numTokens = 0; boolean even = true; while ((currentLine = in.readLine()) != null){ lineCount++; //each line is expected to be of the format: //ped indiv geno geno geno geno... if (currentLine.length() == 0){ //skip blank lines continue; } even = !even; StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) genos = new byte[st.countTokens()]; int q = 0; if (numTokens == 0){ numTokens = st.countTokens(); } if (numTokens != st.countTokens()){ throw new HaploViewException("Genotype file error:\nLine " + lineCount + " appears to have an incorrect number of entries"); } while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ try{ genos[q] = Byte.parseByte(thisGenotype); }catch (NumberFormatException nfe){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + thisGenotype + "\" on line " + lineCount + " not allowed."); } } if (genos[q] < 0 || genos[q] > 9){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + genos[q] + "\" on line " + lineCount + " not allowed."); } q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, false, infile.getName())); } if (!even){ //we're missing a line here throw new HaploViewException("Genotype file appears to have an odd number of lines.\n"+ "Each individual is required to have two chromosomes"); } chromosomes = chroms; //initialize realIndex Chromosome.realIndex = new int[genos.length]; for (int i = 0; i < genos.length; i++){ Chromosome.realIndex[i] = i; } }
1,112,529
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,112,530
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,112,531
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0, a1, a2)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0, a1, a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,112,532
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,112,533
protected void createUI() { tabPane = new JTabbedPane(); queryPane = new QueryPane(); treePane = new PhotoFolderTree(); tabPane.addTab( "Query", queryPane ); tabPane.addTab( "Folders", treePane ); viewPane = new TableCollectionView(); viewPane.setCollection( queryPane.getResultCollection() ); // Set listeners to both query and folder tree panes /* If an actionEvent comes from queryPane & the viewed folder is no the query resouts, swich to it (the result folder will be nodified of changes to quert parameters directly */ queryPane.addActionListener( new ActionListener() { public void actionPerformed( ActionEvent e ) { if ( viewPane.getCollection() != queryPane.getResultCollection() ) { viewPane.setCollection( queryPane.getResultCollection() ); } } } ); /* If the selected folder is changed in treePane, switch to that immediately */ treePane.addPhotoFolderTreeListener( new PhotoFolderTreeListener() { public void photoFolderTreeSelectionChanged( PhotoFolderTreeEvent e ) { viewPane.setCollection( e.getSelected() ); } } ); // Create the split pane to display both of these components JSplitPane split = new JSplitPane( JSplitPane.HORIZONTAL_SPLIT, tabPane, viewPane ); Container cp = getContentPane(); cp.setLayout( new BorderLayout() ); cp.add( split, BorderLayout.CENTER ); // Create the menu bar & menus JMenuBar menuBar = new JMenuBar(); setJMenuBar( menuBar ); JMenu fileMenu = new JMenu( "File" ); fileMenu.setMnemonic(KeyEvent.VK_F); menuBar.add( fileMenu ); JMenuItem importItem = new JMenuItem( "Import image...", KeyEvent.VK_I ); importItem.addActionListener( new ActionListener() { public void actionPerformed( ActionEvent e ) { importFile(); } }); fileMenu.add( importItem ); JMenuItem exitItem = new JMenuItem( "Exit", KeyEvent.VK_X ); exitItem.addActionListener( new ActionListener() { public void actionPerformed( ActionEvent e ) { System.exit( 0 ); } }); fileMenu.add( exitItem ); pack(); }
protected void createUI() { tabPane = new JTabbedPane(); queryPane = new QueryPane(); treePane = new PhotoFolderTree(); tabPane.addTab( "Query", queryPane ); tabPane.addTab( "Folders", treePane ); viewPane = new TableCollectionView(); viewPane.setCollection( queryPane.getResultCollection() ); // Set listeners to both query and folder tree panes /* If an actionEvent comes from queryPane & the viewed folder is no the query resouts, swich to it (the result folder will be nodified of changes to quert parameters directly */ queryPane.addActionListener( new ActionListener() { public void actionPerformed( ActionEvent e ) { if ( viewPane.getCollection() != queryPane.getResultCollection() ) { viewPane.setCollection( queryPane.getResultCollection() ); } } } ); /* If the selected folder is changed in treePane, switch to that immediately */ treePane.addPhotoFolderTreeListener( new PhotoFolderTreeListener() { public void photoFolderTreeSelectionChanged( PhotoFolderTreeEvent e ) { PhotoFolder f = e.getSelected(); if ( f != null ) { viewPane.setCollection( f ); } } } ); // Create the split pane to display both of these components JSplitPane split = new JSplitPane( JSplitPane.HORIZONTAL_SPLIT, tabPane, viewPane ); Container cp = getContentPane(); cp.setLayout( new BorderLayout() ); cp.add( split, BorderLayout.CENTER ); // Create the menu bar & menus JMenuBar menuBar = new JMenuBar(); setJMenuBar( menuBar ); JMenu fileMenu = new JMenu( "File" ); fileMenu.setMnemonic(KeyEvent.VK_F); menuBar.add( fileMenu ); JMenuItem importItem = new JMenuItem( "Import image...", KeyEvent.VK_I ); importItem.addActionListener( new ActionListener() { public void actionPerformed( ActionEvent e ) { importFile(); } }); fileMenu.add( importItem ); JMenuItem exitItem = new JMenuItem( "Exit", KeyEvent.VK_X ); exitItem.addActionListener( new ActionListener() { public void actionPerformed( ActionEvent e ) { System.exit( 0 ); } }); fileMenu.add( exitItem ); pack(); }
1,112,534
public void photoFolderTreeSelectionChanged( PhotoFolderTreeEvent e ) { viewPane.setCollection( e.getSelected() ); }
public void photoFolderTreeSelectionChanged( PhotoFolderTreeEvent e ) { PhotoFolder f = e.getSelected(); if ( f != null ) { viewPane.setCollection( f ); } }
1,112,535
public Script compile() throws Exception { if (tag instanceof CompilableTag) { ((CompilableTag) tag).compile(); } List typeList = new ArrayList(); List methodList = new ArrayList(); List expressionList = new ArrayList(); BeanInfo info = Introspector.getBeanInfo(tag.getClass()); PropertyDescriptor[] descriptors = info.getPropertyDescriptors(); Set attributeSet = new HashSet(); if (descriptors != null) { for (int i = 0, size = descriptors.length; i < size; i++) { PropertyDescriptor descriptor = descriptors[i]; String name = descriptor.getName(); Expression expression = (Expression) attributes.get(name); if (expression != null) { attributeSet.add( name ); Method writeMethod = descriptor.getWriteMethod(); if (writeMethod != null) { Class type = descriptor.getPropertyType(); expressionList.add(expression); methodList.add(writeMethod); typeList.add(type); if (log.isDebugEnabled()) { log.debug( "Adding tag property name: " + name + " type: " + type.getName() + " expression: " + expression); } } } } } // System.err.println( "BeanTagScript::compile() " + this ); // now create the arrays to avoid object allocation & casting when // running the script int size = expressionList.size(); expressions = new Expression[size]; methods = new Method[size]; types = new Class[size]; expressionList.toArray(expressions); methodList.toArray(methods); typeList.toArray(types); // compile body tag.setBody(tag.getBody().compile()); // now lets check for any attributes that are not used for ( Iterator iter = attributes.keySet().iterator(); iter.hasNext(); ) { String name = (String) iter.next(); if ( ! attributeSet.contains( name ) ) { throw new JellyException( "This tag does not understand the attribute '" + name + "'" ); } } return this; }
public Script compile() throws Exception { if (tag instanceof CompilableTag) { ((CompilableTag) tag).compile(); } List typeList = new ArrayList(); List methodList = new ArrayList(); List expressionList = new ArrayList(); BeanInfo info = Introspector.getBeanInfo(tag.getClass()); PropertyDescriptor[] descriptors = info.getPropertyDescriptors(); Set attributeSet = new HashSet(); if (descriptors != null) { for (int i = 0, size = descriptors.length; i < size; i++) { PropertyDescriptor descriptor = descriptors[i]; String name = descriptor.getName(); Expression expression = (Expression) attributes.get(name); if (expression != null) { attributeSet.add( name ); Method writeMethod = descriptor.getWriteMethod(); if (writeMethod != null) { Class type = descriptor.getPropertyType(); expressionList.add(expression); methodList.add(writeMethod); typeList.add(type); if (log.isDebugEnabled()) { log.debug( "Adding tag property name: " + name + " type: " + type.getName() + " expression: " + expression); } } } } } // System.err.println( "BeanTagScript::compile() " + this ); // now create the arrays to avoid object allocation & casting when // running the script int size = expressionList.size(); expressions = new Expression[size]; methods = new Method[size]; types = new Class[size]; expressionList.toArray(expressions); methodList.toArray(methods); typeList.toArray(types); // compile body tag.setBody(tag.getBody().compile()); // now lets check for any attributes that are not used for ( Iterator iter = attributes.keySet().iterator(); iter.hasNext(); ) { String name = (String) iter.next(); if ( ! attributeSet.contains( name ) ) { throw new JellyException( "This tag does not understand the attribute '" + name + "'", getColumnNumber(), getLineNumber() ); } } return this; }
1,112,536
public void run(JellyContext context, XMLOutput output) throws Exception { tag.setContext(context); // initialize all the properties of the tag before its used // if there is a problem abort this tag for (int i = 0, size = expressions.length; i < size; i++) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; // some types are Expression objects so let the tag // evaluate them Object value = null; if (type.isAssignableFrom(Expression.class) && !type.isAssignableFrom(Object.class)) { value = expression; } else { value = expression.evaluate(context); } // convert value to correct type if (value != null) { value = convertType(value, type); } Object[] arguments = { value }; try { method.invoke(tag, arguments); } catch (Exception e) { String valueTypeName = (value != null ) ? value.getClass().getName() : "null"; log.warn( "Cannot call method: " + method.getName() + " as I cannot convert: " + value + " of type: " + valueTypeName + " into type: " + type.getName() ); throw new JellyException( "Cannot call method: " + method.getName() + " on tag of type: " + tag.getClass().getName() + " with value: " + value + " of type: " + valueTypeName + ". Exception: " + e, e ); } } runTag(output); }
public void run(JellyContext context, XMLOutput output) throws Exception { tag.setContext(context); // initialize all the properties of the tag before its used // if there is a problem abort this tag for (int i = 0, size = expressions.length; i < size; i++) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; // some types are Expression objects so let the tag // evaluate them Object value = null; if (type.isAssignableFrom(Expression.class) && !type.isAssignableFrom(Object.class)) { value = expression; } else { value = expression.evaluate(context); } // convert value to correct type if (value != null) { value = convertType(value, type); } Object[] arguments = { value }; try { method.invoke(tag, arguments); } catch (Exception e) { String valueTypeName = (value != null ) ? value.getClass().getName() : "null"; log.warn( "Cannot call method: " + method.getName() + " as I cannot convert: " + value + " of type: " + valueTypeName + " into type: " + type.getName() ); throw new JellyException( "Cannot call method: " + method.getName() + " on tag of type: " + tag.getClass().getName() + " with value: " + value + " of type: " + valueTypeName + ". Exception: " + e, getColumnNumber(), getLineNumber() ); } } runTag(output); }
1,112,537
public void run(JellyContext context, XMLOutput output) throws Exception { tag.setContext(context); // initialize all the properties of the tag before its used // if there is a problem abort this tag for (int i = 0, size = expressions.length; i < size; i++) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; // some types are Expression objects so let the tag // evaluate them Object value = null; if (type.isAssignableFrom(Expression.class) && !type.isAssignableFrom(Object.class)) { value = expression; } else { value = expression.evaluate(context); } // convert value to correct type if (value != null) { value = convertType(value, type); } Object[] arguments = { value }; try { method.invoke(tag, arguments); } catch (Exception e) { String valueTypeName = (value != null ) ? value.getClass().getName() : "null"; log.warn( "Cannot call method: " + method.getName() + " as I cannot convert: " + value + " of type: " + valueTypeName + " into type: " + type.getName() ); throw new JellyException( "Cannot call method: " + method.getName() + " on tag of type: " + tag.getClass().getName() + " with value: " + value + " of type: " + valueTypeName + ". Exception: " + e, e ); } } runTag(output); }
public void run(JellyContext context, XMLOutput output) throws Exception { tag.setContext(context); // initialize all the properties of the tag before its used // if there is a problem abort this tag for (int i = 0, size = expressions.length; i < size; i++) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; // some types are Expression objects so let the tag // evaluate them Object value = null; if (type.isAssignableFrom(Expression.class) && !type.isAssignableFrom(Object.class)) { value = expression; } else { value = expression.evaluate(context); } // convert value to correct type if (value != null) { value = convertType(value, type); } Object[] arguments = { value }; try { method.invoke(tag, arguments); } catch (Exception e) { String valueTypeName = (value != null ) ? value.getClass().getName() : "null"; log.warn( "Cannot call method: " + method.getName() + " as I cannot convert: " + value + " of type: " + valueTypeName + " into type: " + type.getName() ); throw new JellyException( "Cannot call method: " + method.getName() + " on tag of type: " + tag.getClass().getName() + " with value: " + value + " of type: " + valueTypeName + ". Exception: " + e, e ); } } try { tag.doTag(output); } catch (JellyException e) { handleException(e); } catch (Exception e) { handleException(e); } }
1,112,538
public void exit() { if (promptForUnsavedModifications()) { try { closeProject(getProject()); saveSettings(); } catch (ArchitectException e) { logger.error("Couldn't save settings: "+e); } System.exit(0); } }
public void exit() { if (getProject().isSaveInProgress()) { JOptionPane.showMessageDialog(this, "Project is saving, cannot exit the Power Architect. Please wait for the save to finish, and then try again.", "Warning", JOptionPane.WARNING_MESSAGE); return; } if (promptForUnsavedModifications()) { try { closeProject(getProject()); saveSettings(); } catch (ArchitectException e) { logger.error("Couldn't save settings: "+e); } System.exit(0); } }
1,112,540
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command == READ_MARKERS){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(null); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile()); } }else if (command == CUST_BLOCKS){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command == CLEAR_BLOCKS){ colorMenuItems[0].setSelected(true); for (int i = 1; i< colorMenuItems.length; i++){ colorMenuItems[i].setEnabled(false); } changeBlocks(3,1); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method,1); for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true); //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ dPrimeDisplay.refresh(Integer.valueOf(command.substring(5)).intValue()+1); changeKey(Integer.valueOf(command.substring(5)).intValue()+1); //exporting clauses }else if (command == EXPORT_PNG){ fc.setSelectedFile(null); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), PNG_MODE, fc.getSelectedFile()); } }else if (command == EXPORT_TEXT){ fc.setSelectedFile(null); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), TXT_MODE, fc.getSelectedFile()); } }else if (command == "Select All"){ checkPanel.selectAll(); }else if (command == "Rescore Markers"){ String cut = hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); checkPanel.redoRatings(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command == READ_MARKERS){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile()); } }else if (command == CUST_BLOCKS){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command == CLEAR_BLOCKS){ colorMenuItems[0].setSelected(true); for (int i = 1; i< colorMenuItems.length; i++){ colorMenuItems[i].setEnabled(false); } changeBlocks(3,1); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method,1); for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true); //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ dPrimeDisplay.refresh(Integer.valueOf(command.substring(5)).intValue()+1); changeKey(Integer.valueOf(command.substring(5)).intValue()+1); //exporting clauses }else if (command == EXPORT_PNG){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), PNG_MODE, fc.getSelectedFile()); } }else if (command == EXPORT_TEXT){ fc.setSelectedFile(new File("")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), TXT_MODE, fc.getSelectedFile()); } }else if (command == "Select All"){ checkPanel.selectAll(); }else if (command == "Rescore Markers"){ String cut = hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); checkPanel.redoRatings(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
1,112,543
public void setSession(Session s) { this.session = session; }
public void setSession(Session s) { this.session = s; }
1,112,544
public void doTag(XMLOutput output) throws Exception { Class throwableClass = getThrowableClass(); try { invokeBody(output); } catch (Throwable t) { if (t instanceof JellyException) { // unwrap Jelly exceptions which wrap other exceptions JellyException je = (JellyException) t; if (je.getCause() != null) { t = je.getCause(); } } if (var != null) { context.setVariable(var, t); } if (throwableClass != null && !throwableClass.isAssignableFrom(t.getClass())) { fail("Unexpected exception: " + t); } else { return; } } fail("No exception was thrown."); }
public void doTag(XMLOutput output) throws Exception { Class throwableClass = getThrowableClass(); try { invokeBody(output); } catch (Throwable t) { if (t instanceof JellyException) { // unwrap Jelly exceptions which wrap other exceptions JellyException je = (JellyException) t; if (je.getCause() != null) { t = je.getCause(); } } if (var != null) { context.setVariable(var, t); } if (throwableClass != null && !throwableClass.isAssignableFrom(t.getClass())) { fail("Unexpected exception: " + t); } else { return; } } fail("No exception was thrown."); }
1,112,548
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException, ArchitectException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; try { ddlg = (DDLGenerator) DDLUtils.createDDLGenerator( col1.getParentTable().getParentDatabase().getDataSource()); } catch (InstantiationException e1) { throw new ArchitectException("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { throw new ArchitectException("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); for (SQLColumn col : columns ) { synchronized (monitorableMutex) { if (userCancel) return; } ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = null; long profileStartTime = System.currentTimeMillis(); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + ")."); pfd = discoverProfileFunctionDescriptor(col,ddlg,conn); } try { colResult = execProfileFunction(pfd,col,ddlg,conn); } catch ( Exception ex ) { colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(profileStartTime); colResult.setError(true); colResult.setException(ex); colResult.setCreateEndTime(System.currentTimeMillis()); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { putResult(colResult); } synchronized (monitorableMutex) { progress++; if (userCancel) break; } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException, ArchitectException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; try { ddlg = (DDLGenerator) DDLUtils.createDDLGenerator( col1.getParentTable().getParentDatabase().getDataSource()); } catch (InstantiationException e1) { throw new ArchitectException("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { throw new ArchitectException("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); for (SQLColumn col : columns ) { synchronized (monitorableMutex) { if (userCancel) { remove(col.getParentTable()); return; } } ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = null; long profileStartTime = System.currentTimeMillis(); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + ")."); pfd = discoverProfileFunctionDescriptor(col,ddlg,conn); } try { colResult = execProfileFunction(pfd,col,ddlg,conn); } catch ( Exception ex ) { colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(profileStartTime); colResult.setError(true); colResult.setException(ex); colResult.setCreateEndTime(System.currentTimeMillis()); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { putResult(colResult); } synchronized (monitorableMutex) { progress++; if (userCancel) break; } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
1,112,550
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException, ArchitectException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; try { ddlg = (DDLGenerator) DDLUtils.createDDLGenerator( col1.getParentTable().getParentDatabase().getDataSource()); } catch (InstantiationException e1) { throw new ArchitectException("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { throw new ArchitectException("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); for (SQLColumn col : columns ) { synchronized (monitorableMutex) { if (userCancel) return; } ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = null; long profileStartTime = System.currentTimeMillis(); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + ")."); pfd = discoverProfileFunctionDescriptor(col,ddlg,conn); } try { colResult = execProfileFunction(pfd,col,ddlg,conn); } catch ( Exception ex ) { colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(profileStartTime); colResult.setError(true); colResult.setException(ex); colResult.setCreateEndTime(System.currentTimeMillis()); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { putResult(colResult); } synchronized (monitorableMutex) { progress++; if (userCancel) break; } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException, ArchitectException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; try { ddlg = (DDLGenerator) DDLUtils.createDDLGenerator( col1.getParentTable().getParentDatabase().getDataSource()); } catch (InstantiationException e1) { throw new ArchitectException("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { throw new ArchitectException("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); for (SQLColumn col : columns ) { synchronized (monitorableMutex) { if (userCancel) return; } ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = null; long profileStartTime = System.currentTimeMillis(); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + ")."); pfd = discoverProfileFunctionDescriptor(col,ddlg,conn); } try { colResult = execProfileFunction(pfd,col,ddlg,conn); } catch ( Exception ex ) { colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(profileStartTime); colResult.setError(true); colResult.setException(ex); colResult.setCreateEndTime(System.currentTimeMillis()); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { putResult(colResult); } synchronized (monitorableMutex) { progress++; if (userCancel) { remove(col.getParentTable()); break; } } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
1,112,551
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); final int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,552
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT "); sql.append(databaseIdentifierQuoteString); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,553
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append(databaseIdentifierQuoteString); sql.append(") AS DISTINCTCOUNT_"+i); tryCount++; } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,554
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN("); sql.append(databaseIdentifierQuoteString); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,555
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append(databaseIdentifierQuoteString); sql.append(") AS MINVALUE_"+i); tryCount++; } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,556
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX("); sql.append(databaseIdentifierQuoteString); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,557
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append(databaseIdentifierQuoteString); sql.append(") AS MAXVALUE_"+i); tryCount++; } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,558
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName(databaseIdentifierQuoteString+ col.getName()+ databaseIdentifierQuoteString)); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,559
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName(databaseIdentifierQuoteString+ col.getName()+databaseIdentifierQuoteString)); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName(databaseIdentifierQuoteString+ col.getName()+databaseIdentifierQuoteString)); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName(databaseIdentifierQuoteString+ col.getName()+databaseIdentifierQuoteString)); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,560
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhenNull( databaseIdentifierQuoteString+ col.getName()+ databaseIdentifierQuoteString, "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,561
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,562
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,563
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,564
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
1,112,565
public void remove(SQLObject sqo){ results.remove(sqo); }
public void remove(SQLObject sqo) throws ArchitectException{ results.remove(sqo); }
1,112,567
public void setCancelled(boolean cancelled) { synchronized (monitorableMutex) { userCancel = true; } }
public void setCancelled(boolean cancelled) { synchronized (monitorableMutex) { userCancel = cancelled; } }
1,112,569
protected Document parse(Object source) throws Exception { // #### we should allow parsing to output XML events to // the output if no var is specified if (source instanceof String) { String uri = (String) source; InputStream in = context.getResourceAsStream(uri); return getSAXReader().read(in, uri); } else if (source instanceof Reader) { return getSAXReader().read((Reader) source); } else if (source instanceof InputStream) { return getSAXReader().read((InputStream) source); } else if (source instanceof URL) { return getSAXReader().read((URL) source); } else { throw new IllegalArgumentException( "Invalid source argument. Must be a String, Reader, InputStream or URL." + " Was type; " + source.getClass().getName() + " with value: " + source); } }
protected Document parse(Object source) throws Exception { // #### we should allow parsing to output XML events to // the output if no var is specified if (source instanceof String) { String uri = (String) source; InputStream in = context.getResourceAsStream(uri); return getSAXReader().read(in, uri); } else if (source instanceof Reader) { return getSAXReader().read((Reader) source); } else if (source instanceof InputStream) { return getSAXReader().read((InputStream) source); } else if (source instanceof URL) { return getSAXReader().read((URL) source); } else { throw new IllegalArgumentException( "Invalid source argument. Must be a String, Reader, InputStream or URL." + " Was type; " + source.getClass().getName() + " with value: " + source); } }
1,112,570
protected Document parse(Object source) throws Exception { // #### we should allow parsing to output XML events to // the output if no var is specified if (source instanceof String) { String uri = (String) source; InputStream in = context.getResourceAsStream(uri); return getSAXReader().read(in, uri); } else if (source instanceof Reader) { return getSAXReader().read((Reader) source); } else if (source instanceof InputStream) { return getSAXReader().read((InputStream) source); } else if (source instanceof URL) { return getSAXReader().read((URL) source); } else { throw new IllegalArgumentException( "Invalid source argument. Must be a String, Reader, InputStream or URL." + " Was type; " + source.getClass().getName() + " with value: " + source); } }
protected Document parse(Object source) throws Exception { // #### we should allow parsing to output XML events to // the output if no var is specified if (source instanceof String) { String uri = (String) source; InputStream in = context.getResourceAsStream(uri); return getSAXReader().read(in, uri); } else if (source instanceof Reader) { return getSAXReader().read((Reader) source); } else if (source instanceof InputStream) { return getSAXReader().read((InputStream) source); } else if (source instanceof URL) { return getSAXReader().read((URL) source); } else { throw new IllegalArgumentException( "Invalid source argument. Must be a String, Reader, InputStream or URL." + " Was type; " + source.getClass().getName() + " with value: " + source); } }
1,112,571
public void haploDraw(Graphics gr, boolean myuseThickness, int mycolorThresh, int mycrossThinThresh, int mycrossThickThresh, double[] gapDPrime, Haplotype[][] hapsInBlocks){ Graphics2D g = (Graphics2D) gr; final BasicStroke stroke = new BasicStroke(1.0f); final BasicStroke wideStroke = new BasicStroke(2.0f); final int verticalOffset = 43; final Font nonMonoFont = new Font("Lucida Bright", Font.PLAIN, 12); final Font regFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 12); final Font smallFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 7); final Font boldFont = new Font("Lucida Bright", Font.BOLD, 12); FontMetrics regfm = g.getFontMetrics(regFont); FontMetrics nonMonofm = g.getFontMetrics(nonMonoFont); FontMetrics boldfm = g.getFontMetrics(boldFont); String theHap = new String(); int x = 10; int y = verticalOffset; int totalWidth = 0; NumberFormat nf = NumberFormat.getInstance(); NumberFormat nfMulti = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); nfMulti.setMinimumFractionDigits(2); nfMulti.setMaximumFractionDigits(2); int[][]lookupPos = new int[hapsInBlocks.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[hapsInBlocks[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][hapsInBlocks[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + hapsInBlocks[p][q].getListOrder()); } } Dimension theDimension = haploGetPreferredSize(hapsInBlocks, gr); int windowX = (int)theDimension.getWidth(); int windowY = (int)theDimension.getHeight(); g.setColor(Color.white); g.fillRect(0,0,windowX,windowY); g.setColor(Color.black); for (int i = 0; i < hapsInBlocks.length; i++){ int[] markerNums = hapsInBlocks[i][0].getMarkers(); boolean[] tags = hapsInBlocks[i][0].getTags(); int headerX = x; for (int z = 0; z < markerNums.length; z++){ //put tag snps in red if (tags[z]) { g.setColor(Color.red); } //write labels with more than one digit vertically if (markerNums[z]+1 < 10){ g.setFont(regFont); g.drawString(String.valueOf(markerNums[z]+1), headerX, 18); headerX += (regfm.stringWidth(String.valueOf(markerNums[z]+1))); }else { int ones = (markerNums[z]+1)%10; int tens = (((markerNums[z]+1)-ones)%100)/10; g.setFont(regFont); g.drawString(String.valueOf(ones), headerX, 18); g.setFont(smallFont); g.drawString(String.valueOf(tens), headerX-2, 20-regfm.getAscent()); headerX += (regfm.stringWidth(String.valueOf(ones))); } g.setColor(Color.black); } for (int j = 0; j < hapsInBlocks[i].length; j++){ int curHapNum = lookupPos[i][j]; theHap = new String(); String thePercentage = new String(); int[] theGeno = hapsInBlocks[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ //if we don't know what one of the alleles for a marker is, use "x" if (theGeno[k] == 8){ theHap += "x"; }else{ theHap += theGeno[k]; } } //draw the haplotype in mono font g.setFont(regFont); g.drawString(theHap, x, y); //draw the percentage value in non mono font thePercentage = " (" + nf.format(hapsInBlocks[i][curHapNum].getPercentage()) + ")"; g.setFont(nonMonoFont); g.drawString(thePercentage, x+regfm.stringWidth(theHap), y); totalWidth = regfm.stringWidth(theHap) + nonMonofm.stringWidth(thePercentage); if (i < hapsInBlocks.length - 1){ //draw crossovers for (int crossCount = 0; crossCount < hapsInBlocks[i+1].length; crossCount++){ double crossVal = hapsInBlocks[i][curHapNum].getCrossover(crossCount); if (myuseThickness){ //draw thin and thick lines if (crossVal*100 > mycrossThinThresh){ if (crossVal*100 > mycrossThickThresh){ g.setStroke(wideStroke); }else{ g.setStroke(stroke); } //this arcane formula draws lines neatly from one hap to another g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); } }else{ //draw colored lines if(crossVal*100 > mycolorThresh){ g.setStroke(stroke); double overThresh = crossVal*100 - mycolorThresh; float lineRed, lineGreen, lineBlue; if(overThresh < (50-mycolorThresh)/2){ //cold colors lineRed=0.0f; lineBlue=new Double(0.9-((overThresh/((50-mycolorThresh)/2))*0.9)).floatValue(); lineGreen=0.9f-lineBlue; }else{ //hot colors lineBlue=0.0f; lineRed=new Double(((overThresh-(50-mycolorThresh)/2)/((50-mycolorThresh)/2))*0.9).floatValue(); lineGreen=0.9f-lineRed; } Color lineColor = new Color(lineRed, lineGreen, lineBlue); g.setColor(lineColor); g.setStroke(new BasicStroke(1.5f)); g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); g.setColor(Color.black); g.setStroke(stroke); } } } } y += (regfm.getHeight()+5); } //add the multilocus d prime if appropriate if (i < hapsInBlocks.length - 1){ int multiX = x +totalWidth+3; g.setStroke(wideStroke); g.setFont(boldFont); g.drawRect(multiX, windowY-boldfm.getAscent()-4, boldfm.stringWidth("8.88")+3, boldfm.getAscent()+3); g.drawString(String.valueOf(nfMulti.format(gapDPrime[i])), multiX+2, windowY - 3); g.setStroke(stroke); } x += (totalWidth + 40); y = verticalOffset; } }
public void haploDraw(Graphics gr, boolean myuseThickness, int mycolorThresh, int mycrossThinThresh, int mycrossThickThresh, double[] gapDPrime, Haplotype[][] hapsInBlocks){ Graphics2D g = (Graphics2D) gr; final BasicStroke stroke = new BasicStroke(1.0f); final BasicStroke wideStroke = new BasicStroke(2.0f); final int verticalOffset = 43; final Font nonMonoFont = new Font("Lucida Bright", Font.PLAIN, 12); final Font regFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 12); final Font smallFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 7); final Font boldFont = new Font("Lucida Bright", Font.BOLD, 12); FontMetrics regfm = g.getFontMetrics(regFont); FontMetrics nonMonofm = g.getFontMetrics(nonMonoFont); FontMetrics boldfm = g.getFontMetrics(boldFont); String theHap = new String(); int x = 10; int y = verticalOffset; int totalWidth = 0; NumberFormat nf = NumberFormat.getInstance(); NumberFormat nfMulti = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); nfMulti.setMinimumFractionDigits(2); nfMulti.setMaximumFractionDigits(2); int[][]lookupPos = new int[hapsInBlocks.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[hapsInBlocks[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][hapsInBlocks[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + hapsInBlocks[p][q].getListOrder()); } } Dimension theDimension = haploGetPreferredSize(hapsInBlocks, gr); int windowX = (int)theDimension.getWidth(); int windowY = (int)theDimension.getHeight(); g.setColor(Color.white); g.fillRect(0,0,windowX,windowY); g.setColor(Color.black); for (int i = 0; i < hapsInBlocks.length; i++){ int[] markerNums = hapsInBlocks[i][0].getMarkers(); boolean[] tags = hapsInBlocks[i][0].getTags(); int headerX = x; for (int z = 0; z < markerNums.length; z++){ //put tag snps in red if (tags[z]) { g.setColor(Color.red); } //write labels with more than one digit vertically if (markerNums[z]+1 < 10){ g.setFont(regFont); g.drawString(String.valueOf(markerNums[z]+1), headerX, 18); headerX += (regfm.stringWidth(String.valueOf(markerNums[z]+1))); }else { int ones = (markerNums[z]+1)%10; int tens = (((markerNums[z]+1)-ones)%100)/10; g.setFont(regFont); g.drawString(String.valueOf(ones), headerX, 18); g.setFont(smallFont); g.drawString(String.valueOf(tens), headerX-2, 20-regfm.getAscent()); headerX += (regfm.stringWidth(String.valueOf(ones))); } g.setColor(Color.black); } for (int j = 0; j < hapsInBlocks[i].length; j++){ int curHapNum = lookupPos[i][j]; theHap = new String(); String thePercentage = new String(); int[] theGeno = hapsInBlocks[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ //if we don't know what one of the alleles for a marker is, use "x" if (theGeno[k] == 8){ theHap += "x"; }else{ theHap += theGeno[k]; } } //draw the haplotype in mono font g.setFont(regFont); g.drawString(theHap, x, y); //draw the percentage value in non mono font thePercentage = " (" + nf.format(hapsInBlocks[i][curHapNum].getPercentage()) + ")"; g.setFont(nonMonoFont); g.drawString(thePercentage, x+regfm.stringWidth(theHap), y); totalWidth = regfm.stringWidth(theHap) + nonMonofm.stringWidth(thePercentage); if (i < hapsInBlocks.length - 1){ //draw crossovers for (int crossCount = 0; crossCount < hapsInBlocks[i+1].length; crossCount++){ double crossVal = hapsInBlocks[i][curHapNum].getCrossover(crossCount); if (myuseThickness){ //draw thin and thick lines if (crossVal*100 > mycrossThinThresh){ if (crossVal*100 > mycrossThickThresh){ g.setStroke(wideStroke); }else{ g.setStroke(stroke); } //this arcane formula draws lines neatly from one hap to another g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); } }else{ //draw colored lines if(crossVal*100 > mycolorThresh){ g.setStroke(stroke); double overThresh = crossVal*100 - mycolorThresh; float lineRed, lineGreen, lineBlue; if(overThresh < (50-mycolorThresh)/2){ //cold colors lineRed=0.0f; lineBlue=new Double(0.9-((overThresh/((50-mycolorThresh)/2))*0.9)).floatValue(); lineGreen=0.9f-lineBlue; }else{ //hot colors lineBlue=0.0f; lineRed=new Double(((overThresh-(50-mycolorThresh)/2)/((50-mycolorThresh)/2))*0.9).floatValue(); lineGreen=0.9f-lineRed; } Color lineColor = new Color(lineRed, lineGreen, lineBlue); g.setColor(lineColor); g.setStroke(new BasicStroke(1.5f)); g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); g.setColor(Color.black); g.setStroke(stroke); } } } } y += (regfm.getHeight()+5); } //add the multilocus d prime if appropriate if (i < hapsInBlocks.length - 1){ int multiX = x +totalWidth+3; g.setStroke(wideStroke); g.setFont(boldFont); g.drawRect(multiX, windowY-boldfm.getAscent()-4, boldfm.stringWidth("8.88")+3, boldfm.getAscent()+3); g.drawString(String.valueOf(nfMulti.format(gapDPrime[i])), multiX+2, windowY - 3); g.setStroke(stroke); } x += (totalWidth + 40); y = verticalOffset; } }
1,112,572
public void haploDraw(Graphics gr, boolean myuseThickness, int mycolorThresh, int mycrossThinThresh, int mycrossThickThresh, double[] gapDPrime, Haplotype[][] hapsInBlocks){ Graphics2D g = (Graphics2D) gr; final BasicStroke stroke = new BasicStroke(1.0f); final BasicStroke wideStroke = new BasicStroke(2.0f); final int verticalOffset = 43; final Font nonMonoFont = new Font("Lucida Bright", Font.PLAIN, 12); final Font regFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 12); final Font smallFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 7); final Font boldFont = new Font("Lucida Bright", Font.BOLD, 12); FontMetrics regfm = g.getFontMetrics(regFont); FontMetrics nonMonofm = g.getFontMetrics(nonMonoFont); FontMetrics boldfm = g.getFontMetrics(boldFont); String theHap = new String(); int x = 10; int y = verticalOffset; int totalWidth = 0; NumberFormat nf = NumberFormat.getInstance(); NumberFormat nfMulti = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); nfMulti.setMinimumFractionDigits(2); nfMulti.setMaximumFractionDigits(2); int[][]lookupPos = new int[hapsInBlocks.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[hapsInBlocks[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][hapsInBlocks[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + hapsInBlocks[p][q].getListOrder()); } } Dimension theDimension = haploGetPreferredSize(hapsInBlocks, gr); int windowX = (int)theDimension.getWidth(); int windowY = (int)theDimension.getHeight(); g.setColor(Color.white); g.fillRect(0,0,windowX,windowY); g.setColor(Color.black); for (int i = 0; i < hapsInBlocks.length; i++){ int[] markerNums = hapsInBlocks[i][0].getMarkers(); boolean[] tags = hapsInBlocks[i][0].getTags(); int headerX = x; for (int z = 0; z < markerNums.length; z++){ //put tag snps in red if (tags[z]) { g.setColor(Color.red); } //write labels with more than one digit vertically if (markerNums[z]+1 < 10){ g.setFont(regFont); g.drawString(String.valueOf(markerNums[z]+1), headerX, 18); headerX += (regfm.stringWidth(String.valueOf(markerNums[z]+1))); }else { int ones = (markerNums[z]+1)%10; int tens = (((markerNums[z]+1)-ones)%100)/10; g.setFont(regFont); g.drawString(String.valueOf(ones), headerX, 18); g.setFont(smallFont); g.drawString(String.valueOf(tens), headerX-2, 20-regfm.getAscent()); headerX += (regfm.stringWidth(String.valueOf(ones))); } g.setColor(Color.black); } for (int j = 0; j < hapsInBlocks[i].length; j++){ int curHapNum = lookupPos[i][j]; theHap = new String(); String thePercentage = new String(); int[] theGeno = hapsInBlocks[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ //if we don't know what one of the alleles for a marker is, use "x" if (theGeno[k] == 8){ theHap += "x"; }else{ theHap += theGeno[k]; } } //draw the haplotype in mono font g.setFont(regFont); g.drawString(theHap, x, y); //draw the percentage value in non mono font thePercentage = " (" + nf.format(hapsInBlocks[i][curHapNum].getPercentage()) + ")"; g.setFont(nonMonoFont); g.drawString(thePercentage, x+regfm.stringWidth(theHap), y); totalWidth = regfm.stringWidth(theHap) + nonMonofm.stringWidth(thePercentage); if (i < hapsInBlocks.length - 1){ //draw crossovers for (int crossCount = 0; crossCount < hapsInBlocks[i+1].length; crossCount++){ double crossVal = hapsInBlocks[i][curHapNum].getCrossover(crossCount); if (myuseThickness){ //draw thin and thick lines if (crossVal*100 > mycrossThinThresh){ if (crossVal*100 > mycrossThickThresh){ g.setStroke(wideStroke); }else{ g.setStroke(stroke); } //this arcane formula draws lines neatly from one hap to another g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); } }else{ //draw colored lines if(crossVal*100 > mycolorThresh){ g.setStroke(stroke); double overThresh = crossVal*100 - mycolorThresh; float lineRed, lineGreen, lineBlue; if(overThresh < (50-mycolorThresh)/2){ //cold colors lineRed=0.0f; lineBlue=new Double(0.9-((overThresh/((50-mycolorThresh)/2))*0.9)).floatValue(); lineGreen=0.9f-lineBlue; }else{ //hot colors lineBlue=0.0f; lineRed=new Double(((overThresh-(50-mycolorThresh)/2)/((50-mycolorThresh)/2))*0.9).floatValue(); lineGreen=0.9f-lineRed; } Color lineColor = new Color(lineRed, lineGreen, lineBlue); g.setColor(lineColor); g.setStroke(new BasicStroke(1.5f)); g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); g.setColor(Color.black); g.setStroke(stroke); } } } } y += (regfm.getHeight()+5); } //add the multilocus d prime if appropriate if (i < hapsInBlocks.length - 1){ int multiX = x +totalWidth+3; g.setStroke(wideStroke); g.setFont(boldFont); g.drawRect(multiX, windowY-boldfm.getAscent()-4, boldfm.stringWidth("8.88")+3, boldfm.getAscent()+3); g.drawString(String.valueOf(nfMulti.format(gapDPrime[i])), multiX+2, windowY - 3); g.setStroke(stroke); } x += (totalWidth + 40); y = verticalOffset; } }
public void haploDraw(Graphics gr, boolean myuseThickness, int mycolorThresh, int mycrossThinThresh, int mycrossThickThresh, double[] gapDPrime, Haplotype[][] hapsInBlocks){ Graphics2D g = (Graphics2D) gr; final BasicStroke stroke = new BasicStroke(1.0f); final BasicStroke wideStroke = new BasicStroke(2.0f); final int verticalOffset = 43; final Font nonMonoFont = new Font("Lucida Bright", Font.PLAIN, 12); final Font regFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 12); final Font smallFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 7); final Font boldFont = new Font("Lucida Bright", Font.BOLD, 12); FontMetrics regfm = g.getFontMetrics(regFont); FontMetrics nonMonofm = g.getFontMetrics(nonMonoFont); FontMetrics boldfm = g.getFontMetrics(boldFont); String theHap = new String(); int x = 10; int y = verticalOffset; int totalWidth = 0; NumberFormat nf = NumberFormat.getInstance(); NumberFormat nfMulti = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); nfMulti.setMinimumFractionDigits(2); nfMulti.setMaximumFractionDigits(2); int[][]lookupPos = new int[hapsInBlocks.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[hapsInBlocks[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][hapsInBlocks[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + hapsInBlocks[p][q].getListOrder()); } } Dimension theDimension = haploGetPreferredSize(hapsInBlocks, gr); int windowX = (int)theDimension.getWidth(); int windowY = (int)theDimension.getHeight(); g.setColor(Color.white); g.fillRect(0,0,windowX,windowY); g.setColor(Color.black); for (int i = 0; i < hapsInBlocks.length; i++){ int[] markerNums = hapsInBlocks[i][0].getMarkers(); boolean[] tags = hapsInBlocks[i][0].getTags(); int headerX = x; for (int z = 0; z < markerNums.length; z++){ //put tag snps in red if (tags[z]) { g.setColor(Color.red); } //write labels with more than one digit vertically if (markerNums[z]+1 < 10){ g.setFont(regFont); g.drawString(String.valueOf(markerNums[z]+1), headerX, 18); headerX += (regfm.stringWidth(String.valueOf(markerNums[z]+1))); }else { int ones = (markerNums[z]+1)%10; int tens = (((markerNums[z]+1)-ones)%100)/10; g.setFont(regFont); g.drawString(String.valueOf(ones), headerX, 18); g.setFont(smallFont); g.drawString(String.valueOf(tens), headerX-2, 20-regfm.getAscent()); headerX += (regfm.stringWidth(String.valueOf(ones))); } g.setColor(Color.black); } for (int j = 0; j < hapsInBlocks[i].length; j++){ int curHapNum = lookupPos[i][j]; theHap = new String(); String thePercentage = new String(); int[] theGeno = hapsInBlocks[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ //if we don't know what one of the alleles for a marker is, use "x" if (theGeno[k] == 8){ theHap += "x"; }else{ theHap += theGeno[k]; } } //draw the haplotype in mono font g.setFont(regFont); g.drawString(theHap, x, y); //draw the percentage value in non mono font thePercentage = " (" + nf.format(hapsInBlocks[i][curHapNum].getPercentage()) + ")"; g.setFont(nonMonoFont); g.drawString(thePercentage, x+regfm.stringWidth(theHap), y); totalWidth = regfm.stringWidth(theHap) + nonMonofm.stringWidth(thePercentage); if (i < hapsInBlocks.length - 1){ //draw crossovers for (int crossCount = 0; crossCount < hapsInBlocks[i+1].length; crossCount++){ double crossVal = hapsInBlocks[i][curHapNum].getCrossover(crossCount); if (myuseThickness){ //draw thin and thick lines if (crossVal*100 > mycrossThinThresh){ if (crossVal*100 > mycrossThickThresh){ g.setStroke(wideStroke); }else{ g.setStroke(stroke); } //this arcane formula draws lines neatly from one hap to another g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); } }else{ //draw colored lines if(crossVal*100 > mycolorThresh){ g.setStroke(stroke); double overThresh = crossVal*100 - mycolorThresh; float lineRed, lineGreen, lineBlue; if(overThresh < (50-mycolorThresh)/2){ //cold colors lineRed=0.0f; lineBlue=new Double(0.9-((overThresh/((50-mycolorThresh)/2))*0.9)).floatValue(); lineGreen=0.9f-lineBlue; }else{ //hot colors lineBlue=0.0f; lineRed=new Double(((overThresh-(50-mycolorThresh)/2)/((50-mycolorThresh)/2))*0.9).floatValue(); lineGreen=0.9f-lineRed; } Color lineColor = new Color(lineRed, lineGreen, lineBlue); g.setColor(lineColor); g.setStroke(new BasicStroke(1.5f)); g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); g.setColor(Color.black); g.setStroke(stroke); } } } } y += (regfm.getHeight()+5); } //add the multilocus d prime if appropriate if (i < hapsInBlocks.length - 1){ int multiX = x +totalWidth+3; g.setStroke(wideStroke); g.setFont(boldFont); g.drawRect(multiX, windowY-boldfm.getAscent()-4, boldfm.stringWidth("8.88")+3, boldfm.getAscent()+3); g.drawString(String.valueOf(nfMulti.format(gapDPrime[i])), multiX+2, windowY - 3); g.setStroke(stroke); } x += (totalWidth + 40); y = verticalOffset; } }
1,112,573
public void haploDraw(Graphics gr, boolean myuseThickness, int mycolorThresh, int mycrossThinThresh, int mycrossThickThresh, double[] gapDPrime, Haplotype[][] hapsInBlocks){ Graphics2D g = (Graphics2D) gr; final BasicStroke stroke = new BasicStroke(1.0f); final BasicStroke wideStroke = new BasicStroke(2.0f); final int verticalOffset = 43; final Font nonMonoFont = new Font("Lucida Bright", Font.PLAIN, 12); final Font regFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 12); final Font smallFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 7); final Font boldFont = new Font("Lucida Bright", Font.BOLD, 12); FontMetrics regfm = g.getFontMetrics(regFont); FontMetrics nonMonofm = g.getFontMetrics(nonMonoFont); FontMetrics boldfm = g.getFontMetrics(boldFont); String theHap = new String(); int x = 10; int y = verticalOffset; int totalWidth = 0; NumberFormat nf = NumberFormat.getInstance(); NumberFormat nfMulti = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); nfMulti.setMinimumFractionDigits(2); nfMulti.setMaximumFractionDigits(2); int[][]lookupPos = new int[hapsInBlocks.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[hapsInBlocks[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][hapsInBlocks[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + hapsInBlocks[p][q].getListOrder()); } } Dimension theDimension = haploGetPreferredSize(hapsInBlocks, gr); int windowX = (int)theDimension.getWidth(); int windowY = (int)theDimension.getHeight(); g.setColor(Color.white); g.fillRect(0,0,windowX,windowY); g.setColor(Color.black); for (int i = 0; i < hapsInBlocks.length; i++){ int[] markerNums = hapsInBlocks[i][0].getMarkers(); boolean[] tags = hapsInBlocks[i][0].getTags(); int headerX = x; for (int z = 0; z < markerNums.length; z++){ //put tag snps in red if (tags[z]) { g.setColor(Color.red); } //write labels with more than one digit vertically if (markerNums[z]+1 < 10){ g.setFont(regFont); g.drawString(String.valueOf(markerNums[z]+1), headerX, 18); headerX += (regfm.stringWidth(String.valueOf(markerNums[z]+1))); }else { int ones = (markerNums[z]+1)%10; int tens = (((markerNums[z]+1)-ones)%100)/10; g.setFont(regFont); g.drawString(String.valueOf(ones), headerX, 18); g.setFont(smallFont); g.drawString(String.valueOf(tens), headerX-2, 20-regfm.getAscent()); headerX += (regfm.stringWidth(String.valueOf(ones))); } g.setColor(Color.black); } for (int j = 0; j < hapsInBlocks[i].length; j++){ int curHapNum = lookupPos[i][j]; theHap = new String(); String thePercentage = new String(); int[] theGeno = hapsInBlocks[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ //if we don't know what one of the alleles for a marker is, use "x" if (theGeno[k] == 8){ theHap += "x"; }else{ theHap += theGeno[k]; } } //draw the haplotype in mono font g.setFont(regFont); g.drawString(theHap, x, y); //draw the percentage value in non mono font thePercentage = " (" + nf.format(hapsInBlocks[i][curHapNum].getPercentage()) + ")"; g.setFont(nonMonoFont); g.drawString(thePercentage, x+regfm.stringWidth(theHap), y); totalWidth = regfm.stringWidth(theHap) + nonMonofm.stringWidth(thePercentage); if (i < hapsInBlocks.length - 1){ //draw crossovers for (int crossCount = 0; crossCount < hapsInBlocks[i+1].length; crossCount++){ double crossVal = hapsInBlocks[i][curHapNum].getCrossover(crossCount); if (myuseThickness){ //draw thin and thick lines if (crossVal*100 > mycrossThinThresh){ if (crossVal*100 > mycrossThickThresh){ g.setStroke(wideStroke); }else{ g.setStroke(stroke); } //this arcane formula draws lines neatly from one hap to another g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); } }else{ //draw colored lines if(crossVal*100 > mycolorThresh){ g.setStroke(stroke); double overThresh = crossVal*100 - mycolorThresh; float lineRed, lineGreen, lineBlue; if(overThresh < (50-mycolorThresh)/2){ //cold colors lineRed=0.0f; lineBlue=new Double(0.9-((overThresh/((50-mycolorThresh)/2))*0.9)).floatValue(); lineGreen=0.9f-lineBlue; }else{ //hot colors lineBlue=0.0f; lineRed=new Double(((overThresh-(50-mycolorThresh)/2)/((50-mycolorThresh)/2))*0.9).floatValue(); lineGreen=0.9f-lineRed; } Color lineColor = new Color(lineRed, lineGreen, lineBlue); g.setColor(lineColor); g.setStroke(new BasicStroke(1.5f)); g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); g.setColor(Color.black); g.setStroke(stroke); } } } } y += (regfm.getHeight()+5); } //add the multilocus d prime if appropriate if (i < hapsInBlocks.length - 1){ int multiX = x +totalWidth+3; g.setStroke(wideStroke); g.setFont(boldFont); g.drawRect(multiX, windowY-boldfm.getAscent()-4, boldfm.stringWidth("8.88")+3, boldfm.getAscent()+3); g.drawString(String.valueOf(nfMulti.format(gapDPrime[i])), multiX+2, windowY - 3); g.setStroke(stroke); } x += (totalWidth + 40); y = verticalOffset; } }
public void haploDraw(Graphics gr, boolean myuseThickness, int mycolorThresh, int mycrossThinThresh, int mycrossThickThresh, double[] gapDPrime, Haplotype[][] hapsInBlocks){ Graphics2D g = (Graphics2D) gr; final BasicStroke stroke = new BasicStroke(1.0f); final BasicStroke wideStroke = new BasicStroke(2.0f); final int verticalOffset = 43; final Font nonMonoFont = new Font("Lucida Bright", Font.PLAIN, 12); final Font regFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 12); final Font smallFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 7); final Font boldFont = new Font("Lucida Bright", Font.BOLD, 12); FontMetrics regfm = g.getFontMetrics(regFont); FontMetrics nonMonofm = g.getFontMetrics(nonMonoFont); FontMetrics boldfm = g.getFontMetrics(boldFont); String theHap = new String(); int x = 10; int y = verticalOffset; int totalWidth = 0; NumberFormat nf = NumberFormat.getInstance(); NumberFormat nfMulti = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); nfMulti.setMinimumFractionDigits(2); nfMulti.setMaximumFractionDigits(2); int[][]lookupPos = new int[hapsInBlocks.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[hapsInBlocks[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][hapsInBlocks[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + hapsInBlocks[p][q].getListOrder()); } } Dimension theDimension = haploGetPreferredSize(hapsInBlocks, gr); int windowX = (int)theDimension.getWidth(); int windowY = (int)theDimension.getHeight(); g.setColor(Color.white); g.fillRect(0,0,windowX,windowY); g.setColor(Color.black); for (int i = 0; i < hapsInBlocks.length; i++){ int[] markerNums = hapsInBlocks[i][0].getMarkers(); boolean[] tags = hapsInBlocks[i][0].getTags(); int headerX = x; for (int z = 0; z < markerNums.length; z++){ //put tag snps in red if (tags[z]) { g.setColor(Color.red); } //write labels with more than one digit vertically if (markerNums[z]+1 < 10){ g.setFont(regFont); g.drawString(String.valueOf(markerNums[z]+1), headerX, 18); headerX += (regfm.stringWidth(String.valueOf(markerNums[z]+1))); }else { int ones = (markerNums[z]+1)%10; int tens = (((markerNums[z]+1)-ones)%100)/10; g.setFont(regFont); g.drawString(String.valueOf(ones), headerX, 18); g.setFont(smallFont); g.drawString(String.valueOf(tens), headerX-2, 20-regfm.getAscent()); headerX += (regfm.stringWidth(String.valueOf(ones))); } g.setColor(Color.black); } for (int j = 0; j < hapsInBlocks[i].length; j++){ int curHapNum = lookupPos[i][j]; theHap = new String(); String thePercentage = new String(); int[] theGeno = hapsInBlocks[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ //if we don't know what one of the alleles for a marker is, use "x" if (theGeno[k] == 8){ theHap += "x"; }else{ theHap += theGeno[k]; } } //draw the haplotype in mono font g.setFont(regFont); g.drawString(theHap, x, y); //draw the percentage value in non mono font thePercentage = " (" + nf.format(hapsInBlocks[i][curHapNum].getPercentage()) + ")"; g.setFont(nonMonoFont); g.drawString(thePercentage, x+regfm.stringWidth(theHap), y); totalWidth = regfm.stringWidth(theHap) + nonMonofm.stringWidth(thePercentage); if (i < hapsInBlocks.length - 1){ //draw crossovers for (int crossCount = 0; crossCount < hapsInBlocks[i+1].length; crossCount++){ double crossVal = hapsInBlocks[i][curHapNum].getCrossover(crossCount); if (myuseThickness){ //draw thin and thick lines if (crossVal*100 > mycrossThinThresh){ if (crossVal*100 > mycrossThickThresh){ g.setStroke(wideStroke); }else{ g.setStroke(stroke); } //this arcane formula draws lines neatly from one hap to another g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); } }else{ //draw colored lines if(crossVal*100 > mycolorThresh){ g.setStroke(stroke); double overThresh = crossVal*100 - mycolorThresh; float lineRed, lineGreen, lineBlue; if(overThresh < (50-mycolorThresh)/2){ //cold colors lineRed=0.0f; lineBlue=new Double(0.9-((overThresh/((50-mycolorThresh)/2))*0.9)).floatValue(); lineGreen=0.9f-lineBlue; }else{ //hot colors lineBlue=0.0f; lineRed=new Double(((overThresh-(50-mycolorThresh)/2)/((50-mycolorThresh)/2))*0.9).floatValue(); lineGreen=0.9f-lineRed; } g.setColor(lineColor); g.setStroke(new BasicStroke(1.5f)); g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); g.setColor(Color.black); g.setStroke(stroke); } } } } y += (regfm.getHeight()+5); } //add the multilocus d prime if appropriate if (i < hapsInBlocks.length - 1){ int multiX = x +totalWidth+3; g.setStroke(wideStroke); g.setFont(boldFont); g.drawRect(multiX, windowY-boldfm.getAscent()-4, boldfm.stringWidth("8.88")+3, boldfm.getAscent()+3); g.drawString(String.valueOf(nfMulti.format(gapDPrime[i])), multiX+2, windowY - 3); g.setStroke(stroke); } x += (totalWidth + 40); y = verticalOffset; } }
1,112,574
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { AccessController.canAccess(context.getUser(), ACL_EDIT_USERS); User user = buildUser(actionForm); UserManager.getInstance().updateUser(user); UserActivityLogger.getInstance().logActivity( context.getUser().getUsername(), "Edited user "+user.getName()+"/"+user.getPassword()); return mapping.findForward(Forwards.SUCCESS); }
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { AccessController.canAccess(context.getUser(), ACL_EDIT_USERS); User user = buildUser(actionForm); UserManager.getInstance().updateUser(user); UserActivityLogger.getInstance().logActivity( context.getUser().getUsername(), "Edited user "+user.getName()); return mapping.findForward(Forwards.SUCCESS); }
1,112,575
public void remove(SQLObject sqo) throws ArchitectException{ ProfileResult old = results.remove(sqo); if ( sqo instanceof SQLTable ) { for ( SQLColumn col: ((SQLTable)sqo).getColumns()) { results.remove(col); } } else if ( sqo instanceof SQLColumn ) { SQLTable table = ((SQLColumn)sqo).getParentTable(); boolean allColumnDeleted = true; for ( SQLColumn col: table.getColumns()) { if ( getResult(col) != null ) { allColumnDeleted = false; break; } } if ( allColumnDeleted ){ results.remove(table); } } fireProfileRemovedEvent(new ProfileChangeEvent(this, null)); }
public void remove(SQLObject sqo) throws ArchitectException{ results.remove(sqo); if ( sqo instanceof SQLTable ) { for ( SQLColumn col: ((SQLTable)sqo).getColumns()) { results.remove(col); } } else if ( sqo instanceof SQLColumn ) { SQLTable table = ((SQLColumn)sqo).getParentTable(); boolean allColumnDeleted = true; for ( SQLColumn col: table.getColumns()) { if ( getResult(col) != null ) { allColumnDeleted = false; break; } } if ( allColumnDeleted ){ results.remove(table); } } fireProfileRemovedEvent(new ProfileChangeEvent(this, null)); }
1,112,576
public DataFlavor bestImportFlavor(JComponent c, DataFlavor[] flavors) { logger.debug("PlayPenTransferHandler: can I import "+Arrays.asList(flavors)); for (int i = 0; i < flavors.length; i++) { String cls = flavors[i].getDefaultRepresentationClassAsString(); logger.debug("representation class = "+cls); logger.debug("mime type = "+flavors[i].getMimeType()); logger.debug("type = "+flavors[i].getPrimaryType()); logger.debug("subtype = "+flavors[i].getSubType()); logger.debug("class = "+flavors[i].getParameter("class")); logger.debug("isSerializedObject = "+flavors[i].isFlavorSerializedObjectType()); logger.debug("isInputStream = "+flavors[i].isRepresentationClassInputStream()); logger.debug("isRemoteObject = "+flavors[i].isFlavorRemoteObjectType()); logger.debug("isLocalObject = "+flavors[i].getMimeType().equals(DataFlavor.javaJVMLocalObjectMimeType)); if (flavors[i].equals(SQLObjectTransferable.flavor) || flavors[i].equals(SQLObjectListTransferable.flavor)) { logger.debug("YES"); return flavors[i]; } } logger.debug("NO!"); return null; }
public DataFlavor bestImportFlavor(JComponent c, DataFlavor[] flavors) { logger.debug("PlayPenTransferHandler: can I import "+Arrays.asList(flavors)); for (int i = 0; i < flavors.length; i++) { String cls = flavors[i].getDefaultRepresentationClassAsString(); logger.debug("representation class = "+cls); logger.debug("mime type = "+flavors[i].getMimeType()); logger.debug("type = "+flavors[i].getPrimaryType()); logger.debug("subtype = "+flavors[i].getSubType()); logger.debug("class = "+flavors[i].getParameter("class")); logger.debug("isSerializedObject = "+flavors[i].isFlavorSerializedObjectType()); logger.debug("isInputStream = "+flavors[i].isRepresentationClassInputStream()); logger.debug("isRemoteObject = "+flavors[i].isFlavorRemoteObjectType()); logger.debug("isLocalObject = "+flavors[i].getMimeType().equals(DataFlavor.javaJVMLocalObjectMimeType)); if (flavors[i].equals(SQLObjectTransferable.flavor) || flavors[i].equals(SQLObjectListTransferable.flavor)) { logger.debug("YES"); best = flavors[i]; } else { logger.debug("NO!"); } } logger.debug("NO!"); return null; }
1,112,579
public DataFlavor bestImportFlavor(JComponent c, DataFlavor[] flavors) { logger.debug("PlayPenTransferHandler: can I import "+Arrays.asList(flavors)); for (int i = 0; i < flavors.length; i++) { String cls = flavors[i].getDefaultRepresentationClassAsString(); logger.debug("representation class = "+cls); logger.debug("mime type = "+flavors[i].getMimeType()); logger.debug("type = "+flavors[i].getPrimaryType()); logger.debug("subtype = "+flavors[i].getSubType()); logger.debug("class = "+flavors[i].getParameter("class")); logger.debug("isSerializedObject = "+flavors[i].isFlavorSerializedObjectType()); logger.debug("isInputStream = "+flavors[i].isRepresentationClassInputStream()); logger.debug("isRemoteObject = "+flavors[i].isFlavorRemoteObjectType()); logger.debug("isLocalObject = "+flavors[i].getMimeType().equals(DataFlavor.javaJVMLocalObjectMimeType)); if (flavors[i].equals(SQLObjectTransferable.flavor) || flavors[i].equals(SQLObjectListTransferable.flavor)) { logger.debug("YES"); return flavors[i]; } } logger.debug("NO!"); return null; }
public DataFlavor bestImportFlavor(JComponent c, DataFlavor[] flavors) { logger.debug("PlayPenTransferHandler: can I import "+Arrays.asList(flavors)); for (int i = 0; i < flavors.length; i++) { String cls = flavors[i].getDefaultRepresentationClassAsString(); logger.debug("representation class = "+cls); logger.debug("mime type = "+flavors[i].getMimeType()); logger.debug("type = "+flavors[i].getPrimaryType()); logger.debug("subtype = "+flavors[i].getSubType()); logger.debug("class = "+flavors[i].getParameter("class")); logger.debug("isSerializedObject = "+flavors[i].isFlavorSerializedObjectType()); logger.debug("isInputStream = "+flavors[i].isRepresentationClassInputStream()); logger.debug("isRemoteObject = "+flavors[i].isFlavorRemoteObjectType()); logger.debug("isLocalObject = "+flavors[i].getMimeType().equals(DataFlavor.javaJVMLocalObjectMimeType)); if (flavors[i].equals(SQLObjectTransferable.flavor) || flavors[i].equals(SQLObjectListTransferable.flavor)) { logger.debug("YES"); return flavors[i]; } } logger.debug("NO!"); return null; }
1,112,580
public void drop(DropTargetDropEvent dtde) { Transferable t = dtde.getTransferable(); PlayPen c = (PlayPen) dtde.getDropTargetContext().getComponent(); DataFlavor importFlavor = bestImportFlavor(c, t.getTransferDataFlavors()); if (importFlavor == null) { dtde.rejectDrop(); } else { try { Object someData = t.getTransferData(importFlavor); logger.debug("MyJTreeTransferHandler.importData: got object of type "+someData.getClass().getName()); if (someData instanceof SQLTable) { dtde.acceptDrop(DnDConstants.ACTION_COPY); c.addTable((SQLTable) someData, dtde.getLocation()); dtde.dropComplete(true); return; } else if (someData instanceof SQLSchema) { dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLSchema sourceSchema = (SQLSchema) someData; c.addSchema(sourceSchema, dtde.getLocation()); dtde.dropComplete(true); return; } else if (someData instanceof SQLCatalog) { dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLCatalog sourceCatalog = (SQLCatalog) someData; Iterator cit = sourceCatalog.getChildren().iterator(); if (sourceCatalog.isSchemaContainer()) { while (cit.hasNext()) { SQLSchema sourceSchema = (SQLSchema) cit.next(); c.addSchema(sourceSchema, dtde.getLocation()); } } else { while (cit.hasNext()) { SQLTable sourceTable = (SQLTable) cit.next(); c.addTable(sourceTable, dtde.getLocation()); } } dtde.dropComplete(true); return; } else if (someData instanceof SQLColumn) { dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLColumn column = (SQLColumn) someData; JLabel colName = new JLabel(column.getColumnName()); colName.setSize(colName.getPreferredSize()); c.add(colName, dtde.getLocation()); logger.debug("Added "+column.getColumnName()+" to playpen (temporary, only for testing)"); colName.revalidate(); dtde.dropComplete(true); return; } else if (someData instanceof SQLObject[]) { // needs work (should use addSchema()) dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLObject[] objects = (SQLObject[]) someData; for (int i = 0; i < objects.length; i++) { if (objects[i] instanceof SQLTable) { c.addTable((SQLTable) objects[i], dtde.getLocation()); } else if (objects[i] instanceof SQLSchema) { c.addSchema((SQLSchema) objects[i], dtde.getLocation()); } else { logger.warn("Unsupported object in multi-item drop: " +objects[i]); } } dtde.dropComplete(true); return; } else { dtde.rejectDrop(); } } catch (UnsupportedFlavorException ufe) { ufe.printStackTrace(); dtde.rejectDrop(); } catch (IOException ioe) { ioe.printStackTrace(); dtde.rejectDrop(); } catch (InvalidDnDOperationException ex) { ex.printStackTrace(); dtde.rejectDrop(); } catch (ArchitectException ex) { ex.printStackTrace(); dtde.rejectDrop(); } } }
public void drop(DropTargetDropEvent dtde) { Transferable t = dtde.getTransferable(); PlayPen c = (PlayPen) dtde.getDropTargetContext().getComponent(); DataFlavor importFlavor = bestImportFlavor(c, t.getTransferDataFlavors()); if (importFlavor == null) { dtde.rejectDrop(); } else { try { Object someData = t.getTransferData(importFlavor); logger.debug("MyJTreeTransferHandler.importData: got object of type "+someData.getClass().getName()+" @"+someData.hashCode()); if (someData instanceof SQLTable) { dtde.acceptDrop(DnDConstants.ACTION_COPY); c.addTable((SQLTable) someData, dtde.getLocation()); dtde.dropComplete(true); return; } else if (someData instanceof SQLSchema) { dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLSchema sourceSchema = (SQLSchema) someData; c.addSchema(sourceSchema, dtde.getLocation()); dtde.dropComplete(true); return; } else if (someData instanceof SQLCatalog) { dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLCatalog sourceCatalog = (SQLCatalog) someData; Iterator cit = sourceCatalog.getChildren().iterator(); if (sourceCatalog.isSchemaContainer()) { while (cit.hasNext()) { SQLSchema sourceSchema = (SQLSchema) cit.next(); c.addSchema(sourceSchema, dtde.getLocation()); } } else { while (cit.hasNext()) { SQLTable sourceTable = (SQLTable) cit.next(); c.addTable(sourceTable, dtde.getLocation()); } } dtde.dropComplete(true); return; } else if (someData instanceof SQLColumn) { dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLColumn column = (SQLColumn) someData; JLabel colName = new JLabel(column.getColumnName()); colName.setSize(colName.getPreferredSize()); c.add(colName, dtde.getLocation()); logger.debug("Added "+column.getColumnName()+" to playpen (temporary, only for testing)"); colName.revalidate(); dtde.dropComplete(true); return; } else if (someData instanceof SQLObject[]) { // needs work (should use addSchema()) dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLObject[] objects = (SQLObject[]) someData; for (int i = 0; i < objects.length; i++) { if (objects[i] instanceof SQLTable) { c.addTable((SQLTable) objects[i], dtde.getLocation()); } else if (objects[i] instanceof SQLSchema) { c.addSchema((SQLSchema) objects[i], dtde.getLocation()); } else { logger.warn("Unsupported object in multi-item drop: " +objects[i]); } } dtde.dropComplete(true); return; } else { dtde.rejectDrop(); } } catch (UnsupportedFlavorException ufe) { ufe.printStackTrace(); dtde.rejectDrop(); } catch (IOException ioe) { ioe.printStackTrace(); dtde.rejectDrop(); } catch (InvalidDnDOperationException ex) { ex.printStackTrace(); dtde.rejectDrop(); } catch (ArchitectException ex) { ex.printStackTrace(); dtde.rejectDrop(); } } }
1,112,581
public PlayPen(SQLDatabase db) { super(); if (db == null) throw new NullPointerException("db must be non-null"); this.db = db; relationships = new LinkedList(); try { ArchitectUtils.listenToHierarchy(this, db); } catch (ArchitectException ex) { logger.error("Couldn't listen to database", ex); } setLayout(new PlayPenLayout(this)); setName("Play Pen"); setMinimumSize(new Dimension(200,200)); setBackground(java.awt.Color.white); setOpaque(false); // XXX: it really is opaque, but we can't have super.paintComponent() painting over top of our relationship lines dt = new DropTarget(this, new PlayPenDropListener()); tableNames = new HashMap(); addContainerListener(this); setupTablePanePopup(); }
public PlayPen(SQLDatabase db) { super(); if (db == null) throw new NullPointerException("db must be non-null"); this.db = db; relationships = new LinkedList(); try { ArchitectUtils.listenToHierarchy(this, db); } catch (ArchitectException ex) { logger.error("Couldn't listen to database", ex); } setLayout(new PlayPenLayout(this)); setName("Play Pen"); setMinimumSize(new Dimension(200,200)); setBackground(java.awt.Color.white); setOpaque(false); // XXX: it really is opaque, but we can't have super.paintComponent() painting over top of our relationship lines dt = new DropTarget(this, new PlayPenDropListener()); tableNames = new HashMap(); addContainerListener(this); setupTablePanePopup(); }
1,112,582
public PlayPen(SQLDatabase db) { super(); if (db == null) throw new NullPointerException("db must be non-null"); this.db = db; relationships = new LinkedList(); try { ArchitectUtils.listenToHierarchy(this, db); } catch (ArchitectException ex) { logger.error("Couldn't listen to database", ex); } setLayout(new PlayPenLayout(this)); setName("Play Pen"); setMinimumSize(new Dimension(200,200)); setBackground(java.awt.Color.white); setOpaque(false); // XXX: it really is opaque, but we can't have super.paintComponent() painting over top of our relationship lines dt = new DropTarget(this, new PlayPenDropListener()); tableNames = new HashMap(); addContainerListener(this); setupTablePanePopup(); }
public PlayPen(SQLDatabase db) { super(); if (db == null) throw new NullPointerException("db must be non-null"); this.db = db; relationships = new LinkedList(); try { ArchitectUtils.listenToHierarchy(this, db); } catch (ArchitectException ex) { logger.error("Couldn't listen to database", ex); } setLayout(new PlayPenLayout(this)); setName("Play Pen"); setMinimumSize(new Dimension(200,200)); setBackground(java.awt.Color.white); setOpaque(false); // XXX: it really is opaque, but we can't have super.paintComponent() painting over top of our relationship lines dt = new DropTarget(this, new PlayPenDropListener()); addContainerListener(this); setupTablePanePopup(); }
1,112,583
public void sendWelcomeLetters(MailTopic topic,String email)throws RemoteException,FinderException{ Collection letters = MailFinder.getInstance().getEmailLetters(((Integer)topic.getPrimaryKey()).intValue(),EmailLetter.TYPE_SUBSCRIPTION); if(letters!=null &&!letters.isEmpty()){ this.sendLetter((EmailLetter)letters.iterator().next(),topic); } }
public void sendWelcomeLetters(MailTopic topic,String email)throws RemoteException,FinderException{ Collection letters = MailFinder.getInstance().getEmailLetters(((Integer)topic.getPrimaryKey()).intValue(),EmailLetter.TYPE_SUBSCRIPTION); if(letters!=null &&!letters.isEmpty()){ Email eEmail =MailFinder.getInstance().lookupEmail(email); this.sendLetter((EmailLetter)letters.iterator().next(),topic,eEmail); } }
1,112,584
protected RGB parseRGB(String value) { StringTokenizer enum = new StringTokenizer(value, ","); int red = 0; int green = 0; int blue = 0; if (enum.hasMoreTokens()) { red = parseNumber(enum.nextToken()); }
protected RGB parseRGB(String value) { StringTokenizer items = new StringTokenizer(value, ","); int red = 0; int green = 0; int blue = 0; if (enum.hasMoreTokens()) { red = parseNumber(enum.nextToken()); }
1,112,586
protected RGB parseRGB(String value) { StringTokenizer enum = new StringTokenizer(value, ","); int red = 0; int green = 0; int blue = 0; if (enum.hasMoreTokens()) { red = parseNumber(enum.nextToken()); }
protected RGB parseRGB(String value) { StringTokenizer enum = new StringTokenizer(value, ","); int red = 0; int green = 0; int blue = 0; if (enum.hasMoreTokens()) { red = parseNumber(enum.nextToken()); }
1,112,587
public void parseHapMap(File inFile) throws PedFileException, IOException { int colNum = -1; Vector lines = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } lines.add(line); } int numLines = lines.size(); if (numLines < 2){ throw new PedFileException("Hapmap data format error: empty file"); } Individual ind; this.allIndividuals = new Vector(); //enumerate indivs StringTokenizer st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); int numMetaColumns = 0; boolean doneMeta = false; while(!doneMeta && st.hasMoreTokens()){ String thisfield = st.nextToken(); numMetaColumns++; //first indiv ID will be a string beginning with "NA" if (thisfield.startsWith("NA")){ doneMeta = true; } } numMetaColumns--; st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); for (int i = 0; i < numMetaColumns; i++){ st.nextToken(); } Vector namesIncludingDups = new Vector(); StringTokenizer dt; while (st.hasMoreTokens()){ ind = new Individual(numLines); String name = st.nextToken(); namesIncludingDups.add(name); if (name.endsWith("dup")){ //skip dups (i.e. don't add 'em to ind array) continue; } String details = (String)hapMapTranslate.get(name); if (details == null){ throw new PedFileException("Hapmap data format error: " + name); } dt = new StringTokenizer(details, "\n\t\" \""); ind.setFamilyID(dt.nextToken().trim()); ind.setIndividualID(dt.nextToken().trim()); ind.setDadID(dt.nextToken().trim()); ind.setMomID(dt.nextToken().trim()); try { ind.setGender(Integer.parseInt(dt.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(dt.nextToken().trim())); }catch(NumberFormatException nfe) { throw new PedFileException("File error: invalid gender or affected status for indiv " + name); } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } //start at k=1 to skip header which we just processed above. hminfo = new String[numLines-1][]; for(int k=1;k<numLines;k++){ StringTokenizer tokenizer = new StringTokenizer((String)lines.get(k)); //reading the first line if(colNum < 0){ //only check column number count for the first line colNum = tokenizer.countTokens(); } if(colNum != tokenizer.countTokens()) { //this line has a different number of columns //should send some sort of error message //TODO: add something which stores number of markers for all lines and checks that they're consistent throw new PedFileException("Line number mismatch in input file. line " + (k+1)); } if(tokenizer.hasMoreTokens()){ hminfo[k-1] = new String[2]; for (int skip = 0; skip < numMetaColumns; skip++){ //meta-data crap String s = tokenizer.nextToken().trim(); //get marker name, chrom and pos if (skip == 0){ hminfo[k-1][0] = s; } if (skip == 2){ String dc = Chromosome.getDataChrom(); if (dc != null){ if (!dc.equalsIgnoreCase(s)){ throw new PedFileException("Hapmap file format error on line " + (k+1) + ":\n The file appears to contain multiple chromosomes:" + "\n" + dc + ", " + s); } }else{ Chromosome.setDataChrom(s); } } if (skip == 3){ hminfo[k-1][1] = s; } } int index = 0; int indexIncludingDups = -1; while(tokenizer.hasMoreTokens()){ String alleles = tokenizer.nextToken(); indexIncludingDups++; //we've skipped the dups in the ind array, so we skip their genotypes if (((String)namesIncludingDups.elementAt(indexIncludingDups)).endsWith("dup")){ continue; } ind = (Individual)allIndividuals.elementAt(index); int allele1=0, allele2=0; if (alleles.substring(0,1).equals("A")){ allele1 = 1; }else if (alleles.substring(0,1).equals("C")){ allele1 = 2; }else if (alleles.substring(0,1).equals("G")){ allele1 = 3; }else if (alleles.substring(0,1).equals("T")){ allele1 = 4; } if (alleles.substring(1,2).equals("A")){ allele2 = 1; }else if (alleles.substring(1,2).equals("C")){ allele2 = 2; }else if (alleles.substring(1,2).equals("G")){ allele2 = 3; }else if (alleles.substring(1,2).equals("T")){ allele2 = 4; } byte[] markers = new byte[2]; markers[0] = (byte)allele1; markers[1]= (byte)allele2; ind.addMarker(markers); index++; } } } }
public void parseHapMap(File inFile) throws PedFileException, IOException { int colNum = -1; Vector lines = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } lines.add(line); } int numLines = lines.size(); if (numLines < 2){ throw new PedFileException("Hapmap data format error: empty file"); } Individual ind; this.allIndividuals = new Vector(); //enumerate indivs StringTokenizer st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); int numMetaColumns = 0; boolean doneMeta = false; while(!doneMeta && st.hasMoreTokens()){ String thisfield = st.nextToken(); numMetaColumns++; //first indiv ID will be a string beginning with "NA" if (thisfield.startsWith("NA")){ doneMeta = true; } } numMetaColumns--; st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); for (int i = 0; i < numMetaColumns; i++){ st.nextToken(); } Vector namesIncludingDups = new Vector(); StringTokenizer dt; while (st.hasMoreTokens()){ ind = new Individual(numLines-1); String name = st.nextToken(); namesIncludingDups.add(name); if (name.endsWith("dup")){ //skip dups (i.e. don't add 'em to ind array) continue; } String details = (String)hapMapTranslate.get(name); if (details == null){ throw new PedFileException("Hapmap data format error: " + name); } dt = new StringTokenizer(details, "\n\t\" \""); ind.setFamilyID(dt.nextToken().trim()); ind.setIndividualID(dt.nextToken().trim()); ind.setDadID(dt.nextToken().trim()); ind.setMomID(dt.nextToken().trim()); try { ind.setGender(Integer.parseInt(dt.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(dt.nextToken().trim())); }catch(NumberFormatException nfe) { throw new PedFileException("File error: invalid gender or affected status for indiv " + name); } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } //start at k=1 to skip header which we just processed above. hminfo = new String[numLines-1][]; for(int k=1;k<numLines;k++){ StringTokenizer tokenizer = new StringTokenizer((String)lines.get(k)); //reading the first line if(colNum < 0){ //only check column number count for the first line colNum = tokenizer.countTokens(); } if(colNum != tokenizer.countTokens()) { //this line has a different number of columns //should send some sort of error message //TODO: add something which stores number of markers for all lines and checks that they're consistent throw new PedFileException("Line number mismatch in input file. line " + (k+1)); } if(tokenizer.hasMoreTokens()){ hminfo[k-1] = new String[2]; for (int skip = 0; skip < numMetaColumns; skip++){ //meta-data crap String s = tokenizer.nextToken().trim(); //get marker name, chrom and pos if (skip == 0){ hminfo[k-1][0] = s; } if (skip == 2){ String dc = Chromosome.getDataChrom(); if (dc != null){ if (!dc.equalsIgnoreCase(s)){ throw new PedFileException("Hapmap file format error on line " + (k+1) + ":\n The file appears to contain multiple chromosomes:" + "\n" + dc + ", " + s); } }else{ Chromosome.setDataChrom(s); } } if (skip == 3){ hminfo[k-1][1] = s; } } int index = 0; int indexIncludingDups = -1; while(tokenizer.hasMoreTokens()){ String alleles = tokenizer.nextToken(); indexIncludingDups++; //we've skipped the dups in the ind array, so we skip their genotypes if (((String)namesIncludingDups.elementAt(indexIncludingDups)).endsWith("dup")){ continue; } ind = (Individual)allIndividuals.elementAt(index); int allele1=0, allele2=0; if (alleles.substring(0,1).equals("A")){ allele1 = 1; }else if (alleles.substring(0,1).equals("C")){ allele1 = 2; }else if (alleles.substring(0,1).equals("G")){ allele1 = 3; }else if (alleles.substring(0,1).equals("T")){ allele1 = 4; } if (alleles.substring(1,2).equals("A")){ allele2 = 1; }else if (alleles.substring(1,2).equals("C")){ allele2 = 2; }else if (alleles.substring(1,2).equals("G")){ allele2 = 3; }else if (alleles.substring(1,2).equals("T")){ allele2 = 4; } byte[] markers = new byte[2]; markers[0] = (byte)allele1; markers[1]= (byte)allele2; ind.addMarker(markers); index++; } } } }
1,112,589
public void parseHapMap(File inFile) throws PedFileException, IOException { int colNum = -1; Vector lines = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } lines.add(line); } int numLines = lines.size(); if (numLines < 2){ throw new PedFileException("Hapmap data format error: empty file"); } Individual ind; this.allIndividuals = new Vector(); //enumerate indivs StringTokenizer st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); int numMetaColumns = 0; boolean doneMeta = false; while(!doneMeta && st.hasMoreTokens()){ String thisfield = st.nextToken(); numMetaColumns++; //first indiv ID will be a string beginning with "NA" if (thisfield.startsWith("NA")){ doneMeta = true; } } numMetaColumns--; st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); for (int i = 0; i < numMetaColumns; i++){ st.nextToken(); } Vector namesIncludingDups = new Vector(); StringTokenizer dt; while (st.hasMoreTokens()){ ind = new Individual(numLines); String name = st.nextToken(); namesIncludingDups.add(name); if (name.endsWith("dup")){ //skip dups (i.e. don't add 'em to ind array) continue; } String details = (String)hapMapTranslate.get(name); if (details == null){ throw new PedFileException("Hapmap data format error: " + name); } dt = new StringTokenizer(details, "\n\t\" \""); ind.setFamilyID(dt.nextToken().trim()); ind.setIndividualID(dt.nextToken().trim()); ind.setDadID(dt.nextToken().trim()); ind.setMomID(dt.nextToken().trim()); try { ind.setGender(Integer.parseInt(dt.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(dt.nextToken().trim())); }catch(NumberFormatException nfe) { throw new PedFileException("File error: invalid gender or affected status for indiv " + name); } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } //start at k=1 to skip header which we just processed above. hminfo = new String[numLines-1][]; for(int k=1;k<numLines;k++){ StringTokenizer tokenizer = new StringTokenizer((String)lines.get(k)); //reading the first line if(colNum < 0){ //only check column number count for the first line colNum = tokenizer.countTokens(); } if(colNum != tokenizer.countTokens()) { //this line has a different number of columns //should send some sort of error message //TODO: add something which stores number of markers for all lines and checks that they're consistent throw new PedFileException("Line number mismatch in input file. line " + (k+1)); } if(tokenizer.hasMoreTokens()){ hminfo[k-1] = new String[2]; for (int skip = 0; skip < numMetaColumns; skip++){ //meta-data crap String s = tokenizer.nextToken().trim(); //get marker name, chrom and pos if (skip == 0){ hminfo[k-1][0] = s; } if (skip == 2){ String dc = Chromosome.getDataChrom(); if (dc != null){ if (!dc.equalsIgnoreCase(s)){ throw new PedFileException("Hapmap file format error on line " + (k+1) + ":\n The file appears to contain multiple chromosomes:" + "\n" + dc + ", " + s); } }else{ Chromosome.setDataChrom(s); } } if (skip == 3){ hminfo[k-1][1] = s; } } int index = 0; int indexIncludingDups = -1; while(tokenizer.hasMoreTokens()){ String alleles = tokenizer.nextToken(); indexIncludingDups++; //we've skipped the dups in the ind array, so we skip their genotypes if (((String)namesIncludingDups.elementAt(indexIncludingDups)).endsWith("dup")){ continue; } ind = (Individual)allIndividuals.elementAt(index); int allele1=0, allele2=0; if (alleles.substring(0,1).equals("A")){ allele1 = 1; }else if (alleles.substring(0,1).equals("C")){ allele1 = 2; }else if (alleles.substring(0,1).equals("G")){ allele1 = 3; }else if (alleles.substring(0,1).equals("T")){ allele1 = 4; } if (alleles.substring(1,2).equals("A")){ allele2 = 1; }else if (alleles.substring(1,2).equals("C")){ allele2 = 2; }else if (alleles.substring(1,2).equals("G")){ allele2 = 3; }else if (alleles.substring(1,2).equals("T")){ allele2 = 4; } byte[] markers = new byte[2]; markers[0] = (byte)allele1; markers[1]= (byte)allele2; ind.addMarker(markers); index++; } } } }
public void parseHapMap(File inFile) throws PedFileException, IOException { int colNum = -1; Vector lines = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } lines.add(line); } int numLines = lines.size(); if (numLines < 2){ throw new PedFileException("Hapmap data format error: empty file"); } Individual ind; this.allIndividuals = new Vector(); //enumerate indivs StringTokenizer st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); int numMetaColumns = 0; boolean doneMeta = false; while(!doneMeta && st.hasMoreTokens()){ String thisfield = st.nextToken(); numMetaColumns++; //first indiv ID will be a string beginning with "NA" if (thisfield.startsWith("NA")){ doneMeta = true; } } numMetaColumns--; st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); for (int i = 0; i < numMetaColumns; i++){ st.nextToken(); } Vector namesIncludingDups = new Vector(); StringTokenizer dt; while (st.hasMoreTokens()){ ind = new Individual(numLines); String name = st.nextToken(); namesIncludingDups.add(name); if (name.endsWith("dup")){ //skip dups (i.e. don't add 'em to ind array) continue; } String details = (String)hapMapTranslate.get(name); if (details == null){ throw new PedFileException("Hapmap data format error: " + name); } dt = new StringTokenizer(details, "\n\t\" \""); ind.setFamilyID(dt.nextToken().trim()); ind.setIndividualID(dt.nextToken().trim()); ind.setDadID(dt.nextToken().trim()); ind.setMomID(dt.nextToken().trim()); try { ind.setGender(Integer.parseInt(dt.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(dt.nextToken().trim())); }catch(NumberFormatException nfe) { throw new PedFileException("File error: invalid gender or affected status for indiv " + name); } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } //start at k=1 to skip header which we just processed above. hminfo = new String[numLines-1][]; for(int k=1;k<numLines;k++){ StringTokenizer tokenizer = new StringTokenizer((String)lines.get(k)); //reading the first line if(colNum < 0){ //only check column number count for the first line colNum = tokenizer.countTokens(); } if(colNum != tokenizer.countTokens()) { //this line has a different number of columns //should send some sort of error message //TODO: add something which stores number of markers for all lines and checks that they're consistent throw new PedFileException("Line number mismatch in input file. line " + (k+1)); } if(tokenizer.hasMoreTokens()){ hminfo[k-1] = new String[2]; for (int skip = 0; skip < numMetaColumns; skip++){ //meta-data crap String s = tokenizer.nextToken().trim(); //get marker name, chrom and pos if (skip == 0){ hminfo[k-1][0] = s; } if (skip == 2){ String dc = Chromosome.getDataChrom(); if (dc != null){ if (!dc.equalsIgnoreCase(s)){ throw new PedFileException("Hapmap file format error on line " + (k+1) + ":\n The file appears to contain multiple chromosomes:" + "\n" + dc + ", " + s); } }else{ Chromosome.setDataChrom(s); } } if (skip == 3){ hminfo[k-1][1] = s; } } int index = 0; int indexIncludingDups = -1; while(tokenizer.hasMoreTokens()){ String alleles = tokenizer.nextToken(); indexIncludingDups++; //we've skipped the dups in the ind array, so we skip their genotypes if (((String)namesIncludingDups.elementAt(indexIncludingDups)).endsWith("dup")){ continue; } ind = (Individual)allIndividuals.elementAt(index); int allele1=0, allele2=0; if (alleles.substring(0,1).equals("A")){ allele1 = 1; }else if (alleles.substring(0,1).equals("C")){ allele1 = 2; }else if (alleles.substring(0,1).equals("G")){ allele1 = 3; }else if (alleles.substring(0,1).equals("T")){ allele1 = 4; } if (alleles.substring(1,2).equals("A")){ allele2 = 1; }else if (alleles.substring(1,2).equals("C")){ allele2 = 2; }else if (alleles.substring(1,2).equals("G")){ allele2 = 3; }else if (alleles.substring(1,2).equals("T")){ allele2 = 4; } byte[] markers = new byte[2]; markers[0] = (byte)allele1; markers[1]= (byte)allele2; ind.addMarker(markers); index++; } } } }
1,112,590
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numTokens); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(markers); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numMarkers); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(markers); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
1,112,591
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numTokens); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(markers); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numTokens); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(markers); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
1,112,592
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numTokens); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(markers); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numTokens); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(markers); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
1,112,593
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numTokens); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(markers); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numTokens); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(genotype1,genotype2); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
1,112,594
public TransformTag() { super(); tf = TransformerFactory.newInstance(); }
public TransformTag() { super(); this.tf = (SAXTransformerFactory) TransformerFactory.newInstance(); }
1,112,595
protected URIResolver createURIResolver() { return new URIResolver() { public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.info( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (href == null) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); } }; }
protected URIResolver createURIResolver() { return new URIResolver() { public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.debug( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (href == null) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); } }; }
1,112,596
protected URIResolver createURIResolver() { return new URIResolver() { public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.info( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (href == null) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); } }; }
protected URIResolver createURIResolver() { return new URIResolver() { public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.info( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (null == href) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); } }; }
1,112,597
public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.info( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (href == null) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); }
public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.debug( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (href == null) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); }
1,112,598
public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.info( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (href == null) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); }
public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.info( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (null == href) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); }
1,112,599
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
1,112,600
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
1,112,601
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); this.tf.setURIResolver(createURIResolver()); this.transformerHandler = this.tf.newTransformerHandler(this.getObjAsSAXSource(this.getXslt())); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
1,112,602
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
1,112,603
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
1,112,604
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); this.transformerHandler.setResult(result); xmlReader.parse(this.getXMLInputSource()); } else { DocumentResult result = new DocumentResult(); this.transformerHandler.setResult(result); xmlReader.parse(this.getXMLInputSource()); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
1,112,605
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); context.setVariable(var, transformedDoc); } }
public void doTag(XMLOutput output) throws Exception { Document xmlDocument = this.getXmlDocument(output); Document xslDocument = this.parse(this.xsl); tf.setURIResolver(createURIResolver()); Transformer transformer = tf.newTransformer(new DocumentSource(xslDocument)); DocumentSource xmlDocSource = new DocumentSource(xmlDocument); String var = getVar(); if (var == null) { // pass the result of the transform out as SAX events Result result = createSAXResult(output); transformer.transform(xmlDocSource, result); } else { DocumentResult result = new DocumentResult(); transformer.transform(xmlDocSource, result); // output the result as a variable Document transformedDoc = result.getDocument(); this.context.setVariable(varName, transformedDoc); } }
1,112,606
void saveDprimeToText(){ try{ String saveDprimeName = infileName + ".LDout"; new TextMethods().saveDprimeToText(theData.dPrimeTable, saveDprimeName); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
void saveDprimeToText(){ try{ String saveDprimeName = infileName + ".LDout"; new TextMethods().saveDprimeToText(theData.dPrimeTable, saveDprimeName); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
1,112,608
void saveHapsToText(){ //first we see what files are in this directory File currentDir = new File(System.getProperty("user.dir")); int highCount = 0; String[] filez = currentDir.list(); for (int i = 0; i < filez.length; i++){ if (filez[i].startsWith("haptxt")){ if (filez[i].endsWith(infileName)){ //get counter number for this file int thisInt = Integer.parseInt(filez[i].substring(6,8)); if (thisInt > highCount){ highCount = thisInt; } } } } //put together various pieces to create filename for output String saveName = "haptxt"; if (highCount < 9) saveName += "0"; saveName += (highCount+1); saveName += "."; saveName += infileName; try{ new TextMethods().saveHapsToText(finishedHaplos, saveName); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
void saveHapsToText(){ //first we see what files are in this directory File currentDir = new File(System.getProperty("user.dir")); int highCount = 0; String[] filez = currentDir.list(); for (int i = 0; i < filez.length; i++){ if (filez[i].startsWith("haptxt")){ if (filez[i].endsWith(infileName)){ //get counter number for this file int thisInt = Integer.parseInt(filez[i].substring(6,8)); if (thisInt > highCount){ highCount = thisInt; } } } } //put together various pieces to create filename for output String saveName = "haptxt"; if (highCount < 9) saveName += "0"; saveName += (highCount+1); saveName += "."; saveName += infileName; try{ new TextMethods().saveHapsToText(finishedHaplos, saveName); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
1,112,610
void saveHapsToText(){ //first we see what files are in this directory File currentDir = new File(System.getProperty("user.dir")); int highCount = 0; String[] filez = currentDir.list(); for (int i = 0; i < filez.length; i++){ if (filez[i].startsWith("haptxt")){ if (filez[i].endsWith(infileName)){ //get counter number for this file int thisInt = Integer.parseInt(filez[i].substring(6,8)); if (thisInt > highCount){ highCount = thisInt; } } } } //put together various pieces to create filename for output String saveName = "haptxt"; if (highCount < 9) saveName += "0"; saveName += (highCount+1); saveName += "."; saveName += infileName; try{ new TextMethods().saveHapsToText(finishedHaplos, saveName); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
void saveHapsToText(){ //first we see what files are in this directory File currentDir = new File(System.getProperty("user.dir")); int highCount = 0; String[] filez = currentDir.list(); for (int i = 0; i < filez.length; i++){ if (filez[i].startsWith("haptxt")){ if (filez[i].endsWith(infileName)){ //get counter number for this file int thisInt = Integer.parseInt(filez[i].substring(6,8)); if (thisInt > highCount){ highCount = thisInt; } } } } //put together various pieces to create filename for output String saveName = "haptxt"; if (highCount < 9) saveName += "0"; saveName += (highCount+1); saveName += "."; saveName += infileName; try{ new TextMethods().saveHapsToText(finishedHaplos, saveName); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
1,112,611
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command==RAW_DATA){ load(PED); }else if (command == PHASED_DATA){ load(HAPS); }else if (command == HAPMAP_DATA){ load(HMP); }else if (command == BROWSE_GENO){ browse(GENO); }else if (command == BROWSE_INFO){ browse(INFO); }else if (command == "OK"){ HaploView caller = (HaploView)this.getParent(); if (doTDT.isSelected()){ if (trioButton.isSelected()){ caller.assocTest = 1; } else { caller.assocTest = 2; } }else{ caller.assocTest = 0; } String[] returnStrings = {genoFileField.getText(), infoFileField.getText(), maxComparisonDistField.getText()}; caller.readGenotypes(returnStrings, fileType); if (caller.hapDisplay != null){ caller.hapDisplay.setVisible(false); } if (caller.dPrimeDisplay != null){ caller.dPrimeDisplay.setVisible(false); } this.dispose(); }else if (command == "Cancel"){ this.dispose(); }else if (command == "tdt"){ if(this.doTDT.isSelected()){ trioButton.setEnabled(true); ccButton.setEnabled(true); }else{ trioButton.setEnabled(false); ccButton.setEnabled(false); } } }
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command==RAW_DATA){ load(PED); }else if (command == PHASED_DATA){ load(HAPS); }else if (command == HAPMAP_DATA){ load(HMP); }else if (command == BROWSE_GENO){ browse(GENO); }else if (command == BROWSE_INFO){ browse(INFO); }else if (command == "OK"){ HaploView caller = (HaploView)this.getParent(); if (doTDT.isSelected()){ if (trioButton.isSelected()){ caller.assocTest = 1; } else { caller.assocTest = 2; } }else{ caller.assocTest = 0; } String[] returnStrings = {genoFileField.getText(), infoFileField.getText(), maxComparisonDistField.getText()}; caller.readGenotypes(returnStrings, fileType); if (caller.hapDisplay != null){ caller.hapDisplay.setVisible(false); } if (caller.dPrimeDisplay != null){ caller.dPrimeDisplay.setVisible(false); } this.dispose(); }else if (command == "Cancel"){ this.dispose(); }else if (command == "tdt"){ if(this.doTDT.isSelected()){ trioButton.setEnabled(true); ccButton.setEnabled(true); }else{ trioButton.setEnabled(false); ccButton.setEnabled(false); } } }
1,112,612
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command==RAW_DATA){ load(PED); }else if (command == PHASED_DATA){ load(HAPS); }else if (command == HAPMAP_DATA){ load(HMP); }else if (command == BROWSE_GENO){ browse(GENO); }else if (command == BROWSE_INFO){ browse(INFO); }else if (command == "OK"){ HaploView caller = (HaploView)this.getParent(); if (doTDT.isSelected()){ if (trioButton.isSelected()){ caller.assocTest = 1; } else { caller.assocTest = 2; } }else{ caller.assocTest = 0; } String[] returnStrings = {genoFileField.getText(), infoFileField.getText(), maxComparisonDistField.getText()}; caller.readGenotypes(returnStrings, fileType); if (caller.hapDisplay != null){ caller.hapDisplay.setVisible(false); } if (caller.dPrimeDisplay != null){ caller.dPrimeDisplay.setVisible(false); } this.dispose(); }else if (command == "Cancel"){ this.dispose(); }else if (command == "tdt"){ if(this.doTDT.isSelected()){ trioButton.setEnabled(true); ccButton.setEnabled(true); }else{ trioButton.setEnabled(false); ccButton.setEnabled(false); } } }
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command==RAW_DATA){ load(PED); }else if (command == PHASED_DATA){ load(HAPS); }else if (command == HAPMAP_DATA){ load(HMP); }else if (command == BROWSE_GENO){ browse(GENO); }else if (command == BROWSE_INFO){ browse(INFO); }else if (command == "OK"){ HaploView caller = (HaploView)this.getParent(); if (doTDT.isSelected()){ if (trioButton.isSelected()){ caller.assocTest = 1; } else { caller.assocTest = 2; } }else{ caller.assocTest = 0; } String[] returnStrings = {genoFileField.getText(), infoFileField.getText(), maxComparisonDistField.getText()}; caller.readGenotypes(returnStrings, fileType); if (caller.hapDisplay != null){ caller.hapDisplay.setVisible(false); } if (caller.dPrimeDisplay != null){ caller.dPrimeDisplay.setVisible(false); } this.dispose(); }else if (command == "Cancel"){ this.dispose(); }else if (command == "tdt"){ if(this.doTDT.isSelected()){ trioButton.setEnabled(true); ccButton.setEnabled(true); }else{ trioButton.setEnabled(false); ccButton.setEnabled(false); } } }
1,112,613
public Script getBody() { if ( isTrim() && ! hasTrimmed ) { trimBody(); } return body; }
public Script getBody() { if ( isTrim() && ! hasTrimmed ) { trimBody(); } return body; }
1,112,614
protected String getBodyText() throws Exception { // XXX: could maybe optimise this later on by having a pool of buffers StringWriter writer = new StringWriter(); getBody().run(context, XMLOutput.createXMLOutput(writer)); return writer.toString(); }
protected String getBodyText() throws Exception { // XXX: could maybe optimise this later on by having a pool of buffers StringWriter writer = new StringWriter(); invokeBody(XMLOutput.createXMLOutput(writer)); return writer.toString(); }
1,112,615
protected void invokeBody(XMLOutput output) throws Exception { getBody().run(context, output); }
protected void invokeBody(XMLOutput output) throws Exception { if ( isTrim() && ! hasTrimmed ) { trimBody(); } invokeBody(output); }
1,112,616
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0, haploid = 0; Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); int[] founderHomCount = new int[5]; Vector mendels = new Vector(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); //if haploid, check for male hets if(Chromosome.getDataChrom().equals("chrx") && currentInd.getGender()==1){ if(allele1 != allele2) { currentInd.zeroOutMarker(loc); pedFile.setHaploidHets(true); } } //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,0); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,1); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,0); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,1); if(Chromosome.getDataChrom().equals("chrx")){ if(currentInd.getGender() == 1) { if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0)){ //this is an x chrom for a male, so the only thing we need to check is if //allele1 matches either momallele1 or momallele2 if(allele1 != momAllele1 && allele1 != momAllele2) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else { //if gender is anything except 1 we assume female if(momAllele1 == momAllele2) { //mom hom and dad matches mom if(dadAllele1 == momAllele1) { //kid must be hom same allele if(allele1 != momAllele1 || allele2 != momAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //kid must be het if(allele1 == allele2 ){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else{ //mom het,so only need to check that at least one allele matches dad if(allele1 != dadAllele1 && allele2 != dadAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } } }else{ //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); }else{ founderGenoCount.put(familyID, new Integer(1)); } if (allele1 != 9){ //value of 9 means an 'h' allele for haps files... count[allele1]++; } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 != allele2 || allele1 == 9 || allele2 == 9) { founderHetCount++; }else{ founderHomCount[allele1]++; } if(allele2 != 9){ count[allele2]++; } } }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 == allele2) { hom++; }else { het++; } }else{ haploid++; } } //missing data else missing++; } currentFamily.setMendErrs(mendErrNum); } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; String minorAllele, majorAllele; if (freqStuff[2] == 1){ minorAllele = "A"; }else if (freqStuff[2] == 2){ minorAllele = "C"; }else if (freqStuff[2] == 3){ minorAllele = "G"; }else{ minorAllele = "T"; } if (freqStuff[3] == 1){ majorAllele = "A"; }else if (freqStuff[3] == 2){ majorAllele = "C"; }else if (freqStuff[3] == 3){ majorAllele = "G"; }else{ majorAllele = "T"; } //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, haploid, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); if (mendErrNum > 0 && !pedFile.getMendelsExist()){ pedFile.setMendelsExist(true); } result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setMinorAllele(minorAllele); result.setMajorAllele(majorAllele); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setMendelErrors(mendels); return result; }
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0, haploid = 0; Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); int[] founderHomCount = new int[5]; Vector mendels = new Vector(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); //if haploid, check for male hets if(Chromosome.getDataChrom().equalsIgnoreCase("chrx") && currentInd.getGender()==1){ if(allele1 != allele2) { currentInd.zeroOutMarker(loc); pedFile.setHaploidHets(true); } } //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,0); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,1); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,0); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,1); if(Chromosome.getDataChrom().equals("chrx")){ if(currentInd.getGender() == 1) { if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0)){ //this is an x chrom for a male, so the only thing we need to check is if //allele1 matches either momallele1 or momallele2 if(allele1 != momAllele1 && allele1 != momAllele2) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else { //if gender is anything except 1 we assume female if(momAllele1 == momAllele2) { //mom hom and dad matches mom if(dadAllele1 == momAllele1) { //kid must be hom same allele if(allele1 != momAllele1 || allele2 != momAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //kid must be het if(allele1 == allele2 ){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else{ //mom het,so only need to check that at least one allele matches dad if(allele1 != dadAllele1 && allele2 != dadAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } } }else{ //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); }else{ founderGenoCount.put(familyID, new Integer(1)); } if (allele1 != 9){ //value of 9 means an 'h' allele for haps files... count[allele1]++; } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 != allele2 || allele1 == 9 || allele2 == 9) { founderHetCount++; }else{ founderHomCount[allele1]++; } if(allele2 != 9){ count[allele2]++; } } }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 == allele2) { hom++; }else { het++; } }else{ haploid++; } } //missing data else missing++; } currentFamily.setMendErrs(mendErrNum); } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; String minorAllele, majorAllele; if (freqStuff[2] == 1){ minorAllele = "A"; }else if (freqStuff[2] == 2){ minorAllele = "C"; }else if (freqStuff[2] == 3){ minorAllele = "G"; }else{ minorAllele = "T"; } if (freqStuff[3] == 1){ majorAllele = "A"; }else if (freqStuff[3] == 2){ majorAllele = "C"; }else if (freqStuff[3] == 3){ majorAllele = "G"; }else{ majorAllele = "T"; } //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, haploid, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); if (mendErrNum > 0 && !pedFile.getMendelsExist()){ pedFile.setMendelsExist(true); } result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setMinorAllele(minorAllele); result.setMajorAllele(majorAllele); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setMendelErrors(mendels); return result; }
1,112,617
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0, haploid = 0; Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); int[] founderHomCount = new int[5]; Vector mendels = new Vector(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); //if haploid, check for male hets if(Chromosome.getDataChrom().equals("chrx") && currentInd.getGender()==1){ if(allele1 != allele2) { currentInd.zeroOutMarker(loc); pedFile.setHaploidHets(true); } } //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,0); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,1); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,0); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,1); if(Chromosome.getDataChrom().equals("chrx")){ if(currentInd.getGender() == 1) { if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0)){ //this is an x chrom for a male, so the only thing we need to check is if //allele1 matches either momallele1 or momallele2 if(allele1 != momAllele1 && allele1 != momAllele2) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else { //if gender is anything except 1 we assume female if(momAllele1 == momAllele2) { //mom hom and dad matches mom if(dadAllele1 == momAllele1) { //kid must be hom same allele if(allele1 != momAllele1 || allele2 != momAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //kid must be het if(allele1 == allele2 ){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else{ //mom het,so only need to check that at least one allele matches dad if(allele1 != dadAllele1 && allele2 != dadAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } } }else{ //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); }else{ founderGenoCount.put(familyID, new Integer(1)); } if (allele1 != 9){ //value of 9 means an 'h' allele for haps files... count[allele1]++; } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 != allele2 || allele1 == 9 || allele2 == 9) { founderHetCount++; }else{ founderHomCount[allele1]++; } if(allele2 != 9){ count[allele2]++; } } }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 == allele2) { hom++; }else { het++; } }else{ haploid++; } } //missing data else missing++; } currentFamily.setMendErrs(mendErrNum); } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; String minorAllele, majorAllele; if (freqStuff[2] == 1){ minorAllele = "A"; }else if (freqStuff[2] == 2){ minorAllele = "C"; }else if (freqStuff[2] == 3){ minorAllele = "G"; }else{ minorAllele = "T"; } if (freqStuff[3] == 1){ majorAllele = "A"; }else if (freqStuff[3] == 2){ majorAllele = "C"; }else if (freqStuff[3] == 3){ majorAllele = "G"; }else{ majorAllele = "T"; } //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, haploid, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); if (mendErrNum > 0 && !pedFile.getMendelsExist()){ pedFile.setMendelsExist(true); } result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setMinorAllele(minorAllele); result.setMajorAllele(majorAllele); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setMendelErrors(mendels); return result; }
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0, haploid = 0; Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); int[] founderHomCount = new int[5]; Vector mendels = new Vector(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); //if haploid, check for male hets if(Chromosome.getDataChrom().equals("chrx") && currentInd.getGender()==1){ if(allele1 != allele2) { currentInd.zeroOutMarker(loc); pedFile.setHaploidHets(true); } } //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,0); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,1); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,0); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,1); if(Chromosome.getDataChrom().equals("chrx")){ if(currentInd.getGender() == 1) { if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0)){ //this is an x chrom for a male, so the only thing we need to check is if //allele1 matches either momallele1 or momallele2 if(allele1 != momAllele1 && allele1 != momAllele2) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else { //if gender is anything except 1 we assume female if(momAllele1 == momAllele2) { //mom hom and dad matches mom if(dadAllele1 == momAllele1) { //kid must be hom same allele if(allele1 != momAllele1 || allele2 != momAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //kid must be het if(allele1 == allele2 ){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else{ //mom het,so only need to check that at least one allele matches dad if(allele1 != dadAllele1 && allele2 != dadAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } } }else{ //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); }else{ founderGenoCount.put(familyID, new Integer(1)); } if (allele1 != 9){ //value of 9 means an 'h' allele for haps files... count[allele1]++; } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 != allele2 || allele1 == 9 || allele2 == 9) { founderHetCount++; }else{ founderHomCount[allele1]++; } if(allele2 != 9){ count[allele2]++; } } }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 == allele2) { hom++; }else { het++; } }else{ haploid++; } } //missing data else missing++; } currentFamily.setMendErrs(mendErrNum); } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; String minorAllele, majorAllele; if (freqStuff[2] == 1){ minorAllele = "A"; }else if (freqStuff[2] == 2){ minorAllele = "C"; }else if (freqStuff[2] == 3){ minorAllele = "G"; }else{ minorAllele = "T"; } if (freqStuff[3] == 1){ majorAllele = "A"; }else if (freqStuff[3] == 2){ majorAllele = "C"; }else if (freqStuff[3] == 3){ majorAllele = "G"; }else{ majorAllele = "T"; } //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, haploid, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); if (mendErrNum > 0 && !pedFile.getMendelsExist()){ pedFile.setMendelsExist(true); } result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setMinorAllele(minorAllele); result.setMajorAllele(majorAllele); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setMendelErrors(mendels); return result; }
1,112,618
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0, haploid = 0; Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); int[] founderHomCount = new int[5]; Vector mendels = new Vector(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); //if haploid, check for male hets if(Chromosome.getDataChrom().equals("chrx") && currentInd.getGender()==1){ if(allele1 != allele2) { currentInd.zeroOutMarker(loc); pedFile.setHaploidHets(true); } } //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,0); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,1); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,0); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,1); if(Chromosome.getDataChrom().equals("chrx")){ if(currentInd.getGender() == 1) { if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0)){ //this is an x chrom for a male, so the only thing we need to check is if //allele1 matches either momallele1 or momallele2 if(allele1 != momAllele1 && allele1 != momAllele2) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else { //if gender is anything except 1 we assume female if(momAllele1 == momAllele2) { //mom hom and dad matches mom if(dadAllele1 == momAllele1) { //kid must be hom same allele if(allele1 != momAllele1 || allele2 != momAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //kid must be het if(allele1 == allele2 ){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else{ //mom het,so only need to check that at least one allele matches dad if(allele1 != dadAllele1 && allele2 != dadAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } } }else{ //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); }else{ founderGenoCount.put(familyID, new Integer(1)); } if (allele1 != 9){ //value of 9 means an 'h' allele for haps files... count[allele1]++; } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 != allele2 || allele1 == 9 || allele2 == 9) { founderHetCount++; }else{ founderHomCount[allele1]++; } if(allele2 != 9){ count[allele2]++; } } }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 == allele2) { hom++; }else { het++; } }else{ haploid++; } } //missing data else missing++; } currentFamily.setMendErrs(mendErrNum); } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; String minorAllele, majorAllele; if (freqStuff[2] == 1){ minorAllele = "A"; }else if (freqStuff[2] == 2){ minorAllele = "C"; }else if (freqStuff[2] == 3){ minorAllele = "G"; }else{ minorAllele = "T"; } if (freqStuff[3] == 1){ majorAllele = "A"; }else if (freqStuff[3] == 2){ majorAllele = "C"; }else if (freqStuff[3] == 3){ majorAllele = "G"; }else{ majorAllele = "T"; } //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, haploid, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); if (mendErrNum > 0 && !pedFile.getMendelsExist()){ pedFile.setMendelsExist(true); } result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setMinorAllele(minorAllele); result.setMajorAllele(majorAllele); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setMendelErrors(mendels); return result; }
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0, haploid = 0; Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); int[] founderHomCount = new int[5]; Vector mendels = new Vector(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); //if haploid, check for male hets if(Chromosome.getDataChrom().equals("chrx") && currentInd.getGender()==1){ if(allele1 != allele2) { currentInd.zeroOutMarker(loc); pedFile.setHaploidHets(true); } } //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,0); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,1); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,0); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,1); if(Chromosome.getDataChrom().equals("chrx")){ if(currentInd.getGender() == 1) { if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0)){ //this is an x chrom for a male, so the only thing we need to check is if //allele1 matches either momallele1 or momallele2 if(allele1 != momAllele1 && allele1 != momAllele2) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else { //if gender is anything except 1 we assume female if(momAllele1 == momAllele2) { //mom hom and dad matches mom if(dadAllele1 == momAllele1) { //kid must be hom same allele if(allele1 != momAllele1 || allele2 != momAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //kid must be het if(allele1 == allele2 ){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else{ //mom het,so only need to check that at least one allele matches dad if(allele1 != dadAllele1 && allele2 != dadAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } } }else{ //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); }else{ founderGenoCount.put(familyID, new Integer(1)); } if (allele1 != 9){ //value of 9 means an 'h' allele for haps files... count[allele1]++; } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 != allele2 || allele1 == 9 || allele2 == 9) { founderHetCount++; }else{ founderHomCount[allele1]++; } if(allele2 != 9){ count[allele2]++; } } }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if (!Chromosome.getDataChrom().equals("chrx") || currentInd.getGender() != 1) { if(allele1 == allele2) { hom++; }else { het++; } }else{ haploid++; } } //missing data else missing++; } currentFamily.setMendErrs(mendErrNum); } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; String minorAllele, majorAllele; if (freqStuff[2] == 1){ minorAllele = "A"; }else if (freqStuff[2] == 2){ minorAllele = "C"; }else if (freqStuff[2] == 3){ minorAllele = "G"; }else{ minorAllele = "T"; } if (freqStuff[3] == 1){ majorAllele = "A"; }else if (freqStuff[3] == 2){ majorAllele = "C"; }else if (freqStuff[3] == 3){ majorAllele = "G"; }else{ majorAllele = "T"; } //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, haploid, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); if (mendErrNum > 0 && !pedFile.getMendelsExist()){ pedFile.setMendelsExist(true); } result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setMinorAllele(minorAllele); result.setMajorAllele(majorAllele); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setMendelErrors(mendels); return result; }
1,112,619