bugged
stringlengths
6
599k
fixed
stringlengths
10
599k
__index_level_0__
int64
0
1.13M
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ //if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } //} } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ //if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } //} } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; //set up the indexing to take into account skipped markers. Need //to loop through twice because first time we just count number of //unskipped markers int count = 0; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ count++; } } Chromosome.realIndex = new int[count]; int k = 0; for (int i =0; i < numMarkers; i++){ if (markerResults[i]){ Chromosome.realIndex[k] = i; k++; } } //fake the marker info for now Vector markerInfo = new Vector(); for (int i = 0; i < numMarkers; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).unfilteredElementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; Chromosome.markers = markerInfo.toArray(); //return chrom; }
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ //if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } //} } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ //if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } //} } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; //set up the indexing to take into account skipped markers. Need //to loop through twice because first time we just count number of //unskipped markers int count = 0; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ count++; } } Chromosome.realIndex = new int[count]; int k = 0; for (int i =0; i < numMarkers; i++){ if (markerResults[i]){ Chromosome.realIndex[k] = i; k++; } } //fake the marker info for now Vector markerInfo = new Vector(); for (int i = 0; i < numMarkers; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).unfilteredElementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*3000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; Chromosome.markers = markerInfo.toArray(); //return chrom; }
1,110,676
void prepareHapsInput(File infile) throws IOException{ //this method is called to suck in data from a file (its only argument) //of genotypes and sets up the Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); boolean firstTime = true; while ((currentLine = in.readLine()) != null){ //each line is expected to be of the format: //ped indiv geno geno geno geno... StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) //the first time through, count number of genotypes for marker quality statistics if (firstTime){ numBadGenotypes = new double[st.countTokens()]; percentBadGenotypes = new double[st.countTokens()]; } genos = new byte[st.countTokens()]; int q = 0; while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ genos[q] = Byte.parseByte(thisGenotype); } if (genos[q] == 0) numBadGenotypes[q] ++; q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, infile.getName())); firstTime = false; } //generate marker information in case none is subsequently available //also convert sums of bad genotypes to percentages for each marker double numChroms = chroms.size(); Vector markerInfo = new Vector(); Chromosome.realIndex = new int[genos.length]; for (int i = 0; i < genos.length; i++){ Chromosome.realIndex[i] = i; //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chroms.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chroms.elementAt(j)).unfilteredElementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chroms; Chromosome.markers = markerInfo.toArray(); //return chroms; }
void prepareHapsInput(File infile) throws IOException{ //this method is called to suck in data from a file (its only argument) //of genotypes and sets up the Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); boolean firstTime = true; while ((currentLine = in.readLine()) != null){ //each line is expected to be of the format: //ped indiv geno geno geno geno... StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) //the first time through, count number of genotypes for marker quality statistics if (firstTime){ numBadGenotypes = new double[st.countTokens()]; percentBadGenotypes = new double[st.countTokens()]; } genos = new byte[st.countTokens()]; int q = 0; while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ genos[q] = Byte.parseByte(thisGenotype); } if (genos[q] == 0) numBadGenotypes[q] ++; q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, infile.getName())); firstTime = false; } //generate marker information in case none is subsequently available //also convert sums of bad genotypes to percentages for each marker double numChroms = chroms.size(); Vector markerInfo = new Vector(); Chromosome.realIndex = new int[genos.length]; for (int i = 0; i < genos.length; i++){ Chromosome.realIndex[i] = i; //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chroms.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chroms.elementAt(j)).unfilteredElementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i), (i*3000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chroms; Chromosome.markers = markerInfo.toArray(); //return chroms; }
1,110,677
int prepareMarkerInput(File infile) throws IOException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position String currentLine; Vector markers = new Vector(); //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; while ((currentLine = in.readLine()) != null){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).unfilteredElementAt(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } //System.out.println(numa1 + " " + numa2); double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; StringTokenizer st = new StringTokenizer(currentLine); markers.add(new SNP(st.nextToken(), Long.parseLong(st.nextToken()), infile.getName(), maf)); snpcount ++; } if (Chromosome.markers.length == markers.size()){ Chromosome.markers = markers.toArray(); markersLoaded = true; return 1; }else{ return -1; } }
int prepareMarkerInput(File infile, long maxdist) throws IOException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position String currentLine; Vector markers = new Vector(); //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; while ((currentLine = in.readLine()) != null){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).unfilteredElementAt(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } //System.out.println(numa1 + " " + numa2); double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; StringTokenizer st = new StringTokenizer(currentLine); markers.add(new SNP(st.nextToken(), Long.parseLong(st.nextToken()), infile.getName(), maf)); snpcount ++; } if (Chromosome.markers.length == markers.size()){ Chromosome.markers = markers.toArray(); markersLoaded = true; return 1; }else{ return -1; } }
1,110,678
public CreateRelationshipAction(PlayPen pp) { super("Create Relationship"); this.pp = pp; pp.addSelectionListener(this); }
public CreateRelationshipAction() { super("Create Relationship"); this.pp = pp; pp.addSelectionListener(this); }
1,110,680
public CreateRelationshipAction(PlayPen pp) { super("Create Relationship"); this.pp = pp; pp.addSelectionListener(this); }
public CreateRelationshipAction(PlayPen pp) { super("Create Relationship"); this.pp = pp; pp.addSelectionListener(this); }
1,110,681
EM(){ //an old-school speedup courtesy of mjdaly two_n[0]=1; for (int i=1; i<31; i++){ two_n[i]=2*two_n[i-1]; } }
EM(Vector chromosomes, int numTrios){ //an old-school speedup courtesy of mjdaly two_n[0]=1; for (int i=1; i<31; i++){ two_n[i]=2*two_n[i-1]; } }
1,110,683
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,685
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,686
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,687
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,688
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,689
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,690
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,691
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,692
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,693
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,694
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,695
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,696
public float getFStop() { return FStop; }
public double getFStop() { return FStop; }
1,110,698
public float getFocalLength() { return focalLength; }
public double getFocalLength() { return focalLength; }
1,110,699
public static PhotoInfo retrievePhotoInfo( String strPhotoId ) throws PhotoNotFoundException { initDB(); String sql = "SELECT * from photos where photo_id=\"" + strPhotoId +"\""; PhotoInfo photo = new PhotoInfo(); try { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery( sql ); if ( !rs.next() ) { throw new PhotoNotFoundException(); } photo.shootingPlace = rs.getString( "shooting_place" ); photo.photographer = rs.getString( "photographer" ); photo.FStop = rs.getFloat( "f_stop" ); photo.focalLength = rs.getFloat( "focal_length" ); photo.shootTime = rs.getDate( "shoot_time" ); rs.close(); stmt.close(); } catch (SQLException e ) { System.err.println( "Error fetching record: " + e.getMessage() ); // TODO: Actually this is not the right exception for this purpose throw new PhotoNotFoundException(); } return photo; }
public static PhotoInfo retrievePhotoInfo( int photoId ) throws PhotoNotFoundException { initDB(); String sql = "SELECT * from photos where photo_id=\"" + strPhotoId +"\""; PhotoInfo photo = new PhotoInfo(); try { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery( sql ); if ( !rs.next() ) { throw new PhotoNotFoundException(); } photo.shootingPlace = rs.getString( "shooting_place" ); photo.photographer = rs.getString( "photographer" ); photo.FStop = rs.getFloat( "f_stop" ); photo.focalLength = rs.getFloat( "focal_length" ); photo.shootTime = rs.getDate( "shoot_time" ); rs.close(); stmt.close(); } catch (SQLException e ) { System.err.println( "Error fetching record: " + e.getMessage() ); // TODO: Actually this is not the right exception for this purpose throw new PhotoNotFoundException(); } return photo; }
1,110,700
public static PhotoInfo retrievePhotoInfo( String strPhotoId ) throws PhotoNotFoundException { initDB(); String sql = "SELECT * from photos where photo_id=\"" + strPhotoId +"\""; PhotoInfo photo = new PhotoInfo(); try { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery( sql ); if ( !rs.next() ) { throw new PhotoNotFoundException(); } photo.shootingPlace = rs.getString( "shooting_place" ); photo.photographer = rs.getString( "photographer" ); photo.FStop = rs.getFloat( "f_stop" ); photo.focalLength = rs.getFloat( "focal_length" ); photo.shootTime = rs.getDate( "shoot_time" ); rs.close(); stmt.close(); } catch (SQLException e ) { System.err.println( "Error fetching record: " + e.getMessage() ); // TODO: Actually this is not the right exception for this purpose throw new PhotoNotFoundException(); } return photo; }
public static PhotoInfo retrievePhotoInfo( String strPhotoId ) throws PhotoNotFoundException { initDB(); String sql = "SELECT * from photos where photo_id=\"" + photoId +"\""; PhotoInfo photo = new PhotoInfo(); try { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery( sql ); if ( !rs.next() ) { throw new PhotoNotFoundException(); } photo.shootingPlace = rs.getString( "shooting_place" ); photo.photographer = rs.getString( "photographer" ); photo.FStop = rs.getFloat( "f_stop" ); photo.focalLength = rs.getFloat( "focal_length" ); photo.shootTime = rs.getDate( "shoot_time" ); rs.close(); stmt.close(); } catch (SQLException e ) { System.err.println( "Error fetching record: " + e.getMessage() ); // TODO: Actually this is not the right exception for this purpose throw new PhotoNotFoundException(); } return photo; }
1,110,701
public static PhotoInfo retrievePhotoInfo( String strPhotoId ) throws PhotoNotFoundException { initDB(); String sql = "SELECT * from photos where photo_id=\"" + strPhotoId +"\""; PhotoInfo photo = new PhotoInfo(); try { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery( sql ); if ( !rs.next() ) { throw new PhotoNotFoundException(); } photo.shootingPlace = rs.getString( "shooting_place" ); photo.photographer = rs.getString( "photographer" ); photo.FStop = rs.getFloat( "f_stop" ); photo.focalLength = rs.getFloat( "focal_length" ); photo.shootTime = rs.getDate( "shoot_time" ); rs.close(); stmt.close(); } catch (SQLException e ) { System.err.println( "Error fetching record: " + e.getMessage() ); // TODO: Actually this is not the right exception for this purpose throw new PhotoNotFoundException(); } return photo; }
public static PhotoInfo retrievePhotoInfo( String strPhotoId ) throws PhotoNotFoundException { initDB(); String sql = "SELECT * from photos where photo_id=\"" + strPhotoId +"\""; PhotoInfo photo = new PhotoInfo(); try { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery( sql ); if ( !rs.next() ) { throw new PhotoNotFoundException(); } photo.shootingPlace = rs.getString( "shooting_place" ); photo.photographer = rs.getString( "photographer" ); photo.FStop = rs.getFloat( "f_stop" ); photo.focalLength = rs.getFloat( "focal_length" ); photo.shootTime = rs.getDate( "shoot_time" ); rs.close(); stmt.close(); } catch (SQLException e ) { System.err.println( "Error fetching record: " + e.getMessage() ); // TODO: Actually this is not the right exception for this purpose throw new PhotoNotFoundException(); } return photo; }
1,110,702
public void setFStop(float v) { this.FStop = v; }
public void setFStop(double v) { this.FStop = v; }
1,110,703
public void setFocalLength(float v) { this.focalLength = v; }
public void setFocalLength(double v) { this.focalLength = v; }
1,110,704
public void showPreferencesDialog() { // XXX Can't easily use ArchitectPanelBuilder since this // contains a JTabbedPane which is not an ArchitectPanel. final JDialog d = new JDialog(af, "User Preferences"); JPanel cp = new JPanel(new BorderLayout(12,12)); JTabbedPane tp = new JTabbedPane(); cp.add(tp, BorderLayout.CENTER); cp.setBorder(BorderFactory.createEmptyBorder(12,12,12,12)); final PreferencesPanel prefPanel = new PreferencesPanel(af.getUserSettings()); tp.add("General", prefPanel); final JDBCDriverPanel jdbcPanel = new JDBCDriverPanel(af.getArchitectSession()); tp.add("JDBC Drivers", jdbcPanel); JPanel buttonPanel = new JPanel(new FlowLayout(FlowLayout.RIGHT)); JButton okButton = new JButton(ArchitectPanelBuilder.OK_BUTTON_LABEL); okButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { prefPanel.applyChanges(); jdbcPanel.applyChanges(); d.setVisible(false); } }); buttonPanel.add(okButton); Action cancelAction = new AbstractAction() { public void actionPerformed(ActionEvent evt) { prefPanel.discardChanges(); jdbcPanel.discardChanges(); d.setVisible(false); } }; cancelAction.putValue(Action.NAME, ArchitectPanelBuilder.CANCEL_BUTTON_LABEL); JButton cancelButton = new JButton(cancelAction); buttonPanel.add(cancelButton); ArchitectPanelBuilder.makeJDialogCancellable(d, cancelAction); d.getRootPane().setDefaultButton(okButton); cp.add(buttonPanel, BorderLayout.SOUTH); d.setContentPane(cp); d.pack(); d.setLocationRelativeTo(ArchitectFrame.getMainInstance()); d.setVisible(true); }
public void showPreferencesDialog() { // XXX Can't easily use ArchitectPanelBuilder since this // contains a JTabbedPane which is not an ArchitectPanel. final JDialog d = new JDialog(af, "User Preferences"); JPanel cp = new JPanel(new BorderLayout(12,12)); JTabbedPane tp = new JTabbedPane(); cp.add(tp, BorderLayout.CENTER); cp.setBorder(BorderFactory.createEmptyBorder(12,12,12,12)); final PreferencesPanel prefPanel = new PreferencesPanel(af.getUserSettings()); tp.add("General", prefPanel); final JDBCDriverPanel jdbcPanel = new JDBCDriverPanel(af.getArchitectSession()); tp.add("JDBC Drivers", jdbcPanel); JPanel buttonPanel = new JPanel(new FlowLayout(FlowLayout.RIGHT)); JDefaultButton okButton = new JDefaultButton(ArchitectPanelBuilder.OK_BUTTON_LABEL); okButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { prefPanel.applyChanges(); jdbcPanel.applyChanges(); d.setVisible(false); } }); buttonPanel.add(okButton); Action cancelAction = new AbstractAction() { public void actionPerformed(ActionEvent evt) { prefPanel.discardChanges(); jdbcPanel.discardChanges(); d.setVisible(false); } }; cancelAction.putValue(Action.NAME, ArchitectPanelBuilder.CANCEL_BUTTON_LABEL); JButton cancelButton = new JButton(cancelAction); buttonPanel.add(cancelButton); ArchitectPanelBuilder.makeJDialogCancellable(d, cancelAction); d.getRootPane().setDefaultButton(okButton); cp.add(buttonPanel, BorderLayout.SOUTH); d.setContentPane(cp); d.pack(); d.setLocationRelativeTo(ArchitectFrame.getMainInstance()); d.setVisible(true); }
1,110,706
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } /*if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } */ prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else { //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } /*if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } */ prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else if (infile != null){ numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,110,710
public CheckDataPanel(File file) throws IOException{ //okay, for now we're going to assume the ped file has no header Vector pedFileStrings = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(file)); String line; while((line = reader.readLine())!=null){ pedFileStrings.add(line); } pedfile = new PedFile(); pedfile.parse(pedFileStrings); //Vector result = data.check(); Vector result = pedfile.check(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("Name"); tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] ratingArray = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(currentResult.getName());// tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(45)); tempVect.add(new Double(45));// tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping ratingArray[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, ratingArray); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(100); JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
public CheckDataPanel(File file) throws IOException{ //okay, for now we're going to assume the ped file has no header Vector pedFileStrings = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(file)); String line; while((line = reader.readLine())!=null){ pedFileStrings.add(line); } pedfile = new PedFile(); pedfile.parse(pedFileStrings); //Vector result = data.check(); Vector result = pedfile.check(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("Name"); tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] ratingArray = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(currentResult.getName());// tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(45)); tempVect.add(new Double(45));// tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping ratingArray[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, ratingArray); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(100); JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
1,110,711
public void dbObjectChanged(SQLObjectEvent e) { if (e.getSource() == table) { int[] changedIndices = e.getChangedIndices(); for (int i = 0; i < changedIndices.length; i++) { // XXX: should group contiguous regions into one event! logger.debug("Firing contentsChanged event for index "+i); fireContentsChanged(changedIndices[i], changedIndices[i]); } } else if (e.getSource() instanceof SQLColumn) { // make sure this column was actually in the table try { int index = table.getColumns().indexOf(e.getSource()); if (index >= 0) { fireContentsChanged(index, index); } } catch (ArchitectException ex) { logger.error("Exception in dbObjectChanged",ex); } } else { logger.warn("Unexpected SQLObjectEvent: "+e); } }
public void dbObjectChanged(SQLObjectEvent e) { if (e.getSource() == table.getColumnsFolder()) { int[] changedIndices = e.getChangedIndices(); for (int i = 0; i < changedIndices.length; i++) { // XXX: should group contiguous regions into one event! logger.debug("Firing contentsChanged event for index "+i); fireContentsChanged(changedIndices[i], changedIndices[i]); } } else if (e.getSource() instanceof SQLColumn) { // make sure this column was actually in the table try { int index = table.getColumns().indexOf(e.getSource()); if (index >= 0) { fireContentsChanged(index, index); } } catch (ArchitectException ex) { logger.error("Exception in dbObjectChanged",ex); } } else { logger.warn("Unexpected SQLObjectEvent: "+e); } }
1,110,712
public Object getElementAt(int index) { try { return table.getChildren().get(index); } catch (ArchitectException ex) { throw new RuntimeException("Couldn't get child "+index, ex); } }
public Object getElementAt(int index) { try { return table.getColumnsFolder().getChild(index); } catch (ArchitectException ex) { throw new RuntimeException("Couldn't get child "+index, ex); } }
1,110,713
public int getSize() { try { return table.getChildCount(); } catch (ArchitectException ex) { throw new RuntimeException("Couldn't get child count", ex); } }
public int getSize() { try { return table.getColumnsFolder().getChildCount(); } catch (ArchitectException ex) { throw new RuntimeException("Couldn't get child count", ex); } }
1,110,714
public void dbChildrenRemoved(SQLObjectEvent e) { if (e.getSource() == this.model.getColumnsFolder()) { int ci[] = e.getChangedIndices(); logger.debug("Columns removed. Syncing select/highlight lists. Removed indices="+Arrays.asList(ci)); for (int i = 0; i < ci.length; i++) { columnSelection.remove(ci[i]); columnHighlight.remove(ci[i]); } if (columnSelection.size() > 0) { selectNone(); columnSelection.set(Math.min(ci[0], columnSelection.size()-1), Boolean.TRUE); } } try { ArchitectUtils.unlistenToHierarchy(this, e.getChildren()); if (columnSelection.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync selection list: selection="+columnSelection+"; children="+this.model.getColumns()); columnSelection = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnSelection.add(Boolean.FALSE); } } if (columnHighlight.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync highlight list: highlights="+columnHighlight+"; children="+this.model.getColumns()); columnHighlight = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnHighlight.add(null); } } } catch (ArchitectException ex) { logger.error("Couldn't remove children", ex); JOptionPane.showMessageDialog(getPlayPen(), "Couldn't delete column: "+ex.getMessage()); } firePropertyChange("model.children", null, null); revalidate(); }
public void dbChildrenRemoved(SQLObjectEvent e) { if (e.getSource() == this.model.getColumnsFolder()) { int ci[] = e.getChangedIndices(); logger.debug("Columns removed. Syncing select/highlight lists. Removed indices="+Arrays.asList(ci)); for (int i = 0; i < ci.length; i++) { columnSelection.remove(ci[i]); columnHighlight.remove(ci[i]); } if (columnSelection.size() > 0) { selectNone(); columnSelection.set(Math.min(ci[0], columnSelection.size()-1), Boolean.TRUE); } } try { ArchitectUtils.unlistenToHierarchy(this, e.getChildren()); if (columnSelection.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync selection list: selection="+columnSelection+"; children="+this.model.getColumns()); columnSelection = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnSelection.add(Boolean.FALSE); } } if (columnHighlight.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync highlight list: highlights="+columnHighlight+"; children="+this.model.getColumns()); columnHighlight = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnHighlight.add(null); } } } catch (ArchitectException ex) { logger.error("Couldn't remove children", ex); JOptionPane.showMessageDialog(getPlayPen(), "Couldn't delete column: "+ex.getMessage()); } firePropertyChange("model.children", null, null); revalidate(); }
1,110,715
public void dbChildrenRemoved(SQLObjectEvent e) { if (e.getSource() == this.model.getColumnsFolder()) { int ci[] = e.getChangedIndices(); logger.debug("Columns removed. Syncing select/highlight lists. Removed indices="+Arrays.asList(ci)); for (int i = 0; i < ci.length; i++) { columnSelection.remove(ci[i]); columnHighlight.remove(ci[i]); } if (columnSelection.size() > 0) { selectNone(); columnSelection.set(Math.min(ci[0], columnSelection.size()-1), Boolean.TRUE); } } try { ArchitectUtils.unlistenToHierarchy(this, e.getChildren()); if (columnSelection.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync selection list: selection="+columnSelection+"; children="+this.model.getColumns()); columnSelection = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnSelection.add(Boolean.FALSE); } } if (columnHighlight.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync highlight list: highlights="+columnHighlight+"; children="+this.model.getColumns()); columnHighlight = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnHighlight.add(null); } } } catch (ArchitectException ex) { logger.error("Couldn't remove children", ex); JOptionPane.showMessageDialog(getPlayPen(), "Couldn't delete column: "+ex.getMessage()); } firePropertyChange("model.children", null, null); revalidate(); }
public void dbChildrenRemoved(SQLObjectEvent e) { if (e.getSource() == this.model.getColumnsFolder()) { int ci[] = e.getChangedIndices(); logger.debug("Columns removed. Syncing select/highlight lists. Removed indices="+Arrays.asList(ci)); for (int i = 0; i < ci.length; i++) { columnSelection.remove(ci[i]); columnHighlight.remove(ci[i]); } if (columnSelection.size() > 0) { selectNone(); columnSelection.set(Math.min(ci[0], columnSelection.size()-1), Boolean.TRUE); } } try { ArchitectUtils.unlistenToHierarchy(this, e.getChildren()); if (columnSelection.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync selection list: selection="+columnSelection+"; children="+this.model.getColumns()); columnSelection = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnSelection.add(Boolean.FALSE); } } if (columnHighlight.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync highlight list: highlights="+columnHighlight+"; children="+this.model.getColumns()); columnHighlight = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnHighlight.add(null); } } } catch (ArchitectException ex) { logger.error("Couldn't remove children", ex); JOptionPane.showMessageDialog(getPlayPen(), "Couldn't delete column: "+ex.getMessage()); } firePropertyChange("model.children", null, null); revalidate(); }
1,110,716
public void doTag(XMLOutput output) throws Exception { if (getVar() == null) { throw new IllegalArgumentException("The var attribute cannot be null"); } Document document = null; if (html == null) { document = parseBody(output); } else { document = parse(html); } context.setVariable(getVar(), document); }
public void doTag(XMLOutput output) throws Exception { if (getVar() == null) { throw new IllegalArgumentException("The var attribute cannot be null"); } Document document = null; if (html == null) { String text = getText(); if (text != null) { document = parseText(text); } else { document = parseBody(output); } } else { document = parse(html); } context.setVariable(getVar(), document); }
1,110,717
protected Transferable createTransferable(JComponent c) { log.warn( "createTransferable" ); Collection selection = view.getSelection(); sourcePhotos = new PhotoInfo[selection.size()]; Iterator iter = selection.iterator(); int i = 0; while ( iter.hasNext() ) { sourcePhotos[i] = (PhotoInfo) iter.next(); i++; } log.warn( "" + i + " photos selected" );// shouldRemove = true; return new PhotoCollectionTransferable(sourcePhotos); }
protected Transferable createTransferable(JComponent c) { log.warn( "createTransferable" ); Collection selection = view.getSelection(); sourcePhotos = new PhotoInfo[selection.size()]; Iterator iter = selection.iterator(); int i = 0; while ( iter.hasNext() ) { sourcePhotos[i] = (PhotoInfo) iter.next(); i++; } log.warn( "" + i + " photos selected" );// shouldRemove = true; PhotoCollection sourceCollection = view.getCollection(); return new PhotoCollectionTransferable( sourcePhotos ); }
1,110,718
protected void exportDone(JComponent c, Transferable data, int action) { PhotoCollection coll = view.getCollection(); if (/*shouldRemove && */ (action == MOVE) && coll instanceof PhotoFolder ) { PhotoFolder folder = (PhotoFolder) coll; for ( int i = 0; i < sourcePhotos.length; i++ ) { folder.removePhoto( sourcePhotos[i] ); } } }
protected void exportDone(JComponent c, Transferable data, int action) { PhotoCollection coll = view.getCollection(); if (/*shouldRemove && */ (action == MOVE) && coll instanceof PhotoFolder ) { PhotoFolder folder = (PhotoFolder) coll; for ( int i = 0; i < sourcePhotos.length; i++ ) { folder.removePhoto( sourcePhotos[i] ); } } }
1,110,719
void generateDPrimeTable(long maxdist){ //calculating D prime requires the number of each possible 2 marker //haplotype in the dataset dPrimeTable = new PairwiseLinkage[((Chromosome) chromosomes.firstElement()).size()][((Chromosome) chromosomes.firstElement()).size()]; int doublehet; long negMaxdist = -1*maxdist; int[][] twoMarkerHaplos = new int[3][3]; //loop through all marker pairs for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ //clear the array for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = ((SNP)markerInfo.elementAt(pos1)).getPosition() - ((SNP)markerInfo.elementAt(pos2)).getPosition(); if (sep > maxdist || sep < negMaxdist){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,-99,0,0,0,nullArray); continue; } for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).elementAt(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) dPrimeTable[pos1][pos2] = computeDPrime(twoMarkerHaplos[1][1], twoMarkerHaplos[1][2], twoMarkerHaplos[2][1], twoMarkerHaplos[2][2], doublehet, 0.1); } } }
void generateDPrimeTable(long maxdist){ //calculating D prime requires the number of each possible 2 marker //haplotype in the dataset dPrimeTable = new PairwiseLinkage[Chromosome.size()][Chromosome.size()]; int doublehet; long negMaxdist = -1*maxdist; int[][] twoMarkerHaplos = new int[3][3]; //loop through all marker pairs for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ //clear the array for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = ((SNP)markerInfo.elementAt(pos1)).getPosition() - ((SNP)markerInfo.elementAt(pos2)).getPosition(); if (sep > maxdist || sep < negMaxdist){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,-99,0,0,0,nullArray); continue; } for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).elementAt(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) dPrimeTable[pos1][pos2] = computeDPrime(twoMarkerHaplos[1][1], twoMarkerHaplos[1][2], twoMarkerHaplos[2][1], twoMarkerHaplos[2][2], doublehet, 0.1); } } }
1,110,722
void generateDPrimeTable(long maxdist){ //calculating D prime requires the number of each possible 2 marker //haplotype in the dataset dPrimeTable = new PairwiseLinkage[((Chromosome) chromosomes.firstElement()).size()][((Chromosome) chromosomes.firstElement()).size()]; int doublehet; long negMaxdist = -1*maxdist; int[][] twoMarkerHaplos = new int[3][3]; //loop through all marker pairs for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ //clear the array for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = ((SNP)markerInfo.elementAt(pos1)).getPosition() - ((SNP)markerInfo.elementAt(pos2)).getPosition(); if (sep > maxdist || sep < negMaxdist){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,-99,0,0,0,nullArray); continue; } for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).elementAt(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) dPrimeTable[pos1][pos2] = computeDPrime(twoMarkerHaplos[1][1], twoMarkerHaplos[1][2], twoMarkerHaplos[2][1], twoMarkerHaplos[2][2], doublehet, 0.1); } } }
void generateDPrimeTable(long maxdist){ //calculating D prime requires the number of each possible 2 marker //haplotype in the dataset dPrimeTable = new PairwiseLinkage[((Chromosome) chromosomes.firstElement()).size()][((Chromosome) chromosomes.firstElement()).size()]; int doublehet; long negMaxdist = -1*maxdist; int[][] twoMarkerHaplos = new int[3][3]; //loop through all marker pairs for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ //clear the array for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = ((SNP)markerInfo.elementAt(pos1)).getPosition() - ((SNP)markerInfo.elementAt(pos2)).getPosition(); if (sep > maxdist || sep < negMaxdist){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,-99,0,0,0,nullArray); continue; } for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).elementAt(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) dPrimeTable[pos1][pos2] = computeDPrime(twoMarkerHaplos[1][1], twoMarkerHaplos[1][2], twoMarkerHaplos[2][1], twoMarkerHaplos[2][2], doublehet, 0.1); } } }
1,110,723
void guessBlocks(int method){ Vector returnVec = new Vector(); switch(method){ case 0: returnVec = new FindBlocks(dPrimeTable, markerInfo).doSFS(); break; case 1: returnVec = new FindBlocks(dPrimeTable).do4Gamete(); break; case 2: returnVec = new FindBlocks(dPrimeTable).doMJD(); break; } blocks = returnVec; }
void guessBlocks(int method){ Vector returnVec = new Vector(); switch(method){ case 0: returnVec = new FindBlocks(dPrimeTable).doSFS(); break; case 1: returnVec = new FindBlocks(dPrimeTable).do4Gamete(); break; case 2: returnVec = new FindBlocks(dPrimeTable).doMJD(); break; } blocks = returnVec; }
1,110,724
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
1,110,725
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; else{ chrom1[i] = 5; chrom2[i] = 5; chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; else { dadTb[i] = 5; dadUb[i] = 5; if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; else { momTb[i] = 5; momUb[i] = 5; else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; else { dadTb[i] = dad2; dadUb[i] = dad1; if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; else { momTb[i] = mom2; momUb[i] = mom1; else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; else { dadTb[i] = kid1; else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; else { momTb[i] = kid1; else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; else { momTb[i] = kid1; momUb[i] = kid2; else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; else { dadTb[i] = kid1; dadUb[i] = kid2; else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; else if (a1 == 0){ a1 = thisAllele; numa1++; else if (thisAllele == a1){ numa1++; else{ numa2++; if (thisAllele == 0) { numBadGenotypes[i] ++; double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; chromosomes = chrom; //return chrom;
1,110,726
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; int count = 0; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ count++; } } Chromosome.realIndex = new int[count]; int k = 0; for (int i =0; i < numMarkers; i++){ if (markerResults[i]){ Chromosome.realIndex[k] = i; k++; } } Vector markerInfo = new Vector(); for (int i = 0; i < numMarkers; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
1,110,727
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).unfilteredElementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
1,110,728
void prepareGenotypeInput(File infile) throws IOException{ //this method is called to suck in data from a file (its only argument) //of genotypes and return a vector of Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); boolean firstTime = true; while ((currentLine = in.readLine()) != null){ //each line is expected to be of the format: //ped indiv geno geno geno geno... StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) //the first time through, count number of genotypes for marker quality statistics if (firstTime){ numBadGenotypes = new double[st.countTokens()]; percentBadGenotypes = new double[st.countTokens()]; } genos = new byte[st.countTokens()]; int q = 0; while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ genos[q] = Byte.parseByte(thisGenotype); } if (genos[q] == 0) numBadGenotypes[q] ++; q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, infile.getName())); firstTime = false; } //generate marker information in case none is subsequently available //also convert sums of bad genotypes to percentages for each marker double numChroms = chroms.size(); for (int i = 0; i < genos.length; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chroms.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chroms.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chroms; //return chroms; }
void prepareGenotypeInput(File infile) throws IOException{ //this method is called to suck in data from a file (its only argument) //of genotypes and return a vector of Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); boolean firstTime = true; while ((currentLine = in.readLine()) != null){ //each line is expected to be of the format: //ped indiv geno geno geno geno... StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) //the first time through, count number of genotypes for marker quality statistics if (firstTime){ numBadGenotypes = new double[st.countTokens()]; percentBadGenotypes = new double[st.countTokens()]; } genos = new byte[st.countTokens()]; int q = 0; while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ genos[q] = Byte.parseByte(thisGenotype); } if (genos[q] == 0) numBadGenotypes[q] ++; q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, infile.getName())); firstTime = false; } //generate marker information in case none is subsequently available //also convert sums of bad genotypes to percentages for each marker double numChroms = chroms.size(); for (int i = 0; i < genos.length; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chroms.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chroms.elementAt(j)).unfilteredElementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chroms; //return chroms; }
1,110,730
int prepareMarkerInput(File infile) throws IOException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position String currentLine; Vector markers = new Vector(); //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; while ((currentLine = in.readLine()) != null){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).elementAt(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } //System.out.println(numa1 + " " + numa2); double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; StringTokenizer st = new StringTokenizer(currentLine); markers.add(new SNP(st.nextToken(), Long.parseLong(st.nextToken()), infile.getName(), maf)); snpcount ++; } if (markerInfo.size() == markers.size()){ markerInfo = markers; return 1; }else{ return -1; } }
int prepareMarkerInput(File infile) throws IOException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position String currentLine; Vector markers = new Vector(); //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; while ((currentLine = in.readLine()) != null){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).unfilteredElementAt(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } //System.out.println(numa1 + " " + numa2); double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; StringTokenizer st = new StringTokenizer(currentLine); markers.add(new SNP(st.nextToken(), Long.parseLong(st.nextToken()), infile.getName(), maf)); snpcount ++; } if (markerInfo.size() == markers.size()){ markerInfo = markers; return 1; }else{ return -1; } }
1,110,732
int prepareMarkerInput(File infile) throws IOException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position String currentLine; Vector markers = new Vector(); //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; while ((currentLine = in.readLine()) != null){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).elementAt(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } //System.out.println(numa1 + " " + numa2); double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; StringTokenizer st = new StringTokenizer(currentLine); markers.add(new SNP(st.nextToken(), Long.parseLong(st.nextToken()), infile.getName(), maf)); snpcount ++; } if (markerInfo.size() == markers.size()){ markerInfo = markers; return 1; }else{ return -1; } }
int prepareMarkerInput(File infile) throws IOException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position String currentLine; Vector markers = new Vector(); //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; while ((currentLine = in.readLine()) != null){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).elementAt(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } //System.out.println(numa1 + " " + numa2); double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; StringTokenizer st = new StringTokenizer(currentLine); markers.add(new SNP(st.nextToken(), Long.parseLong(st.nextToken()), infile.getName(), maf)); snpcount ++; } if (markerInfo.size() == markers.size()){ markerInfo = markers; return 1; }else{ return -1; } }
1,110,733
public Object javaToSql( Object src ) { log.debug( "javaToSql: " + src.getClass().getName() + " " + src ); if ( src instanceof Date ) { return new Timestamp( ((Date)src).getTime() ); } return src; }
public Object javaToSql( Object src ) { log.debug( "javaToSql: " + src ); if ( src instanceof Date ) { return new Timestamp( ((Date)src).getTime() ); } return src; }
1,110,734
public Object sqlToJava( Object src ) { log.debug( "sqlToJava: " + src.getClass().getName() + " " + src ); if ( src instanceof Timestamp ) { return new Date( ((Timestamp)src).getTime() ); } return src; }
public Object sqlToJava( Object src ) { log.debug( "sqlToJava: " + src ); if ( src instanceof Timestamp ) { return new Date( ((Timestamp)src).getTime() ); } return src; }
1,110,735
public void dragDropEnd(DragSourceDropEvent dsde) { System.out.println("DBTree: got dragDropEnd event"); }
public void dragDropEnd(DragSourceDropEvent dsde) { }
1,110,736
public void dragEnter(DragSourceDragEvent dsde) { System.out.println("DBTree: got dragEnter event"); }
public void dragEnter(DragSourceDragEvent dsde) { }
1,110,737
public void dragExit(DragSourceEvent dse) { System.out.println("DBTree: got dragExit event"); }
public void dragExit(DragSourceEvent dse) { }
1,110,738
public void dragOver(DragSourceDragEvent dsde) { System.out.println("DBTree: got dragOver event"); }
public void dragOver(DragSourceDragEvent dsde) { }
1,110,739
public void dropActionChanged(DragSourceDragEvent dsde) { System.out.println("DBTree: got dropActionChanged event"); }
public void dropActionChanged(DragSourceDragEvent dsde) { }
1,110,740
public void run(Context context, XMLOutput output) throws Exception { output.write( text ); }
public void run(JellyContext context, XMLOutput output) throws Exception { output.write( text ); }
1,110,741
void readGenotypes(String[] inputOptions, int type){ //input is a 2 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); try { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); Options.setAssocTest(ASSOC_NONE); } theData = new HaploData(); Vector result = null; if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ result = theData.linkageToChrom(inFile, type); } if(type != HAPS && theData.getPedFile().isBogusParents()) { JOptionPane.showMessageDialog(this, "One or more individuals in the file reference non-existent parents.\nThese references have been ignored.", "File Error", JOptionPane.ERROR_MESSAGE); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1] == null){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); //initialize realIndex Chromosome.doFilter(Chromosome.getUnfilteredSize()); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); boolean[] markerResults = new boolean[result.size()]; for (int i = 0; i < result.size(); i++){ if (((MarkerResult)result.get(i)).getRating() > 0 && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2){ markerResults[i] = true; }else{ markerResults[i] = false; } } //set up the indexing to take into account skipped markers. Chromosome.doFilter(markerResults); } //let's start the math final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(Options.getAssocTest() != ASSOC_NONE) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(theData.getPedFile()); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); HaploAssocPanel htp = new HaploAssocPanel(theData.getHaplotypes()); metaAssoc.add("Haplotypes", htp); tabs.addTab("Association Results", metaAssoc); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } }
void readGenotypes(String[] inputOptions, int type){ //input is a 2 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); try { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); Options.setAssocTest(ASSOC_NONE); } theData = new HaploData(); if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ result = theData.linkageToChrom(inFile, type); } if(type != HAPS && theData.getPedFile().isBogusParents()) { JOptionPane.showMessageDialog(this, "One or more individuals in the file reference non-existent parents.\nThese references have been ignored.", "File Error", JOptionPane.ERROR_MESSAGE); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1] == null){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); //initialize realIndex Chromosome.doFilter(Chromosome.getUnfilteredSize()); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); boolean[] markerResults = new boolean[result.size()]; for (int i = 0; i < result.size(); i++){ if (((MarkerResult)result.get(i)).getRating() > 0 && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2){ markerResults[i] = true; }else{ markerResults[i] = false; } } //set up the indexing to take into account skipped markers. Chromosome.doFilter(markerResults); } //let's start the math final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(Options.getAssocTest() != ASSOC_NONE) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(theData.getPedFile()); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); HaploAssocPanel htp = new HaploAssocPanel(theData.getHaplotypes()); metaAssoc.add("Haplotypes", htp); tabs.addTab("Association Results", metaAssoc); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } }
1,110,742
void readGenotypes(String[] inputOptions, int type){ //input is a 2 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); try { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); Options.setAssocTest(ASSOC_NONE); } theData = new HaploData(); Vector result = null; if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ result = theData.linkageToChrom(inFile, type); } if(type != HAPS && theData.getPedFile().isBogusParents()) { JOptionPane.showMessageDialog(this, "One or more individuals in the file reference non-existent parents.\nThese references have been ignored.", "File Error", JOptionPane.ERROR_MESSAGE); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1] == null){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); //initialize realIndex Chromosome.doFilter(Chromosome.getUnfilteredSize()); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); boolean[] markerResults = new boolean[result.size()]; for (int i = 0; i < result.size(); i++){ if (((MarkerResult)result.get(i)).getRating() > 0 && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2){ markerResults[i] = true; }else{ markerResults[i] = false; } } //set up the indexing to take into account skipped markers. Chromosome.doFilter(markerResults); } //let's start the math final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(Options.getAssocTest() != ASSOC_NONE) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(theData.getPedFile()); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); HaploAssocPanel htp = new HaploAssocPanel(theData.getHaplotypes()); metaAssoc.add("Haplotypes", htp); tabs.addTab("Association Results", metaAssoc); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } }
void readGenotypes(String[] inputOptions, int type){ //input is a 2 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); try { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); Options.setAssocTest(ASSOC_NONE); } theData = new HaploData(); Vector result = null; if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ theData.linkageToChrom(inFile, type); } if(type != HAPS && theData.getPedFile().isBogusParents()) { JOptionPane.showMessageDialog(this, "One or more individuals in the file reference non-existent parents.\nThese references have been ignored.", "File Error", JOptionPane.ERROR_MESSAGE); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1] == null){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); //initialize realIndex Chromosome.doFilter(Chromosome.getUnfilteredSize()); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); boolean[] markerResults = new boolean[result.size()]; for (int i = 0; i < result.size(); i++){ if (((MarkerResult)result.get(i)).getRating() > 0 && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2){ markerResults[i] = true; }else{ markerResults[i] = false; } } //set up the indexing to take into account skipped markers. Chromosome.doFilter(markerResults); } //let's start the math final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(Options.getAssocTest() != ASSOC_NONE) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(theData.getPedFile()); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); HaploAssocPanel htp = new HaploAssocPanel(theData.getHaplotypes()); metaAssoc.add("Haplotypes", htp); tabs.addTab("Association Results", metaAssoc); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } }
1,110,743
public PairwiseLinkage computeDPrime(int pos1, int pos2){ int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]))/LN10 + ((double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; loglike0 = (known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2))/LN10; num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bugz0r in the dec-alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
public PairwiseLinkage computeDPrime(int pos1, int pos2){ int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]))/LN10 + ((double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; loglike0 = (known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2))/LN10; num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bugz0r in the dec-alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
1,110,745
public PairwiseLinkage computeDPrime(int pos1, int pos2){ int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]))/LN10 + ((double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; loglike0 = (known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2))/LN10; num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bugz0r in the dec-alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
public PairwiseLinkage computeDPrime(int pos1, int pos2){ int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5 || b2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]))/LN10 + ((double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; loglike0 = (known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2))/LN10; num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bugz0r in the dec-alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
1,110,746
public void init() { setAttribute("enabled", true); addChild(getValuedConfiguration("port", "" + m_smtpListenerPort)); DefaultConfiguration handlerConfig = new DefaultConfiguration("handler"); handlerConfig.addChild(getValuedConfiguration("helloName", "myMailServer")); handlerConfig.addChild(getValuedConfiguration("connectiontimeout", "360000")); handlerConfig.addChild(getValuedConfiguration("authorizedAddresses", m_authorizedAddresses)); handlerConfig.addChild(getValuedConfiguration("maxmessagesize", "" + 0)); handlerConfig.addChild(getValuedConfiguration("authRequired", m_authorizingMode)); handlerConfig.addChild(createRemoteManagerHandlerChainConfiguration()); addChild(handlerConfig); }
public void init() { setAttribute("enabled", true); addChild(getValuedConfiguration("port", "" + m_smtpListenerPort)); DefaultConfiguration handlerConfig = new DefaultConfiguration("handler"); handlerConfig.addChild(getValuedConfiguration("helloName", "myMailServer")); handlerConfig.addChild(getValuedConfiguration("connectiontimeout", "360000")); handlerConfig.addChild(getValuedConfiguration("authorizedAddresses", m_authorizedAddresses)); handlerConfig.addChild(getValuedConfiguration("maxmessagesize", "" + 0)); handlerConfig.addChild(getValuedConfiguration("authRequired", m_authorizingMode)); handlerConfig.addChild(createHandlerChainConfiguration()); addChild(handlerConfig); }
1,110,747
public Tag getTag(JellyContext context) throws JellyException { Tag tag = context.getTagOfTagScript(this); if ( tag == null ) { tag = createTag(); if ( tag != null ) { context.setTagForScript(this,tag); } } configureTag(tag,context); return tag; }
public Tag getTag(JellyContext context) throws JellyException { Tag tag = context.getTagOfTagScript(this); if ( tag == null ) { tag = createTag(); if ( tag != null ) { context.setTagForScript(this,tag); } } return tag; }
1,110,749
void prepareMarkerInput(File infile, long maxdist) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; Vector markers = new Vector(); long negMaxdist = -1 * maxdist; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } if (lineCount > Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nToo many markers")); } //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).getGenotype(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; String name = st.nextToken(); String loc = st.nextToken(); try{ markers.add(new SNP(name, Long.parseLong(loc), infile.getName(), Math.rint(maf*100.0)/100.0)); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + loc + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } snpcount ++; } if (lineCount < Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nNot enough markers")); } Chromosome.markers = markers.toArray(); if (dPrimeTable != null){ //loop through the dprime table to null-out distant markers for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ dPrimeTable[pos1][pos2] = null; continue; } } } } filteredDPrimeTable = getFilteredTable(); } infoKnown=true; }else{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, long maxdist) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; Vector markers = new Vector(); long negMaxdist = -1 * maxdist; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } if (lineCount > Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).getGenotype(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; String name = st.nextToken(); String loc = st.nextToken(); try{ markers.add(new SNP(name, Long.parseLong(loc), infile.getName(), Math.rint(maf*100.0)/100.0)); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + loc + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } snpcount ++; } if (lineCount < Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nNot enough markers")); } Chromosome.markers = markers.toArray(); if (dPrimeTable != null){ //loop through the dprime table to null-out distant markers for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ dPrimeTable[pos1][pos2] = null; continue; } } } } filteredDPrimeTable = getFilteredTable(); } infoKnown=true; }else{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,110,751
void prepareMarkerInput(File infile, long maxdist) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; Vector markers = new Vector(); long negMaxdist = -1 * maxdist; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } if (lineCount > Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nToo many markers")); } //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).getGenotype(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; String name = st.nextToken(); String loc = st.nextToken(); try{ markers.add(new SNP(name, Long.parseLong(loc), infile.getName(), Math.rint(maf*100.0)/100.0)); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + loc + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } snpcount ++; } if (lineCount < Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nNot enough markers")); } Chromosome.markers = markers.toArray(); if (dPrimeTable != null){ //loop through the dprime table to null-out distant markers for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ dPrimeTable[pos1][pos2] = null; continue; } } } } filteredDPrimeTable = getFilteredTable(); } infoKnown=true; }else{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, long maxdist) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; Vector markers = new Vector(); long negMaxdist = -1 * maxdist; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } if (lineCount > Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nToo many markers")); } //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).getGenotype(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; String name = st.nextToken(); String loc = st.nextToken(); try{ markers.add(new SNP(name, Long.parseLong(loc), infile.getName(), Math.rint(maf*100.0)/100.0)); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + loc + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } snpcount ++; } if (lineCount < Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } Chromosome.markers = markers.toArray(); if (dPrimeTable != null){ //loop through the dprime table to null-out distant markers for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ dPrimeTable[pos1][pos2] = null; continue; } } } } filteredDPrimeTable = getFilteredTable(); } infoKnown=true; }else{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,110,752
public void saveHapsToText(Haplotype[][] finishedHaplos, double[] multidprime, File saveHapsFile) throws IOException{ if (finishedHaplos == null) return; NumberFormat nf = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); //open file for saving haps text FileWriter saveHapsWriter = new FileWriter(saveHapsFile); //go through each block and print haplos for (int i = 0; i < finishedHaplos.length; i++){ //write block header saveHapsWriter.write("BLOCK " + (i+1) + ". MARKERS:"); int[] markerNums = finishedHaplos[i][0].getMarkers(); boolean[] tags = finishedHaplos[i][0].getTags(); for (int j = 0; j < markerNums.length; j++){ saveHapsWriter.write(" " + (markerNums[j]+1)); if (tags[j]) saveHapsWriter.write("!"); } saveHapsWriter.write("\n"); //write haps and crossover percentages for (int j = 0; j < finishedHaplos[i].length; j++){ int[] theGeno = finishedHaplos[i][j].getGeno(); StringBuffer theHap = new StringBuffer(theGeno.length); for (int k = 0; k < theGeno.length; k++){ theHap.append(theGeno[k]); } saveHapsWriter.write(theHap.toString() + " (" + nf.format(finishedHaplos[i][j].getPercentage()) + ")"); if (i < finishedHaplos.length-1){ saveHapsWriter.write("\t|"); for (int crossCount = 0; crossCount < finishedHaplos[i+1].length; crossCount++){ if (crossCount != 0) saveHapsWriter.write("\t"); saveHapsWriter.write(nf.format(finishedHaplos[i][j].getCrossover(crossCount))); } saveHapsWriter.write("|"); } saveHapsWriter.write("\n"); } if (i < finishedHaplos.length - 1){ saveHapsWriter.write("Multiallelic Dprime: " + multidprime[i] + "\n"); }else{ saveHapsWriter.write("\n"); } } saveHapsWriter.close(); }
public void saveHapsToText(Haplotype[][] finishedHaplos, double[] multidprime, File saveHapsFile) throws IOException{ if (finishedHaplos == null) return; NumberFormat nf = NumberFormat.getInstance(Locale.US); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); //open file for saving haps text FileWriter saveHapsWriter = new FileWriter(saveHapsFile); //go through each block and print haplos for (int i = 0; i < finishedHaplos.length; i++){ //write block header saveHapsWriter.write("BLOCK " + (i+1) + ". MARKERS:"); int[] markerNums = finishedHaplos[i][0].getMarkers(); boolean[] tags = finishedHaplos[i][0].getTags(); for (int j = 0; j < markerNums.length; j++){ saveHapsWriter.write(" " + (markerNums[j]+1)); if (tags[j]) saveHapsWriter.write("!"); } saveHapsWriter.write("\n"); //write haps and crossover percentages for (int j = 0; j < finishedHaplos[i].length; j++){ int[] theGeno = finishedHaplos[i][j].getGeno(); StringBuffer theHap = new StringBuffer(theGeno.length); for (int k = 0; k < theGeno.length; k++){ theHap.append(theGeno[k]); } saveHapsWriter.write(theHap.toString() + " (" + nf.format(finishedHaplos[i][j].getPercentage()) + ")"); if (i < finishedHaplos.length-1){ saveHapsWriter.write("\t|"); for (int crossCount = 0; crossCount < finishedHaplos[i+1].length; crossCount++){ if (crossCount != 0) saveHapsWriter.write("\t"); saveHapsWriter.write(nf.format(finishedHaplos[i][j].getCrossover(crossCount))); } saveHapsWriter.write("|"); } saveHapsWriter.write("\n"); } if (i < finishedHaplos.length - 1){ saveHapsWriter.write("Multiallelic Dprime: " + multidprime[i] + "\n"); }else{ saveHapsWriter.write("\n"); } } saveHapsWriter.close(); }
1,110,753
public CheckDataPanel(HaploData hd, boolean disp) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getUnfilteredMarker(i).getName()); tempVect.add(new Long(Chromosome.getUnfilteredMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); if (disp){ table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); } }
public CheckDataPanel(HaploData hd, boolean disp) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getUnfilteredMarker(i).getName()); tempVect.add(new Long(Chromosome.getUnfilteredMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); if (disp){ table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setMinWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); } }
1,110,754
public void registerTagLibrary(String namespaceURI, TagLibrary taglib) { log.info("Registering tag library to: " + namespaceURI + " taglib: " + taglib); taglibs.put(namespaceURI, taglib); }
public void registerTagLibrary(String namespaceURI, TagLibrary taglib) { log.info("Registering tag library to: " + namespaceURI + " taglib: " + taglib); taglibs.put(namespaceURI, taglib); }
1,110,756
public void addBlock(int firstMarker, int lastMarker) { if (firstMarker < 0){ firstMarker = 0; } if (lastMarker >= Chromosome.realIndex.length){ lastMarker = Chromosome.realIndex.length-1; } if (lastMarker - firstMarker < 1){ return; } int inArray[] = new int[lastMarker-firstMarker+1]; for (int i = 0; i < inArray.length; i++){ inArray[i] = firstMarker+i; this.isInBlock[firstMarker+i] = true; } blocksChanged = true; if (blocks.size() != 0){ boolean placed = false; for (int i = 0; i < blocks.size(); i++){ int currentBlock[] = (int[])blocks.elementAt(i); //trim out any blocks that are overlapped if ((lastMarker >= currentBlock[0] && firstMarker <= currentBlock[currentBlock.length-1]) || firstMarker <= currentBlock[currentBlock.length-1] && firstMarker >= currentBlock[0]){ blocks.removeElementAt(i); i--; } } for (int i = 0; i < blocks.size(); i++){ int currentBlock[] = (int[])blocks.elementAt(i); if (firstMarker <= currentBlock[0] && !placed){ blocks.insertElementAt(inArray,i); placed = true; } } if (!placed){ blocks.add(inArray); } }else{ blocks.add(inArray); } }
public void addBlock(int firstMarker, int lastMarker) { if (firstMarker < 0){ firstMarker = 0; } if (lastMarker >= Chromosome.realIndex.length){ lastMarker = Chromosome.realIndex.length-1; } if (lastMarker - firstMarker < 1){ return; } int inArray[] = new int[lastMarker-firstMarker+1]; for (int i = 0; i < inArray.length; i++){ inArray[i] = firstMarker+i; this.isInBlock[firstMarker+i] = true; } blocksChanged = true; if (blocks.size() != 0){ boolean placed = false; for (int i = 0; i < blocks.size(); i++){ int currentBlock[] = (int[])blocks.elementAt(i); //trim out any blocks that are overlapped if ((lastMarker >= currentBlock[0] && firstMarker <= currentBlock[currentBlock.length-1]) || firstMarker <= currentBlock[currentBlock.length-1] && firstMarker >= currentBlock[0]){ blocks.removeElementAt(i); i--; } } for (int i = 0; i < blocks.size(); i++){ int currentBlock[] = (int[])blocks.elementAt(i); if (firstMarker <= currentBlock[0] && !placed){ blocks.insertElementAt(inArray,i); placed = true; } } if (!placed){ blocks.add(inArray); } }else{ blocks.add(inArray); } }
1,110,757
public Vector check() throws PedFileException{ Vector results = new Vector(); //_size = _pedFile.getNumIndividuals(); int numOfMarkers = _pedFile.getNumMarkers(); //not worrying about names right now //TODO: store and use marker names //Vector names = this._pedFile.getMarkerNames(); Vector names = null; boolean withName=false; if(names!=null && names.size()==numOfMarkers) { withName = true; } for(int i= 0; i < numOfMarkers; i++){ MarkerResult markerResult; if(withName) { markerResult = checkMarker(i, (String)names.get(i)); }else{ markerResult = checkMarker(i, new String("Marker " + (i+1))); } results.add(markerResult); } return results; }
public Vector check() throws PedFileException{ Vector results = new Vector(); //_size = _pedFile.getNumIndividuals(); int numOfMarkers = _pedFile.getNumMarkers(); //not worrying about names right now //TODO: store and use marker names //Vector names = this._pedFile.getMarkerNames(); Vector names = null; boolean withName=false; if(names!=null && names.size()==numOfMarkers) { withName = true; } for(int i= 0; i < numOfMarkers; i++){ MarkerResult markerResult; if(withName) { markerResult = checkMarker(i, (String)names.get(i)); }else{ markerResult = checkMarker(i, new String("Marker " + (i+1))); } results.add(markerResult); } return results; }
1,110,760
public Vector check() throws PedFileException{ Vector results = new Vector(); //_size = _pedFile.getNumIndividuals(); int numOfMarkers = _pedFile.getNumMarkers(); //not worrying about names right now //TODO: store and use marker names //Vector names = this._pedFile.getMarkerNames(); Vector names = null; boolean withName=false; if(names!=null && names.size()==numOfMarkers) { withName = true; } for(int i= 0; i < numOfMarkers; i++){ MarkerResult markerResult; if(withName) { markerResult = checkMarker(i, (String)names.get(i)); }else{ markerResult = checkMarker(i, new String("Marker " + (i+1))); } results.add(markerResult); } return results; }
public Vector check() throws PedFileException{ Vector results = new Vector(); //_size = _pedFile.getNumIndividuals(); int numOfMarkers = _pedFile.getNumMarkers(); //not worrying about names right now //TODO: store and use marker names //Vector names = this._pedFile.getMarkerNames(); Vector names = null; boolean withName=false; if(names!=null && names.size()==numOfMarkers) { withName = true; } for(int i= 0; i < numOfMarkers; i++){ MarkerResult markerResult; if(withName) { markerResult = checkMarker(i, (String)names.get(i)); }else{ markerResult = checkMarker(i, new String("Marker " + (i+1))); } results.add(markerResult); } return results; }
1,110,761
private MarkerResult checkMarker(int loc, String name)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; //int indivgeno=0, int missing=0, parenthet=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0; //Hashtable allgenos = new Hashtable(); Hashtable numindivs=new Hashtable(); Hashtable parentgeno = new Hashtable(); Hashtable kidgeno = new Hashtable(); Hashtable parenthom = new Hashtable(); Hashtable count = new Hashtable(); String allele1_string, allele2_string; //loop through each family, check data for marker loc Enumeration famList = _pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = _pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers = currentInd.getMarker(loc); allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if(!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0"))){ //do mendel check //byte[] marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getMomID())).getMarker(loc); byte[] marker = (currentFamily.getMember(currentInd.getMomID())).getMarker(loc); int momAllele1 = marker[0]; int momAllele2 = marker[1]; //marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getDadID())).getMarker(loc); marker = (currentFamily.getMember(currentInd.getDadID())).getMarker(loc); int dadAllele1 = marker[0]; int dadAllele2 = marker[1]; //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } //end mendel check } } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers; byte[] zeroArray = {0,0}; if (currentInd.getZeroed(loc)){ markers = zeroArray; }else{ markers = currentInd.getMarker(loc); } allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has parents if(currentInd.getMomID().compareTo(Individual.DATA_MISSING)==0 && currentInd.getDadID().compareTo(Individual.DATA_MISSING)==0){ //$parentgeno{$ped}++ //set parentgeno incOrSetOne(parentgeno,familyID); if(allele1 != allele2) { parenthet++; } else{ incOrSetOne(parenthom,allele1_string); } } else{//$kidgeno{$ped}++ incOrSetOne(kidgeno,familyID); } if(allele1 == allele2) { hom++; } else { het++; } //count number of allele incOrSetOne(count,allele1_string); incOrSetOne(count,allele2_string); } //missing data else missing++; } } } double obsHET = getObsHET(het, hom); double[] freqStuff = getFreqStuff(count); double preHET = freqStuff[0]; double maf = freqStuff[1]; //HW p value double pvalue = getPValue(parenthom, parenthet); //geno percent double genopct = getGenoPercent(het, hom, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(numindivs, parentgeno, kidgeno); //rating int rating = this.getRating(genopct, pvalue, obsHET, mendErrNum,maf); result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setName(name); return result; }
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; //int indivgeno=0, int missing=0, parenthet=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0; //Hashtable allgenos = new Hashtable(); Hashtable numindivs=new Hashtable(); Hashtable parentgeno = new Hashtable(); Hashtable kidgeno = new Hashtable(); Hashtable parenthom = new Hashtable(); Hashtable count = new Hashtable(); String allele1_string, allele2_string; //loop through each family, check data for marker loc Enumeration famList = _pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = _pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers = currentInd.getMarker(loc); allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if(!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0"))){ //do mendel check //byte[] marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getMomID())).getMarker(loc); byte[] marker = (currentFamily.getMember(currentInd.getMomID())).getMarker(loc); int momAllele1 = marker[0]; int momAllele2 = marker[1]; //marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getDadID())).getMarker(loc); marker = (currentFamily.getMember(currentInd.getDadID())).getMarker(loc); int dadAllele1 = marker[0]; int dadAllele2 = marker[1]; //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } //end mendel check } } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers; byte[] zeroArray = {0,0}; if (currentInd.getZeroed(loc)){ markers = zeroArray; }else{ markers = currentInd.getMarker(loc); } allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has parents if(currentInd.getMomID().compareTo(Individual.DATA_MISSING)==0 && currentInd.getDadID().compareTo(Individual.DATA_MISSING)==0){ //$parentgeno{$ped}++ //set parentgeno incOrSetOne(parentgeno,familyID); if(allele1 != allele2) { parenthet++; } else{ incOrSetOne(parenthom,allele1_string); } } else{//$kidgeno{$ped}++ incOrSetOne(kidgeno,familyID); } if(allele1 == allele2) { hom++; } else { het++; } //count number of allele incOrSetOne(count,allele1_string); incOrSetOne(count,allele2_string); } //missing data else missing++; } } } double obsHET = getObsHET(het, hom); double[] freqStuff = getFreqStuff(count); double preHET = freqStuff[0]; double maf = freqStuff[1]; //HW p value double pvalue = getPValue(parenthom, parenthet); //geno percent double genopct = getGenoPercent(het, hom, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(numindivs, parentgeno, kidgeno); //rating int rating = this.getRating(genopct, pvalue, obsHET, mendErrNum,maf); result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setName(name); return result; }
1,110,762
private MarkerResult checkMarker(int loc, String name)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; //int indivgeno=0, int missing=0, parenthet=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0; //Hashtable allgenos = new Hashtable(); Hashtable numindivs=new Hashtable(); Hashtable parentgeno = new Hashtable(); Hashtable kidgeno = new Hashtable(); Hashtable parenthom = new Hashtable(); Hashtable count = new Hashtable(); String allele1_string, allele2_string; //loop through each family, check data for marker loc Enumeration famList = _pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = _pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers = currentInd.getMarker(loc); allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if(!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0"))){ //do mendel check //byte[] marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getMomID())).getMarker(loc); byte[] marker = (currentFamily.getMember(currentInd.getMomID())).getMarker(loc); int momAllele1 = marker[0]; int momAllele2 = marker[1]; //marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getDadID())).getMarker(loc); marker = (currentFamily.getMember(currentInd.getDadID())).getMarker(loc); int dadAllele1 = marker[0]; int dadAllele2 = marker[1]; //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } //end mendel check } } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers; byte[] zeroArray = {0,0}; if (currentInd.getZeroed(loc)){ markers = zeroArray; }else{ markers = currentInd.getMarker(loc); } allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has parents if(currentInd.getMomID().compareTo(Individual.DATA_MISSING)==0 && currentInd.getDadID().compareTo(Individual.DATA_MISSING)==0){ //$parentgeno{$ped}++ //set parentgeno incOrSetOne(parentgeno,familyID); if(allele1 != allele2) { parenthet++; } else{ incOrSetOne(parenthom,allele1_string); } } else{//$kidgeno{$ped}++ incOrSetOne(kidgeno,familyID); } if(allele1 == allele2) { hom++; } else { het++; } //count number of allele incOrSetOne(count,allele1_string); incOrSetOne(count,allele2_string); } //missing data else missing++; } } } double obsHET = getObsHET(het, hom); double[] freqStuff = getFreqStuff(count); double preHET = freqStuff[0]; double maf = freqStuff[1]; //HW p value double pvalue = getPValue(parenthom, parenthet); //geno percent double genopct = getGenoPercent(het, hom, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(numindivs, parentgeno, kidgeno); //rating int rating = this.getRating(genopct, pvalue, obsHET, mendErrNum,maf); result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setName(name); return result; }
private MarkerResult checkMarker(int loc, String name)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; //int indivgeno=0, int missing=0, parenthet=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0; //Hashtable allgenos = new Hashtable(); Hashtable numindivs=new Hashtable(); Hashtable parentgeno = new Hashtable(); Hashtable kidgeno = new Hashtable(); Hashtable parenthom = new Hashtable(); Hashtable count = new Hashtable(); String allele1_string, allele2_string; //loop through each family, check data for marker loc Enumeration famList = _pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = _pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers = currentInd.getMarker(loc); allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if(!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0"))){ //do mendel check //byte[] marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getMomID())).getMarker(loc); byte[] marker = (currentFamily.getMember(currentInd.getMomID())).getMarker(loc); int momAllele1 = marker[0]; int momAllele2 = marker[1]; //marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getDadID())).getMarker(loc); marker = (currentFamily.getMember(currentInd.getDadID())).getMarker(loc); int dadAllele1 = marker[0]; int dadAllele2 = marker[1]; //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } //end mendel check } } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers; byte[] zeroArray = {0,0}; if (currentInd.getZeroed(loc)){ markers = zeroArray; }else{ markers = currentInd.getMarker(loc); } allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has parents if(currentInd.getMomID().compareTo(Individual.DATA_MISSING)==0 && currentInd.getDadID().compareTo(Individual.DATA_MISSING)==0){ //$parentgeno{$ped}++ //set parentgeno incOrSetOne(parentgeno,familyID); if(allele1 != allele2) { parenthet++; } else{ incOrSetOne(parenthom,allele1_string); } } else{//$kidgeno{$ped}++ incOrSetOne(kidgeno,familyID); } if(allele1 == allele2) { hom++; } else { het++; } //count number of allele incOrSetOne(count,allele1_string); incOrSetOne(count,allele2_string); } //missing data else missing++; } } } double obsHET = getObsHET(het, hom); double[] freqStuff = getFreqStuff(count); double preHET = freqStuff[0]; double maf = freqStuff[1]; //HW p value double pvalue = getPValue(parenthom, parenthet); //geno percent double genopct = getGenoPercent(het, hom, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(numindivs, parentgeno, kidgeno); //rating int rating = this.getRating(genopct, pvalue, obsHET, mendErrNum,maf); result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); return result; }
1,110,763
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == "Open"){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ theData = new HaploData(fc.getSelectedFile()); infileName = fc.getSelectedFile().getName(); //compute D primes and monitor progress progressMonitor = new ProgressMonitor(this, "Computing " + theData.getToBeCompleted() + " values of D prime","", 0, theData.getToBeCompleted()); progressMonitor.setProgress(0); progressMonitor.setMillisToDecideToPopup(2000); final SwingWorker worker = new SwingWorker(){ public Object construct(){ theData.doMonitoredComputation(); return ""; } }; timer = new javax.swing.Timer(500, new ActionListener(){ public void actionPerformed(ActionEvent evt){ progressMonitor.setProgress(theData.getComplete()); if (theData.getComplete() == theData.getToBeCompleted()){ timer.stop(); progressMonitor.close(); infoKnown=false; drawPicture(theData); loadInfoMenuItem.setEnabled(true); hapMenuItem.setEnabled(true); customizeHapsMenuItem.setEnabled(true); exportMenuItem.setEnabled(true); saveDprimeMenuItem.setEnabled(true); clearBlocksMenuItem.setEnabled(true); guessBlocksMenuItem.setEnabled(true); } } }); worker.start(); timer.start(); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } } else if (command == loadInfoStr){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ int good = theData.prepareMarkerInput(fc.getSelectedFile()); if (good == -1){ JOptionPane.showMessageDialog(this, "Number of markers in info file does not match number of markers in dataset.", "Error", JOptionPane.ERROR_MESSAGE); }else{ infoKnown=true; // loadInfoMenuItem.setEnabled(false); drawPicture(theData); } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } }else if (command == "Clear All Blocks"){ theBlocks.clearBlocks(); }else if (command == "Define Blocks"){ defineBlocks(); }else if (command == "Customize Haplotype Output"){ customizeHaps(); }else if (command == "Tutorial"){ showHelp(); }else if (command == "Export LD Picture to JPG"){ doExportDPrime(); }else if (command == "Dump LD Output to Text"){ saveDprimeToText(); }else if (command == "Save Haplotypes to Text"){ saveHapsToText(); }else if (command == "Save Haplotypes to JPG"){ saveHapsPic(); }else if (command == "Generate Haplotypes"){ try{ drawHaplos(theData.generateHaplotypes(theData.blocks, haploThresh)); saveHapsMenuItem.setEnabled(true); saveHapsPicMenuItem.setEnabled(true); }catch (IOException ioe){} } else if (command == "Exit"){ System.exit(0); } }
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == "Open"){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ theData = new HaploData(fc.getSelectedFile()); infileName = fc.getSelectedFile().getName(); //compute D primes and monitor progress progressMonitor = new ProgressMonitor(this, "Computing " + theData.getToBeCompleted() + " values of D prime","", 0, theData.getToBeCompleted()); progressMonitor.setProgress(0); progressMonitor.setMillisToDecideToPopup(2000); final SwingWorker worker = new SwingWorker(){ public Object construct(){ theData.doMonitoredComputation(); return ""; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ progressMonitor.setProgress(theData.getComplete()); if (theData.getComplete() == theData.getToBeCompleted()){ timer.stop(); progressMonitor.close(); infoKnown=false; drawPicture(theData); loadInfoMenuItem.setEnabled(true); hapMenuItem.setEnabled(true); customizeHapsMenuItem.setEnabled(true); exportMenuItem.setEnabled(true); saveDprimeMenuItem.setEnabled(true); clearBlocksMenuItem.setEnabled(true); guessBlocksMenuItem.setEnabled(true); } } }); worker.start(); timer.start(); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } } else if (command == loadInfoStr){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ int good = theData.prepareMarkerInput(fc.getSelectedFile()); if (good == -1){ JOptionPane.showMessageDialog(this, "Number of markers in info file does not match number of markers in dataset.", "Error", JOptionPane.ERROR_MESSAGE); }else{ infoKnown=true; // loadInfoMenuItem.setEnabled(false); drawPicture(theData); } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } }else if (command == "Clear All Blocks"){ theBlocks.clearBlocks(); }else if (command == "Define Blocks"){ defineBlocks(); }else if (command == "Customize Haplotype Output"){ customizeHaps(); }else if (command == "Tutorial"){ showHelp(); }else if (command == "Export LD Picture to JPG"){ doExportDPrime(); }else if (command == "Dump LD Output to Text"){ saveDprimeToText(); }else if (command == "Save Haplotypes to Text"){ saveHapsToText(); }else if (command == "Save Haplotypes to JPG"){ saveHapsPic(); }else if (command == "Generate Haplotypes"){ try{ drawHaplos(theData.generateHaplotypes(theData.blocks, haploThresh)); saveHapsMenuItem.setEnabled(true); saveHapsPicMenuItem.setEnabled(true); }catch (IOException ioe){} } else if (command == "Exit"){ System.exit(0); } }
1,110,767
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == "Open"){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ theData = new HaploData(fc.getSelectedFile()); infileName = fc.getSelectedFile().getName(); //compute D primes and monitor progress progressMonitor = new ProgressMonitor(this, "Computing " + theData.getToBeCompleted() + " values of D prime","", 0, theData.getToBeCompleted()); progressMonitor.setProgress(0); progressMonitor.setMillisToDecideToPopup(2000); final SwingWorker worker = new SwingWorker(){ public Object construct(){ theData.doMonitoredComputation(); return ""; } }; timer = new javax.swing.Timer(500, new ActionListener(){ public void actionPerformed(ActionEvent evt){ progressMonitor.setProgress(theData.getComplete()); if (theData.getComplete() == theData.getToBeCompleted()){ timer.stop(); progressMonitor.close(); infoKnown=false; drawPicture(theData); loadInfoMenuItem.setEnabled(true); hapMenuItem.setEnabled(true); customizeHapsMenuItem.setEnabled(true); exportMenuItem.setEnabled(true); saveDprimeMenuItem.setEnabled(true); clearBlocksMenuItem.setEnabled(true); guessBlocksMenuItem.setEnabled(true); } } }); worker.start(); timer.start(); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } } else if (command == loadInfoStr){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ int good = theData.prepareMarkerInput(fc.getSelectedFile()); if (good == -1){ JOptionPane.showMessageDialog(this, "Number of markers in info file does not match number of markers in dataset.", "Error", JOptionPane.ERROR_MESSAGE); }else{ infoKnown=true; // loadInfoMenuItem.setEnabled(false); drawPicture(theData); } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } }else if (command == "Clear All Blocks"){ theBlocks.clearBlocks(); }else if (command == "Define Blocks"){ defineBlocks(); }else if (command == "Customize Haplotype Output"){ customizeHaps(); }else if (command == "Tutorial"){ showHelp(); }else if (command == "Export LD Picture to JPG"){ doExportDPrime(); }else if (command == "Dump LD Output to Text"){ saveDprimeToText(); }else if (command == "Save Haplotypes to Text"){ saveHapsToText(); }else if (command == "Save Haplotypes to JPG"){ saveHapsPic(); }else if (command == "Generate Haplotypes"){ try{ drawHaplos(theData.generateHaplotypes(theData.blocks, haploThresh)); saveHapsMenuItem.setEnabled(true); saveHapsPicMenuItem.setEnabled(true); }catch (IOException ioe){} } else if (command == "Exit"){ System.exit(0); } }
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == "Open"){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ theData = new HaploData(fc.getSelectedFile()); infileName = fc.getSelectedFile().getName(); //compute D primes and monitor progress progressMonitor = new ProgressMonitor(this, "Computing " + theData.getToBeCompleted() + " values of D prime","", 0, theData.getToBeCompleted()); progressMonitor.setProgress(0); progressMonitor.setMillisToDecideToPopup(2000); final SwingWorker worker = new SwingWorker(){ public Object construct(){ theData.doMonitoredComputation(); return ""; } }; timer = new javax.swing.Timer(500, new ActionListener(){ public void actionPerformed(ActionEvent evt){ progressMonitor.setProgress(theData.getComplete()); if (theData.finished){ timer.stop(); progressMonitor.close(); infoKnown=false; drawPicture(theData); loadInfoMenuItem.setEnabled(true); hapMenuItem.setEnabled(true); customizeHapsMenuItem.setEnabled(true); exportMenuItem.setEnabled(true); saveDprimeMenuItem.setEnabled(true); clearBlocksMenuItem.setEnabled(true); guessBlocksMenuItem.setEnabled(true); } } }); worker.start(); timer.start(); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } } else if (command == loadInfoStr){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ int good = theData.prepareMarkerInput(fc.getSelectedFile()); if (good == -1){ JOptionPane.showMessageDialog(this, "Number of markers in info file does not match number of markers in dataset.", "Error", JOptionPane.ERROR_MESSAGE); }else{ infoKnown=true; // loadInfoMenuItem.setEnabled(false); drawPicture(theData); } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } }else if (command == "Clear All Blocks"){ theBlocks.clearBlocks(); }else if (command == "Define Blocks"){ defineBlocks(); }else if (command == "Customize Haplotype Output"){ customizeHaps(); }else if (command == "Tutorial"){ showHelp(); }else if (command == "Export LD Picture to JPG"){ doExportDPrime(); }else if (command == "Dump LD Output to Text"){ saveDprimeToText(); }else if (command == "Save Haplotypes to Text"){ saveHapsToText(); }else if (command == "Save Haplotypes to JPG"){ saveHapsPic(); }else if (command == "Generate Haplotypes"){ try{ drawHaplos(theData.generateHaplotypes(theData.blocks, haploThresh)); saveHapsMenuItem.setEnabled(true); saveHapsPicMenuItem.setEnabled(true); }catch (IOException ioe){} } else if (command == "Exit"){ System.exit(0); } }
1,110,768
public void actionPerformed(ActionEvent evt){ progressMonitor.setProgress(theData.getComplete()); if (theData.getComplete() == theData.getToBeCompleted()){ timer.stop(); progressMonitor.close(); infoKnown=false; drawPicture(theData); loadInfoMenuItem.setEnabled(true); hapMenuItem.setEnabled(true); customizeHapsMenuItem.setEnabled(true); exportMenuItem.setEnabled(true); saveDprimeMenuItem.setEnabled(true); clearBlocksMenuItem.setEnabled(true); guessBlocksMenuItem.setEnabled(true); } }
public void actionPerformed(ActionEvent evt){ progressMonitor.setProgress(theData.getComplete()); if (theData.finished){ timer.stop(); progressMonitor.close(); infoKnown=false; drawPicture(theData); loadInfoMenuItem.setEnabled(true); hapMenuItem.setEnabled(true); customizeHapsMenuItem.setEnabled(true); exportMenuItem.setEnabled(true); saveDprimeMenuItem.setEnabled(true); clearBlocksMenuItem.setEnabled(true); guessBlocksMenuItem.setEnabled(true); } }
1,110,769
public String draw(DashboardContext context) { StringBuffer output = new StringBuffer(); output.append(component.draw(context)); // append script final String dashboardId = context.getDashboardConfig().getDashboardId(); String appId = context.getWebContext().getApplicationConfig().getApplicationId(); output.append("\n<script>"); output.append("self.setTimeout(\"refreshDBComponent("); output.append("''"); output.append(dashboardId); output.append("'', ''"); output.append(getId()); output.append("'', "); output.append(refreshInterval + ", " + appId + ")\", " + refreshInterval + ");"); output.append("</script>"); return output.toString(); }
public String draw(DashboardContext context) { StringBuffer output = new StringBuffer(); output.append(component.draw(context)); // append script final String dashboardId = context.getDashboardConfig().getDashboardId(); String appId = context.getWebContext().getApplicationConfig().getApplicationId(); output.append("\n<script>"); output.append("self.setTimeout(\"refreshDBComponent("); output.append("''"); output.append(dashboardId); output.append("'', ''"); output.append(getId()); output.append("'', "); output.append(refreshInterval + ", " + appId + ",''dummy'',''dummy'')\", " + refreshInterval + ");"); output.append("</script>"); return output.toString(); }
1,110,770
public void colorDPrime(int scheme){ PairwiseLinkage dPrime[][] = theData.filteredDPrimeTable; if (scheme == STD_SCHEME){ // set coloring based on LOD and D' for (int i = 0; i < dPrime.length; i++){ for (int j = i+1; j < dPrime[i].length; j++){ PairwiseLinkage thisPair = dPrime[i][j]; if (thisPair == null){ continue; } double d = thisPair.getDPrime(); double l = thisPair.getLOD(); Color boxColor = null; if (l > 2) { if (d < 0.5) { //high LOD, low D' boxColor = new Color(255, 224, 224); } else { //high LOD, high D' shades of red double blgr = (255-32)*2*(1-d); boxColor = new Color(255, (int) blgr, (int) blgr); //boxColor = new Color(224, (int) blgr, (int) blgr); } } else if (d > 0.99) { //high D', low LOD blueish color boxColor = new Color(192, 192, 240); } else { //no LD boxColor = Color.white; } thisPair.setColor(boxColor); } } }else if (scheme == SFS_SCHEME){ for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null){ continue; } //get the right bits double lowCI = thisPair.getConfidenceLow(); double highCI = thisPair.getConfidenceHigh(); //color in squares if (lowCI >= FindBlocks.cutLowCI && highCI >= FindBlocks.cutHighCI) { thisPair.setColor(Color.darkGray); //strong LD }else if (highCI >= FindBlocks.recHighCI) { thisPair.setColor(Color.lightGray); //uninformative } else { thisPair.setColor(Color.white); //recomb } } } }else if (scheme == GAM_SCHEME){ for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null) { continue; } double[] freqs = thisPair.getFreqs(); int numGam = 0; for (int i = 0; i < freqs.length; i++){ if (freqs[i] > FindBlocks.fourGameteCutoff) numGam++; } //color in squares if(numGam > 3){ thisPair.setColor(Color.white); }else{ thisPair.setColor(Color.darkGray); } } } } }
public void colorDPrime(int scheme){ PairwiseLinkage dPrime[][] = theData.filteredDPrimeTable; if (scheme == STD_SCHEME){ // set coloring based on LOD and D' for (int i = 0; i < dPrime.length; i++){ for (int j = i+1; j < dPrime[i].length; j++){ PairwiseLinkage thisPair = dPrime[i][j]; if (thisPair == null){ continue; } double d = thisPair.getDPrime(); double l = thisPair.getLOD(); Color boxColor = null; if (l > 2) { if (d < 0.5) { //high LOD, low D' boxColor = new Color(255, 224, 224); } else { //high LOD, high D' shades of red double blgr = (255-32)*2*(1-d); boxColor = new Color(255, (int) blgr, (int) blgr); //boxColor = new Color(224, (int) blgr, (int) blgr); } } else if (d > 0.99) { //high D', low LOD blueish color boxColor = new Color(192, 192, 240); } else { //no LD boxColor = Color.white; } thisPair.setColor(boxColor); } } }else if (scheme == SFS_SCHEME){ for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null){ continue; } //get the right bits double lowCI = thisPair.getConfidenceLow(); double highCI = thisPair.getConfidenceHigh(); //color in squares if (lowCI >= FindBlocks.cutLowCI && highCI >= FindBlocks.cutHighCI) { thisPair.setColor(Color.darkGray); //strong LD }else if (highCI >= FindBlocks.recHighCI) { thisPair.setColor(Color.lightGray); //uninformative } else { thisPair.setColor(Color.white); //recomb } } } }else if (scheme == GAM_SCHEME){ for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null) { continue; } double[] freqs = thisPair.getFreqs(); int numGam = 0; for (int i = 0; i < freqs.length; i++){ if (freqs[i] > FindBlocks.fourGameteCutoff + 1E-8) numGam++; } //color in squares if(numGam > 3){ thisPair.setColor(Color.white); }else{ thisPair.setColor(Color.darkGray); } } } } }
1,110,772
public void mouseClicked(MouseEvent e) { if ((e.getModifiers() & InputEvent.BUTTON1_MASK) == InputEvent.BUTTON1_MASK) { int clickX = e.getX(); int clickY = e.getY(); if (showWM && wmInteriorRect.contains(clickX,clickY)){ //convert a click on the worldmap to a point on the big picture int bigClickX = (((clickX - getVisibleRect().x - (worldmap.getWidth()-wmInteriorRect.width)/2) * chartSize.width) / wmInteriorRect.width)-getVisibleRect().width/2; int bigClickY = (((clickY - getVisibleRect().y - (worldmap.getHeight() - wmInteriorRect.height)/2 - (getVisibleRect().height-worldmap.getHeight())) * chartSize.height) / wmInteriorRect.height) - getVisibleRect().height/2 + infoHeight; //System.out.println(chartSize.height); //if the clicks are near the edges, correct values if (bigClickX > chartSize.width - getVisibleRect().width){ bigClickX = chartSize.width - getVisibleRect().width; } if (bigClickX < 0){ bigClickX = 0; } if (bigClickY > chartSize.height - getVisibleRect().height + infoHeight){ bigClickY = chartSize.height - getVisibleRect().height + infoHeight; } if (bigClickY < 0){ bigClickY = 0; } ((JViewport)getParent()).setViewPosition(new Point(bigClickX,bigClickY)); }else{ Rectangle blockselector = new Rectangle(clickXShift-boxRadius,clickYShift - boxRadius, (Chromosome.getFilteredSize()*boxSize), boxSize); if(blockselector.contains(clickX,clickY)){ int whichMarker = (int)(0.5 + (double)((clickX - clickXShift))/boxSize); if (theData.isInBlock[whichMarker]){ theData.removeFromBlock(whichMarker); refresh(0); } else if (whichMarker > 0 && whichMarker < Chromosome.realIndex.length){ theData.addMarkerIntoSurroundingBlock(whichMarker); } } } } }
public void mouseClicked(MouseEvent e) { if ((e.getModifiers() & InputEvent.BUTTON1_MASK) == InputEvent.BUTTON1_MASK) { int clickX = e.getX(); int clickY = e.getY(); if (showWM && wmInteriorRect.contains(clickX,clickY)){ //convert a click on the worldmap to a point on the big picture int bigClickX = (((clickX - getVisibleRect().x - (worldmap.getWidth()-wmInteriorRect.width)/2) * chartSize.width) / wmInteriorRect.width)-getVisibleRect().width/2; int bigClickY = (((clickY - getVisibleRect().y - (worldmap.getHeight() - wmInteriorRect.height)/2 - (getVisibleRect().height-worldmap.getHeight())) * chartSize.height) / wmInteriorRect.height) - getVisibleRect().height/2 + infoHeight; //System.out.println(chartSize.height); //if the clicks are near the edges, correct values if (bigClickX > chartSize.width - getVisibleRect().width){ bigClickX = chartSize.width - getVisibleRect().width; } if (bigClickX < 0){ bigClickX = 0; } if (bigClickY > chartSize.height - getVisibleRect().height + infoHeight){ bigClickY = chartSize.height - getVisibleRect().height + infoHeight; } if (bigClickY < 0){ bigClickY = 0; } ((JViewport)getParent()).setViewPosition(new Point(bigClickX,bigClickY)); }else{ Rectangle blockselector = new Rectangle(clickXShift-boxRadius,clickYShift - boxRadius, (Chromosome.getFilteredSize()*boxSize), boxSize); if(blockselector.contains(clickX,clickY)){ int whichMarker = (int)(0.5 + (double)((clickX - clickXShift))/boxSize); if (theData.isInBlock[whichMarker]){ theData.removeFromBlock(whichMarker); refresh(); } else if (whichMarker > 0 && whichMarker < Chromosome.realIndex.length){ theData.addMarkerIntoSurroundingBlock(whichMarker); } } } } }
1,110,773
public void mouseReleased(MouseEvent e) { //remove popped up window if ((e.getModifiers() & InputEvent.BUTTON3_MASK) == InputEvent.BUTTON3_MASK){ popupExists = false; repaint(); //resize window once user has ceased dragging } else if ((e.getModifiers() & InputEvent.BUTTON1_MASK) == InputEvent.BUTTON1_MASK){ if (getCursor() == Cursor.getPredefinedCursor(Cursor.NE_RESIZE_CURSOR)){ resizeRectExists = false; noImage = true; if (resizeWMRect.width > 20){ wmMaxWidth = resizeWMRect.width; } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); repaint(); } if (getCursor() == Cursor.getPredefinedCursor(Cursor.E_RESIZE_CURSOR)){ setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); blockRectExists = false; int firstMarker = (int)(0.5 + (double)((blockStartX - clickXShift))/boxSize); int lastMarker = (int)(0.5 + (double)((e.getX() - clickXShift))/boxSize); if (firstMarker > lastMarker){ int temp = firstMarker; firstMarker = lastMarker; lastMarker = temp; } theData.addBlock(firstMarker, lastMarker); refresh(0); } } }
public void mouseReleased(MouseEvent e) { //remove popped up window if ((e.getModifiers() & InputEvent.BUTTON3_MASK) == InputEvent.BUTTON3_MASK){ popupExists = false; repaint(); //resize window once user has ceased dragging } else if ((e.getModifiers() & InputEvent.BUTTON1_MASK) == InputEvent.BUTTON1_MASK){ if (getCursor() == Cursor.getPredefinedCursor(Cursor.NE_RESIZE_CURSOR)){ resizeRectExists = false; noImage = true; if (resizeWMRect.width > 20){ wmMaxWidth = resizeWMRect.width; } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); repaint(); } if (getCursor() == Cursor.getPredefinedCursor(Cursor.E_RESIZE_CURSOR)){ setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); blockRectExists = false; int firstMarker = (int)(0.5 + (double)((blockStartX - clickXShift))/boxSize); int lastMarker = (int)(0.5 + (double)((e.getX() - clickXShift))/boxSize); if (firstMarker > lastMarker){ int temp = firstMarker; firstMarker = lastMarker; lastMarker = temp; } theData.addBlock(firstMarker, lastMarker); refresh(); } } }
1,110,774
public void refresh(int scheme){ if (scheme != 0){ colorDPrime(scheme); } noImage = true; repaint(); }
public void refresh(int scheme){ if (scheme != 0){ colorDPrime(scheme); } noImage = true; repaint(); }
1,110,775
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
1,110,776
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
1,110,777
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
1,110,778
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
1,110,779
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
1,110,780
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] rawHaplotypes = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
1,110,781
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above rawHaplotypes[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
1,110,782
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ rawHaplotypes[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
1,110,783
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = new Haplotype[rawHaplotypes.length][]; for (int i = 0; i < rawHaplotypes.length; i++) { Vector orderedHaps = new Vector(); for (int hapCount = 0; hapCount < rawHaplotypes[i].length; hapCount++) { if (orderedHaps.size() == 0) { orderedHaps.add(rawHaplotypes[i][hapCount]); } else { for (int j = 0; j < orderedHaps.size(); j++) { if (((Haplotype)(orderedHaps.elementAt(j))).getPercentage() < rawHaplotypes[i][hapCount].getPercentage()) { orderedHaps.add(j, rawHaplotypes[i][hapCount]); break; } if ((j+1) == orderedHaps.size()) { orderedHaps.add(rawHaplotypes[i][hapCount]); break; } } } } haplotypes[i] = new Haplotype[orderedHaps.size()]; orderedHaps.copyInto(haplotypes[i]); } haplotypes = generateCrossovers(haplotypes); return haplotypes; } return results; }
1,110,784
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return rawHaplotypes; }
1,110,785
public Tag createTag(String name, Attributes attributes) throws JellyException { return new MenuManagerTag(); }
public Tag createTag(String name, Attributes attributes) throws JellyException { return new MenuManagerTag(); }
1,110,787
public Tag createTag(String name, Attributes attributes) throws JellyException { return new ViewerTag(theclass, style); }
public Tag createTag(String name, Attributes attributes) throws JellyException { return new ViewerTag(theclass, style); }
1,110,788
public void mouseMoved(MouseEvent evt) { if (rubberBand != null) { // repaint old region in case of shrinkage Rectangle dirtyRegion = zoomRect(new Rectangle(rubberBand)); Point p = unzoomPoint(new Point(evt.getPoint())); rubberBand.setBounds(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); rubberBand.add(p); // update selected items Rectangle temp = new Rectangle(); // avoids multiple allocations in getBounds for (int i = 0, n = contentPane.getComponentCount(); i < n; i++) { Component c = contentPane.getComponent(i); if (c instanceof Selectable) { ((Selectable) c).setSelected(rubberBand.intersects(c.getBounds(temp))); } } // Add the new rubberband to the dirty region and grow // it in case the line is thick due to extreme zoom dirtyRegion.add(zoomRect(new Rectangle(rubberBand))); dirtyRegion.x -= 3; dirtyRegion.y -= 3; dirtyRegion.width += 6; dirtyRegion.height += 6; repaint(dirtyRegion); } else { retargetToContentPane(evt); } }
public void mouseMoved(MouseEvent evt) { if (rubberBand != null) { // repaint old region in case of shrinkage Rectangle dirtyRegion = zoomRect(new Rectangle(rubberBand)); Point p = unzoomPoint(new Point(evt.getPoint())); rubberBand.setBounds(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); rubberBand.add(p); // update selected items Rectangle temp = new Rectangle(); // avoids multiple allocations in getBounds for (int i = 0, n = contentPane.getComponentCount(); i < n; i++) { Component c = contentPane.getComponent(i); if (c instanceof Relationship) { ((Relationship) c).setSelected(((Relationship) c).intersects(rubberBand)); } else if (c instanceof Selectable) { ((Selectable) c).setSelected(rubberBand.intersects(c.getBounds(temp))); } } // Add the new rubberband to the dirty region and grow // it in case the line is thick due to extreme zoom dirtyRegion.add(zoomRect(new Rectangle(rubberBand))); dirtyRegion.x -= 3; dirtyRegion.y -= 3; dirtyRegion.width += 6; dirtyRegion.height += 6; repaint(dirtyRegion); } else { retargetToContentPane(evt); } }
1,110,789
public void mouseReleased(MouseEvent evt) { if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = rubberBand; rubberBandOrigin = null; rubberBand = null; repaint(zoomRect(new Rectangle(dirtyRegion))); } } else if (!retargetToContentPane(evt)) { //((PlayPen) evt.getSource()).selectNone(); maybeShowPopup(evt); } }
public void mouseReleased(MouseEvent evt) { if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = new Rectangle(rubberBand); dirtyRegion.width += (int) (dirtyRegion.width * 0.1); dirtyRegion.height += (int) (dirtyRegion.height * 0.1); rubberBandOrigin = null; rubberBand = null; repaint(zoomRect(new Rectangle(dirtyRegion))); } } else if (!retargetToContentPane(evt)) { //((PlayPen) evt.getSource()).selectNone(); maybeShowPopup(evt); } }
1,110,790
public void mouseReleased(MouseEvent evt) { if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = rubberBand; rubberBandOrigin = null; rubberBand = null; repaint(zoomRect(new Rectangle(dirtyRegion))); } } else if (!retargetToContentPane(evt)) { //((PlayPen) evt.getSource()).selectNone(); maybeShowPopup(evt); } }
public void mouseReleased(MouseEvent evt) { if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = rubberBand; rubberBandOrigin = null; rubberBand = null; repaint(zoomRect(dirtyRegion)); } } else if (!retargetToContentPane(evt)) { //((PlayPen) evt.getSource()).selectNone(); maybeShowPopup(evt); } }
1,110,791
public Object convert(Class type, Object value) { if ( value != null ) { String text = value.toString(); StringTokenizer enum = new StringTokenizer( text, "," ); int x = 0; int y = 0; if ( enum.hasMoreTokens() ) { x = parseNumber( enum.nextToken() ); } if ( enum.hasMoreTokens() ) { y = parseNumber( enum.nextToken() ); }
public Object convert(Class type, Object value) { if ( value != null ) { String text = value.toString(); StringTokenizer pointEnum = new StringTokenizer( text, "," ); int x = 0; int y = 0; if ( enum.hasMoreTokens() ) { x = parseNumber( enum.nextToken() ); } if ( enum.hasMoreTokens() ) { y = parseNumber( enum.nextToken() ); }
1,110,794
public Object convert(Class type, Object value) { if ( value != null ) { String text = value.toString(); StringTokenizer enum = new StringTokenizer( text, "," ); int x = 0; int y = 0; if ( enum.hasMoreTokens() ) { x = parseNumber( enum.nextToken() ); } if ( enum.hasMoreTokens() ) { y = parseNumber( enum.nextToken() ); }
public Object convert(Class type, Object value) { if ( value != null ) { String text = value.toString(); StringTokenizer enum = new StringTokenizer( text, "," ); int x = 0; int y = 0; if ( enum.hasMoreTokens() ) { x = parseNumber( enum.nextToken() ); } if ( enum.hasMoreTokens() ) { y = parseNumber( enum.nextToken() ); }
1,110,795
public Object convert(Class type, Object value) { if ( value != null ) { String text = value.toString(); StringTokenizer enum = new StringTokenizer( text, "," ); int x = 0; int y = 0; if ( enum.hasMoreTokens() ) { x = parseNumber( enum.nextToken() ); } if ( enum.hasMoreTokens() ) { y = parseNumber( enum.nextToken() ); }
public Object convert(Class type, Object value) { if ( value != null ) { String text = value.toString(); StringTokenizer enum = new StringTokenizer( text, "," ); int x = 0; int y = 0; if ( enum.hasMoreTokens() ) { x = parseNumber( enum.nextToken() ); } if ( enum.hasMoreTokens() ) { y = parseNumber( enum.nextToken() ); }
1,110,796
public FileScannerTag(FileScanner fileScanner) { this.fileScanner = fileScanner; this.dynaBean = new ConvertingWrapDynaBean(fileScanner); }
public FileScannerTag(FileScanner fileScanner) { this.fileScanner = fileScanner; }
1,110,798
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
1,110,799
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte kid1, kid2; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte kid1, kid2; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
1,110,800
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
1,110,801