bugged
stringlengths
6
599k
fixed
stringlengths
10
599k
__index_level_0__
int64
0
1.13M
public void drop(DropTargetDropEvent dtde) { PlayPen pp = tp.getPlayPen(); Point loc = pp.unzoomPoint(new Point(dtde.getLocation())); loc.x -= tp.getX(); loc.y -= tp.getY(); logger.debug("Drop target drop event on "+tp.getName()+": "+dtde); Transferable t = dtde.getTransferable(); DataFlavor importFlavor = bestImportFlavor(pp, t.getTransferDataFlavors()); if (importFlavor == null) { dtde.rejectDrop(); tp.setInsertionPoint(COLUMN_INDEX_NONE); } else { try { DBTree dbtree = ArchitectFrame.getMainInstance().dbTree; // XXX: bad int insertionPoint = tp.pointToColumnIndex(loc); boolean newColumnsInPk = false; if (insertionPoint == COLUMN_INDEX_END_OF_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = true; } else if (insertionPoint == COLUMN_INDEX_START_OF_NON_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = false; } else if (insertionPoint < 0) { insertionPoint = tp.getModel().getColumns().size(); newColumnsInPk = false; } else if (insertionPoint < tp.getModel().getPkSize()) { newColumnsInPk = true; } ArrayList paths = (ArrayList) t.getTransferData(importFlavor); logger.debug("Importing items from tree: "+paths);// Used to put the undo event adapter into a drag and drop state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_START, "Starting drag and drop")); Iterator removeIt = paths.iterator(); // Create a list so we don't have a comodification error ArrayList removeList = new ArrayList(); while (removeIt.hasNext()) { removeList.add(dbtree.getNodeForDnDPath((int[]) removeIt.next())); } for(int ii = removeList.size()-1; ii > -1; ii--) { Object someData = removeList.get(ii); logger.debug("drop: got object of type "+someData.getClass().getName()); if (someData instanceof SQLTable) { SQLTable table = (SQLTable) someData; if (table.getParentDatabase() == tp.getModel().getParentDatabase()) { // can't import table from target into target!! dtde.rejectDrop(); } else { dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, table); dtde.dropComplete(true); } } else if (someData instanceof SQLColumn) { SQLColumn col = (SQLColumn) someData; if (col.getParentTable() == tp.getModel()) { // moving column inside the same table dtde.acceptDrop(DnDConstants.ACTION_MOVE); int oldIndex = col.getParent().getChildren().indexOf(col); if (insertionPoint > oldIndex) { insertionPoint--; } tp.getModel().changeColumnIndex(oldIndex, insertionPoint); dtde.dropComplete(true); } else if (col.getParentTable().getParentDatabase() == tp.getModel().getParentDatabase()) { // moving column within playpen dtde.acceptDrop(DnDConstants.ACTION_MOVE); col.getParentTable().removeColumn(col); logger.debug("Moving column '"+col.getName() +"' to table '"+tp.getModel().getName() +"' at position "+insertionPoint); tp.getModel().addColumn(insertionPoint, col); if (newColumnsInPk) { col.setPrimaryKeySeq(new Integer(1)); } else { col.setPrimaryKeySeq(null); } dtde.dropComplete(true); } else { // importing column from a source database dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, col, newColumnsInPk); logger.debug("Inherited "+col.getName()+" to table"); dtde.dropComplete(true); } } else { dtde.rejectDrop(); } } } catch (Exception ex) { // Trying to show this dialog sometimes hangs the app in OS X //JOptionPane.showMessageDialog(tp, "Drop failed: "+ex.getMessage()); logger.error("Error processing drop operation", ex); dtde.rejectDrop(); } finally { tp.setInsertionPoint(COLUMN_INDEX_NONE); tp.getModel().normalizePrimaryKey();// Used to put the undo event adapter into a // regular state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_END, "End drag and drop")); } } }
public void drop(DropTargetDropEvent dtde) { PlayPen pp = tp.getPlayPen(); Point loc = pp.unzoomPoint(new Point(dtde.getLocation())); loc.x -= tp.getX(); loc.y -= tp.getY(); logger.debug("Drop target drop event on "+tp.getName()+": "+dtde); Transferable t = dtde.getTransferable(); DataFlavor importFlavor = bestImportFlavor(pp, t.getTransferDataFlavors()); if (importFlavor == null) { dtde.rejectDrop(); tp.setInsertionPoint(COLUMN_INDEX_NONE); } else { try { DBTree dbtree = ArchitectFrame.getMainInstance().dbTree; // XXX: bad int insertionPoint = tp.pointToColumnIndex(loc); boolean newColumnsInPk = false; if (insertionPoint == COLUMN_INDEX_END_OF_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = true; } else if (insertionPoint == COLUMN_INDEX_START_OF_NON_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = false; } else if (insertionPoint < 0) { insertionPoint = tp.getModel().getColumns().size(); newColumnsInPk = false; } else if (insertionPoint < tp.getModel().getPkSize()) { newColumnsInPk = true; } ArrayList paths = (ArrayList) t.getTransferData(importFlavor); logger.debug("Importing items from tree: "+paths);// Used to put the undo event adapter into a drag and drop state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_START, "Starting drag and drop")); Iterator removeIt = paths.iterator(); // Create a list so we don't have a comodification error ArrayList removeList = new ArrayList(); while (removeIt.hasNext()) { removeList.add(dbtree.getNodeForDnDPath((int[]) removeIt.next())); } for(int ii = removeList.size()-1; ii > -1; ii--) { Object someData = removeList.get(ii); logger.debug("drop: got object of type "+someData.getClass().getName()); if (someData instanceof SQLTable) { SQLTable table = (SQLTable) someData; if (table.getParentDatabase() == tp.getModel().getParentDatabase()) { // can't import table from target into target!! dtde.rejectDrop(); } else { dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, table); dtde.dropComplete(true); } } else if (someData instanceof SQLColumn) { SQLColumn col = (SQLColumn) someData; if (col.getParentTable() == tp.getModel()) { // moving column inside the same table dtde.acceptDrop(DnDConstants.ACTION_MOVE); int oldIndex = col.getParent().getChildren().indexOf(col); if (insertionPoint > oldIndex) { insertionPoint--; } tp.getModel().changeColumnIndex(oldIndex, insertionPoint); dtde.dropComplete(true); } else if (col.getParentTable().getParentDatabase() == tp.getModel().getParentDatabase()) { // moving column within playpen dtde.acceptDrop(DnDConstants.ACTION_MOVE); col.getParentTable().removeColumn(col); logger.debug("Moving column '"+col.getName() +"' to table '"+tp.getModel().getName() +"' at position "+insertionPoint); tp.getModel().addColumn(insertionPoint, col); if (newColumnsInPk) { col.setPrimaryKeySeq(new Integer(1)); } else { col.setPrimaryKeySeq(null); } dtde.dropComplete(true); } else { // importing column from a source database dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, col, newColumnsInPk); logger.debug("Inherited "+col.getName()+" to table"); dtde.dropComplete(true); } } else { dtde.rejectDrop(); } } } catch (Exception ex) { // Trying to show this dialog sometimes hangs the app in OS X //JOptionPane.showMessageDialog(tp, "Drop failed: "+ex.getMessage()); logger.error("Error processing drop operation", ex); dtde.rejectDrop(); } finally { tp.setInsertionPoint(COLUMN_INDEX_NONE); tp.getModel().normalizePrimaryKey();// Used to put the undo event adapter into a // regular state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_END, "End drag and drop")); } } }
1,110,557
public void drop(DropTargetDropEvent dtde) { PlayPen pp = tp.getPlayPen(); Point loc = pp.unzoomPoint(new Point(dtde.getLocation())); loc.x -= tp.getX(); loc.y -= tp.getY(); logger.debug("Drop target drop event on "+tp.getName()+": "+dtde); Transferable t = dtde.getTransferable(); DataFlavor importFlavor = bestImportFlavor(pp, t.getTransferDataFlavors()); if (importFlavor == null) { dtde.rejectDrop(); tp.setInsertionPoint(COLUMN_INDEX_NONE); } else { try { DBTree dbtree = ArchitectFrame.getMainInstance().dbTree; // XXX: bad int insertionPoint = tp.pointToColumnIndex(loc); boolean newColumnsInPk = false; if (insertionPoint == COLUMN_INDEX_END_OF_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = true; } else if (insertionPoint == COLUMN_INDEX_START_OF_NON_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = false; } else if (insertionPoint < 0) { insertionPoint = tp.getModel().getColumns().size(); newColumnsInPk = false; } else if (insertionPoint < tp.getModel().getPkSize()) { newColumnsInPk = true; } ArrayList paths = (ArrayList) t.getTransferData(importFlavor); logger.debug("Importing items from tree: "+paths);// Used to put the undo event adapter into a drag and drop state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_START, "Starting drag and drop")); Iterator removeIt = paths.iterator(); // Create a list so we don't have a comodification error ArrayList removeList = new ArrayList(); while (removeIt.hasNext()) { removeList.add(dbtree.getNodeForDnDPath((int[]) removeIt.next())); } for(int ii = removeList.size()-1; ii > -1; ii--) { Object someData = removeList.get(ii); logger.debug("drop: got object of type "+someData.getClass().getName()); if (someData instanceof SQLTable) { SQLTable table = (SQLTable) someData; if (table.getParentDatabase() == tp.getModel().getParentDatabase()) { // can't import table from target into target!! dtde.rejectDrop(); } else { dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, table); dtde.dropComplete(true); } } else if (someData instanceof SQLColumn) { SQLColumn col = (SQLColumn) someData; if (col.getParentTable() == tp.getModel()) { // moving column inside the same table dtde.acceptDrop(DnDConstants.ACTION_MOVE); int oldIndex = col.getParent().getChildren().indexOf(col); if (insertionPoint > oldIndex) { insertionPoint--; } tp.getModel().changeColumnIndex(oldIndex, insertionPoint); dtde.dropComplete(true); } else if (col.getParentTable().getParentDatabase() == tp.getModel().getParentDatabase()) { // moving column within playpen dtde.acceptDrop(DnDConstants.ACTION_MOVE); col.getParentTable().removeColumn(col); logger.debug("Moving column '"+col.getName() +"' to table '"+tp.getModel().getName() +"' at position "+insertionPoint); tp.getModel().addColumn(insertionPoint, col); if (newColumnsInPk) { col.setPrimaryKeySeq(new Integer(1)); } else { col.setPrimaryKeySeq(null); } dtde.dropComplete(true); } else { // importing column from a source database dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, col, newColumnsInPk); logger.debug("Inherited "+col.getName()+" to table"); dtde.dropComplete(true); } } else { dtde.rejectDrop(); } } } catch (Exception ex) { // Trying to show this dialog sometimes hangs the app in OS X //JOptionPane.showMessageDialog(tp, "Drop failed: "+ex.getMessage()); logger.error("Error processing drop operation", ex); dtde.rejectDrop(); } finally { tp.setInsertionPoint(COLUMN_INDEX_NONE); tp.getModel().normalizePrimaryKey();// Used to put the undo event adapter into a // regular state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_END, "End drag and drop")); } } }
public void drop(DropTargetDropEvent dtde) { PlayPen pp = tp.getPlayPen(); Point loc = pp.unzoomPoint(new Point(dtde.getLocation())); loc.x -= tp.getX(); loc.y -= tp.getY(); logger.debug("Drop target drop event on "+tp.getName()+": "+dtde); Transferable t = dtde.getTransferable(); DataFlavor importFlavor = bestImportFlavor(pp, t.getTransferDataFlavors()); if (importFlavor == null) { dtde.rejectDrop(); tp.setInsertionPoint(COLUMN_INDEX_NONE); } else { try { DBTree dbtree = ArchitectFrame.getMainInstance().dbTree; // XXX: bad int insertionPoint = tp.pointToColumnIndex(loc); boolean newColumnsInPk = false; if (insertionPoint == COLUMN_INDEX_END_OF_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = true; } else if (insertionPoint == COLUMN_INDEX_START_OF_NON_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = false; } else if (insertionPoint < 0) { insertionPoint = tp.getModel().getColumns().size(); newColumnsInPk = false; } else if (insertionPoint < tp.getModel().getPkSize()) { newColumnsInPk = true; } ArrayList paths = (ArrayList) t.getTransferData(importFlavor); logger.debug("Importing items from tree: "+paths);// Used to put the undo event adapter into a drag and drop state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_START, "Starting drag and drop")); Iterator removeIt = paths.iterator(); // Create a list so we don't have a comodification error ArrayList removeList = new ArrayList(); while (removeIt.hasNext()) { removeList.add(dbtree.getNodeForDnDPath((int[]) removeIt.next())); } for(int ii = removeList.size()-1; ii > -1; ii--) { Object someData = removeList.get(ii); logger.debug("drop: got object of type "+someData.getClass().getName()); if (someData instanceof SQLTable) { SQLTable table = (SQLTable) someData; if (table.getParentDatabase() == tp.getModel().getParentDatabase()) { // can't import table from target into target!! dtde.rejectDrop(); } else { dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, table); dtde.dropComplete(true); } } else if (someData instanceof SQLColumn) { SQLColumn col = (SQLColumn) someData; if (col.getParentTable() == tp.getModel()) { // moving column inside the same table dtde.acceptDrop(DnDConstants.ACTION_MOVE); int oldIndex = col.getParent().getChildren().indexOf(col); if (insertionPoint > oldIndex) { insertionPoint--; } tp.getModel().changeColumnIndex(oldIndex, insertionPoint); dtde.dropComplete(true); } else if (col.getParentTable().getParentDatabase() == tp.getModel().getParentDatabase()) { // moving column within playpen dtde.acceptDrop(DnDConstants.ACTION_MOVE); col.getParentTable().removeColumn(col); logger.debug("Moving column '"+col.getName() +"' to table '"+tp.getModel().getName() +"' at position "+insertionPoint); tp.getModel().addColumn(insertionPoint, col); if (newColumnsInPk) { col.setPrimaryKeySeq(new Integer(1)); } else { col.setPrimaryKeySeq(null); } dtde.dropComplete(true); } else { // importing column from a source database dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, col, newColumnsInPk); logger.debug("Inherited "+col.getName()+" to table"); dtde.dropComplete(true); } } else { dtde.rejectDrop(); } } } catch (Exception ex) { // Trying to show this dialog sometimes hangs the app in OS X //JOptionPane.showMessageDialog(tp, "Drop failed: "+ex.getMessage()); logger.error("Error processing drop operation", ex); dtde.rejectDrop(); } finally { tp.setInsertionPoint(COLUMN_INDEX_NONE); tp.getModel().normalizePrimaryKey();// Used to put the undo event adapter into a // regular state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_END, "End drag and drop")); } } }
1,110,558
public void mouseClicked(MouseEvent evt) { if (evt.getClickCount() == 2) { TablePane tp = (TablePane) evt.getSource(); if (tp.isSelected()) { ArchitectFrame af = ArchitectFrame.getMainInstance(); int selectedColIndex = tp.getSelectedColumnIndex(); if (selectedColIndex == COLUMN_INDEX_NONE) { af.editTableAction.actionPerformed (new ActionEvent(tp, ActionEvent.ACTION_PERFORMED, "DoubleClick")); } else if (selectedColIndex >= 0) { af.editColumnAction.actionPerformed (new ActionEvent(tp, ActionEvent.ACTION_PERFORMED, "DoubleClick")); } } } }
public void mouseClicked(MouseEvent evt) { if ((evt.getModifiers() & MouseEvent.BUTTON1_MASK) != 0 && evt.getClickCount() == 2) { TablePane tp = (TablePane) evt.getSource(); if (tp.isSelected()) { ArchitectFrame af = ArchitectFrame.getMainInstance(); int selectedColIndex = tp.getSelectedColumnIndex(); if (selectedColIndex == COLUMN_INDEX_NONE) { af.editTableAction.actionPerformed (new ActionEvent(tp, ActionEvent.ACTION_PERFORMED, "DoubleClick")); } else if (selectedColIndex >= 0) { af.editColumnAction.actionPerformed (new ActionEvent(tp, ActionEvent.ACTION_PERFORMED, "DoubleClick")); } } } }
1,110,559
void browse(int browseType){ String name; String markerInfoName = ""; HaploView h = (HaploView) this.getParent(); h.fc.setSelectedFile(new File("")); int returned = h.fc.showOpenDialog(this); if (returned != JFileChooser.APPROVE_OPTION) return; File file = h.fc.getSelectedFile(); if (browseType == GENO){ name = file.getName(); genoFileField.setText(file.getParent()+File.separator+name); if(infoFileField.getText().equals("")){ //baseName should be everything but the final ".XXX" extension StringTokenizer st = new StringTokenizer(name,"."); String baseName = st.nextToken(); for (int i = 0; i < st.countTokens()-1; i++){ baseName = baseName.concat(".").concat(st.nextToken()); } //check for info file for original file sample.haps //either sample.haps.info or sample.info File maybeMarkers1 = new File(file.getParent(), name + MARKER_DATA_EXT); File maybeMarkers2 = new File(file.getParent(), baseName + MARKER_DATA_EXT); if (maybeMarkers1.exists()){ markerInfoName = maybeMarkers1.getName(); }else if (maybeMarkers2.exists()){ markerInfoName = maybeMarkers2.getName(); }else{ return; } infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }else if (browseType==INFO){ markerInfoName = file.getName(); infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }
void browse(int browseType){ String name; String markerInfoName = ""; HaploView h = (HaploView) this.getParent(); h.fc.setSelectedFile(new File("")); int returned = h.fc.showOpenDialog(this); if (returned != JFileChooser.APPROVE_OPTION) return; File file = h.fc.getSelectedFile(); if (browseType == GENO){ name = file.getName(); genoFileField.setText(file.getParent()+File.separator+name); if(infoFileField.getText().equals("")){ //baseName should be everything but the final ".XXX" extension StringTokenizer st = new StringTokenizer(name,"."); String baseName = st.nextToken(); int numPieces = st.countTokens()-1; for (int i = 0; i < numPieces; i++){ baseName = baseName.concat(".").concat(st.nextToken()); } //check for info file for original file sample.haps //either sample.haps.info or sample.info File maybeMarkers1 = new File(file.getParent(), name + MARKER_DATA_EXT); File maybeMarkers2 = new File(file.getParent(), baseName + MARKER_DATA_EXT); if (maybeMarkers1.exists()){ markerInfoName = maybeMarkers1.getName(); }else if (maybeMarkers2.exists()){ markerInfoName = maybeMarkers2.getName(); }else{ return; } infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }else if (browseType==INFO){ markerInfoName = file.getName(); infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }
1,110,560
public static String appendURLParam(String url, String param, String value){ StringBuffer urlString = new StringBuffer(url); if (url.indexOf("?") == -1) { urlString.append("?"); }else if(url.endsWith("&") == false) { urlString.append("&"); } urlString.append(param); urlString.append("="); urlString.append(value); return urlString.toString(); }
public static String appendURLParam(String url, String param, String value){ StringBuffer urlString = new StringBuffer(url); if (url.indexOf("?") == -1) { urlString.append("?"); }else if(url.endsWith("&") == false) { urlString.append("&"); } urlString.append(param); urlString.append("="); try { urlString.append(URLEncoder.encode(value, "UTF-8")); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } return urlString.toString(); }
1,110,563
public void doTag(XMLOutput output) throws Exception { invokeBody(output); if (name == null) { throw new MissingAttributeException("name"); } if (dynaClass == null) { throw new MissingAttributeException("dynaClass"); } final DynaClass theDynaClass = dynaClass; final Map beanAttributes = (attributes != null) ? attributes : EMPTY_MAP; TagFactory factory = new TagFactory() { public Tag createTag() { return new DynamicDynaBeanTag(theDynaClass, beanAttributes, varAttribute); } }; getTagLibrary().registerBeanTag(name, factory); // now lets clear the attributes for next invocation and help the GC attributes = null; }
public void doTag(XMLOutput output) throws Exception { invokeBody(output); if (name == null) { throw new MissingAttributeException("name"); } if (dynaClass == null) { throw new MissingAttributeException("dynaClass"); } final DynaClass theDynaClass = dynaClass; final Map beanAttributes = (attributes != null) ? attributes : EMPTY_MAP; TagFactory factory = new TagFactory() { public Tag createTag(String name, Attributes attributes) { return new DynamicDynaBeanTag(theDynaClass, beanAttributes, varAttribute); } }; getTagLibrary().registerBeanTag(name, factory); // now lets clear the attributes for next invocation and help the GC attributes = null; }
1,110,564
public Tag createTag() { return new DynamicDynaBeanTag(theDynaClass, beanAttributes, varAttribute); }
public Tag createTag(String name, Attributes attributes) { return new DynamicDynaBeanTag(theDynaClass, beanAttributes, varAttribute); }
1,110,565
public String computeDPrime(int a, int b, int c, int d, int e, double f){ int i,j,k,count,itmp; int low_i = 0; int high_i = 0; double[] nAA = new double[1]; double[] nBB = new double[1]; double[] nAB = new double[1]; double[] nBA = new double[1]; double[] pAA = new double[1]; double[] pBB = new double[1]; double[] pAB = new double[1]; double[] pBA = new double[1]; double loglike, oldloglike, meand, mean2d, sd; double g,h,m,tmp,r; double num, denom1, denom2, denom, dprime, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, r2; double tmpAA, tmpAB, tmpBA, tmpBB, dpr, tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=(double)a; known[AB]=(double)b; known[BA]=(double)c; known[BB]=(double)d; unknownDH=e; total_chroms= a+b+c+d+(2*unknownDH); pA1 = (double) (a+b+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (double) (a+c+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = f; /* set initial conditions */ if (const_prob < 0.00) { pAA[0]=pA1*pA2; pAB[0]=pA1*pB2; pBA[0]=pB1*pA2; pBB[0]=pB1*pB2; } else { pAA[0]=const_prob; pAB[0]=const_prob; pBA[0]=const_prob; pBB[0]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,0); estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,count); loglike = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=pAA[0]; pAA[0]=pAB[0]; pAB[0]=tmp; tmp=pBB[0]; pBB[0]=pBA[0]; pBA[0]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=nAA[0]; nAA[0]=nAB[0]; nAB[0]=tmp; tmp=nBB[0]; nBB[0]=nBA[0]; nBA[0]=tmp; /* num has now undergone a sign change */ num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (pAA[0]+pBA[0])*(pBA[0]+pBB[0]); denom2 = (pAA[0]+pAB[0])*(pAB[0]+pBB[0]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ r2 = num*num/(pA1*pB1*pA2*pB2); /* we've computed D', its' LOD, and r^2 - let's store them and then compute confidence intervals */ String returnStr = new String(""); returnStr += dprime; returnStr += "\t"; returnStr += loglike1-loglike0; returnStr += "\t"; returnStr += r2; returnStr += "\t"; real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } returnStr += (double) low_i/100.0; returnStr += "\t"; returnStr += (double) high_i/100.0; return returnStr; }
public String computeDPrime(int a, int b, int c, int d, int e, double f){ int i,j,k,count,itmp; int low_i = 0; int high_i = 0; double[] nAA = new double[1]; double[] nBB = new double[1]; double[] nAB = new double[1]; double[] nBA = new double[1]; double[] pAA = new double[1]; double[] pBB = new double[1]; double[] pAB = new double[1]; double[] pBA = new double[1]; double loglike, oldloglike, meand, mean2d, sd; double g,h,m,tmp,r; double num, denom1, denom2, denom, dprime, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, r2; double tmpAA, tmpAB, tmpBA, tmpBB, dpr, tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=(double)a; known[AB]=(double)b; known[BA]=(double)c; known[BB]=(double)d; unknownDH=e; total_chroms= a+b+c+d+(2*unknownDH); pA1 = (double) (a+b+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (double) (a+c+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = f; /* set initial conditions */ if (const_prob < 0.00) { pAA[0]=pA1*pA2; pAB[0]=pA1*pB2; pBA[0]=pB1*pA2; pBB[0]=pB1*pB2; } else { pAA[0]=const_prob; pAB[0]=const_prob; pBA[0]=const_prob; pBB[0]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,0); estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,count); loglike = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=pAA[0]; pAA[0]=pAB[0]; pAB[0]=tmp; tmp=pBB[0]; pBB[0]=pBA[0]; pBA[0]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=nAA[0]; nAA[0]=nAB[0]; nAB[0]=tmp; tmp=nBB[0]; nBB[0]=nBA[0]; nBA[0]=tmp; /* num has now undergone a sign change */ num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (pAA[0]+pBA[0])*(pBA[0]+pBB[0]); denom2 = (pAA[0]+pAB[0])*(pAB[0]+pBB[0]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ r2 = num*num/(pA1*pB1*pA2*pB2); /* we've computed D', its' LOD, and r^2 - let's store them and then compute confidence intervals */ String returnStr = new String(""); NumberFormat nf = NumberFormat.getInstance(); nf.setMinimumFractionDigits(2); nf.setMaximumFractionDigits(2); returnStr += nf.format(dprime); returnStr += "\t"; returnStr += loglike1-loglike0; returnStr += "\t"; returnStr += r2; returnStr += "\t"; real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } returnStr += (double) low_i/100.0; returnStr += "\t"; returnStr += (double) high_i/100.0; return returnStr; }
1,110,566
public String computeDPrime(int a, int b, int c, int d, int e, double f){ int i,j,k,count,itmp; int low_i = 0; int high_i = 0; double[] nAA = new double[1]; double[] nBB = new double[1]; double[] nAB = new double[1]; double[] nBA = new double[1]; double[] pAA = new double[1]; double[] pBB = new double[1]; double[] pAB = new double[1]; double[] pBA = new double[1]; double loglike, oldloglike, meand, mean2d, sd; double g,h,m,tmp,r; double num, denom1, denom2, denom, dprime, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, r2; double tmpAA, tmpAB, tmpBA, tmpBB, dpr, tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=(double)a; known[AB]=(double)b; known[BA]=(double)c; known[BB]=(double)d; unknownDH=e; total_chroms= a+b+c+d+(2*unknownDH); pA1 = (double) (a+b+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (double) (a+c+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = f; /* set initial conditions */ if (const_prob < 0.00) { pAA[0]=pA1*pA2; pAB[0]=pA1*pB2; pBA[0]=pB1*pA2; pBB[0]=pB1*pB2; } else { pAA[0]=const_prob; pAB[0]=const_prob; pBA[0]=const_prob; pBB[0]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,0); estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,count); loglike = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=pAA[0]; pAA[0]=pAB[0]; pAB[0]=tmp; tmp=pBB[0]; pBB[0]=pBA[0]; pBA[0]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=nAA[0]; nAA[0]=nAB[0]; nAB[0]=tmp; tmp=nBB[0]; nBB[0]=nBA[0]; nBA[0]=tmp; /* num has now undergone a sign change */ num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (pAA[0]+pBA[0])*(pBA[0]+pBB[0]); denom2 = (pAA[0]+pAB[0])*(pAB[0]+pBB[0]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ r2 = num*num/(pA1*pB1*pA2*pB2); /* we've computed D', its' LOD, and r^2 - let's store them and then compute confidence intervals */ String returnStr = new String(""); returnStr += dprime; returnStr += "\t"; returnStr += loglike1-loglike0; returnStr += "\t"; returnStr += r2; returnStr += "\t"; real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } returnStr += (double) low_i/100.0; returnStr += "\t"; returnStr += (double) high_i/100.0; return returnStr; }
public String computeDPrime(int a, int b, int c, int d, int e, double f){ int i,j,k,count,itmp; int low_i = 0; int high_i = 0; double[] nAA = new double[1]; double[] nBB = new double[1]; double[] nAB = new double[1]; double[] nBA = new double[1]; double[] pAA = new double[1]; double[] pBB = new double[1]; double[] pAB = new double[1]; double[] pBA = new double[1]; double loglike, oldloglike, meand, mean2d, sd; double g,h,m,tmp,r; double num, denom1, denom2, denom, dprime, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, r2; double tmpAA, tmpAB, tmpBA, tmpBB, dpr, tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=(double)a; known[AB]=(double)b; known[BA]=(double)c; known[BB]=(double)d; unknownDH=e; total_chroms= a+b+c+d+(2*unknownDH); pA1 = (double) (a+b+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (double) (a+c+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = f; /* set initial conditions */ if (const_prob < 0.00) { pAA[0]=pA1*pA2; pAB[0]=pA1*pB2; pBA[0]=pB1*pA2; pBB[0]=pB1*pB2; } else { pAA[0]=const_prob; pAB[0]=const_prob; pBA[0]=const_prob; pBB[0]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,0); estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,count); loglike = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=pAA[0]; pAA[0]=pAB[0]; pAB[0]=tmp; tmp=pBB[0]; pBB[0]=pBA[0]; pBA[0]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=nAA[0]; nAA[0]=nAB[0]; nAB[0]=tmp; tmp=nBB[0]; nBB[0]=nBA[0]; nBA[0]=tmp; /* num has now undergone a sign change */ num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (pAA[0]+pBA[0])*(pBA[0]+pBB[0]); denom2 = (pAA[0]+pAB[0])*(pAB[0]+pBB[0]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ r2 = num*num/(pA1*pB1*pA2*pB2); /* we've computed D', its' LOD, and r^2 - let's store them and then compute confidence intervals */ String returnStr = new String(""); returnStr += dprime; returnStr += "\t"; returnStr += nf.format(loglike1-loglike0); returnStr += "\t"; returnStr += r2; returnStr += "\t"; real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } returnStr += (double) low_i/100.0; returnStr += "\t"; returnStr += (double) high_i/100.0; return returnStr; }
1,110,567
public String computeDPrime(int a, int b, int c, int d, int e, double f){ int i,j,k,count,itmp; int low_i = 0; int high_i = 0; double[] nAA = new double[1]; double[] nBB = new double[1]; double[] nAB = new double[1]; double[] nBA = new double[1]; double[] pAA = new double[1]; double[] pBB = new double[1]; double[] pAB = new double[1]; double[] pBA = new double[1]; double loglike, oldloglike, meand, mean2d, sd; double g,h,m,tmp,r; double num, denom1, denom2, denom, dprime, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, r2; double tmpAA, tmpAB, tmpBA, tmpBB, dpr, tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=(double)a; known[AB]=(double)b; known[BA]=(double)c; known[BB]=(double)d; unknownDH=e; total_chroms= a+b+c+d+(2*unknownDH); pA1 = (double) (a+b+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (double) (a+c+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = f; /* set initial conditions */ if (const_prob < 0.00) { pAA[0]=pA1*pA2; pAB[0]=pA1*pB2; pBA[0]=pB1*pA2; pBB[0]=pB1*pB2; } else { pAA[0]=const_prob; pAB[0]=const_prob; pBA[0]=const_prob; pBB[0]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,0); estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,count); loglike = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=pAA[0]; pAA[0]=pAB[0]; pAB[0]=tmp; tmp=pBB[0]; pBB[0]=pBA[0]; pBA[0]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=nAA[0]; nAA[0]=nAB[0]; nAB[0]=tmp; tmp=nBB[0]; nBB[0]=nBA[0]; nBA[0]=tmp; /* num has now undergone a sign change */ num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (pAA[0]+pBA[0])*(pBA[0]+pBB[0]); denom2 = (pAA[0]+pAB[0])*(pAB[0]+pBB[0]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ r2 = num*num/(pA1*pB1*pA2*pB2); /* we've computed D', its' LOD, and r^2 - let's store them and then compute confidence intervals */ String returnStr = new String(""); returnStr += dprime; returnStr += "\t"; returnStr += loglike1-loglike0; returnStr += "\t"; returnStr += r2; returnStr += "\t"; real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } returnStr += (double) low_i/100.0; returnStr += "\t"; returnStr += (double) high_i/100.0; return returnStr; }
public String computeDPrime(int a, int b, int c, int d, int e, double f){ int i,j,k,count,itmp; int low_i = 0; int high_i = 0; double[] nAA = new double[1]; double[] nBB = new double[1]; double[] nAB = new double[1]; double[] nBA = new double[1]; double[] pAA = new double[1]; double[] pBB = new double[1]; double[] pAB = new double[1]; double[] pBA = new double[1]; double loglike, oldloglike, meand, mean2d, sd; double g,h,m,tmp,r; double num, denom1, denom2, denom, dprime, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, r2; double tmpAA, tmpAB, tmpBA, tmpBB, dpr, tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=(double)a; known[AB]=(double)b; known[BA]=(double)c; known[BB]=(double)d; unknownDH=e; total_chroms= a+b+c+d+(2*unknownDH); pA1 = (double) (a+b+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (double) (a+c+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = f; /* set initial conditions */ if (const_prob < 0.00) { pAA[0]=pA1*pA2; pAB[0]=pA1*pB2; pBA[0]=pB1*pA2; pBB[0]=pB1*pB2; } else { pAA[0]=const_prob; pAB[0]=const_prob; pBA[0]=const_prob; pBB[0]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,0); estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,count); loglike = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=pAA[0]; pAA[0]=pAB[0]; pAB[0]=tmp; tmp=pBB[0]; pBB[0]=pBA[0]; pBA[0]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=nAA[0]; nAA[0]=nAB[0]; nAB[0]=tmp; tmp=nBB[0]; nBB[0]=nBA[0]; nBA[0]=tmp; /* num has now undergone a sign change */ num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (pAA[0]+pBA[0])*(pBA[0]+pBB[0]); denom2 = (pAA[0]+pAB[0])*(pAB[0]+pBB[0]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ r2 = num*num/(pA1*pB1*pA2*pB2); /* we've computed D', its' LOD, and r^2 - let's store them and then compute confidence intervals */ String returnStr = new String(""); returnStr += dprime; returnStr += "\t"; returnStr += loglike1-loglike0; returnStr += "\t"; returnStr += nf.format(r2); returnStr += "\t"; real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } returnStr += (double) low_i/100.0; returnStr += "\t"; returnStr += (double) high_i/100.0; return returnStr; }
1,110,568
private File getNewFname( java.util.Date date, String strExtension ) { System.err.println( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); System.err.println( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { System.err.println( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); System.err.println( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { System.err.println( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
private File getNewFname( java.util.Date date, String strExtension ) { log.debug( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); System.err.println( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { System.err.println( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); System.err.println( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { System.err.println( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
1,110,569
private File getNewFname( java.util.Date date, String strExtension ) { System.err.println( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); System.err.println( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { System.err.println( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); System.err.println( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { System.err.println( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
private File getNewFname( java.util.Date date, String strExtension ) { System.err.println( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); log.debug( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { System.err.println( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); System.err.println( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { System.err.println( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
1,110,570
private File getNewFname( java.util.Date date, String strExtension ) { System.err.println( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); System.err.println( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { System.err.println( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); System.err.println( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { System.err.println( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
private File getNewFname( java.util.Date date, String strExtension ) { System.err.println( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); System.err.println( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { log.debug( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); System.err.println( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { log.debug( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
1,110,571
private File getNewFname( java.util.Date date, String strExtension ) { System.err.println( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); System.err.println( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { System.err.println( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); System.err.println( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { System.err.println( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
private File getNewFname( java.util.Date date, String strExtension ) { System.err.println( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); System.err.println( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { System.err.println( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); log.debug( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { System.err.println( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
1,110,572
public String getRatios() { //if the array is null this is a block-title node, not an actual hap if (counts == null){ return (""); } nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); for(int i= 0;i<counts.length;i++) { for(int j= 0;j<counts[i].length;j++) { counts[i][j] = (new Double(nf.format(counts[i][j]))).doubleValue(); } } if (counts.length == 1){ //TDT if (this.counts[0][0] > this.counts[0][1]){ return this.counts[0][0] + " : " + this.counts[0][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0]; } }else{ //case-control if (this.counts[0][0] > this.counts[0][1]){ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][0] + " : " + this.counts[0][1] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][0] + " : " + this.counts[0][1] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } }else{ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][1] + " : " + this.counts[0][0] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } } } }
public String getRatios() { //if the array is null this is a block-title node, not an actual hap if (counts == null){ return (""); } nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); for(int i= 0;i<counts.length;i++) { for(int j= 0;j<counts[i].length;j++) { counts[i][j] = (new Double(nf.format(counts[i][j]))).doubleValue(); } } if (counts.length == 1){ //TDT if (this.counts[0][0] > this.counts[0][1]){ return this.counts[0][0] + " : " + this.counts[0][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0]; } }else{ //case-control if (this.counts[0][0] > this.counts[0][1]){ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][0] + " : " + this.counts[0][1] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][0] + " : " + this.counts[0][1] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } }else{ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][1] + " : " + this.counts[0][0] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } } } }
1,110,573
public String getRatios() { //if the array is null this is a block-title node, not an actual hap if (counts == null){ return (""); } nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); for(int i= 0;i<counts.length;i++) { for(int j= 0;j<counts[i].length;j++) { counts[i][j] = (new Double(nf.format(counts[i][j]))).doubleValue(); } } if (counts.length == 1){ //TDT if (this.counts[0][0] > this.counts[0][1]){ return this.counts[0][0] + " : " + this.counts[0][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0]; } }else{ //case-control if (this.counts[0][0] > this.counts[0][1]){ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][0] + " : " + this.counts[0][1] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][0] + " : " + this.counts[0][1] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } }else{ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][1] + " : " + this.counts[0][0] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } } } }
public String getRatios() { //if the array is null this is a block-title node, not an actual hap if (counts == null){ return (""); } nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); for(int i= 0;i<counts.length;i++) { for(int j= 0;j<counts[i].length;j++) { counts[i][j] = (new Double(nf.format(counts[i][j]))).doubleValue(); } } if (counts.length == 1){ //TDT if (this.counts[0][0] > this.counts[0][1]){ return this.counts[0][0] + " : " + this.counts[0][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0]; } }else{ //case-control if (this.counts[0][0] > this.counts[0][1]){ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][0] + " : " + this.counts[0][1] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][0] + " : " + this.counts[0][1] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } }else{ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][1] + " : " + this.counts[0][0] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } } } }
1,110,574
public String getRatios() { //if the array is null this is a block-title node, not an actual hap if (counts == null){ return (""); } nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); for(int i= 0;i<counts.length;i++) { for(int j= 0;j<counts[i].length;j++) { counts[i][j] = (new Double(nf.format(counts[i][j]))).doubleValue(); } } if (counts.length == 1){ //TDT if (this.counts[0][0] > this.counts[0][1]){ return this.counts[0][0] + " : " + this.counts[0][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0]; } }else{ //case-control if (this.counts[0][0] > this.counts[0][1]){ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][0] + " : " + this.counts[0][1] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][0] + " : " + this.counts[0][1] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } }else{ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][1] + " : " + this.counts[0][0] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } } } }
public String getRatios() { //if the array is null this is a block-title node, not an actual hap if (counts == null){ return (""); } nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); for(int i= 0;i<counts.length;i++) { for(int j= 0;j<counts[i].length;j++) { counts[i][j] = (new Double(nf.format(counts[i][j]))).doubleValue(); } } if (counts.length == 1){ //TDT if (this.counts[0][0] > this.counts[0][1]){ return this.counts[0][0] + " : " + this.counts[0][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0]; } }else{ //case-control if (this.counts[0][0] > this.counts[0][1]){ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][0] + " : " + this.counts[0][1] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][0] + " : " + this.counts[0][1] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } }else{ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][1] + " : " + this.counts[0][0] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } } } }
1,110,575
public void mouseClicked(MouseEvent evt) { if (evt.getClickCount() == 2) { ArchitectFrame.getMainInstance().editRelationshipAction.actionPerformed (new ActionEvent(evt.getSource(), ActionEvent.ACTION_PERFORMED, "DoubleClick")); } }
public void mouseClicked(MouseEvent evt) { if (evt.getClickCount() == 2) { ArchitectFrame.getMainInstance().editRelationshipAction.actionPerformed (new ActionEvent(evt.getSource(), ActionEvent.ACTION_PERFORMED, ArchitectSwingConstants.ACTION_COMMAND_SRC_PLAYPEN)); } }
1,110,576
public PhotoCollection getCollection() { return photoCollection; }
public PhotoCollection getCollection() { if ( photoCollection != null ) { return photoCollection.getOrigCollection(); } return null; }
1,110,577
public Tag createTag ( ) throws Exception { return new ConstraintTag ( this ); }
public Tag createTag(String name, Attributes attributes) throws Exception { return new ConstraintTag ( this ); }
1,110,578
public Tag createTag ( ) { return new ConstraintTag ( this ); // still scratching my head about "this" usage... }
public Tag createTag(String name, Attributes attributes) { return new ConstraintTag ( this ); // still scratching my head about "this" usage... }
1,110,579
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { ApplicationForm appForm = (ApplicationForm)actionForm; ApplicationConfig config = ApplicationConfigManager.getApplicationConfig( appForm.getApplicationId()); assert config != null; config.setName(appForm.getName()); config.setHost(appForm.getHost()); if(appForm.getPort() != null) config.setPort(new Integer(appForm.getPort())); config.setUsername(appForm.getUsername()); final String password = appForm.getPassword(); if(password != null && !password.equals(config.getPassword())){ config.setPassword(password); } ApplicationConfigManager.updateApplication(config); UserActivityLogger.getInstance().logActivity( context.getUser().getUsername(), "Updated application with ID "+config.getApplicationId()); return mapping.findForward(Forwards.SUCCESS); }
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { ApplicationForm appForm = (ApplicationForm)actionForm; ApplicationConfig config = ApplicationConfigManager.getApplicationConfig( appForm.getApplicationId()); assert config != null; config.setName(appForm.getName()); config.setHost(appForm.getHost()); if(appForm.getPort() != null) config.setPort(new Integer(appForm.getPort())); config.setUsername(appForm.getUsername()); final String password = appForm.getPassword(); if(password != null && !password.equals(config.getPassword())){ config.setPassword(password); } ApplicationConfigManager.updateApplication(config); UserActivityLogger.getInstance().logActivity( context.getUser().getUsername(), "Updated application "+"\""+config.getName()+"\""); return mapping.findForward(Forwards.SUCCESS); }
1,110,581
public String toIdentifier(String logicalName, String physicalName) { // replace spaces with underscores if (logicalName == null) return null; logger.debug("getting physical name for: " + logicalName); String ident = logicalName.replace(' ','_'); logger.debug("after replace of spaces: " + ident); // see if it's a reserved word, and add something alpha to front if it is... if (isReservedWord(ident)) { ident = "X" + ident; logger.debug("identifier was reserved word, appending X: " + ident); } // replace anything that is not a letter, character, or underscore with an underscore... ident = ident.replaceAll("[^a-zA-Z0-9_@$#]", "_"); // first time through if (physicalName == null) { // length is ok if (ident.length() < 129) { return ident; } else { // length is too big logger.debug("truncating identifier: " + ident); String base = ident.substring(0,125); int tiebreaker = ((ident.hashCode() % 1000) + 1000) % 1000; logger.debug("new identifier: " + base + tiebreaker); return (base + tiebreaker); } } else { // back for more, which means that we probably // had a namespace conflict. Hack the ident down // to size if it's too big, and then generate // a hash tiebreaker using the ident and the // passed value physicalName logger.debug("physical identifier is not unique, regenerating: " + physicalName); String base = ident; if (ident.length() > 125) { base = ident.substring(0,125); } int tiebreaker = (((ident + physicalName).hashCode() % 1000) + 1000) % 1000; logger.debug("regenerated identifier is: " + (base + tiebreaker)); return (base + tiebreaker); } }
private String toIdentifier(String logicalName, String physicalName) { // replace spaces with underscores if (logicalName == null) return null; logger.debug("getting physical name for: " + logicalName); String ident = logicalName.replace(' ','_'); logger.debug("after replace of spaces: " + ident); // see if it's a reserved word, and add something alpha to front if it is... if (isReservedWord(ident)) { ident = "X" + ident; logger.debug("identifier was reserved word, appending X: " + ident); } // replace anything that is not a letter, character, or underscore with an underscore... ident = ident.replaceAll("[^a-zA-Z0-9_@$#]", "_"); // first time through if (physicalName == null) { // length is ok if (ident.length() < 129) { return ident; } else { // length is too big logger.debug("truncating identifier: " + ident); String base = ident.substring(0,125); int tiebreaker = ((ident.hashCode() % 1000) + 1000) % 1000; logger.debug("new identifier: " + base + tiebreaker); return (base + tiebreaker); } } else { // back for more, which means that we probably // had a namespace conflict. Hack the ident down // to size if it's too big, and then generate // a hash tiebreaker using the ident and the // passed value physicalName logger.debug("physical identifier is not unique, regenerating: " + physicalName); String base = ident; if (ident.length() > 125) { base = ident.substring(0,125); } int tiebreaker = (((ident + physicalName).hashCode() % 1000) + 1000) % 1000; logger.debug("regenerated identifier is: " + (base + tiebreaker)); return (base + tiebreaker); } }
1,110,582
public void doTag(XMLOutput output) throws JellyTagException { StylesheetTag tag = (StylesheetTag) findAncestorWithClass( StylesheetTag.class ); if (tag == null) { throw new JellyTagException( "<applyTemplates> tag must be inside a <stylesheet> tag" ); } Stylesheet stylesheet = tag.getStylesheet(); XMLOutput oldOutput = tag.getStylesheetOutput(); tag.setStylesheetOutput(output); Object source = tag.getXPathSource(); // for some reason, these DOM4J methods only throw Exception try { if ( select != null ) { stylesheet.applyTemplates( source, select ); } else { stylesheet.applyTemplates( source ); } } catch (Exception e) { throw new JellyTagException(e); } tag.setStylesheetOutput(oldOutput); // #### should support MODE!!! }
public void doTag(XMLOutput output) throws JellyTagException { StylesheetTag tag = (StylesheetTag) findAncestorWithClass( StylesheetTag.class ); if (tag == null) { throw new JellyTagException( "<applyTemplates> tag must be inside a <stylesheet> tag" ); } Stylesheet stylesheet = tag.getStylesheet(); XMLOutput oldOutput = tag.getStylesheetOutput(); tag.setStylesheetOutput(output); Object source = tag.getXPathSource(); // for some reason, these DOM4J methods only throw Exception try { if ( select != null ) { stylesheet.applyTemplates( source, select, mode ); } else { stylesheet.applyTemplates( source ); } } catch (Exception e) { throw new JellyTagException(e); } tag.setStylesheetOutput(oldOutput); // #### should support MODE!!! }
1,110,583
public void doTag(XMLOutput output) throws JellyTagException { StylesheetTag tag = (StylesheetTag) findAncestorWithClass( StylesheetTag.class ); if (tag == null) { throw new JellyTagException( "<applyTemplates> tag must be inside a <stylesheet> tag" ); } Stylesheet stylesheet = tag.getStylesheet(); XMLOutput oldOutput = tag.getStylesheetOutput(); tag.setStylesheetOutput(output); Object source = tag.getXPathSource(); // for some reason, these DOM4J methods only throw Exception try { if ( select != null ) { stylesheet.applyTemplates( source, select ); } else { stylesheet.applyTemplates( source ); } } catch (Exception e) { throw new JellyTagException(e); } tag.setStylesheetOutput(oldOutput); // #### should support MODE!!! }
public void doTag(XMLOutput output) throws JellyTagException { StylesheetTag tag = (StylesheetTag) findAncestorWithClass( StylesheetTag.class ); if (tag == null) { throw new JellyTagException( "<applyTemplates> tag must be inside a <stylesheet> tag" ); } Stylesheet stylesheet = tag.getStylesheet(); XMLOutput oldOutput = tag.getStylesheetOutput(); tag.setStylesheetOutput(output); Object source = tag.getXPathSource(); // for some reason, these DOM4J methods only throw Exception try { if ( select != null ) { stylesheet.applyTemplates( source, select ); } else { stylesheet.applyTemplates( source, mode ); } } catch (Exception e) { throw new JellyTagException(e); } tag.setStylesheetOutput(oldOutput); // #### should support MODE!!! }
1,110,584
public static void setDataChrom(String chrom) { dataChrom = chrom.toLowerCase(); }
public static void setDataChrom(String chrom) { if (chrom != null){ dataChrom = chrom.toLowerCase(); }else{ dataChrom = null; } }
1,110,585
public static Vector calcCCTDT(Vector chromosomes){ Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for (int i = 0; i < numMarkers; i++){ TDTResult thisResult = new TDTResult(Chromosome.getUnfilteredMarker(i)); for (int j = 0; j < chromosomes.size()-1; j++){ Chromosome theChrom = (Chromosome)chromosomes.get(j); j++; Chromosome nextChrom = (Chromosome)chromosomes.get(j); if (theChrom.getAffected()){ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 0); }else{ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 1); } } results.add(thisResult); } return results; }
public static Vector calcCCTDT(PedFile pf){ Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for (int i = 0; i < numMarkers; i++){ TDTResult thisResult = new TDTResult(Chromosome.getUnfilteredMarker(i)); for (int j = 0; j < chromosomes.size()-1; j++){ Chromosome theChrom = (Chromosome)chromosomes.get(j); j++; Chromosome nextChrom = (Chromosome)chromosomes.get(j); if (theChrom.getAffected()){ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 0); }else{ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 1); } } results.add(thisResult); } return results; }
1,110,586
public static Vector calcCCTDT(Vector chromosomes){ Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for (int i = 0; i < numMarkers; i++){ TDTResult thisResult = new TDTResult(Chromosome.getUnfilteredMarker(i)); for (int j = 0; j < chromosomes.size()-1; j++){ Chromosome theChrom = (Chromosome)chromosomes.get(j); j++; Chromosome nextChrom = (Chromosome)chromosomes.get(j); if (theChrom.getAffected()){ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 0); }else{ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 1); } } results.add(thisResult); } return results; }
public static Vector calcCCTDT(Vector chromosomes){ Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for (int i = 0; i < numMarkers; i++){ TDTResult thisResult = new TDTResult(Chromosome.getUnfilteredMarker(i)); for (int j = 0; j < chromosomes.size()-1; j++){ Chromosome theChrom = (Chromosome)chromosomes.get(j); j++; Chromosome nextChrom = (Chromosome)chromosomes.get(j); if (theChrom.getAffected()){ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 0); }else{ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 1); } } results.add(thisResult); } return results; }
1,110,587
public static Vector calcTrioTDT(Vector chromosomes) { Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for(int k=0;k<numMarkers;k++){ results.add(new TDTResult(Chromosome.getUnfilteredMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ if(!chrom1T.kidMissing[j] && !chrom2T.kidMissing[j]) { byte allele1T = chrom1T.getUnfilteredGenotype(j); byte allele1U = chrom1U.getUnfilteredGenotype(j); byte allele2T = chrom2T.getUnfilteredGenotype(j); byte allele2U = chrom2U.getUnfilteredGenotype(j); if( !(allele1T == 0 || allele1U == 0 || allele2T == 0 || allele2U == 0) ){ TDTResult curRes = (TDTResult)results.get(j); curRes.tallyTrioInd(allele1T,allele1U); curRes.tallyTrioInd(allele2T,allele2U); } } } } return results; }
public static Vector calcTrioTDT(PedFile pf) throws PedFileException{ Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for(int k=0;k<numMarkers;k++){ results.add(new TDTResult(Chromosome.getUnfilteredMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ if(!chrom1T.kidMissing[j] && !chrom2T.kidMissing[j]) { byte allele1T = chrom1T.getUnfilteredGenotype(j); byte allele1U = chrom1U.getUnfilteredGenotype(j); byte allele2T = chrom2T.getUnfilteredGenotype(j); byte allele2U = chrom2U.getUnfilteredGenotype(j); if( !(allele1T == 0 || allele1U == 0 || allele2T == 0 || allele2U == 0) ){ TDTResult curRes = (TDTResult)results.get(j); curRes.tallyTrioInd(allele1T,allele1U); curRes.tallyTrioInd(allele2T,allele2U); } } } } return results; }
1,110,588
public static Vector calcTrioTDT(Vector chromosomes) { Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for(int k=0;k<numMarkers;k++){ results.add(new TDTResult(Chromosome.getUnfilteredMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ if(!chrom1T.kidMissing[j] && !chrom2T.kidMissing[j]) { byte allele1T = chrom1T.getUnfilteredGenotype(j); byte allele1U = chrom1U.getUnfilteredGenotype(j); byte allele2T = chrom2T.getUnfilteredGenotype(j); byte allele2U = chrom2U.getUnfilteredGenotype(j); if( !(allele1T == 0 || allele1U == 0 || allele2T == 0 || allele2U == 0) ){ TDTResult curRes = (TDTResult)results.get(j); curRes.tallyTrioInd(allele1T,allele1U); curRes.tallyTrioInd(allele2T,allele2U); } } } } return results; }
public static Vector calcTrioTDT(Vector chromosomes) { Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for(int k=0;k<numMarkers;k++){ results.add(new TDTResult(Chromosome.getUnfilteredMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ if(!chrom1T.kidMissing[j] && !chrom2T.kidMissing[j]) { byte allele1T = chrom1T.getUnfilteredGenotype(j); byte allele1U = chrom1U.getUnfilteredGenotype(j); byte allele2T = chrom2T.getUnfilteredGenotype(j); byte allele2U = chrom2U.getUnfilteredGenotype(j); if( !(allele1T == 0 || allele1U == 0 || allele2T == 0 || allele2U == 0) ){ TDTResult curRes = (TDTResult)results.get(j); curRes.tallyTrioInd(allele1T,allele1U); curRes.tallyTrioInd(allele2T,allele2U); } } } } return results; }
1,110,589
public static Vector calcTrioTDT(Vector chromosomes) { Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for(int k=0;k<numMarkers;k++){ results.add(new TDTResult(Chromosome.getUnfilteredMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ if(!chrom1T.kidMissing[j] && !chrom2T.kidMissing[j]) { byte allele1T = chrom1T.getUnfilteredGenotype(j); byte allele1U = chrom1U.getUnfilteredGenotype(j); byte allele2T = chrom2T.getUnfilteredGenotype(j); byte allele2U = chrom2U.getUnfilteredGenotype(j); if( !(allele1T == 0 || allele1U == 0 || allele2T == 0 || allele2U == 0) ){ TDTResult curRes = (TDTResult)results.get(j); curRes.tallyTrioInd(allele1T,allele1U); curRes.tallyTrioInd(allele2T,allele2U); } } } } return results; }
public static Vector calcTrioTDT(Vector chromosomes) { Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for(int k=0;k<numMarkers;k++){ results.add(new TDTResult(Chromosome.getUnfilteredMarker(k))); results.add(thisResult); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ if(!chrom1T.kidMissing[j] && !chrom2T.kidMissing[j]) { byte allele1T = chrom1T.getUnfilteredGenotype(j); byte allele1U = chrom1U.getUnfilteredGenotype(j); byte allele2T = chrom2T.getUnfilteredGenotype(j); byte allele2U = chrom2U.getUnfilteredGenotype(j); if( !(allele1T == 0 || allele1U == 0 || allele2T == 0 || allele2U == 0) ){ TDTResult curRes = (TDTResult)results.get(j); curRes.tallyTrioInd(allele1T,allele1U); curRes.tallyTrioInd(allele2T,allele2U); results.add(thisResult); } results.add(thisResult); } results.add(thisResult); } results.add(thisResult); } return results; results.add(thisResult); }
1,110,590
public Statement createStatement() throws SQLException { return connection.createStatement(); }
public Statement createStatement() throws SQLException { return connection.createStatement(); }
1,110,591
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; long poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,592
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,593
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,594
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,595
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (long j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,596
public void actionPerformed(ActionEvent e) { String cmd = e.getActionCommand(); if ( cmd == PHOTO_PROPS_CMD ) { showSelectionPropsDialog(); } else if ( cmd == PHOTO_SHOW_CMD ) { showSelectedPhoto(); } else if ( cmd == PHOTO_ROTATE_CW_CMD ) { rotateSelectedPhoto( 90 ); } else if ( cmd == PHOTO_ROTATE_CCW_CMD ) { rotateSelectedPhoto( -90 ); } else if ( cmd == PHOTO_ROTATE_180_CMD ) { rotateSelectedPhoto( 180 ); } else if ( cmd == PHOTO_ADD_TO_FOLDER_CMD ) { queryForNewFolder(); } }
public void actionPerformed(ActionEvent e) { String cmd = e.getActionCommand(); if ( cmd == PHOTO_PROPS_CMD ) { showSelectionPropsDialog(); } else if ( cmd == PHOTO_SHOW_CMD ) { showSelectedPhoto(); } else if ( cmd == PHOTO_ROTATE_CW_CMD ) { rotateSelectedPhoto( 90 ); } else if ( cmd == PHOTO_ROTATE_CCW_CMD ) { rotateSelectedPhoto( -90 ); } else if ( cmd == PHOTO_ROTATE_180_CMD ) { rotateSelectedPhoto( 180 ); } else if ( cmd == PHOTO_ADD_TO_FOLDER_CMD ) { queryForNewFolder(); } }
1,110,598
void createUI() { photoTransferHandler = new PhotoCollectionTransferHandler( this ); setTransferHandler( photoTransferHandler ); addMouseListener( this ); addMouseMotionListener( this ); // Create the popup menu popup = new JPopupMenu(); JMenuItem propsItem = new JMenuItem( "Properties" ); propsItem.addActionListener( this ); propsItem.setActionCommand( PHOTO_PROPS_CMD ); JMenuItem showItem = new JMenuItem( "Show image" ); showItem.addActionListener( this ); showItem.setActionCommand( PHOTO_SHOW_CMD ); JMenuItem rotateCW = new JMenuItem( "Rotate 90 deg CW" ); rotateCW.addActionListener( this ); rotateCW.setActionCommand( PHOTO_ROTATE_CW_CMD ); JMenuItem rotateCCW = new JMenuItem( "Rotate 90 deg CCW" ); rotateCCW.addActionListener( this ); rotateCCW.setActionCommand( PHOTO_ROTATE_CCW_CMD ); JMenuItem rotate180deg = new JMenuItem( "Rotate 180 degrees" ); rotate180deg.addActionListener( this ); rotate180deg.setActionCommand( PHOTO_ROTATE_180_CMD ); JMenuItem addToFolder = new JMenuItem( "Add to folder..." ); addToFolder.addActionListener( this ); addToFolder.setActionCommand( PHOTO_ADD_TO_FOLDER_CMD ); exportSelectedAction = new ExportSelectedAction( this, "Export selected...", null, "Export the selected photos to from archive database to image files", KeyEvent.VK_X ); JMenuItem exportSelected = new JMenuItem( exportSelectedAction ); popup.add( showItem ); popup.add( propsItem ); popup.add( rotateCW ); popup.add( rotateCCW ); popup.add( rotate180deg ); popup.add( addToFolder ); popup.add( exportSelected ); MouseListener popupListener = new PopupListener(); addMouseListener( popupListener ); }
void createUI() { photoTransferHandler = new PhotoCollectionTransferHandler( this ); setTransferHandler( photoTransferHandler ); addMouseListener( this ); addMouseMotionListener( this ); // Create the popup menu popup = new JPopupMenu(); JMenuItem propsItem = new JMenuItem( "Properties" ); propsItem.addActionListener( this ); propsItem.setActionCommand( PHOTO_PROPS_CMD ); JMenuItem showItem = new JMenuItem( "Show image" ); showItem.addActionListener( this ); showItem.setActionCommand( PHOTO_SHOW_CMD ); JMenuItem rotateCW = new JMenuItem( "Rotate 90 deg CW" ); rotateCW.addActionListener( this ); rotateCW.setActionCommand( PHOTO_ROTATE_CW_CMD ); JMenuItem rotateCCW = new JMenuItem( "Rotate 90 deg CCW" ); rotateCCW.addActionListener( this ); rotateCCW.setActionCommand( PHOTO_ROTATE_CCW_CMD ); JMenuItem rotate180deg = new JMenuItem( "Rotate 180 degrees" ); rotate180deg.addActionListener( this ); rotate180deg.setActionCommand( PHOTO_ROTATE_180_CMD ); JMenuItem addToFolder = new JMenuItem( "Add to folder..." ); addToFolder.addActionListener( this ); addToFolder.setActionCommand( PHOTO_ADD_TO_FOLDER_CMD ); exportSelectedAction = new ExportSelectedAction( this, "Export selected...", null, "Export the selected photos to from archive database to image files", KeyEvent.VK_X ); JMenuItem exportSelected = new JMenuItem( exportSelectedAction ); popup.add( showItem ); popup.add( propsItem ); popup.add( rotateCW ); popup.add( rotateCCW ); popup.add( rotate180deg ); popup.add( addToFolder ); popup.add( exportSelected ); MouseListener popupListener = new PopupListener(); addMouseListener( popupListener ); }
1,110,600
private void paintThumbnail( Graphics2D g2, PhotoInfo photo, int startx, int starty, boolean isSelected ) { // Current position in which attributes can be drawn int ypos = starty + rowHeight/2; if ( photo != null ) { Thumbnail thumbnail = photo.getThumbnail(); if ( thumbnail != null ) { // Find the position for the thumbnail BufferedImage img = thumbnail.getImage(); int x = startx + (columnWidth - img.getWidth())/(int)2; int y = starty + (rowHeight - img.getHeight())/(int)2; g2.drawImage( img, new AffineTransform( 1f, 0f, 0f, 1f, x, y ), null ); if ( isSelected ) { Stroke prevStroke = g2.getStroke(); Color prevColor = g2.getColor(); g2.setStroke( new BasicStroke( 3.0f) ); g2.setColor( Color.BLUE ); g2.drawRect( x, y, img.getWidth(), img.getHeight() ); g2.setColor( prevColor ); g2.setStroke( prevStroke ); } // Increase ypos so that attributes are drawn under the image ypos += ((int)img.getHeight())/2 + 4; } // Draw the attributes Color prevBkg = g2.getBackground(); if ( isSelected ) { g2.setBackground( Color.BLUE ); } Font attrFont = new Font( "Arial", Font.PLAIN, 10 ); FontRenderContext frc = g2.getFontRenderContext(); if ( showDate && photo.getShootTime() != null ) { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); String dateStr = df.format( photo.getShootTime() ); TextLayout txt = new TextLayout( dateStr, attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth - bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } String shootPlace = photo.getShootingPlace(); if ( showPlace && shootPlace != null && shootPlace.length() > 0 ) { TextLayout txt = new TextLayout( photo.getShootingPlace(), attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth-bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } g2.setBackground( prevBkg ); } }
private void paintThumbnail( Graphics2D g2, PhotoInfo photo, int startx, int starty, boolean isSelected ) { // Current position in which attributes can be drawn int ypos = starty + rowHeight/2; if ( photo != null ) { Thumbnail thumbnail = photo.getThumbnail(); if ( thumbnail != null ) { // Find the position for the thumbnail BufferedImage img = thumbnail.getImage(); int x = startx + (columnWidth - img.getWidth())/(int)2; int y = starty + (rowHeight - img.getHeight())/(int)2; g2.drawImage( img, new AffineTransform( 1f, 0f, 0f, 1f, x, y ), null ); if ( isSelected ) { Stroke prevStroke = g2.getStroke(); Color prevColor = g2.getColor(); g2.setStroke( new BasicStroke( 3.0f) ); g2.setColor( Color.BLUE ); g2.drawRect( x, y, img.getWidth(), img.getHeight() ); g2.setColor( prevColor ); g2.setStroke( prevStroke ); } // Increase ypos so that attributes are drawn under the image ypos += ((int)img.getHeight())/2 + 4; } // Draw the attributes Color prevBkg = g2.getBackground(); if ( isSelected ) { g2.setBackground( Color.BLUE ); } Font attrFont = new Font( "Arial", Font.PLAIN, 10 ); FontRenderContext frc = g2.getFontRenderContext(); if ( showDate && photo.getShootTime() != null ) { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); String dateStr = df.format( photo.getShootTime() ); TextLayout txt = new TextLayout( dateStr, attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth - bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } String shootPlace = photo.getShootingPlace(); if ( showPlace && shootPlace != null && shootPlace.length() > 0 ) { TextLayout txt = new TextLayout( photo.getShootingPlace(), attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth-bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } g2.setBackground( prevBkg ); } }
1,110,602
private void paintThumbnail( Graphics2D g2, PhotoInfo photo, int startx, int starty, boolean isSelected ) { // Current position in which attributes can be drawn int ypos = starty + rowHeight/2; if ( photo != null ) { Thumbnail thumbnail = photo.getThumbnail(); if ( thumbnail != null ) { // Find the position for the thumbnail BufferedImage img = thumbnail.getImage(); int x = startx + (columnWidth - img.getWidth())/(int)2; int y = starty + (rowHeight - img.getHeight())/(int)2; g2.drawImage( img, new AffineTransform( 1f, 0f, 0f, 1f, x, y ), null ); if ( isSelected ) { Stroke prevStroke = g2.getStroke(); Color prevColor = g2.getColor(); g2.setStroke( new BasicStroke( 3.0f) ); g2.setColor( Color.BLUE ); g2.drawRect( x, y, img.getWidth(), img.getHeight() ); g2.setColor( prevColor ); g2.setStroke( prevStroke ); } // Increase ypos so that attributes are drawn under the image ypos += ((int)img.getHeight())/2 + 4; } // Draw the attributes Color prevBkg = g2.getBackground(); if ( isSelected ) { g2.setBackground( Color.BLUE ); } Font attrFont = new Font( "Arial", Font.PLAIN, 10 ); FontRenderContext frc = g2.getFontRenderContext(); if ( showDate && photo.getShootTime() != null ) { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); String dateStr = df.format( photo.getShootTime() ); TextLayout txt = new TextLayout( dateStr, attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth - bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } String shootPlace = photo.getShootingPlace(); if ( showPlace && shootPlace != null && shootPlace.length() > 0 ) { TextLayout txt = new TextLayout( photo.getShootingPlace(), attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth-bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } g2.setBackground( prevBkg ); } }
private void paintThumbnail( Graphics2D g2, PhotoInfo photo, int startx, int starty, boolean isSelected ) { // Current position in which attributes can be drawn int ypos = starty + rowHeight/2; if ( photo != null ) { Thumbnail thumbnail = photo.getThumbnail(); if ( thumbnail != null ) { // Find the position for the thumbnail BufferedImage img = thumbnail.getImage(); int x = startx + (columnWidth - img.getWidth())/(int)2; int y = starty + (rowHeight - img.getHeight())/(int)2; g2.drawImage( img, new AffineTransform( 1f, 0f, 0f, 1f, x, y ), null ); if ( isSelected ) { Stroke prevStroke = g2.getStroke(); Color prevColor = g2.getColor(); g2.setStroke( new BasicStroke( 3.0f) ); g2.setColor( Color.BLUE ); g2.drawRect( x, y, img.getWidth(), img.getHeight() ); g2.setColor( prevColor ); g2.setStroke( prevStroke ); } // Increase ypos so that attributes are drawn under the image ypos += ((int)img.getHeight())/2 + 4; } // Draw the attributes Color prevBkg = g2.getBackground(); if ( isSelected ) { g2.setBackground( Color.BLUE ); } Font attrFont = new Font( "Arial", Font.PLAIN, 10 ); FontRenderContext frc = g2.getFontRenderContext(); if ( showDate && photo.getShootTime() != null ) { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); String dateStr = df.format( photo.getShootTime() ); TextLayout txt = new TextLayout( dateStr, attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth - bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } String shootPlace = photo.getShootingPlace(); if ( showPlace && shootPlace != null && shootPlace.length() > 0 ) { TextLayout txt = new TextLayout( photo.getShootingPlace(), attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth-bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } g2.setBackground( prevBkg ); } }
1,110,603
public void postReport() { logger.debug(toString()); if (numReportsThisRun++ > MAX_REPORT_TRIES) { logger.info( String.format( "Not logging this error, threshold of %d exceeded", MAX_REPORT_TRIES)); return; } exception.printStackTrace(); String url = System.getProperty(REPORT_URL_SYSTEM_PROP); if (url == null) { url = DEFAULT_REPORT_URL; } // TODO decouple this from the main frame UserSettings settings = ArchitectFrame.getMainInstance().getUserSettings().getQfaUserSettings(); if(!settings.getBoolean(QFAUserSettings.EXCEPTION_REPORTING,true)) return; logger.info("Posting error report to SQL Power at URL <"+url+">"); try { HttpURLConnection dest = (HttpURLConnection) new URL(url).openConnection(); dest.setDoOutput(true); dest.setDoInput(true); dest.setUseCaches(false); dest.setRequestMethod("POST"); dest.setRequestProperty("Content-Type", "text/xml"); dest.connect(); OutputStream out = null; try { out = new BufferedOutputStream(dest.getOutputStream()); out.write(toXML().getBytes("ISO-8859-1")); out.flush(); } finally { if (out != null) out.close(); } // Note: the error report will only get sent if we attempt to read from the URL Connection (!??!?) BufferedReader in = new BufferedReader(new InputStreamReader(dest.getInputStream())); StringBuffer response = new StringBuffer(); String line; while ((line = in.readLine()) != null) { response.append(line); } in.close(); logger.info("Error report servlet response: "+response); } catch (Exception e) { // Just catch-and-squash everything because we're already in up to our necks at this point. logger.error("Couldn't send exception report to <\""+url+"\">", e); } }
public void postReport() { logger.debug("posting report: "+toString()); if (numReportsThisRun++ > MAX_REPORT_TRIES) { logger.info( String.format( "Not logging this error, threshold of %d exceeded", MAX_REPORT_TRIES)); return; } exception.printStackTrace(); String url = System.getProperty(REPORT_URL_SYSTEM_PROP); if (url == null) { url = DEFAULT_REPORT_URL; } // TODO decouple this from the main frame UserSettings settings = ArchitectFrame.getMainInstance().getUserSettings().getQfaUserSettings(); if(!settings.getBoolean(QFAUserSettings.EXCEPTION_REPORTING,true)) return; logger.info("Posting error report to SQL Power at URL <"+url+">"); try { HttpURLConnection dest = (HttpURLConnection) new URL(url).openConnection(); dest.setDoOutput(true); dest.setDoInput(true); dest.setUseCaches(false); dest.setRequestMethod("POST"); dest.setRequestProperty("Content-Type", "text/xml"); dest.connect(); OutputStream out = null; try { out = new BufferedOutputStream(dest.getOutputStream()); out.write(toXML().getBytes("ISO-8859-1")); out.flush(); } finally { if (out != null) out.close(); } // Note: the error report will only get sent if we attempt to read from the URL Connection (!??!?) BufferedReader in = new BufferedReader(new InputStreamReader(dest.getInputStream())); StringBuffer response = new StringBuffer(); String line; while ((line = in.readLine()) != null) { response.append(line); } in.close(); logger.info("Error report servlet response: "+response); } catch (Exception e) { // Just catch-and-squash everything because we're already in up to our necks at this point. logger.error("Couldn't send exception report to <\""+url+"\">", e); } }
1,110,604
public void postReport() { logger.debug(toString()); if (numReportsThisRun++ > MAX_REPORT_TRIES) { logger.info( String.format( "Not logging this error, threshold of %d exceeded", MAX_REPORT_TRIES)); return; } exception.printStackTrace(); String url = System.getProperty(REPORT_URL_SYSTEM_PROP); if (url == null) { url = DEFAULT_REPORT_URL; } // TODO decouple this from the main frame UserSettings settings = ArchitectFrame.getMainInstance().getUserSettings().getQfaUserSettings(); if(!settings.getBoolean(QFAUserSettings.EXCEPTION_REPORTING,true)) return; logger.info("Posting error report to SQL Power at URL <"+url+">"); try { HttpURLConnection dest = (HttpURLConnection) new URL(url).openConnection(); dest.setDoOutput(true); dest.setDoInput(true); dest.setUseCaches(false); dest.setRequestMethod("POST"); dest.setRequestProperty("Content-Type", "text/xml"); dest.connect(); OutputStream out = null; try { out = new BufferedOutputStream(dest.getOutputStream()); out.write(toXML().getBytes("ISO-8859-1")); out.flush(); } finally { if (out != null) out.close(); } // Note: the error report will only get sent if we attempt to read from the URL Connection (!??!?) BufferedReader in = new BufferedReader(new InputStreamReader(dest.getInputStream())); StringBuffer response = new StringBuffer(); String line; while ((line = in.readLine()) != null) { response.append(line); } in.close(); logger.info("Error report servlet response: "+response); } catch (Exception e) { // Just catch-and-squash everything because we're already in up to our necks at this point. logger.error("Couldn't send exception report to <\""+url+"\">", e); } }
public void postReport() { logger.debug(toString()); if (numReportsThisRun++ > MAX_REPORT_TRIES) { logger.info( String.format( "Not logging this error, threshold of %d exceeded", MAX_REPORT_TRIES)); return; } exception.printStackTrace(); String url = System.getProperty(REPORT_URL_SYSTEM_PROP); if (url == null) { url = DEFAULT_REPORT_URL; } // TODO decouple this from the main frame UserSettings settings = ArchitectFrame.getMainInstance().getUserSettings().getQfaUserSettings(); if(!settings.getBoolean(QFAUserSettings.EXCEPTION_REPORTING,true)) return; logger.info("Posting error report to SQL Power at URL <"+url+">"); try { HttpURLConnection dest = (HttpURLConnection) new URL(url).openConnection(); dest.setDoOutput(true); dest.setDoInput(true); dest.setUseCaches(false); dest.setRequestMethod("POST"); dest.setRequestProperty("Content-Type", "text/xml"); dest.connect(); OutputStream out = null; try { out = new BufferedOutputStream(dest.getOutputStream()); out.write(toXML().getBytes("ISO-8859-1")); out.flush(); } finally { if (out != null) out.close(); } // Note: the error report will only get sent if we attempt to read from the URL Connection (!??!?) InputStreamReader inputStreamReader = new InputStreamReader(dest.getInputStream()); BufferedReader in = new BufferedReader(inputStreamReader); StringBuffer response = new StringBuffer(); String line; while ((line = in.readLine()) != null) { response.append(line); } in.close(); logger.info("Error report servlet response: "+response); } catch (Exception e) { // Just catch-and-squash everything because we're already in up to our necks at this point. logger.error("Couldn't send exception report to <\""+url+"\">", e); } }
1,110,605
public PresentationObject getTopicsOverView(IWContext iwc){ Table T = new Table(); int row = 1; T.add(getTopicLink(-1,iwrb.getLocalizedString("new_topic","New topic")),1,row); row++; T.add(tf.format(iwrb.getLocalizedString("name","Name"),tf.HEADER),1,row); T.add(tf.format(iwrb.getLocalizedString("category","Category"),tf.HEADER),2,row); T.add(tf.format(iwrb.getLocalizedString("mail_server","Mail server"),tf.HEADER),3,row); T.add(tf.format(iwrb.getLocalizedString("subscribers","Subscribers"),tf.HEADER),4,row); T.add(tf.format(iwrb.getLocalizedString("welcome","Welcome"),tf.HEADER),5,row); row++; if(!topics.isEmpty()){ Iterator iter = topics.values().iterator(); EmailTopic topic; ICCategory category; EmailAccount account; EmailLetter welcome; Collection welcomes; Collection accounts; int emailCount; int topicID; while(iter.hasNext()){ topic = (EmailTopic) iter.next(); topicID = topic.getIdentifier().intValue(); T.add(getTopicLink(topicID,topic.getName()),1,row); category = (ICCategory) categories.get(Integer.toString(topic.getCategoryId())); T.add(tf.format(category.getName()),2,row); accounts = MailFinder.getInstance().getTopicAccounts(topicID,MailProtocol.SMTP); if(accounts!=null && !accounts.isEmpty()){ account = (EmailAccount) accounts.iterator().next(); T.add(getAccountLink(topicID,( account.getIdentifier().intValue()),account.getHost()),3,row); } else{ T.add(getAccountLink(topicID,-1,"X"),3,row); } emailCount = MailFinder.getInstance().getListEmailsCount(topic.getListId()); T.add((getSubscribersLink(topicID,String.valueOf(emailCount))),4,row); welcomes = MailFinder.getInstance().getEmailLetters(topicID,MailLetter.TYPE_SUBSCRIPTION); if(welcomes!=null && !welcomes.isEmpty()){ welcome = (MailLetter) welcomes.iterator().next(); T.add(getWelcomeLetterLink(welcome.getIdentifier().intValue(),topicID,welcome.getSubject()),5,row); //T.add(tf.format(welcome.getSubject()),5,row); } else{ T.add(getWelcomeLetterLink(-1,topicID,"X"),5,row); } row++; } } return T; }
public PresentationObject getTopicsOverView(IWContext iwc){ Table T = new Table(); int row = 1; T.add(getTopicLink(-1,iwrb.getLocalizedString("new_topic","New topic")),1,row); row++; T.add(tf.format(iwrb.getLocalizedString("name","Name"),tf.HEADER),1,row); T.add(tf.format(iwrb.getLocalizedString("category","Category"),tf.HEADER),2,row); T.add(tf.format(iwrb.getLocalizedString("mail_server","Mail server"),tf.HEADER),3,row); T.add(tf.format(iwrb.getLocalizedString("subscribers","Subscribers"),tf.HEADER),4,row); T.add(tf.format(iwrb.getLocalizedString("welcome","Welcome"),tf.HEADER),5,row); row++; if(!topics.isEmpty()){ Iterator iter = topics.values().iterator(); EmailTopic topic; ICCategory category; EmailAccount account; EmailLetter welcome; Collection welcomes; Collection accounts; int emailCount; int topicID; while(iter.hasNext()){ topic = (EmailTopic) iter.next(); topicID = topic.getIdentifier().intValue(); T.add(getTopicLink(topicID,topic.getName()),1,row); category = (ICCategory) categories.get(Integer.toString(topic.getCategoryId())); T.add(tf.format(category.getName()),2,row); accounts = MailFinder.getInstance().getTopicAccounts(topicID,MailProtocol.SMTP); if(accounts!=null && !accounts.isEmpty()){ account = (EmailAccount) accounts.iterator().next(); T.add(getAccountLink(topicID,( account.getIdentifier().intValue()),account.getHost()),3,row); } else{ T.add(getAccountLink(topicID,-1,"X"),3,row); } emailCount = MailFinder.getInstance().getListEmailsCount(topic.getListId()); T.add((getSubscribersLink(topic.getListId(),String.valueOf(emailCount))),4,row); welcomes = MailFinder.getInstance().getEmailLetters(topicID,MailLetter.TYPE_SUBSCRIPTION); if(welcomes!=null && !welcomes.isEmpty()){ welcome = (MailLetter) welcomes.iterator().next(); T.add(getWelcomeLetterLink(welcome.getIdentifier().intValue(),topicID,welcome.getSubject()),5,row); //T.add(tf.format(welcome.getSubject()),5,row); } else{ T.add(getWelcomeLetterLink(-1,topicID,"X"),5,row); } row++; } } return T; }
1,110,607
public void refreshTable(){ this.removeAll(); snpsByName = new Hashtable(); this.setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); Vector columnNames = new Vector(); Vector tableData = new Vector(); columnNames.add("#"); columnNames.add("Name"); columnNames.add("Position"); columnNames.add("Force Include"); columnNames.add("Force Exclude"); columnNames.add("Tag this SNP?"); for (int i = 0; i < Chromosome.getSize(); i++){ SNP tempSNP = Chromosome.getMarker(i); snpsByName.put(tempSNP.getName(), tempSNP); Vector tempData = new Vector(); tempData.add(Integer.toString(Chromosome.realIndex[i]+1)); tempData.add(tempSNP.getName()); tempData.add(String.valueOf(tempSNP.getPosition())); tempData.add(new Boolean(false)); tempData.add(new Boolean(false)); tempData.add(new Boolean(true)); tableData.add(tempData); } TagConfigTableModel tableModel = new TagConfigTableModel(columnNames,tableData); tableModel.addTableModelListener(this); table = new JTable(tableModel); table.getColumnModel().getColumn(0).setPreferredWidth(30); JScrollPane scrollPane = new JScrollPane(table); scrollPane.setMaximumSize(scrollPane.getPreferredSize()); add(scrollPane); JPanel optsRightPanel = new JPanel(); optsRightPanel.setLayout(new BoxLayout(optsRightPanel, BoxLayout.Y_AXIS)); JPanel rsqPanel = new JPanel(); JLabel rsqLabel = new JLabel("r\u00b2 threshold"); rsqPanel.add(rsqLabel); rsqField = new NumberTextField(String.valueOf(Options.getTaggerRsqCutoff()),5,true); rsqPanel.add(rsqField); optsRightPanel.add(rsqPanel); JPanel lodPanel = new JPanel(); JLabel lodLabel = new JLabel("LOD threshold for multi-marker tests"); lodPanel.add(lodLabel); lodField = new NumberTextField(String.valueOf(Options.getTaggerLODCutoff()),5,true); lodPanel.add(lodField); optsRightPanel.add(lodPanel); JPanel optsLeftPanel = new JPanel(); optsLeftPanel.setLayout(new BoxLayout(optsLeftPanel, BoxLayout.Y_AXIS)); JRadioButton pairwiseButton = new JRadioButton("pairwise tagging only"); pairwiseButton.setActionCommand(String.valueOf(Tagger.PAIRWISE_ONLY)); optsLeftPanel.add(pairwiseButton); JRadioButton dupleButton = new JRadioButton("aggressive tagging: use 2-marker haplotypes"); dupleButton.setActionCommand(String.valueOf(Tagger.AGGRESSIVE_DUPLE)); optsLeftPanel.add(dupleButton); JRadioButton tripleButton = new JRadioButton("aggressive tagging: use 2- and 3-marker haplotypes"); tripleButton.setActionCommand(String.valueOf(Tagger.AGGRESSIVE_TRIPLE)); optsLeftPanel.add(tripleButton); aggressiveGroup = new ButtonGroup(); aggressiveGroup.add(pairwiseButton); aggressiveGroup.add(dupleButton); aggressiveGroup.add(tripleButton); pairwiseButton.setSelected(true); JPanel optsPanel = new JPanel(); optsPanel.add(optsLeftPanel); optsPanel.add(optsRightPanel); add(optsPanel); runTaggerButton = new JButton("Run Tagger"); runTaggerButton.addActionListener(this); resetTableButton = new JButton("Reset Table"); resetTableButton.addActionListener(this); JPanel buttonPanel = new JPanel(); buttonPanel.add(runTaggerButton); buttonPanel.add(resetTableButton); add(buttonPanel); }
public void refreshTable(){ this.removeAll(); snpsByName = new Hashtable(); this.setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); Vector columnNames = new Vector(); Vector tableData = new Vector(); columnNames.add("#"); columnNames.add("Name"); columnNames.add("Position"); columnNames.add("Force Include"); columnNames.add("Force Exclude"); columnNames.add("Tag this SNP?"); for (int i = 0; i < Chromosome.getSize(); i++){ SNP tempSNP = Chromosome.getMarker(i); snpsByName.put(tempSNP.getName(), tempSNP); Vector tempData = new Vector(); tempData.add(Integer.toString(Chromosome.realIndex[i]+1)); tempData.add(tempSNP.getName()); tempData.add(String.valueOf(tempSNP.getPosition())); tempData.add(new Boolean(false)); tempData.add(new Boolean(false)); tempData.add(new Boolean(true)); tableData.add(tempData); } TagConfigTableModel tableModel = new TagConfigTableModel(columnNames,tableData); tableModel.addTableModelListener(this); table = new JTable(tableModel); table.getColumnModel().getColumn(0).setPreferredWidth(30); JScrollPane scrollPane = new JScrollPane(table); scrollPane.setMaximumSize(scrollPane.getPreferredSize()); add(scrollPane); JPanel optsRightPanel = new JPanel(); optsRightPanel.setLayout(new BoxLayout(optsRightPanel, BoxLayout.Y_AXIS)); JPanel rsqPanel = new JPanel(); JLabel rsqLabel = new JLabel("r\u00b2 threshold"); rsqPanel.add(rsqLabel); rsqField = new NumberTextField(String.valueOf(Options.getTaggerRsqCutoff()),5,true); rsqPanel.add(rsqField); optsRightPanel.add(rsqPanel); JPanel lodPanel = new JPanel(); JLabel lodLabel = new JLabel("LOD threshold for multi-marker tests"); lodPanel.add(lodLabel); lodField = new NumberTextField(String.valueOf(Options.getTaggerLODCutoff()),5,true); lodPanel.add(lodField); optsRightPanel.add(lodPanel); JPanel optsLeftPanel = new JPanel(); optsLeftPanel.setLayout(new BoxLayout(optsLeftPanel, BoxLayout.Y_AXIS)); JRadioButton pairwiseButton = new JRadioButton("pairwise tagging only"); pairwiseButton.setActionCommand(String.valueOf(Tagger.PAIRWISE_ONLY)); optsLeftPanel.add(pairwiseButton); JRadioButton dupleButton = new JRadioButton("aggressive tagging: use 2-marker haplotypes"); dupleButton.setActionCommand(String.valueOf(Tagger.AGGRESSIVE_DUPLE)); optsLeftPanel.add(dupleButton); JRadioButton tripleButton = new JRadioButton("aggressive tagging: use 2- and 3-marker haplotypes"); tripleButton.setActionCommand(String.valueOf(Tagger.AGGRESSIVE_TRIPLE)); optsLeftPanel.add(tripleButton); aggressiveGroup = new ButtonGroup(); aggressiveGroup.add(pairwiseButton); aggressiveGroup.add(dupleButton); aggressiveGroup.add(tripleButton); tripleButton.setSelected(true); JPanel optsPanel = new JPanel(); optsPanel.add(optsLeftPanel); optsPanel.add(optsRightPanel); add(optsPanel); runTaggerButton = new JButton("Run Tagger"); runTaggerButton.addActionListener(this); resetTableButton = new JButton("Reset Table"); resetTableButton.addActionListener(this); JPanel buttonPanel = new JPanel(); buttonPanel.add(runTaggerButton); buttonPanel.add(resetTableButton); add(buttonPanel); }
1,110,608
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,110,609
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,110,610
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,110,611
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,110,612
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,110,613
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
1,110,614
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception{ ApplicationForm appForm = (ApplicationForm)actionForm; /* create ApplicationConfigData from this form */ ApplicationConfigData appConfigData = new ApplicationConfigData(); CoreUtils.copyProperties(appConfigData, appForm); Map<String, String> paramValues = new HashMap<String, String>(); if(appForm.getJndiFactory() != null) paramValues.put(ApplicationConfig.JNDI_FACTORY, appForm.getJndiFactory()); if(appForm.getJndiURL() != null) paramValues.put(ApplicationConfig.JNDI_URL, appForm.getJndiURL()); appConfigData.setParamValues(paramValues); ConfigurationService service = ServiceFactory.getConfigurationService(); service.addApplication(Utils.getServiceContext(context), appConfigData); return mapping.findForward(Forwards.SUCCESS); }
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception{ ApplicationForm appForm = (ApplicationForm)actionForm; /* create ApplicationConfigData from this form */ ApplicationConfigData appConfigData = new ApplicationConfigData(); CoreUtils.copyProperties(appConfigData, appForm); Map<String, String> paramValues = new HashMap<String, String>(); if(appForm.getJndiFactory() != null) paramValues.put(ApplicationConfig.JNDI_FACTORY, appForm.getJndiFactory()); if(appForm.getJndiURL() != null) paramValues.put(ApplicationConfig.JNDI_URL, appForm.getJndiURL()); appConfigData.setParamValues(paramValues); ConfigurationService service = ServiceFactory.getConfigurationService(); service.addAppWithDashboard(Utils.getServiceContext(context), appConfigData); return mapping.findForward(Forwards.SUCCESS); }
1,110,615
public UserList(int count, String namePrefix, String domain, String password) { this(count, namePrefix, domain); m_password = password; }
public UserList(int count, String namePrefix, String domain, String password) { this(count, namePrefix, domain); m_password = password; }
1,110,616
protected void setup() { GenericDDLGenerator ddlg = project.getDDLGenerator(); setLayout(new FormLayout()); add(new JLabel("Allow Connection?")); add(allowConnection = new JCheckBox()); allowConnection.setSelected(ddlg.getAllowConnection()); add(new JLabel("Output File")); JPanel p = new JPanel(new FlowLayout()); p.add(filename = new JTextField((ddlg.getFile() == null ? "" : ddlg.getFile().getPath()), 35)); p.add(fileChooserButton = new JButton("...")); fileChooserButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { JFileChooser fc = new JFileChooser(); fc.addChoosableFileFilter(ASUtils.sqlFileFilter); fc.setSelectedFile(new File(filename.getText())); int rv = fc.showDialog(DDLExportPanel.this, "Ok"); if (rv == JFileChooser.APPROVE_OPTION) { filename.setText(fc.getSelectedFile().getPath()); } } }); add(p); Vector dbTypeList = new Vector(); dbTypeList.add(ASUtils.lvb("Generic JDBC", GenericDDLGenerator.class)); dbTypeList.add(ASUtils.lvb("DB2", DB2DDLGenerator.class)); dbTypeList.add(ASUtils.lvb("Oracle 8i/9i", OracleDDLGenerator.class)); dbTypeList.add(ASUtils.lvb("SQLServer 2000", SQLServerDDLGenerator.class)); add(new JLabel("Database Type")); add(dbType = new JComboBox(dbTypeList)); if (ddlg.getClass() == GenericDDLGenerator.class) { dbType.setSelectedIndex(0); } else if (ddlg.getClass() == DB2DDLGenerator.class) { dbType.setSelectedIndex(1); } else if (ddlg.getClass() == OracleDDLGenerator.class) { dbType.setSelectedIndex(2); } else if (ddlg.getClass() == SQLServerDDLGenerator.class) { dbType.setSelectedIndex(3); } else { logger.error("Unknown DDL generator class "+ddlg.getClass()); dbType.addItem(ASUtils.lvb("Unknwon Generator", ddlg.getClass())); } }
protected void setup() { GenericDDLGenerator ddlg = project.getDDLGenerator(); setLayout(new FormLayout()); add(new JLabel("Allow Connection?")); add(allowConnection = new JCheckBox()); allowConnection.setSelected(ddlg.getAllowConnection()); add(new JLabel("Output File")); JPanel p = new JPanel(new FlowLayout()); File outFile = ddlg.getFile(); if (outFile == null) { outFile = new File(System.getProperty("user.dir"), project.getName()+".ddl"); } p.add(filename = new JTextField((outFile.getPath()), 35)); p.add(fileChooserButton = new JButton("...")); fileChooserButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { JFileChooser fc = new JFileChooser(); fc.addChoosableFileFilter(ASUtils.sqlFileFilter); fc.setSelectedFile(new File(filename.getText())); int rv = fc.showDialog(DDLExportPanel.this, "Ok"); if (rv == JFileChooser.APPROVE_OPTION) { filename.setText(fc.getSelectedFile().getPath()); } } }); add(p); Vector dbTypeList = new Vector(); dbTypeList.add(ASUtils.lvb("Generic JDBC", GenericDDLGenerator.class)); dbTypeList.add(ASUtils.lvb("DB2", DB2DDLGenerator.class)); dbTypeList.add(ASUtils.lvb("Oracle 8i/9i", OracleDDLGenerator.class)); dbTypeList.add(ASUtils.lvb("SQLServer 2000", SQLServerDDLGenerator.class)); add(new JLabel("Database Type")); add(dbType = new JComboBox(dbTypeList)); if (ddlg.getClass() == GenericDDLGenerator.class) { dbType.setSelectedIndex(0); } else if (ddlg.getClass() == DB2DDLGenerator.class) { dbType.setSelectedIndex(1); } else if (ddlg.getClass() == OracleDDLGenerator.class) { dbType.setSelectedIndex(2); } else if (ddlg.getClass() == SQLServerDDLGenerator.class) { dbType.setSelectedIndex(3); } else { logger.error("Unknown DDL generator class "+ddlg.getClass()); dbType.addItem(ASUtils.lvb("Unknwon Generator", ddlg.getClass())); } }
1,110,617
protected void fireDbChildrenInserted(int[] newIndices, List newChildren) { System.out.println("SQLObject: firing dbChildrenInserted event"); SQLObjectEvent e = new SQLObjectEvent (this, newIndices, (SQLObject[]) newChildren.toArray(new SQLObject[newChildren.size()])); Iterator it = getSqlObjectListeners().iterator(); int count = 0; while (it.hasNext()) { count ++; ((SQLObjectListener) it.next()).dbChildrenInserted(e); } System.out.println("SQLObject: notified "+count+" listeners"); }
protected void fireDbChildrenInserted(int[] newIndices, List newChildren) { System.out.println(getClass().getName()+": firing dbChildrenInserted event"); SQLObjectEvent e = new SQLObjectEvent (this, newIndices, (SQLObject[]) newChildren.toArray(new SQLObject[newChildren.size()])); Iterator it = getSqlObjectListeners().iterator(); int count = 0; while (it.hasNext()) { count ++; ((SQLObjectListener) it.next()).dbChildrenInserted(e); } System.out.println("SQLObject: notified "+count+" listeners"); }
1,110,619
protected void fireDbChildrenInserted(int[] newIndices, List newChildren) { System.out.println("SQLObject: firing dbChildrenInserted event"); SQLObjectEvent e = new SQLObjectEvent (this, newIndices, (SQLObject[]) newChildren.toArray(new SQLObject[newChildren.size()])); Iterator it = getSqlObjectListeners().iterator(); int count = 0; while (it.hasNext()) { count ++; ((SQLObjectListener) it.next()).dbChildrenInserted(e); } System.out.println("SQLObject: notified "+count+" listeners"); }
protected void fireDbChildrenInserted(int[] newIndices, List newChildren) { System.out.println("SQLObject: firing dbChildrenInserted event"); SQLObjectEvent e = new SQLObjectEvent (this, newIndices, (SQLObject[]) newChildren.toArray(new SQLObject[newChildren.size()])); Iterator it = getSqlObjectListeners().iterator(); int count = 0; while (it.hasNext()) { count ++; ((SQLObjectListener) it.next()).dbChildrenInserted(e); } System.out.println(getClass().getName()+": notified "+count+" listeners"); }
1,110,620
protected TagScript createStaticTag( String namespaceURI, String localName, Attributes list ) throws SAXException { try { Tag tag = new StaticTag( namespaceURI, localName ); TagScript script = new TagScript( tag ); // now iterate through through the expressions int size = list.getLength(); for ( int i = 0; i < size; i++ ) { String attributeName = list.getLocalName(i); String attributeValue = list.getValue(i); Expression expression = getExpressionFactory().createExpression( attributeValue ); if ( expression == null ) { expression = createExpression( localName, attributeName, attributeValue ); } script.addAttribute( attributeName, expression ); } return script; } catch (Exception e) { log.warn( "Could not create static tag for URI: " + namespaceURI + " tag name: " + localName, e ); throw createSAXException(e); } }
protected TagScript createStaticTag( String namespaceURI, String localName, Attributes list ) throws SAXException { try { Tag tag = new StaticTag( namespaceURI, localName ); TagScript script = new TagScript( tag ); // now iterate through through the expressions int size = list.getLength(); for ( int i = 0; i < size; i++ ) { String attributeName = list.getLocalName(i); String attributeValue = list.getValue(i); Expression expression = getExpressionFactory().createExpression( attributeValue ); if ( expression == null ) { expression = createConstantExpression( localName, attributeName, attributeValue ); } script.addAttribute( attributeName, expression ); } return script; } catch (Exception e) { log.warn( "Could not create static tag for URI: " + namespaceURI + " tag name: " + localName, e ); throw createSAXException(e); } }
1,110,621
protected TagScript createTag( String namespaceURI, String localName, Attributes list ) throws SAXException { try { // use the URI to load a taglib TagLibrary taglib = (TagLibrary) taglibs.get( namespaceURI ); if ( taglib == null ) { if ( namespaceURI != null && namespaceURI.startsWith( "jelly:" ) ) { String uri = namespaceURI.substring(6); // try to find the class on the claspath try { Class taglibClass = getClassLoader().loadClass( uri ); taglib = (TagLibrary) taglibClass.newInstance(); } catch (ClassNotFoundException e) { log.warn( "Could not load class: " + uri + " so disabling the taglib" ); } } } if ( taglib != null ) { TagScript script = taglib.createTagScript( localName, list ); // now iterate through through the expressions int size = list.getLength(); for ( int i = 0; i < size; i++ ) { String attributeName = list.getLocalName(i); String attributeValue = list.getValue(i); Expression expression = taglib.createExpression( getExpressionFactory(), localName, attributeName, attributeValue ); if ( expression == null ) { expression = createExpression( localName, attributeName, attributeValue ); } script.addAttribute( attributeName, expression ); } return script; } return null; } catch (Exception e) { log.warn( "Could not create taglib or URI: " + namespaceURI + " tag name: " + localName, e ); throw createSAXException(e); } catch (Throwable e) { log.warn( "Could not create taglib or URI: " + namespaceURI + " tag name: " + localName, e ); return null; } }
protected TagScript createTag( String namespaceURI, String localName, Attributes list ) throws SAXException { try { // use the URI to load a taglib TagLibrary taglib = (TagLibrary) taglibs.get( namespaceURI ); if ( taglib == null ) { if ( namespaceURI != null && namespaceURI.startsWith( "jelly:" ) ) { String uri = namespaceURI.substring(6); // try to find the class on the claspath try { Class taglibClass = getClassLoader().loadClass( uri ); taglib = (TagLibrary) taglibClass.newInstance(); } catch (ClassNotFoundException e) { log.warn( "Could not load class: " + uri + " so disabling the taglib" ); } } } if ( taglib != null ) { TagScript script = taglib.createTagScript( localName, list ); // now iterate through through the expressions int size = list.getLength(); for ( int i = 0; i < size; i++ ) { String attributeName = list.getLocalName(i); String attributeValue = list.getValue(i); Expression expression = taglib.createExpression( getExpressionFactory(), localName, attributeName, attributeValue ); if ( expression == null ) { expression = createConstantExpression( localName, attributeName, attributeValue ); } script.addAttribute( attributeName, expression ); } return script; } return null; } catch (Exception e) { log.warn( "Could not create taglib or URI: " + namespaceURI + " tag name: " + localName, e ); throw createSAXException(e); } catch (Throwable e) { log.warn( "Could not create taglib or URI: " + namespaceURI + " tag name: " + localName, e ); return null; } }
1,110,622
public Script compileScript() throws Exception { XMLParser parser = new XMLParser(); parser.setContext( getContext() ); Script script = parser.parse( getUrl().openStream() ); script = script.compile(); if ( log.isDebugEnabled() ) { log.debug( "Compiled script: " + getUrl() ); } return script; }
public Script compileScript() throws Exception { XMLParser parser = new XMLParser(); parser.setContext( getContext() ); Script script = parser.parse( getUrl().openStream() ); script = script.compile(); if ( log.isDebugEnabled() ) { log.debug( "Compiled script: " + getUrl() ); } return script; }
1,110,623
public static void main(String[] args) throws Exception { if ( args.length <= 0 ) { System.out.println( "Usage: Jelly scriptFile [outputFile]" ); return; } Jelly jelly = new Jelly(); jelly.setScript( args[0] ); /* // later we might wanna add some command line arguments // checking stuff using commons-cli to specify the output file // and input file via command line arguments Writer writer = ( args.length > 1 ) ? new FileWriter( args[1] ) : new OutputStreamWriter( System.out ); BufferedWriter output = new BufferedWriter( writer );*/ Writer writer = new BufferedWriter( new OutputStreamWriter( System.out ) ); Script script = jelly.compileScript(); XMLOutput output = XMLOutput.createXMLOutput( writer ); // add the system properties and the command line arguments Context context = jelly.getContext(); context.setVariable( "args", args ); script.run( context, output ); writer.close(); }
public static void main(String[] args) throws Exception { if ( args.length <= 0 ) { System.out.println( "Usage: Jelly scriptFile [outputFile]" ); return; } Jelly jelly = new Jelly(); jelly.setScript( args[0] ); /* // later we might wanna add some command line arguments // checking stuff using commons-cli to specify the output file // and input file via command line arguments Writer writer = ( args.length > 1 ) ? new FileWriter( args[1] ) : new OutputStreamWriter( System.out ); BufferedWriter output = new BufferedWriter( writer );*/ Writer writer = new BufferedWriter( new OutputStreamWriter( System.out ) ); Script script = jelly.compileScript(); XMLOutput output = XMLOutput.createXMLOutput( writer ); // add the system properties and the command line arguments Context context = jelly.getContext(); context.setVariable( "args", args ); script.run( context, output ); writer.close(); }
1,110,624
public void setCropBounds( Rectangle2D cropBounds ) { ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); if ( !cropBounds.equals( getCropBounds() ) ) { // Rotation changes, invalidate the thumbnail thumbnail = null; } cropMinX = cropBounds.getMinX(); cropMinY = cropBounds.getMinY(); cropMaxX = cropBounds.getMaxX(); cropMaxY = cropBounds.getMaxY(); checkCropBounds(); modified(); txw.commit(); }
public void setCropBounds( Rectangle2D cropBounds ) { ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); if ( !cropBounds.equals( getCropBounds() ) ) { // Rotation changes, invalidate the thumbnail invalidateThumbnail(); } cropMinX = cropBounds.getMinX(); cropMinY = cropBounds.getMinY(); cropMaxX = cropBounds.getMaxX(); cropMaxY = cropBounds.getMaxY(); checkCropBounds(); modified(); txw.commit(); }
1,110,628
public void setPrefRotation(double v) { ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); if ( v != prefRotation ) { // Rotation changes, invalidate the thumbnail thumbnail = null; } this.prefRotation = v; modified(); txw.commit(); }
public void setPrefRotation(double v) { ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); if ( v != prefRotation ) { // Rotation changes, invalidate the thumbnail invalidateThumbnail(); } this.prefRotation = v; modified(); txw.commit(); }
1,110,629
public void run(JellyContext context, XMLOutput output) throws JellyTagException { try { startNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not start namespace prefixes",e); } Tag tag = null; try { tag = getTag(context); // lets see if we have a dynamic tag if (tag instanceof StaticTag) { tag = findDynamicTag(context, (StaticTag) tag); } setTag(tag,context); } catch (JellyException e) { throw new JellyTagException(e); } URL rootURL = context.getRootURL(); URL currentURL = context.getCurrentURL(); try { if ( tag == null ) { return; } tag.setContext(context); setContextURLs(context); DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = null; if ( Expression.class.isAssignableFrom( dynaTag.getAttributeType(name) ) ) { value = expression; } else { value = expression.evaluate(context); } dynaTag.setAttribute(name, value); } tag.doTag(output); } catch (JellyTagException e) { handleException(e); } catch (RuntimeException e) { handleException(e); } finally { context.setCurrentURL(currentURL); context.setRootURL(rootURL); } try { endNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not end namespace prefixes",e); } }
public void run(JellyContext context, XMLOutput output) throws JellyTagException { try { startNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not start namespace prefixes",e); } Tag tag = null; try { tag = getTag(context); // lets see if we have a dynamic tag if (tag instanceof StaticTag) { tag = findDynamicTag(context, (StaticTag) tag); } setTag(tag,context); } catch (JellyException e) { throw new JellyTagException(e); } URL rootURL = context.getRootURL(); URL currentURL = context.getCurrentURL(); try { if ( tag == null ) { return; } tag.setContext(context); setContextURLs(context); DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); if(name.indexOf(':')!=-1) name = name.substring(name.indexOf(':')+1); ExpressionAttribute expat = (ExpressionAttribute) entry.getValue(); Expression expression = expat.exp; Object value = null; if ( Expression.class.isAssignableFrom( dynaTag.getAttributeType(name) ) ) { value = expression; } else { value = expression.evaluate(context); } dynaTag.setAttribute(name, value); } tag.doTag(output); } catch (JellyTagException e) { handleException(e); } catch (RuntimeException e) { handleException(e); } finally { context.setCurrentURL(currentURL); context.setRootURL(rootURL); } try { endNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not end namespace prefixes",e); } }
1,110,631
public void run(JellyContext context, XMLOutput output) throws JellyTagException { try { startNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not start namespace prefixes",e); } Tag tag = null; try { tag = getTag(context); // lets see if we have a dynamic tag if (tag instanceof StaticTag) { tag = findDynamicTag(context, (StaticTag) tag); } setTag(tag,context); } catch (JellyException e) { throw new JellyTagException(e); } URL rootURL = context.getRootURL(); URL currentURL = context.getCurrentURL(); try { if ( tag == null ) { return; } tag.setContext(context); setContextURLs(context); DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = null; if ( Expression.class.isAssignableFrom( dynaTag.getAttributeType(name) ) ) { value = expression; } else { value = expression.evaluate(context); } dynaTag.setAttribute(name, value); } tag.doTag(output); } catch (JellyTagException e) { handleException(e); } catch (RuntimeException e) { handleException(e); } finally { context.setCurrentURL(currentURL); context.setRootURL(rootURL); } try { endNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not end namespace prefixes",e); } }
public void run(JellyContext context, XMLOutput output) throws JellyTagException { try { startNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not start namespace prefixes",e); } Tag tag = null; try { tag = getTag(context); // lets see if we have a dynamic tag if (tag instanceof StaticTag) { tag = findDynamicTag(context, (StaticTag) tag); } setTag(tag,context); } catch (JellyException e) { throw new JellyTagException(e); } URL rootURL = context.getRootURL(); URL currentURL = context.getCurrentURL(); try { if ( tag == null ) { return; } tag.setContext(context); setContextURLs(context); DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = null; if ( Expression.class.isAssignableFrom( dynaTag.getAttributeType(name) ) ) { value = expression; } else { value = expression.evaluate(context); } if(expat.prefix!=null || expat.prefix.length()>0 && tag instanceof StaticTag) ((StaticTag) dynaTag).setAttribute(name,expat.prefix, expat.nsURI,value); else dynaTag.setAttribute(name, value); } tag.doTag(output); } catch (JellyTagException e) { handleException(e); } catch (RuntimeException e) { handleException(e); } finally { context.setCurrentURL(currentURL); context.setRootURL(rootURL); } try { endNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not end namespace prefixes",e); } }
1,110,632
public void actionPerformed(ActionEvent e) { logger.debug("DBCS Action invoked"); ArchitectDataSource newDS = dbcsPanel.getDbcs(); String curName = null; for (Component c : ((TextPanel)dbcsPanel.getComponents()[0]).getComponents()) { if ("dbNameField".equals(c.getName())){ curName = ((JTextField) c).getText(); } } if (curName == null ) { throw new ArchitectRuntimeException(new ArchitectException("DBCS Panel improperly intialized")); } if (isNew) { dbcsPanel.applyChanges(); if ("".equals(newDS.getName().trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { plDotIni.addDataSource(newDS); if (connectionSelectionCallBack != null) { connectionSelectionCallBack.selectDBConnection(newDS); } } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } } else if ("".equals(curName.trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else if (curName.equals(oldName)) { System.out.println("The current Name is the same as the old name"); dbcsPanel.applyChanges(); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); ArchitectDataSource dataSource = plDotIni.getDataSource(curName); if (dataSource == null ) { dbcsPanel.applyChanges(); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } }
public void actionPerformed(ActionEvent e) { logger.debug("DBCS Action invoked"); ArchitectDataSource newDS = dbcsPanel.getDbcs(); String curName = null; for (Component c : ((TextPanel)dbcsPanel.getComponents()[0]).getComponents()) { if ("dbNameField".equals(c.getName())){ curName = ((JTextField) c).getText(); } } if (curName == null ) { throw new ArchitectRuntimeException(new ArchitectException("DBCS Panel improperly intialized")); } if (isNew) { dbcsPanel.applyChanges(); if ("".equals(newDS.getName().trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { plDotIni.addDataSource(newDS); if (connectionSelectionCallBack != null) { connectionSelectionCallBack.selectDBConnection(newDS); } } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } } else if ("".equals(curName.trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else if (curName.equals(oldName)) { logger.debug("The current Name is the same as the old name"); dbcsPanel.applyChanges(); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); ArchitectDataSource dataSource = plDotIni.getDataSource(curName); if (dataSource == null ) { dbcsPanel.applyChanges(); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } }
1,110,633
public void characters(char ch[], int start, int length) throws SAXException { contentHandler.characters(ch, start, length); }
public void characters(char[] ch, int start, int length) throws SAXException { contentHandler.characters(ch, start, length); }
1,110,636
public static XMLOutput createXMLOutput(XMLReader xmlReader) { XMLOutput output = new XMLOutput(xmlReader.getContentHandler()); // isn't it lovely what we've got to do to find the LexicalHandler... ;-) for (int i = 0; i < LEXICAL_HANDLER_NAMES.length; i++) { try { Object value = xmlReader.getProperty(LEXICAL_HANDLER_NAMES[i]); if (value instanceof LexicalHandler) { output.setLexicalHandler((LexicalHandler) value); break; } } catch (Exception e) { // ignore any unsupported-operation exceptions if (log.isDebugEnabled()) log.debug("error setting lexical handler properties", e); } } return output; }
public static XMLOutput createXMLOutput(XMLReader xmlReader) { XMLOutput output = new XMLOutput(xmlReader.getContentHandler()); // isn't it lovely what we've got to do to find the LexicalHandler... ;-) for (int i = 0; i < LEXICAL_HANDLER_NAMES.length; i++) { try { Object value = xmlReader.getProperty(LEXICAL_HANDLER_NAMES[i]); if (value instanceof LexicalHandler) { output.setLexicalHandler((LexicalHandler) value); break; } } catch (Exception e) { // ignore any unsupported-operation exceptions if (log.isDebugEnabled()) { log.debug("error setting lexical handler properties", e); } } } return output; }
1,110,637
public void endPrefixMapping(String prefix) throws SAXException { contentHandler.endPrefixMapping(prefix); }
public void endPrefixMapping(String prefix) throws SAXException { namespaceStack.popNamespace(prefix); }
1,110,638
public void ignorableWhitespace(char ch[], int start, int length) throws SAXException { contentHandler.ignorableWhitespace(ch, start, length); }
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException { contentHandler.ignorableWhitespace(ch, start, length); }
1,110,640
public void startPrefixMapping(String prefix, String uri) throws SAXException { contentHandler.startPrefixMapping(prefix, uri); }
public void startPrefixMapping(String prefix, String uri) throws SAXException { namespaceStack.pushNamespace(prefix, uri); }
1,110,641
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == 1){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == 1){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,642
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == 1){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == ASSOC_CC){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == ASSOC_CC){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,643
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == 1){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == ASSOC_TRIO) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == 1){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,644
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == 1){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == ASSOC_CC){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
1,110,645
public void run(Context context, XMLOutput output) throws Exception { getBody().run( context, output ); }
public void run(JellyContext context, XMLOutput output) throws Exception { getBody().run( context, output ); }
1,110,647
private double getGenoPercent(int het, int hom, int missing){ double genoPct = 100.0*(het+hom)/(het+hom+missing); return genoPct; }
private double getGenoPercent(int het, int hom, int missing){ double genoPct = 100.0*(het+hom)/(het+hom+missing); return genoPct; }
1,110,648
public void mousePressed (MouseEvent e) { Rectangle blockselector = new Rectangle(clickXShift-boxRadius,clickYShift - boxRadius, (Chromosome.getFilteredSize()*boxSize), boxSize); //if users right clicks & holds, pop up the info if ((e.getModifiers() & InputEvent.BUTTON3_MASK) == InputEvent.BUTTON3_MASK){ Graphics g = getGraphics(); g.setFont(boxFont); FontMetrics metrics = g.getFontMetrics(); PairwiseLinkage[][] dPrimeTable = theData.filteredDPrimeTable; final int clickX = e.getX(); final int clickY = e.getY(); double dboxX = (double)(clickX - clickXShift - (clickY-clickYShift))/boxSize; double dboxY = (double)(clickX - clickXShift + (clickY-clickYShift))/boxSize; final int boxX, boxY; if (dboxX < 0){ boxX = (int)(dboxX - 0.5); } else{ boxX = (int)(dboxX + 0.5); } if (dboxY < 0){ boxY = (int)(dboxY - 0.5); }else{ boxY = (int)(dboxY + 0.5); } if ((boxX >= lowX && boxX <= highX) && (boxY > boxX && boxY < highY) && !(worldmapRect.contains(clickX,clickY))){ if (dPrimeTable[boxX][boxY] != null){ displayStrings = new String[5]; if (theData.infoKnown){ displayStrings[0] = new String ("(" +Chromosome.getFilteredMarker(boxX).getName() + ", " + Chromosome.getFilteredMarker(boxY).getName() + ")"); }else{ displayStrings[0] = new String("(" + (Chromosome.realIndex[boxX]+1) + ", " + (Chromosome.realIndex[boxY]+1) + ")"); } displayStrings[1] = new String ("D': " + dPrimeTable[boxX][boxY].getDPrime()); displayStrings[2] = new String ("LOD: " + dPrimeTable[boxX][boxY].getLOD()); displayStrings[3] = new String ("r^2: " + dPrimeTable[boxX][boxY].getRSquared()); displayStrings[4] = new String ("D' conf. bounds: " + dPrimeTable[boxX][boxY].getConfidenceLow() + "-" + dPrimeTable[boxX][boxY].getConfidenceHigh()); popupExists = true; } } else if (blockselector.contains(clickX, clickY)){ int marker = (int)(0.5 + (double)((clickX - clickXShift))/boxSize); displayStrings = new String[2]; if (theData.infoKnown){ displayStrings[0] = new String (Chromosome.getFilteredMarker(marker).getName()); }else{ displayStrings[0] = new String("Marker " + (Chromosome.realIndex[marker]+1)); } displayStrings[1] = new String ("MAF: " + Chromosome.getFilteredMarker(marker).getMAF()); popupExists = true; } if (popupExists){ int strlen = 0; for (int x = 0; x < displayStrings.length; x++){ if (strlen < metrics.stringWidth(displayStrings[x])){ strlen = metrics.stringWidth(displayStrings[x]); } } //edge shifts prevent window from popping up partially offscreen int visRightBound = (int)(getVisibleRect().getWidth() + getVisibleRect().getX()); int visBotBound = (int)(getVisibleRect().getHeight() + getVisibleRect().getY()); int rightEdgeShift = 0; if (clickX + strlen + popupLeftMargin +5 > visRightBound){ rightEdgeShift = clickX + strlen + popupLeftMargin + 10 - visRightBound; } int botEdgeShift = 0; if (clickY + 5*metrics.getHeight()+10 > visBotBound){ botEdgeShift = clickY + 5*metrics.getHeight()+15 - visBotBound; } popupDrawRect = new Rectangle(clickX-rightEdgeShift, clickY-botEdgeShift, strlen+popupLeftMargin+5, displayStrings.length*metrics.getHeight()+10); repaint(); } }else if ((e.getModifiers() & InputEvent.BUTTON1_MASK) == InputEvent.BUTTON1_MASK){ int x = e.getX(); int y = e.getY(); if (blockselector.contains(x,y)){ setCursor(Cursor.getPredefinedCursor(Cursor.E_RESIZE_CURSOR)); blockStartX = x; } } }
public void mousePressed (MouseEvent e) { Rectangle blockselector = new Rectangle(clickXShift-boxRadius,clickYShift - boxRadius, (Chromosome.getFilteredSize()*boxSize), boxSize); //if users right clicks & holds, pop up the info if ((e.getModifiers() & InputEvent.BUTTON3_MASK) == InputEvent.BUTTON3_MASK){ Graphics g = getGraphics(); g.setFont(boxFont); FontMetrics metrics = g.getFontMetrics(); PairwiseLinkage[][] dPrimeTable = theData.filteredDPrimeTable; final int clickX = e.getX(); final int clickY = e.getY(); double dboxX = (double)(clickX - clickXShift - (clickY-clickYShift))/boxSize; double dboxY = (double)(clickX - clickXShift + (clickY-clickYShift))/boxSize; final int boxX, boxY; if (dboxX < 0){ boxX = (int)(dboxX - 0.5); } else{ boxX = (int)(dboxX + 0.5); } if (dboxY < 0){ boxY = (int)(dboxY - 0.5); }else{ boxY = (int)(dboxY + 0.5); } if ((boxX >= lowX && boxX <= highX) && (boxY > boxX && boxY < highY) && !(worldmapRect.contains(clickX,clickY))){ if (dPrimeTable[boxX][boxY] != null){ displayStrings = new String[5]; if (theData.infoKnown){ displayStrings[0] = new String ("(" +Chromosome.getFilteredMarker(boxX).getName() + ", " + Chromosome.getFilteredMarker(boxY).getName() + ")"); }else{ displayStrings[0] = new String("(" + (Chromosome.realIndex[boxX]+1) + ", " + (Chromosome.realIndex[boxY]+1) + ")"); } displayStrings[1] = new String ("D': " + dPrimeTable[boxX][boxY].getDPrime()); displayStrings[2] = new String ("LOD: " + dPrimeTable[boxX][boxY].getLOD()); displayStrings[3] = new String ("r^2: " + dPrimeTable[boxX][boxY].getRSquared()); displayStrings[4] = new String ("D' conf. bounds: " + dPrimeTable[boxX][boxY].getConfidenceLow() + "-" + dPrimeTable[boxX][boxY].getConfidenceHigh()); popupExists = true; } } else if (blockselector.contains(clickX, clickY)){ int marker = (int)(0.5 + (double)((clickX - clickXShift))/boxSize); displayStrings = new String[2]; if (theData.infoKnown){ displayStrings[0] = new String (Chromosome.getFilteredMarker(marker).getName()); }else{ displayStrings[0] = new String("Marker " + (Chromosome.realIndex[marker]+1)); } displayStrings[1] = new String ("MAF: " + Chromosome.getFilteredMarker(marker).getMAF()); popupExists = true; } if (popupExists){ int strlen = 0; for (int x = 0; x < displayStrings.length; x++){ if (strlen < metrics.stringWidth(displayStrings[x])){ strlen = metrics.stringWidth(displayStrings[x]); } } //edge shifts prevent window from popping up partially offscreen int visRightBound = (int)(getVisibleRect().getWidth() + getVisibleRect().getX()); int visBotBound = (int)(getVisibleRect().getHeight() + getVisibleRect().getY()); int rightEdgeShift = 0; if (clickX + strlen + popupLeftMargin +5 > visRightBound){ rightEdgeShift = clickX + strlen + popupLeftMargin + 10 - visRightBound; } int botEdgeShift = 0; if (clickY + 5*metrics.getHeight()+10 > visBotBound){ botEdgeShift = clickY + 5*metrics.getHeight()+15 - visBotBound; } popupDrawRect = new Rectangle(clickX-rightEdgeShift, clickY-botEdgeShift+smallDataVertSlop, strlen+popupLeftMargin+5, displayStrings.length*metrics.getHeight()+10); repaint(); } }else if ((e.getModifiers() & InputEvent.BUTTON1_MASK) == InputEvent.BUTTON1_MASK){ int x = e.getX(); int y = e.getY(); if (blockselector.contains(x,y)){ setCursor(Cursor.getPredefinedCursor(Cursor.E_RESIZE_CURSOR)); blockStartX = x; } } }
1,110,649
public void run(Context context, XMLOutput output) throws Exception { DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for ( Iterator iter = attributes.entrySet().iterator(); iter.hasNext(); ) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = expression.evaluate( context ); dynaTag.setAttribute( name, value ); } tag.run( context, output ); }
public void run(JellyContext context, XMLOutput output) throws Exception { DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for ( Iterator iter = attributes.entrySet().iterator(); iter.hasNext(); ) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = expression.evaluate( context ); dynaTag.setAttribute( name, value ); } tag.run( context, output ); }
1,110,650
protected void cleanup() { if(addToPP) { pp.unzoomPoint(p); logger.debug("Placing table at: "+p); pp.addImpl(tp,p,pp.getPPComponentCount()); try { pp.db.addChild(tp.getModel()); pp.selectNone(); tp.setSelected(true); mouseMode = mouseModeType.SELECT_TABLE; } catch (ArchitectException e) { logger.error("Couldn't add table \""+tp.getModel()+"\" to play pen:", e); JOptionPane.showMessageDialog(null, "Failed to add table:\n"+e.getMessage()); return; } } else { tp.setMoving(false); } pp.setCursor(null); pp.removeMouseMotionListener(this); pp.removeMouseListener(this); pp.revalidate(); }
protected void cleanup() { if(addToPP) { pp.unzoomPoint(p); logger.debug("Placing table at: "+p); pp.addImpl(tp,p,pp.getPPComponentCount()); try { pp.db.addChild(tp.getModel()); pp.selectNone(); tp.setSelected(true); mouseMode = MouseModeType.SELECT_TABLE; } catch (ArchitectException e) { logger.error("Couldn't add table \""+tp.getModel()+"\" to play pen:", e); JOptionPane.showMessageDialog(null, "Failed to add table:\n"+e.getMessage()); return; } } else { tp.setMoving(false); } pp.setCursor(null); pp.removeMouseMotionListener(this); pp.removeMouseListener(this); pp.revalidate(); }
1,110,651
public void mouseMoved(MouseEvent evt) { if (rubberBand != null) { // repaint old region in case of shrinkage Rectangle dirtyRegion = zoomRect(new Rectangle(rubberBand)); Point p = unzoomPoint(evt.getPoint()); rubberBand.setBounds(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); rubberBand.add(p); mouseMode = mouseModeType.RUBBERBAND_MOVE; // update selected items Rectangle temp = new Rectangle(); // avoids multiple allocations in getBounds for (int i = 0, n = contentPane.getComponentCount(); i < n; i++) { PlayPenComponent c = contentPane.getComponent(i); if (c instanceof Relationship) { // relationship is non-rectangular so we can't use getBounds for intersection testing ((Relationship) c).setSelected(((Relationship) c).intersects(rubberBand)); } else if (c instanceof Selectable) { ((Selectable) c).setSelected(rubberBand.intersects(c.getBounds(temp))); } } // Add the new rubberband to the dirty region and grow // it in case the line is thick due to extreme zoom dirtyRegion.add(zoomRect(new Rectangle(rubberBand))); repaintRubberBandRegion(dirtyRegion); } }
public void mouseMoved(MouseEvent evt) { if (rubberBand != null) { // repaint old region in case of shrinkage Rectangle dirtyRegion = zoomRect(new Rectangle(rubberBand)); Point p = unzoomPoint(evt.getPoint()); rubberBand.setBounds(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); rubberBand.add(p); mouseMode = MouseModeType.RUBBERBAND_MOVE; // update selected items Rectangle temp = new Rectangle(); // avoids multiple allocations in getBounds for (int i = 0, n = contentPane.getComponentCount(); i < n; i++) { PlayPenComponent c = contentPane.getComponent(i); if (c instanceof Relationship) { // relationship is non-rectangular so we can't use getBounds for intersection testing ((Relationship) c).setSelected(((Relationship) c).intersects(rubberBand)); } else if (c instanceof Selectable) { ((Selectable) c).setSelected(rubberBand.intersects(c.getBounds(temp))); } } // Add the new rubberband to the dirty region and grow // it in case the line is thick due to extreme zoom dirtyRegion.add(zoomRect(new Rectangle(rubberBand))); repaintRubberBandRegion(dirtyRegion); } }
1,110,652
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
1,110,653
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
1,110,654
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == MouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
1,110,655
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
1,110,656
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = MouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = MouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
1,110,657
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = MouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
1,110,658
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = MouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
1,110,659
public void mouseReleased(MouseEvent evt) { draggingTablePanes = false; if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = rubberBand; rubberBandOrigin = null; rubberBand = null; zoomRect(dirtyRegion); repaintRubberBandRegion(dirtyRegion); if ( getSelectedItems().size() > 0 ) mouseMode = mouseModeType.MULTI_SELECT; else mouseMode = mouseModeType.IDLE; } } maybeShowPopup(evt); }
public void mouseReleased(MouseEvent evt) { draggingTablePanes = false; if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = rubberBand; rubberBandOrigin = null; rubberBand = null; zoomRect(dirtyRegion); repaintRubberBandRegion(dirtyRegion); if ( getSelectedItems().size() > 0 ) mouseMode = MouseModeType.MULTI_SELECT; else mouseMode = mouseModeType.IDLE; } } maybeShowPopup(evt); }
1,110,660
public void mouseReleased(MouseEvent evt) { draggingTablePanes = false; if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = rubberBand; rubberBandOrigin = null; rubberBand = null; zoomRect(dirtyRegion); repaintRubberBandRegion(dirtyRegion); if ( getSelectedItems().size() > 0 ) mouseMode = mouseModeType.MULTI_SELECT; else mouseMode = mouseModeType.IDLE; } } maybeShowPopup(evt); }
public void mouseReleased(MouseEvent evt) { draggingTablePanes = false; if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = rubberBand; rubberBandOrigin = null; rubberBand = null; zoomRect(dirtyRegion); repaintRubberBandRegion(dirtyRegion); if ( getSelectedItems().size() > 0 ) mouseMode = mouseModeType.MULTI_SELECT; else mouseMode = MouseModeType.IDLE; } } maybeShowPopup(evt); }
1,110,661
public static void setMouseMode(mouseModeType mouseMode) { PlayPen.mouseMode = mouseMode; }
public static void setMouseMode(MouseModeType mouseMode) { PlayPen.mouseMode = mouseMode; }
1,110,663
public void paintComponent(Graphics g){ Graphics2D g2 = (Graphics2D) g; Dimension size = getSize(); Dimension pref = getPreferredSize(); Rectangle visRect = getVisibleRect(); /* boxSize = ((clipRect.width-2*H_BORDER)/dPrimeTable.length-1); if (boxSize < 12){boxSize=12;} if (boxSize < 25){ printDetails = false; boxRadius = boxSize/2; }else{ boxRadius = boxSize/2 - 1; } */ //okay so this dumb if block is to prevent the ugly repainting //bug when loading markers after the data are already being displayed, //results in a little off-centering for small datasets, but not too bad. //clickxshift and clickyshift are used later to translate from x,y coords //to the pair of markers comparison at those coords if (!(markersLoaded)){ g2.translate((size.width - pref.width) / 2, (size.height - pref.height) / 2); clickXShift = left + (size.width-pref.width)/2; clickYShift = top + (size.height - pref.height)/2; } else { g2.translate((size.width - pref.width) / 2, 0); clickXShift = left + (size.width-pref.width)/2; clickYShift = top; } FontMetrics boxFontMetrics = g.getFontMetrics(boxFont); int diamondX[] = new int[4]; int diamondY[] = new int[4]; Polygon diamond; left = H_BORDER; top = V_BORDER; FontMetrics metrics; int ascent; g2.setColor(this.getBackground()); g2.fillRect(0,0,pref.width,pref.height); g2.setColor(Color.BLACK); if (markersLoaded) { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); //// draw the marker locations BasicStroke thickerStroke = new BasicStroke(1); BasicStroke thinnerStroke = new BasicStroke(0.25f); int wide = (dPrimeTable.length-1) * boxSize; //TODO: talk to kirby about locusview scaling gizmo int lineLeft = wide/20; int lineSpan = (wide/10)*9; long minpos = Chromosome.getMarker(0).getPosition(); long maxpos = Chromosome.getMarker(Chromosome.size()-1).getPosition(); double spanpos = maxpos - minpos; g2.setStroke(thinnerStroke); g2.setColor(Color.white); g2.fillRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT); g2.setColor(Color.black); g2.drawRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT); for (int i = 0; i < Chromosome.size(); i++) { double pos = (Chromosome.getMarker(i).getPosition() - minpos) / spanpos; int xx = (int) (left + lineLeft + lineSpan*pos); g2.setStroke(thickerStroke); g.drawLine(xx, 5, xx, 5 + TICK_HEIGHT); g2.setStroke(thinnerStroke); g.drawLine(xx, 5 + TICK_HEIGHT, left + i*boxSize, TICK_BOTTOM); } top += TICK_BOTTOM; //// draw the marker names if (printDetails){ g.setFont(markerNameFont); metrics = g.getFontMetrics(); ascent = metrics.getAscent(); widestMarkerName = metrics.stringWidth(Chromosome.getMarker(0).getName()); for (int x = 1; x < dPrimeTable.length; x++) { int thiswide = metrics.stringWidth(Chromosome.getMarker(x).getName()); if (thiswide > widestMarkerName) widestMarkerName = thiswide; } //System.out.println(widest); g2.translate(left, top + widestMarkerName); g2.rotate(-Math.PI / 2.0); for (int x = 0; x < dPrimeTable.length; x++) { g2.drawString(Chromosome.getMarker(x).getName(),TEXT_NUMBER_GAP, x*boxSize + ascent/3); } g2.rotate(Math.PI / 2.0); g2.translate(-left, -(top + widestMarkerName)); // move everybody down top += widestMarkerName + TEXT_NUMBER_GAP; } g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } //// draw the marker numbers if (printDetails){ g.setFont(markerNumFont); metrics = g.getFontMetrics(); ascent = metrics.getAscent(); for (int x = 0; x < dPrimeTable.length; x++) { String mark = String.valueOf(Chromosome.realIndex[x] + 1); g.drawString(mark, left + x*boxSize - metrics.stringWidth(mark)/2, top + ascent); } top += boxRadius/2; // give a little space between numbers and boxes } //the following values are the bounds on the boxes we want to //display given that the current window is 'visRect' lowX = (visRect.x-clickXShift-(visRect.y + visRect.height-clickYShift))/boxSize; if (lowX < 0) { lowX = 0; } highX = ((visRect.x + visRect.width)/boxSize)+1; if (highX > dPrimeTable.length-1){ highX = dPrimeTable.length-1; } lowY = ((visRect.x-clickXShift)+(visRect.y-clickYShift))/boxSize; if (lowY < lowX+1){ lowY = lowX+1; } highY = (((visRect.x-clickXShift+visRect.width) + (visRect.y-clickYShift+visRect.height))/boxSize)+1; if (highY > dPrimeTable.length){ highY = dPrimeTable.length; } // draw table column by column for (int x = lowX; x < highX; x++) { //always draw the fewest possible boxes if (lowY < x+1){ lowY = x+1; } for (int y = lowY; y < highY; y++) { if (dPrimeTable[x][y] == null){ continue; } //TODO:if you load data then info it doesn't handle selective drawing correctly double d = dPrimeTable[x][y].getDPrime(); //double l = dPrimeTable[x][y].getLOD(); Color boxColor = dPrimeTable[x][y].getColor(); // draw markers above int xx = left + (x + y) * boxSize / 2; int yy = top + (y - x) * boxSize / 2; diamondX[0] = xx; diamondY[0] = yy - boxRadius; diamondX[1] = xx + boxRadius; diamondY[1] = yy; diamondX[2] = xx; diamondY[2] = yy + boxRadius; diamondX[3] = xx - boxRadius; diamondY[3] = yy; diamond = new Polygon(diamondX, diamondY, 4); g.setColor(boxColor); g.fillPolygon(diamond); if (boxColor == Color.white) { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g.setColor(Color.lightGray); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } if(printDetails){ g.setFont(boxFont); ascent = boxFontMetrics.getAscent(); int val = (int) (d * 100); g.setColor((val < 50) ? Color.gray : Color.black); if (val != 100) { String valu = String.valueOf(val); int widf = boxFontMetrics.stringWidth(valu); g.drawString(valu, xx - widf/2, yy + ascent/2); } } } } if (pref.getWidth() > visRect.width){ if (noImage){ //first time through draw a worldmap if dataset is big: final int WM_MAX_WIDTH = 300; double scalefactor; scalefactor = (double)(chartSize.width)/WM_MAX_WIDTH; CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(), BorderFactory.createLoweredBevelBorder()); worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2, (int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2, BufferedImage.TYPE_3BYTE_BGR); Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(this.getBackground()); gw2.fillRect(1,1,worldmap.getWidth()-2,worldmap.getHeight()-2); //make a pretty border gw2.setColor(Color.BLACK); wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth()-1,worldmap.getHeight()-1); ir = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth()-1, worldmap.getHeight()-1); double prefBoxSize = boxSize/scalefactor; float[] smallDiamondX = new float[4]; float[] smallDiamondY = new float[4]; GeneralPath gp; System.out.println(System.currentTimeMillis()); for (int x = 0; x < dPrimeTable.length-1; x++){ for (int y = x+1; y < dPrimeTable.length; y++){ if (dPrimeTable[x][y] == null){ continue; } double xx = (x + y)*prefBoxSize/2+wmBorder.getBorderInsets(this).left; double yy = (y - x)*prefBoxSize/2+wmBorder.getBorderInsets(this).top; smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2); smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy; smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2); smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy; gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length); gp.moveTo(smallDiamondX[0],smallDiamondY[0]); for (int i = 1; i < smallDiamondX.length; i++){ gp.lineTo(smallDiamondX[i], smallDiamondY[i]); } gp.closePath(); gw2.setColor(dPrimeTable[x][y].getColor()); gw2.fill(gp); } } System.out.println(System.currentTimeMillis()); noImage = false; } paintWorldMap(g); } }
public void paintComponent(Graphics g){ Graphics2D g2 = (Graphics2D) g; Dimension size = getSize(); Dimension pref = getPreferredSize(); Rectangle visRect = getVisibleRect(); /* boxSize = ((clipRect.width-2*H_BORDER)/dPrimeTable.length-1); if (boxSize < 12){boxSize=12;} if (boxSize < 25){ printDetails = false; boxRadius = boxSize/2; }else{ boxRadius = boxSize/2 - 1; } */ //okay so this dumb if block is to prevent the ugly repainting //bug when loading markers after the data are already being displayed, //results in a little off-centering for small datasets, but not too bad. //clickxshift and clickyshift are used later to translate from x,y coords //to the pair of markers comparison at those coords if (!(markersLoaded)){ g2.translate((size.width - pref.width) / 2, (size.height - pref.height) / 2); clickXShift = left + (size.width-pref.width)/2; clickYShift = top + (size.height - pref.height)/2; } else { g2.translate((size.width - pref.width) / 2, 0); clickXShift = left + (size.width-pref.width)/2; clickYShift = top; } FontMetrics boxFontMetrics = g.getFontMetrics(boxFont); int diamondX[] = new int[4]; int diamondY[] = new int[4]; Polygon diamond; left = H_BORDER; top = V_BORDER; FontMetrics metrics; int ascent; g2.setColor(this.getBackground()); g2.fillRect(0,0,pref.width,pref.height); g2.setColor(Color.BLACK); if (markersLoaded) { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); //// draw the marker locations BasicStroke thickerStroke = new BasicStroke(1); BasicStroke thinnerStroke = new BasicStroke(0.25f); int wide = (dPrimeTable.length-1) * boxSize; //TODO: talk to kirby about locusview scaling gizmo int lineLeft = wide/20; int lineSpan = (wide/10)*9; long minpos = Chromosome.getMarker(0).getPosition(); long maxpos = Chromosome.getMarker(Chromosome.size()-1).getPosition(); double spanpos = maxpos - minpos; g2.setStroke(thinnerStroke); g2.setColor(Color.white); g2.fillRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT); g2.setColor(Color.black); g2.drawRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT); for (int i = 0; i < Chromosome.size(); i++) { double pos = (Chromosome.getMarker(i).getPosition() - minpos) / spanpos; int xx = (int) (left + lineLeft + lineSpan*pos); g2.setStroke(thickerStroke); g.drawLine(xx, 5, xx, 5 + TICK_HEIGHT); g2.setStroke(thinnerStroke); g.drawLine(xx, 5 + TICK_HEIGHT, left + i*boxSize, TICK_BOTTOM); } top += TICK_BOTTOM; //// draw the marker names if (printDetails){ g.setFont(markerNameFont); metrics = g.getFontMetrics(); ascent = metrics.getAscent(); widestMarkerName = metrics.stringWidth(Chromosome.getMarker(0).getName()); for (int x = 1; x < dPrimeTable.length; x++) { int thiswide = metrics.stringWidth(Chromosome.getMarker(x).getName()); if (thiswide > widestMarkerName) widestMarkerName = thiswide; } //System.out.println(widest); g2.translate(left, top + widestMarkerName); g2.rotate(-Math.PI / 2.0); for (int x = 0; x < dPrimeTable.length; x++) { g2.drawString(Chromosome.getMarker(x).getName(),TEXT_NUMBER_GAP, x*boxSize + ascent/3); } g2.rotate(Math.PI / 2.0); g2.translate(-left, -(top + widestMarkerName)); // move everybody down top += widestMarkerName + TEXT_NUMBER_GAP; } g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } //// draw the marker numbers if (printDetails){ g.setFont(markerNumFont); metrics = g.getFontMetrics(); ascent = metrics.getAscent(); for (int x = 0; x < dPrimeTable.length; x++) { String mark = String.valueOf(Chromosome.realIndex[x] + 1); g.drawString(mark, left + x*boxSize - metrics.stringWidth(mark)/2, top + ascent); } top += boxRadius/2; // give a little space between numbers and boxes } //the following values are the bounds on the boxes we want to //display given that the current window is 'visRect' lowX = (visRect.x-clickXShift-(visRect.y + visRect.height-clickYShift))/boxSize; if (lowX < 0) { lowX = 0; } highX = ((visRect.x + visRect.width)/boxSize)+1; if (highX > dPrimeTable.length-1){ highX = dPrimeTable.length-1; } lowY = ((visRect.x-clickXShift)+(visRect.y-clickYShift))/boxSize; if (lowY < lowX+1){ lowY = lowX+1; } highY = (((visRect.x-clickXShift+visRect.width) + (visRect.y-clickYShift+visRect.height))/boxSize)+1; if (highY > dPrimeTable.length){ highY = dPrimeTable.length; } // draw table column by column for (int x = lowX; x < highX; x++) { //always draw the fewest possible boxes if (lowY < x+1){ lowY = x+1; } for (int y = lowY; y < highY; y++) { if (dPrimeTable[x][y] == null){ continue; } //TODO:if you load data then info it doesn't handle selective drawing correctly double d = dPrimeTable[x][y].getDPrime(); //double l = dPrimeTable[x][y].getLOD(); Color boxColor = dPrimeTable[x][y].getColor(); // draw markers above int xx = left + (x + y) * boxSize / 2; int yy = top + (y - x) * boxSize / 2; diamondX[0] = xx; diamondY[0] = yy - boxRadius; diamondX[1] = xx + boxRadius; diamondY[1] = yy; diamondX[2] = xx; diamondY[2] = yy + boxRadius; diamondX[3] = xx - boxRadius; diamondY[3] = yy; diamond = new Polygon(diamondX, diamondY, 4); g.setColor(boxColor); g.fillPolygon(diamond); if (boxColor == Color.white) { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g.setColor(Color.lightGray); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } if(printDetails){ g.setFont(boxFont); ascent = boxFontMetrics.getAscent(); int val = (int) (d * 100); g.setColor((val < 50) ? Color.gray : Color.black); if (val != 100) { String valu = String.valueOf(val); int widf = boxFontMetrics.stringWidth(valu); g.drawString(valu, xx - widf/2, yy + ascent/2); } } } } if (pref.getWidth() > visRect.width){ if (noImage){ //first time through draw a worldmap if dataset is big: final int WM_MAX_WIDTH = 300; double scalefactor; scalefactor = (double)(chartSize.width)/WM_MAX_WIDTH; CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(), BorderFactory.createLoweredBevelBorder()); worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2, (int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2, BufferedImage.TYPE_3BYTE_BGR); Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(this.getBackground()); gw2.fillRect(1,1,worldmap.getWidth()-2,worldmap.getHeight()-2); //make a pretty border gw2.setColor(Color.BLACK); wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth()-1,worldmap.getHeight()-1); ir = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth()-1, worldmap.getHeight()-1); double prefBoxSize = boxSize/scalefactor; float[] smallDiamondX = new float[4]; float[] smallDiamondY = new float[4]; GeneralPath gp; for (int x = 0; x < dPrimeTable.length-1; x++){ for (int y = x+1; y < dPrimeTable.length; y++){ if (dPrimeTable[x][y] == null){ continue; } double xx = (x + y)*prefBoxSize/2+wmBorder.getBorderInsets(this).left; double yy = (y - x)*prefBoxSize/2+wmBorder.getBorderInsets(this).top; smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2); smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy; smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2); smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy; gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length); gp.moveTo(smallDiamondX[0],smallDiamondY[0]); for (int i = 1; i < smallDiamondX.length; i++){ gp.lineTo(smallDiamondX[i], smallDiamondY[i]); } gp.closePath(); gw2.setColor(dPrimeTable[x][y].getColor()); gw2.fill(gp); } } noImage = false; } paintWorldMap(g); } }
1,110,665
private void processFile(String fileName,int fileType,String infoFileName){ try { int outputType; long maxDistance; long negMaxDistance; HaploData textData; File OutputFile; File inputFile; inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } maxDistance = this.arg_distance * 1000; negMaxDistance = -maxDistance; outputType = this.arg_output; textData = new HaploData(); Vector result = null; if(fileType == 0){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == 1) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.arg_quiet) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, arg_skipCheck); }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,arg_skipCheck); } File infoFile; if(infoFileName.equals("")) { infoFile = null; }else{ infoFile = new File(infoFileName); } textData.prepareMarkerInput(infoFile,maxDistance,textData.getPedFile().getHMInfo()); if(!arg_quiet && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } textData.infoKnown = true; if(this.arg_showCheck && result != null) { System.out.println("Data check results:\n" + "Name\tObsHET\tPredHET\tHWpval\t%Geno\tFamTrio\tMendErr"); for(int i=0;i<result.size();i++){ MarkerResult currentResult = (MarkerResult)result.get(i); System.out.println( Chromosome.getMarker(i).getName() +"\t"+ currentResult.getObsHet() +"\t"+ currentResult.getPredHet() +"\t"+ currentResult.getHWpvalue() +"\t"+ currentResult.getGenoPercent() +"\t"+ currentResult.getFamTrioNum() +"\t"+ currentResult.getMendErrNum()); } } if(this.arg_check && result != null){ OutputFile = new File (fileName + ".CHECK"); FileWriter saveCheckWriter = new FileWriter(OutputFile); saveCheckWriter.write("Name\tObsHET\tPredHET\tHWpval\t%Geno\tFamTrio\tMendErr\n"); for(int i=0;i<result.size();i++){ MarkerResult currentResult = (MarkerResult)result.get(i); saveCheckWriter.write( Chromosome.getMarker(i).getName() +"\t"+ currentResult.getObsHet() +"\t"+ currentResult.getPredHet() +"\t"+ currentResult.getHWpvalue() +"\t"+ currentResult.getGenoPercent() +"\t"+ currentResult.getFamTrioNum() +"\t"+ currentResult.getMendErrNum() +"\n"); } saveCheckWriter.close(); } if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = new File(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = new File(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = new File(fileName + ".SPINEblocks"); break; default: OutputFile = new File(fileName + ".GABRIELblocks"); break; } //this handles output type ALL int start = 0; int stop = Chromosome.getFilteredSize(); if(outputType == BLOX_ALL) { OutputFile = new File(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } } if(this.arg_dprime) { OutputFile = new File(fileName + ".DPRIME"); if (textData.filteredDPrimeTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getFilteredSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getFilteredSize()); } } //if(fileType){ //TDT.calcTrioTDT(textData.chromosomes); //TODO: Deal with this. why do we calc TDT? and make sure not to do it except when appropriate //} } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
private void processFile(String fileName,int fileType,String infoFileName){ try { int outputType; long maxDistance; long negMaxDistance; HaploData textData; File OutputFile; File inputFile; inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } maxDistance = this.arg_distance * 1000; negMaxDistance = -maxDistance; outputType = this.arg_output; textData = new HaploData(0); Vector result = null; if(fileType == 0){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == 1) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.arg_quiet) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, arg_skipCheck); }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,arg_skipCheck); } File infoFile; if(infoFileName.equals("")) { infoFile = null; }else{ infoFile = new File(infoFileName); } textData.prepareMarkerInput(infoFile,maxDistance,textData.getPedFile().getHMInfo()); if(!arg_quiet && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } textData.infoKnown = true; if(this.arg_showCheck && result != null) { System.out.println("Data check results:\n" + "Name\tObsHET\tPredHET\tHWpval\t%Geno\tFamTrio\tMendErr"); for(int i=0;i<result.size();i++){ MarkerResult currentResult = (MarkerResult)result.get(i); System.out.println( Chromosome.getMarker(i).getName() +"\t"+ currentResult.getObsHet() +"\t"+ currentResult.getPredHet() +"\t"+ currentResult.getHWpvalue() +"\t"+ currentResult.getGenoPercent() +"\t"+ currentResult.getFamTrioNum() +"\t"+ currentResult.getMendErrNum()); } } if(this.arg_check && result != null){ OutputFile = new File (fileName + ".CHECK"); FileWriter saveCheckWriter = new FileWriter(OutputFile); saveCheckWriter.write("Name\tObsHET\tPredHET\tHWpval\t%Geno\tFamTrio\tMendErr\n"); for(int i=0;i<result.size();i++){ MarkerResult currentResult = (MarkerResult)result.get(i); saveCheckWriter.write( Chromosome.getMarker(i).getName() +"\t"+ currentResult.getObsHet() +"\t"+ currentResult.getPredHet() +"\t"+ currentResult.getHWpvalue() +"\t"+ currentResult.getGenoPercent() +"\t"+ currentResult.getFamTrioNum() +"\t"+ currentResult.getMendErrNum() +"\n"); } saveCheckWriter.close(); } if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = new File(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = new File(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = new File(fileName + ".SPINEblocks"); break; default: OutputFile = new File(fileName + ".GABRIELblocks"); break; } //this handles output type ALL int start = 0; int stop = Chromosome.getFilteredSize(); if(outputType == BLOX_ALL) { OutputFile = new File(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } } if(this.arg_dprime) { OutputFile = new File(fileName + ".DPRIME"); if (textData.filteredDPrimeTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getFilteredSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getFilteredSize()); } } //if(fileType){ //TDT.calcTrioTDT(textData.chromosomes); //TODO: Deal with this. why do we calc TDT? and make sure not to do it except when appropriate //} } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
1,110,666
public void login(ServiceContext context, String username, String password) throws ServiceException{ LoginCallbackHandler callbackHandler = new LoginCallbackHandler(username, password); User user = null; UserManager userManager = UserManager.getInstance(); UserActivityLogger logger = UserActivityLogger.getInstance(); try{ loginContext = new LoginContext(AuthConstants.AUTH_CONFIG_INDEX, callbackHandler); loginContext.login(); /* Need this for external login modules, user is really authenticated after this step */ Set principals = loginContext.getSubject().getPrincipals(); Object obj = null; for(Iterator principalIt = principals.iterator(); principalIt.hasNext();){ if((obj = principalIt.next()) instanceof User){ user = (User)obj; break; } } /* Successful login: - Add new users authenticated through external LoginModules. - Update the lock count and status of existing users */ if(user == null){ user = new User(); user.setUsername(username); user.setExternalUser(true); List roles = new ArrayList(); roles.add(new Role(org.jmanage.core.auth.ExternalUserRolesConfig.getInstance().getUserRole(username))); user.setRoles(roles); }else{ user = userManager.getUser(user.getName()); user.setLockCount(0); user.setStatus(null); userManager.updateUser(user); } /* set Subject in session */ context._setUser(user); logger.logActivity(user.getName(), "logged in successfully"); }catch(LoginException lex){ user = userManager.getUser(username); String errorCode = ErrorCodes.UNKNOWN_ERROR; Object[] values = null; /* Conditionalize the error message */ if(user == null){ errorCode = ErrorCodes.INVALID_CREDENTIALS; }else if(User.STATUS_LOCKED.equals(user.getStatus())){ errorCode = ErrorCodes.ACCOUNT_LOCKED; }else if(user.getLockCount() < MAX_LOGIN_ATTEMPTS_ALLOWED){ int thisAttempt = user.getLockCount()+1; user.setLockCount(thisAttempt); if(thisAttempt == MAX_LOGIN_ATTEMPTS_ALLOWED){ user.setStatus(User.STATUS_LOCKED); userManager.updateUser(user); errorCode = ErrorCodes.ACCOUNT_LOCKED; }else{ userManager.updateUser(user); errorCode = ErrorCodes.INVALID_LOGIN_ATTEMPTS; values = new Object[]{ String.valueOf(MAX_LOGIN_ATTEMPTS_ALLOWED - thisAttempt)}; } } if(user != null) logger.logActivity(username, user.getName()+" failed to login"); throw new ServiceException(errorCode, values); } }
public void login(ServiceContext context, String username, String password) throws ServiceException{ LoginCallbackHandler callbackHandler = new LoginCallbackHandler(username, password); User user = null; UserManager userManager = UserManager.getInstance(); UserActivityLogger logger = UserActivityLogger.getInstance(); try{ loginContext = new LoginContext(AuthConstants.AUTH_CONFIG_INDEX, callbackHandler); loginContext.login(); /* Need this for external login modules, user is really authenticated after this step */ Set principals = loginContext.getSubject().getPrincipals(); Object obj = null; for(Iterator principalIt = principals.iterator(); principalIt.hasNext();){ if((obj = principalIt.next()) instanceof User){ user = (User)obj; break; } } /* Successful login: - Add new users authenticated through external LoginModules. - Update the lock count and status of existing users */ if(user == null){ user = new User(); user.setUsername(username); user.setExternalUser(true); List roles = new ArrayList(); roles.add(new Role(org.jmanage.core.auth.ExternalUserRolesConfig.getInstance().getUserRole(username))); user.setRoles(roles); }else{ user = userManager.getUser(user.getName()); user.setLockCount(0); user.setStatus(null); userManager.updateUser(user); } /* set Subject in session */ context._setUser(user); logger.logActivity(user.getName(), "logged in successfully"); }catch(LoginException lex){ user = userManager.getUser(username); String errorCode = ErrorCodes.UNKNOWN_ERROR; Object[] values = null; /* Conditionalize the error message */ if(user == null){ errorCode = ErrorCodes.INVALID_CREDENTIALS; }else if(User.STATUS_LOCKED.equals(user.getStatus())){ errorCode = ErrorCodes.ACCOUNT_LOCKED; }else if(user.getLockCount() < MAX_LOGIN_ATTEMPTS_ALLOWED){ int thisAttempt = user.getLockCount()+1; user.setLockCount(thisAttempt); if(thisAttempt == MAX_LOGIN_ATTEMPTS_ALLOWED){ user.setStatus(User.STATUS_LOCKED); userManager.updateUser(user); errorCode = ErrorCodes.ACCOUNT_LOCKED; }else{ userManager.updateUser(user); errorCode = ErrorCodes.INVALID_LOGIN_ATTEMPTS; values = new Object[]{ String.valueOf(MAX_LOGIN_ATTEMPTS_ALLOWED - thisAttempt)}; } } if(user != null) logger.logActivity(username, user.getName()+" failed to login"); throw new ServiceException(errorCode, values); } }
1,110,667
public void logout(ServiceContext context, User user)throws ServiceException{ try{ loginContext.logout(); UserActivityLogger.getInstance().logActivity(user.getName(), "logged out successfully"); }catch(LoginException lex){ throw new ServiceException(ErrorCodes.UNKNOWN_ERROR); } }
public void logout(ServiceContext context, User user)throws ServiceException{ try{ loginContext.logout(); UserActivityLogger.getInstance().logActivity(user.getName(), "logged out successfully"); }catch(LoginException lex){ throw new ServiceException(ErrorCodes.UNKNOWN_ERROR); } }
1,110,668
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return bcp; }
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return bcp; }
1,110,669
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return bcp; }
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return bcp; }
1,110,670
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return bcp; }
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return bcp; }
1,110,671
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return bcp; }
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return ep; }
1,110,672
void createNewFolder() { if ( selected != null ) { String newName = (String) JOptionPane.showInputDialog( this, "Enter name for new folder", "New folder", JOptionPane.PLAIN_MESSAGE, null, null, "New folder" ); if ( newName != null ) { PhotoFolder newFolder = PhotoFolder.create( newName, selected ); } } }
void createNewFolder() { if ( selected != null ) { String newName = (String) JOptionPane.showInputDialog( this, "Enter name for new folder", "New folder", JOptionPane.PLAIN_MESSAGE, null, null, "New folder" ); if ( newName != null ) { PhotoFolder newFolder = PhotoFolder.create( newName, selected ); } } }
1,110,674
void renameSelectedFolder() { if ( selected != null ) { String origName = selected.getName(); String newName = (String) JOptionPane.showInputDialog( this, "Enter new name", "Rename folder", JOptionPane.PLAIN_MESSAGE, null, null, origName ); if ( newName != null ) { PhotoFolder f = selected; f.setName( newName ); log.debug( "Changed name to " + newName ); } } }
void renameSelectedFolder() { if ( selected != null ) { String origName = selected.getName(); String newName = (String) JOptionPane.showInputDialog( this, "Enter new name", "Rename folder", JOptionPane.PLAIN_MESSAGE, null, null, origName ); if ( newName != null ) { PhotoFolder f = selected; f.setName( newName ); log.debug( "Changed name to " + newName ); } } }
1,110,675