bugged
stringlengths 6
599k
| fixed
stringlengths 10
599k
| __index_level_0__
int64 0
1.13M
|
|---|---|---|
public void put(PlottableChunk[] chunks) throws SQLException, IOException { MicroSecondTimeRange stuffInDB = RangeTool.getFullTime(chunks); logger.debug("stuffInDB timeRange: " + stuffInDB); MicroSecondDate startTime = PlottableChunk.stripToDay(stuffInDB.getBeginTime()); logger.debug("start time of chunks: " + startTime); MicroSecondDate strippedEnd = PlottableChunk.stripToDay(stuffInDB.getEndTime()); logger.debug("end time of chunks: " + strippedEnd); if(!strippedEnd.equals(stuffInDB.getEndTime())) { logger.debug("!strippedEnd.equals(stuffInDB.getEndTime())"); strippedEnd = strippedEnd.add(PlottableChunk.ONE_DAY); logger.debug("strippedEnd now: " + strippedEnd); } stuffInDB = new MicroSecondTimeRange(startTime, strippedEnd); PlottableChunk[] dbChunks = get(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("got " + dbChunks.length + " chunks from stuff that was already in the database"); PlottableChunk[] everything = new PlottableChunk[chunks.length + dbChunks.length]; System.arraycopy(dbChunks, 0, everything, 0, dbChunks.length); System.arraycopy(chunks, 0, everything, dbChunks.length, chunks.length); logger.debug("Merging " + everything.length + " chunks"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = ReduceTool.merge(everything); logger.debug("Breaking " + everything.length + " remaining chunks after merge into seperate chunks based on day"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = breakIntoDays(everything); logger.debug("Adding " + everything.length + " chunks split on days"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } int rowsDropped = drop(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("Dropped " + rowsDropped + " rows of stuff that new data covered"); for(int i = 0; i < everything.length; i++) { logger.debug("Adding chunk " + i + ": " + everything[i]); int stmtIndex = 1; PlottableChunk chunk = everything[i]; synchronized(put) { try { put.setInt(stmtIndex++, chanTable.put(chunk.getChannel())); put.setInt(stmtIndex++, chunk.getPixelsPerDay()); put.setTimestamp(stmtIndex++, chunk.getBeginTime() .getTimestamp()); put.setTimestamp(stmtIndex++, chunk.getEndTime() .getTimestamp()); int[] y = chunk.getData().y_coor; put.setInt(stmtIndex++, y.length / 2); ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(out); for(int k = 0; k < y.length; k++) { dos.writeInt(y[k]); } put.setBytes(stmtIndex++, out.toByteArray()); put.executeUpdate(); } catch(SQLException ex) { logger.warn("problem with sql query: " + put); throw ex; } } } }
|
public void put(PlottableChunk[] chunks) throws SQLException, IOException { MicroSecondTimeRange stuffInDB = RangeTool.getFullTime(chunks); logger.debug("stuffInDB timeRange: " + stuffInDB); MicroSecondDate startTime = PlottableChunk.stripToDay(stuffInDB.getBeginTime()); logger.debug("start time of chunks: " + startTime); MicroSecondDate strippedEnd = PlottableChunk.stripToDay(stuffInDB.getEndTime()); logger.debug("end time of chunks: " + strippedEnd); if(!strippedEnd.equals(stuffInDB.getEndTime())) { logger.debug("!strippedEnd.equals(stuffInDB.getEndTime())"); strippedEnd = strippedEnd.add(PlottableChunk.ONE_DAY); logger.debug("strippedEnd now: " + strippedEnd); } stuffInDB = new MicroSecondTimeRange(startTime, strippedEnd); PlottableChunk[] dbChunks = get(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("got " + dbChunks.length + " chunks from stuff that was already in the database"); PlottableChunk[] everything = new PlottableChunk[chunks.length + dbChunks.length]; System.arraycopy(dbChunks, 0, everything, 0, dbChunks.length); System.arraycopy(chunks, 0, everything, dbChunks.length, chunks.length); logger.debug("Merging " + everything.length + " chunks"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = ReduceTool.merge(everything); logger.debug("Breaking " + everything.length + " remaining chunks after merge into seperate chunks based on day"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = breakIntoDays(everything); logger.debug("Adding " + everything.length + " chunks split on days"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } int rowsDropped = drop(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("Dropped " + rowsDropped + " rows of stuff that new data covered"); for(int i = 0; i < everything.length; i++) { logger.debug("Adding chunk " + i + ": " + everything[i]); int stmtIndex = 1; PlottableChunk chunk = everything[i]; synchronized(put) { try { put.setInt(stmtIndex++, chanTable.put(chunk.getChannel())); put.setInt(stmtIndex++, chunk.getPixelsPerDay()); put.setTimestamp(stmtIndex++, chunk.getBeginTime() .getTimestamp()); put.setTimestamp(stmtIndex++, chunk.getEndTime() .getTimestamp()); int[] y = chunk.getData().y_coor; put.setInt(stmtIndex++, y.length / 2); ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(out); for(int k = 0; k < y.length; k++) { dos.writeInt(y[k]); } put.setBytes(stmtIndex++, out.toByteArray()); put.executeUpdate(); } catch(SQLException ex) { logger.warn("problem with sql query: " + put); throw ex; } } } }
| 1,117,652
|
public void put(PlottableChunk[] chunks) throws SQLException, IOException { MicroSecondTimeRange stuffInDB = RangeTool.getFullTime(chunks); logger.debug("stuffInDB timeRange: " + stuffInDB); MicroSecondDate startTime = PlottableChunk.stripToDay(stuffInDB.getBeginTime()); logger.debug("start time of chunks: " + startTime); MicroSecondDate strippedEnd = PlottableChunk.stripToDay(stuffInDB.getEndTime()); logger.debug("end time of chunks: " + strippedEnd); if(!strippedEnd.equals(stuffInDB.getEndTime())) { logger.debug("!strippedEnd.equals(stuffInDB.getEndTime())"); strippedEnd = strippedEnd.add(PlottableChunk.ONE_DAY); logger.debug("strippedEnd now: " + strippedEnd); } stuffInDB = new MicroSecondTimeRange(startTime, strippedEnd); PlottableChunk[] dbChunks = get(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("got " + dbChunks.length + " chunks from stuff that was already in the database"); PlottableChunk[] everything = new PlottableChunk[chunks.length + dbChunks.length]; System.arraycopy(dbChunks, 0, everything, 0, dbChunks.length); System.arraycopy(chunks, 0, everything, dbChunks.length, chunks.length); logger.debug("Merging " + everything.length + " chunks"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = ReduceTool.merge(everything); logger.debug("Breaking " + everything.length + " remaining chunks after merge into seperate chunks based on day"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = breakIntoDays(everything); logger.debug("Adding " + everything.length + " chunks split on days"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } int rowsDropped = drop(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("Dropped " + rowsDropped + " rows of stuff that new data covered"); for(int i = 0; i < everything.length; i++) { logger.debug("Adding chunk " + i + ": " + everything[i]); int stmtIndex = 1; PlottableChunk chunk = everything[i]; synchronized(put) { try { put.setInt(stmtIndex++, chanTable.put(chunk.getChannel())); put.setInt(stmtIndex++, chunk.getPixelsPerDay()); put.setTimestamp(stmtIndex++, chunk.getBeginTime() .getTimestamp()); put.setTimestamp(stmtIndex++, chunk.getEndTime() .getTimestamp()); int[] y = chunk.getData().y_coor; put.setInt(stmtIndex++, y.length / 2); ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(out); for(int k = 0; k < y.length; k++) { dos.writeInt(y[k]); } put.setBytes(stmtIndex++, out.toByteArray()); put.executeUpdate(); } catch(SQLException ex) { logger.warn("problem with sql query: " + put); throw ex; } } } }
|
public void put(PlottableChunk[] chunks) throws SQLException, IOException { MicroSecondTimeRange stuffInDB = RangeTool.getFullTime(chunks); logger.debug("stuffInDB timeRange: " + stuffInDB); MicroSecondDate startTime = PlottableChunk.stripToDay(stuffInDB.getBeginTime()); logger.debug("start time of chunks: " + startTime); MicroSecondDate strippedEnd = PlottableChunk.stripToDay(stuffInDB.getEndTime()); logger.debug("end time of chunks: " + strippedEnd); if(!strippedEnd.equals(stuffInDB.getEndTime())) { logger.debug("!strippedEnd.equals(stuffInDB.getEndTime())"); strippedEnd = strippedEnd.add(PlottableChunk.ONE_DAY); logger.debug("strippedEnd now: " + strippedEnd); } stuffInDB = new MicroSecondTimeRange(startTime, strippedEnd); PlottableChunk[] dbChunks = get(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("got " + dbChunks.length + " chunks from stuff that was already in the database"); PlottableChunk[] everything = new PlottableChunk[chunks.length + dbChunks.length]; System.arraycopy(dbChunks, 0, everything, 0, dbChunks.length); System.arraycopy(chunks, 0, everything, dbChunks.length, chunks.length); logger.debug("Merging " + everything.length + " chunks"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = ReduceTool.merge(everything); logger.debug("Breaking " + everything.length + " remaining chunks after merge into seperate chunks based on day"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = breakIntoDays(everything); logger.debug("Adding " + everything.length + " chunks split on days"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } int rowsDropped = drop(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("Dropped " + rowsDropped + " rows of stuff that new data covered"); for(int i = 0; i < everything.length; i++) { logger.debug("Adding chunk " + i + ": " + everything[i]); int stmtIndex = 1; PlottableChunk chunk = everything[i]; synchronized(put) { try { put.setInt(stmtIndex++, chanTable.put(chunk.getChannel())); put.setInt(stmtIndex++, chunk.getPixelsPerDay()); put.setTimestamp(stmtIndex++, chunk.getBeginTime() .getTimestamp()); put.setTimestamp(stmtIndex++, chunk.getEndTime() .getTimestamp()); int[] y = chunk.getData().y_coor; put.setInt(stmtIndex++, y.length / 2); ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(out); for(int k = 0; k < y.length; k++) { dos.writeInt(y[k]); } put.setBytes(stmtIndex++, out.toByteArray()); put.executeUpdate(); } catch(SQLException ex) { logger.warn("problem with sql query: " + put); throw ex; } } } }
| 1,117,653
|
public void put(PlottableChunk[] chunks) throws SQLException, IOException { MicroSecondTimeRange stuffInDB = RangeTool.getFullTime(chunks); logger.debug("stuffInDB timeRange: " + stuffInDB); MicroSecondDate startTime = PlottableChunk.stripToDay(stuffInDB.getBeginTime()); logger.debug("start time of chunks: " + startTime); MicroSecondDate strippedEnd = PlottableChunk.stripToDay(stuffInDB.getEndTime()); logger.debug("end time of chunks: " + strippedEnd); if(!strippedEnd.equals(stuffInDB.getEndTime())) { logger.debug("!strippedEnd.equals(stuffInDB.getEndTime())"); strippedEnd = strippedEnd.add(PlottableChunk.ONE_DAY); logger.debug("strippedEnd now: " + strippedEnd); } stuffInDB = new MicroSecondTimeRange(startTime, strippedEnd); PlottableChunk[] dbChunks = get(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("got " + dbChunks.length + " chunks from stuff that was already in the database"); PlottableChunk[] everything = new PlottableChunk[chunks.length + dbChunks.length]; System.arraycopy(dbChunks, 0, everything, 0, dbChunks.length); System.arraycopy(chunks, 0, everything, dbChunks.length, chunks.length); logger.debug("Merging " + everything.length + " chunks"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = ReduceTool.merge(everything); logger.debug("Breaking " + everything.length + " remaining chunks after merge into seperate chunks based on day"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = breakIntoDays(everything); logger.debug("Adding " + everything.length + " chunks split on days"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } int rowsDropped = drop(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("Dropped " + rowsDropped + " rows of stuff that new data covered"); for(int i = 0; i < everything.length; i++) { logger.debug("Adding chunk " + i + ": " + everything[i]); int stmtIndex = 1; PlottableChunk chunk = everything[i]; synchronized(put) { try { put.setInt(stmtIndex++, chanTable.put(chunk.getChannel())); put.setInt(stmtIndex++, chunk.getPixelsPerDay()); put.setTimestamp(stmtIndex++, chunk.getBeginTime() .getTimestamp()); put.setTimestamp(stmtIndex++, chunk.getEndTime() .getTimestamp()); int[] y = chunk.getData().y_coor; put.setInt(stmtIndex++, y.length / 2); ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(out); for(int k = 0; k < y.length; k++) { dos.writeInt(y[k]); } put.setBytes(stmtIndex++, out.toByteArray()); put.executeUpdate(); } catch(SQLException ex) { logger.warn("problem with sql query: " + put); throw ex; } } } }
|
public void put(PlottableChunk[] chunks) throws SQLException, IOException { MicroSecondTimeRange stuffInDB = RangeTool.getFullTime(chunks); logger.debug("stuffInDB timeRange: " + stuffInDB); MicroSecondDate startTime = PlottableChunk.stripToDay(stuffInDB.getBeginTime()); logger.debug("start time of chunks: " + startTime); MicroSecondDate strippedEnd = PlottableChunk.stripToDay(stuffInDB.getEndTime()); logger.debug("end time of chunks: " + strippedEnd); if(!strippedEnd.equals(stuffInDB.getEndTime())) { logger.debug("!strippedEnd.equals(stuffInDB.getEndTime())"); strippedEnd = strippedEnd.add(PlottableChunk.ONE_DAY); logger.debug("strippedEnd now: " + strippedEnd); } stuffInDB = new MicroSecondTimeRange(startTime, strippedEnd); PlottableChunk[] dbChunks = get(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("got " + dbChunks.length + " chunks from stuff that was already in the database"); PlottableChunk[] everything = new PlottableChunk[chunks.length + dbChunks.length]; System.arraycopy(dbChunks, 0, everything, 0, dbChunks.length); System.arraycopy(chunks, 0, everything, dbChunks.length, chunks.length); logger.debug("Merging " + everything.length + " chunks"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = ReduceTool.merge(everything); logger.debug("Breaking " + everything.length + " remaining chunks after merge into seperate chunks based on day"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = breakIntoDays(everything); logger.debug("Adding " + everything.length + " chunks split on days"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } int rowsDropped = drop(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("Dropped " + rowsDropped + " rows of stuff that new data covered"); for(int i = 0; i < everything.length; i++) { logger.debug("Adding chunk " + i + ": " + everything[i]); int stmtIndex = 1; PlottableChunk chunk = everything[i]; synchronized(put) { try { put.setInt(stmtIndex++, chanTable.put(chunk.getChannel())); put.setInt(stmtIndex++, chunk.getPixelsPerDay()); put.setTimestamp(stmtIndex++, chunk.getBeginTime() .getTimestamp()); put.setTimestamp(stmtIndex++, chunk.getEndTime() .getTimestamp()); int[] y = chunk.getData().y_coor; put.setInt(stmtIndex++, y.length / 2); ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(out); for(int k = 0; k < y.length; k++) { dos.writeInt(y[k]); } put.setBytes(stmtIndex++, out.toByteArray()); put.executeUpdate(); } catch(SQLException ex) { logger.warn("problem with sql query: " + put); throw ex; } } } }
| 1,117,654
|
public void put(PlottableChunk[] chunks) throws SQLException, IOException { MicroSecondTimeRange stuffInDB = RangeTool.getFullTime(chunks); logger.debug("stuffInDB timeRange: " + stuffInDB); MicroSecondDate startTime = PlottableChunk.stripToDay(stuffInDB.getBeginTime()); logger.debug("start time of chunks: " + startTime); MicroSecondDate strippedEnd = PlottableChunk.stripToDay(stuffInDB.getEndTime()); logger.debug("end time of chunks: " + strippedEnd); if(!strippedEnd.equals(stuffInDB.getEndTime())) { logger.debug("!strippedEnd.equals(stuffInDB.getEndTime())"); strippedEnd = strippedEnd.add(PlottableChunk.ONE_DAY); logger.debug("strippedEnd now: " + strippedEnd); } stuffInDB = new MicroSecondTimeRange(startTime, strippedEnd); PlottableChunk[] dbChunks = get(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("got " + dbChunks.length + " chunks from stuff that was already in the database"); PlottableChunk[] everything = new PlottableChunk[chunks.length + dbChunks.length]; System.arraycopy(dbChunks, 0, everything, 0, dbChunks.length); System.arraycopy(chunks, 0, everything, dbChunks.length, chunks.length); logger.debug("Merging " + everything.length + " chunks"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = ReduceTool.merge(everything); logger.debug("Breaking " + everything.length + " remaining chunks after merge into seperate chunks based on day"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = breakIntoDays(everything); logger.debug("Adding " + everything.length + " chunks split on days"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } int rowsDropped = drop(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("Dropped " + rowsDropped + " rows of stuff that new data covered"); for(int i = 0; i < everything.length; i++) { logger.debug("Adding chunk " + i + ": " + everything[i]); int stmtIndex = 1; PlottableChunk chunk = everything[i]; synchronized(put) { try { put.setInt(stmtIndex++, chanTable.put(chunk.getChannel())); put.setInt(stmtIndex++, chunk.getPixelsPerDay()); put.setTimestamp(stmtIndex++, chunk.getBeginTime() .getTimestamp()); put.setTimestamp(stmtIndex++, chunk.getEndTime() .getTimestamp()); int[] y = chunk.getData().y_coor; put.setInt(stmtIndex++, y.length / 2); ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(out); for(int k = 0; k < y.length; k++) { dos.writeInt(y[k]); } put.setBytes(stmtIndex++, out.toByteArray()); put.executeUpdate(); } catch(SQLException ex) { logger.warn("problem with sql query: " + put); throw ex; } } } }
|
public void put(PlottableChunk[] chunks) throws SQLException, IOException { MicroSecondTimeRange stuffInDB = RangeTool.getFullTime(chunks); logger.debug("stuffInDB timeRange: " + stuffInDB); MicroSecondDate startTime = PlottableChunk.stripToDay(stuffInDB.getBeginTime()); logger.debug("start time of chunks: " + startTime); MicroSecondDate strippedEnd = PlottableChunk.stripToDay(stuffInDB.getEndTime()); logger.debug("end time of chunks: " + strippedEnd); if(!strippedEnd.equals(stuffInDB.getEndTime())) { logger.debug("!strippedEnd.equals(stuffInDB.getEndTime())"); strippedEnd = strippedEnd.add(PlottableChunk.ONE_DAY); logger.debug("strippedEnd now: " + strippedEnd); } stuffInDB = new MicroSecondTimeRange(startTime, strippedEnd); PlottableChunk[] dbChunks = get(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("got " + dbChunks.length + " chunks from stuff that was already in the database"); PlottableChunk[] everything = new PlottableChunk[chunks.length + dbChunks.length]; System.arraycopy(dbChunks, 0, everything, 0, dbChunks.length); System.arraycopy(chunks, 0, everything, dbChunks.length, chunks.length); logger.debug("Merging " + everything.length + " chunks"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = ReduceTool.merge(everything); logger.debug("Breaking " + everything.length + " remaining chunks after merge into seperate chunks based on day"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } everything = breakIntoDays(everything); logger.debug("Adding " + everything.length + " chunks split on days"); for(int i = 0; i < everything.length; i++) { logger.debug(everything[i]); } int rowsDropped = drop(stuffInDB, chunks[0].getChannel(), chunks[0].getPixelsPerDay()); logger.debug("Dropped " + rowsDropped + " rows of stuff that new data covered"); for(int i = 0; i < everything.length; i++) { logger.debug("putting chunk " + i + ": " + everything[i]); int stmtIndex = 1; PlottableChunk chunk = everything[i]; synchronized(put) { try { put.setInt(stmtIndex++, chanTable.put(chunk.getChannel())); put.setInt(stmtIndex++, chunk.getPixelsPerDay()); put.setTimestamp(stmtIndex++, chunk.getBeginTime() .getTimestamp()); put.setTimestamp(stmtIndex++, chunk.getEndTime() .getTimestamp()); int[] y = chunk.getData().y_coor; put.setInt(stmtIndex++, y.length / 2); ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(out); for(int k = 0; k < y.length; k++) { dos.writeInt(y[k]); } put.setBytes(stmtIndex++, out.toByteArray()); put.executeUpdate(); } catch(SQLException ex) { logger.warn("problem with sql query: " + put); throw ex; } } } }
| 1,117,655
|
public void copyTo(BwEvent ev) { super.copyTo(ev); ev.setName(getName()); ev.setSummary(getSummary()); ev.setDescription(getDescription()); ev.setDtstart(getDtstart()); ev.setDtend(getDtend()); ev.setEndType(getEndType()); ev.setDuration(getDuration()); ev.setLink(getLink()); ev.setDeleted(getDeleted()); ev.setStatus(getStatus()); ev.setCost(getCost()); BwOrganizer org = getOrganizer(); if (org != null) { org = (BwOrganizer)org.clone(); } ev.setOrganizer(org); ev.setDtstamp(getDtstamp()); ev.setLastmod(getLastmod()); ev.setCreated(getCreated()); ev.setPriority(getPriority()); ev.setSequence(getSequence()); BwSponsor sp = getSponsor(); if (sp != null) { sp = (BwSponsor)sp.clone(); } ev.setSponsor(sp); BwLocation loc = getLocation(); if (loc != null) { loc = (BwLocation)loc.clone(); } ev.setLocation(loc); ev.setGuid(getGuid()); ev.setTransparency(getTransparency()); /* categories */ Iterator it = iterateCategories(); TreeSet cs = new TreeSet(); while (it.hasNext()) { BwCategory c = (BwCategory)it.next(); cs.add((BwCategory)c.clone()); } ev.setCategories(cs); ev.setAttendees(cloneAttendees()); ev.setRecurring(getRecurring()); /* This ought to be cloned but it brings with it a whole set of instances. Leave for the moment */ ev.setRecurrence(getRecurrence()); }
|
public void copyTo(BwEvent ev) { super.copyTo(ev); ev.setName(getName()); ev.setSummary(getSummary()); ev.setDescription(getDescription()); ev.setDtstart(getDtstart()); ev.setDtend(getDtend()); ev.setEndType(getEndType()); ev.setDuration(getDuration()); ev.setLink(getLink()); ev.setDeleted(getDeleted()); ev.setStatus(getStatus()); ev.setCost(getCost()); BwOrganizer org = getOrganizer(); if (org != null) { org = (BwOrganizer)org.clone(); } ev.setOrganizer(org); ev.setDtstamp(getDtstamp()); ev.setLastmod(getLastmod()); ev.setCreated(getCreated()); ev.setPriority(getPriority()); ev.setSequence(getSequence()); BwSponsor sp = getSponsor(); if (sp != null) { sp = (BwSponsor)sp.clone(); } ev.setSponsor(sp); BwLocation loc = getLocation(); if (loc != null) { loc = (BwLocation)loc.clone(); } ev.setLocation(loc); ev.setGuid(getGuid()); ev.setTransparency(getTransparency()); /* categories */ Iterator it = iterateCategories(); TreeSet cs = new TreeSet(); while (it.hasNext()) { BwCategory c = (BwCategory)it.next(); cs.add((BwCategory)c.clone()); } ev.setCategories(cs); ev.setAttendees(cloneAttendees()); ev.setRecurring(getRecurring()); /* This ought to be cloned but it brings with it a whole set of instances. Leave for the moment */ ev.setRecurrence((BwRecurrence)getRecurrence().clone()); }
| 1,117,657
|
public void copyTo(BwShareableContainedDbentity val) { super.copyTo(val); val.setCalendar((BwCalendar)getCalendar()); }
|
public void copyTo(BwShareableContainedDbentity val) { super.copyTo(val); val.setCalendar((BwCalendar)getCalendar().clone()); }
| 1,117,658
|
public Object clone() { BwOrganizer nobj = new BwOrganizer(getId(), getCn(), getDir(), getLanguage(), getSentBy(), getOrganizerUri()); nobj.setId(getId()); return nobj; }
|
public Object clone() { BwOrganizer nobj = new BwOrganizer((BwUser)getOwner().clone(), getPublick(), getCn(), getDir(), getLanguage(), getSentBy(), getOrganizerUri()); nobj.setId(getId()); return nobj; }
| 1,117,659
|
protected String calculateURL() throws JspException { String urlStr = super.calculateURL(); if (!PortletServlet.isPortletRequest(pageContext.getRequest())) { return urlStr; } try { URL url = new URL(urlStr); String path = url.getPath(); if (path.endsWith(".rdo")) { setRenderURL("true"); } else if (path.endsWith(".rdo")) { setActionURL("true"); } /* We want a context relative url */ urlStr = url.getFile(); //System.out.println("LLLLLLLLLLLLLLLLLLUrlStr = " + urlStr); /* Drop the context */ int pos = urlStr.indexOf('/'); if (pos > 0) { urlStr = urlStr.substring(pos); } urlStr = TagsSupport.getURL(pageContext, urlStr, urlType); /* remove embedded anchor because calendar xsl stylesheet * adds extra parameters later during transformation */ pos = urlStr.indexOf('#'); if (pos > -1) { urlStr = urlStr.substring(0, pos); } /* Remove bedework dummy request parameter - * it's an encoded form of ?b=de */ urlStr = urlStr.replaceAll(bedeworkDummyPar, ""); //Generate valid xml markup for transformationthrow new urlStr = urlStr.replaceAll("&", "&"); //System.out.println("LLLLLLLLLLLLLLLLLLLLLLLUrlStr = " + urlStr); } catch (MalformedURLException mue) { throw new JspException(mue); } return urlStr; }
|
protected String calculateURL() throws JspException { String urlStr = super.calculateURL(); if (!PortletServlet.isPortletRequest(pageContext.getRequest())) { return urlStr; } try { URL url = new URL(urlStr); String path = url.getPath(); if (path.endsWith(".rdo")) { setRenderURL("true"); } else if (path.endsWith(".do")) { setActionURL("true"); } /* We want a context relative url */ urlStr = url.getFile(); //System.out.println("LLLLLLLLLLLLLLLLLLUrlStr = " + urlStr); /* Drop the context */ int pos = urlStr.indexOf('/'); if (pos > 0) { urlStr = urlStr.substring(pos); } urlStr = TagsSupport.getURL(pageContext, urlStr, urlType); /* remove embedded anchor because calendar xsl stylesheet * adds extra parameters later during transformation */ pos = urlStr.indexOf('#'); if (pos > -1) { urlStr = urlStr.substring(0, pos); } /* Remove bedework dummy request parameter - * it's an encoded form of ?b=de */ urlStr = urlStr.replaceAll(bedeworkDummyPar, ""); //Generate valid xml markup for transformationthrow new urlStr = urlStr.replaceAll("&", "&"); //System.out.println("LLLLLLLLLLLLLLLLLLLLLLLUrlStr = " + urlStr); } catch (MalformedURLException mue) { throw new JspException(mue); } return urlStr; }
| 1,117,660
|
public static LocalSeismogramImpl toFissures(DataRecord seed) throws SeedFormatException { DataHeader header = seed.getHeader(); edu.iris.Fissures.Time time = new edu.iris.Fissures.Time(header.getISOStartTime(), -1); // the network id isn't correct, but network start is not stored // in miniseed ChannelId channelId = new ChannelId(new NetworkId(header.getNetworkCode().trim(), time), header.getStationIdentifier().trim(), header.getLocationIdentifier().trim(), header.getChannelIdentifier().trim(), time); String seisId = channelId.network_id.network_code+":" +channelId.station_code+":" +channelId.site_code+":" +channelId.channel_code+":" +header.getISOStartTime(); Property[] props = new Property[1]; props[0] = new Property("Name", seisId); Blockette[] blocketts = seed.getBlockettes(100); int numPerSampling; TimeInterval timeInterval; if (blocketts.length != 0) { Blockette100 b100 = (Blockette100)blocketts[0]; float f = b100.getActualSampleRate(); numPerSampling = 1; timeInterval = new TimeInterval(1/f, UnitImpl.SECOND); } else { if (header.getSampleRateFactor() > 0) { numPerSampling = header.getSampleRateFactor(); timeInterval = new TimeInterval(1, UnitImpl.SECOND); if (header.getSampleRateMultiplier() > 0) { numPerSampling *= header.getSampleRateMultiplier(); } else { timeInterval = (TimeInterval)timeInterval.multiplyBy(-1 * header.getSampleRateMultiplier()); } } else { numPerSampling = 1; timeInterval = new TimeInterval(-1 * header.getSampleRateFactor(), UnitImpl.SECOND); if (header.getSampleRateMultiplier() > 0) { numPerSampling *= header.getSampleRateMultiplier(); } else { timeInterval = (TimeInterval)timeInterval.multiplyBy(-1 * header.getSampleRateMultiplier()); } } } SamplingImpl sampling = new SamplingImpl(numPerSampling, timeInterval); TimeSeriesDataSel bits = convertData(seed); return new LocalSeismogramImpl(seisId, props, time, header.getNumSamples(), sampling, UnitImpl.COUNT, channelId, new edu.iris.Fissures.IfParameterMgr.ParameterRef[0], new QuantityImpl[0], new SamplingImpl[0], bits); }
|
public static LocalSeismogramImpl toFissures(DataRecord seed) throws SeedFormatException { DataHeader header = seed.getHeader(); edu.iris.Fissures.Time time = new edu.iris.Fissures.Time(header.getISOStartTime(), -1); // the network id isn't correct, but network start is not stored // in miniseed ChannelId channelId = new ChannelId(new NetworkId(header.getNetworkCode().trim(), time), header.getStationIdentifier().trim(), header.getLocationIdentifier().trim(), header.getChannelIdentifier().trim(), time); String seisId = channelId.network_id.network_code+":" +channelId.station_code+":" +channelId.site_code+":" +channelId.channel_code+":" +header.getISOStartTime(); Property[] props = new Property[1]; props[0] = new Property("Name", seisId); Blockette[] blocketts = seed.getBlockettes(100); int numPerSampling; TimeInterval timeInterval; if (blocketts.length != 0) { Blockette100 b100 = (Blockette100)blocketts[0]; float f = b100.getActualSampleRate(); numPerSampling = 1; timeInterval = new TimeInterval(1/f, UnitImpl.SECOND); } else { if (header.getSampleRateFactor() > 0) { numPerSampling = header.getSampleRateFactor(); timeInterval = new TimeInterval(1, UnitImpl.SECOND); if (header.getSampleRateMultiplier() > 0) { numPerSampling *= header.getSampleRateMultiplier(); } else { timeInterval = (TimeInterval)timeInterval.multiplyBy(-1 * header.getSampleRateMultiplier()); } } else { numPerSampling = 1; timeInterval = new TimeInterval(-1 * header.getSampleRateFactor(), UnitImpl.SECOND); if (header.getSampleRateMultiplier() > 0) { numPerSampling *= header.getSampleRateMultiplier(); } else { timeInterval = (TimeInterval)timeInterval.multiplyBy(-1 * header.getSampleRateMultiplier()); } } } SamplingImpl sampling = new SamplingImpl(numPerSampling, timeInterval); TimeSeriesDataSel bits = convertData(seed); return new LocalSeismogramImpl(seisId, props, time, header.getNumSamples(), sampling, UnitImpl.COUNT, channelId, new edu.iris.Fissures.IfParameterMgr.ParameterRef[0], new QuantityImpl[0], new SamplingImpl[0], bits); }
| 1,117,661
|
public static LocalSeismogramImpl toFissures(DataRecord seed) throws SeedFormatException { DataHeader header = seed.getHeader(); edu.iris.Fissures.Time time = new edu.iris.Fissures.Time(header.getISOStartTime(), -1); // the network id isn't correct, but network start is not stored // in miniseed ChannelId channelId = new ChannelId(new NetworkId(header.getNetworkCode().trim(), time), header.getStationIdentifier().trim(), header.getLocationIdentifier().trim(), header.getChannelIdentifier().trim(), time); String seisId = channelId.network_id.network_code+":" +channelId.station_code+":" +channelId.site_code+":" +channelId.channel_code+":" +header.getISOStartTime(); Property[] props = new Property[1]; props[0] = new Property("Name", seisId); Blockette[] blocketts = seed.getBlockettes(100); int numPerSampling; TimeInterval timeInterval; if (blocketts.length != 0) { Blockette100 b100 = (Blockette100)blocketts[0]; float f = b100.getActualSampleRate(); numPerSampling = 1; timeInterval = new TimeInterval(1/f, UnitImpl.SECOND); } else { if (header.getSampleRateFactor() > 0) { numPerSampling = header.getSampleRateFactor(); timeInterval = new TimeInterval(1, UnitImpl.SECOND); if (header.getSampleRateMultiplier() > 0) { numPerSampling *= header.getSampleRateMultiplier(); } else { timeInterval = (TimeInterval)timeInterval.multiplyBy(-1 * header.getSampleRateMultiplier()); } } else { numPerSampling = 1; timeInterval = new TimeInterval(-1 * header.getSampleRateFactor(), UnitImpl.SECOND); if (header.getSampleRateMultiplier() > 0) { numPerSampling *= header.getSampleRateMultiplier(); } else { timeInterval = (TimeInterval)timeInterval.multiplyBy(-1 * header.getSampleRateMultiplier()); } } } SamplingImpl sampling = new SamplingImpl(numPerSampling, timeInterval); TimeSeriesDataSel bits = convertData(seed); return new LocalSeismogramImpl(seisId, props, time, header.getNumSamples(), sampling, UnitImpl.COUNT, channelId, new edu.iris.Fissures.IfParameterMgr.ParameterRef[0], new QuantityImpl[0], new SamplingImpl[0], bits); }
|
public static LocalSeismogramImpl toFissures(DataRecord seed) throws SeedFormatException { DataHeader header = seed.getHeader(); edu.iris.Fissures.Time time = new edu.iris.Fissures.Time(header.getISOStartTime(), -1); // the network id isn't correct, but network start is not stored // in miniseed ChannelId channelId = new ChannelId(new NetworkId(header.getNetworkCode().trim(), time), header.getStationIdentifier().trim(), header.getLocationIdentifier().trim(), header.getChannelIdentifier().trim(), time); String seisId = channelId.network_id.network_code+":" +channelId.station_code+":" +channelId.site_code+":" +channelId.channel_code+":" +header.getISOStartTime(); Property[] props = new Property[1]; props[0] = new Property("Name", seisId); Blockette[] blocketts = seed.getBlockettes(100); int numPerSampling; TimeInterval timeInterval; if (blocketts.length != 0) { Blockette100 b100 = (Blockette100)blocketts[0]; float f = b100.getActualSampleRate(); numPerSampling = 1; timeInterval = new TimeInterval(1/f, UnitImpl.SECOND); } else { if (header.getSampleRateFactor() > 0) { numPerSampling = header.getSampleRateFactor(); timeInterval = new TimeInterval(1, UnitImpl.SECOND); if (header.getSampleRateMultiplier() > 0) { numPerSampling *= header.getSampleRateMultiplier(); } else { timeInterval = (TimeInterval)timeInterval.multiplyBy(-1 * header.getSampleRateMultiplier()); } } else { numPerSampling = 1; timeInterval = new TimeInterval(-1 * header.getSampleRateFactor(), UnitImpl.SECOND); if (header.getSampleRateMultiplier() > 0) { numPerSampling *= header.getSampleRateMultiplier(); } else { timeInterval = (TimeInterval)timeInterval.multiplyBy(-1 * header.getSampleRateMultiplier()); } } } SamplingImpl sampling = new SamplingImpl(numPerSampling, timeInterval); TimeSeriesDataSel bits = convertData(seed); return new LocalSeismogramImpl(seisId, props, time, header.getNumSamples(), sampling, UnitImpl.COUNT, channelId, new edu.iris.Fissures.IfParameterMgr.ParameterRef[0], new QuantityImpl[0], new SamplingImpl[0], bits); }
| 1,117,662
|
public boolean isError() { return (error == null); }
|
public boolean isError() { return (error != null); }
| 1,117,663
|
public static SacTimeSeries getSAC(LocalSeismogramImpl seis, Channel channel, Origin origin) throws CodecException { SacTimeSeries sac = getSAC(seis); addChannel(sac, channel); addOrigin(sac, origin); return sac; }
|
public static SacTimeSeries getSAC(LocalSeismogramImpl seis, Channel channel, Origin origin) throws CodecException { SacTimeSeries sac = getSAC(seis); addChannel(sac, channel); addOrigin(sac, origin); return sac; }
| 1,117,664
|
public static SacTimeSeries getSAC(LocalSeismogramImpl seis, Channel channel, Origin origin) throws CodecException { SacTimeSeries sac = getSAC(seis); addChannel(sac, channel); addOrigin(sac, origin); return sac; }
|
public static SacTimeSeries getSAC(LocalSeismogramImpl seis, Channel channel, Origin origin) throws CodecException { SacTimeSeries sac = getSAC(seis); addChannel(sac, channel); addOrigin(sac, origin); return sac; }
| 1,117,665
|
public void write(File file) throws FileNotFoundException, IOException { DataOutputStream dos = new DataOutputStream( new BufferedOutputStream( new FileOutputStream(file))); writeHeader(dos); writeData(dos); dos.close(); }
|
public void write(File file) throws FileNotFoundException, IOException { DataOutputStream dos = new DataOutputStream( new BufferedOutputStream( new FileOutputStream(file))); writeHeader(dos); writeData(dos); dos.close(); }
| 1,117,666
|
TimezonesImpl(boolean debug, BwUser user, RestoreIntf ri) throws CalFacadeException { this.debug = debug; this.user = user; this.ri = ri; // Force fetch of timezones //lookup("not-a-timezone"); }
|
TimezonesImpl(boolean debug, BwUser user, RestoreIntf ri) throws CalFacadeException { super(debug); this.user = user; this.ri = ri; // Force fetch of timezones //lookup("not-a-timezone"); }
| 1,117,667
|
public VTimeZone findTimeZone(final String id, BwUser owner) throws CalFacadeException { if (debug) { trace("find timezone with id " + id + " for owner " + owner); } TimezoneInfo tzinfo = lookup(id); if ((tzinfo != null) && (tzinfo.vtz != null)) { return tzinfo.vtz; } /* Do we need to look up anything? VTimeZone vTimeZone = cal.getTimeZone(id, owner); if (vTimeZone == null) { return null; } tzinfo = new TimezoneInfo(); tzinfo.vtz = vTimeZone; tzinfo.tz = new TimeZone(vTimeZone); timezones.put(id, tzinfo); return vTimeZone; */ return null; }
|
public VTimeZone findTimeZone(final String id, BwUser owner) throws CalFacadeException { if (debug) { trace("find timezone with id " + id + " for owner " + owner); } TimezoneInfo tzinfo = lookup(id); if ((tzinfo != null) && (tzinfo.vtz != null)) { return tzinfo.vtz; } /* Do we need to look up anything? VTimeZone vTimeZone = cal.getTimeZone(id, owner); if (vTimeZone == null) { return null; } tzinfo = new TimezoneInfo(); tzinfo.vtz = vTimeZone; tzinfo.tz = new TimeZone(vTimeZone); timezones.put(id, tzinfo); return vTimeZone; */ return null; }
| 1,117,668
|
public TimeZone getTimeZone(final String id) throws CalFacadeException { TimezoneInfo tzinfo = lookup(id); /* Do we need to look up anything? if (tzinfo == null) { VTimeZone vTimeZone = cal.getTimeZone(id, null); if (vTimeZone == null) { return null; } tzinfo = new TimezoneInfo(); tzinfo.vtz = vTimeZone; tzinfo.tz = new TimeZone(vTimeZone); timezones.put(id, tzinfo); } */ return tzinfo.tz; }
|
public TimeZone getTimeZone(final String id) throws CalFacadeException { TimezoneInfo tzinfo = lookup(id); /* Do we need to look up anything? if (tzinfo == null) { VTimeZone vTimeZone = cal.getTimeZone(id, null); if (vTimeZone == null) { return null; } tzinfo = new TimezoneInfo(); tzinfo.vtz = vTimeZone; tzinfo.tz = new TimeZone(vTimeZone); timezones.put(id, tzinfo); } */ return tzinfo.getTz(); }
| 1,117,669
|
private TimezoneInfo lookup(String id) throws CalFacadeException { TimezoneInfo tzinfo; /* if (!systemTimezonesInitialised) { // First call (after reinit) synchronized (this) { if (!systemTimezonesInitialised) { Collection tzs = cal.getPublicTimeZones(); Iterator it = tzs.iterator(); while (it.hasNext()) { BwTimeZone btz = (BwTimeZone)it.next(); Calendar cal = IcalTranslator.getCalendar(btz.getVtimezone()); VTimeZone vtz = (VTimeZone)cal.getComponents().getComponent(Component.VTIMEZONE); if (vtz == null) { throw new CalFacadeException("Incorrectly stored timezone"); } tzinfo = new TimezoneInfo(); tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); systemTimezones.put(btz.getTzid(), tzinfo); } systemTimezonesInitialised = true; } } } */ tzinfo = (TimezoneInfo)systemTimezones.get(id); if (tzinfo != null) { tzinfo.publick = true; } else { tzinfo = (TimezoneInfo)timezones.get(id); } return tzinfo; }
|
private TimezoneInfo lookup(String id) throws CalFacadeException { TimezoneInfo tzinfo; /* if (!systemTimezonesInitialised) { // First call (after reinit) synchronized (this) { if (!systemTimezonesInitialised) { Collection tzs = cal.getPublicTimeZones(); Iterator it = tzs.iterator(); while (it.hasNext()) { BwTimeZone btz = (BwTimeZone)it.next(); Calendar cal = IcalTranslator.getCalendar(btz.getVtimezone()); VTimeZone vtz = (VTimeZone)cal.getComponents().getComponent(Component.VTIMEZONE); if (vtz == null) { throw new CalFacadeException("Incorrectly stored timezone"); } tzinfo = new TimezoneInfo(); tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); systemTimezones.put(btz.getTzid(), tzinfo); } systemTimezonesInitialised = true; } } } */ if (tzinfo != null) { tzinfo.publick = true; } else { tzinfo = (TimezoneInfo)timezones.get(id); } return tzinfo; }
| 1,117,670
|
private TimezoneInfo lookup(String id) throws CalFacadeException { TimezoneInfo tzinfo; /* if (!systemTimezonesInitialised) { // First call (after reinit) synchronized (this) { if (!systemTimezonesInitialised) { Collection tzs = cal.getPublicTimeZones(); Iterator it = tzs.iterator(); while (it.hasNext()) { BwTimeZone btz = (BwTimeZone)it.next(); Calendar cal = IcalTranslator.getCalendar(btz.getVtimezone()); VTimeZone vtz = (VTimeZone)cal.getComponents().getComponent(Component.VTIMEZONE); if (vtz == null) { throw new CalFacadeException("Incorrectly stored timezone"); } tzinfo = new TimezoneInfo(); tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); systemTimezones.put(btz.getTzid(), tzinfo); } systemTimezonesInitialised = true; } } } */ tzinfo = (TimezoneInfo)systemTimezones.get(id); if (tzinfo != null) { tzinfo.publick = true; } else { tzinfo = (TimezoneInfo)timezones.get(id); } return tzinfo; }
|
private TimezoneInfo lookup(String id) throws CalFacadeException { TimezoneInfo tzinfo; /* if (!systemTimezonesInitialised) { // First call (after reinit) synchronized (this) { if (!systemTimezonesInitialised) { Collection tzs = cal.getPublicTimeZones(); Iterator it = tzs.iterator(); while (it.hasNext()) { BwTimeZone btz = (BwTimeZone)it.next(); Calendar cal = IcalTranslator.getCalendar(btz.getVtimezone()); VTimeZone vtz = (VTimeZone)cal.getComponents().getComponent(Component.VTIMEZONE); if (vtz == null) { throw new CalFacadeException("Incorrectly stored timezone"); } tzinfo = new TimezoneInfo(); tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); systemTimezones.put(btz.getTzid(), tzinfo); } systemTimezonesInitialised = true; } } } */ tzinfo = (TimezoneInfo)systemTimezones.get(id); if (tzinfo != null) { tzinfo.publick = true; } else { tzinfo = (TimezoneInfo)timezones.get(id); } return tzinfo; }
| 1,117,671
|
private TimezoneInfo lookup(String id) throws CalFacadeException { TimezoneInfo tzinfo; /* if (!systemTimezonesInitialised) { // First call (after reinit) synchronized (this) { if (!systemTimezonesInitialised) { Collection tzs = cal.getPublicTimeZones(); Iterator it = tzs.iterator(); while (it.hasNext()) { BwTimeZone btz = (BwTimeZone)it.next(); Calendar cal = IcalTranslator.getCalendar(btz.getVtimezone()); VTimeZone vtz = (VTimeZone)cal.getComponents().getComponent(Component.VTIMEZONE); if (vtz == null) { throw new CalFacadeException("Incorrectly stored timezone"); } tzinfo = new TimezoneInfo(); tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); systemTimezones.put(btz.getTzid(), tzinfo); } systemTimezonesInitialised = true; } } } */ tzinfo = (TimezoneInfo)systemTimezones.get(id); if (tzinfo != null) { tzinfo.publick = true; } else { tzinfo = (TimezoneInfo)timezones.get(id); } return tzinfo; }
|
private TimezoneInfo lookup(String id) throws CalFacadeException { TimezoneInfo tzinfo; /* if (!systemTimezonesInitialised) { // First call (after reinit) synchronized (this) { if (!systemTimezonesInitialised) { Collection tzs = cal.getPublicTimeZones(); Iterator it = tzs.iterator(); while (it.hasNext()) { BwTimeZone btz = (BwTimeZone)it.next(); Calendar cal = IcalTranslator.getCalendar(btz.getVtimezone()); VTimeZone vtz = (VTimeZone)cal.getComponents().getComponent(Component.VTIMEZONE); if (vtz == null) { throw new CalFacadeException("Incorrectly stored timezone"); tzinfo = new TimezoneInfo(); tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); systemTimezones.put(btz.getTzid(), tzinfo); systemTimezonesInitialised = true; */ tzinfo = (TimezoneInfo)systemTimezones.get(id); if (tzinfo != null) { tzinfo.publick = true; else { tzinfo = (TimezoneInfo)timezones.get(id); return tzinfo;
| 1,117,672
|
public void refreshTimezones() throws CalFacadeException { synchronized (this) { //systemTimezonesInitialised = false; systemTimezones = new HashMap(); } // force refresh now lookup("not-a-timezone"); }
|
public void refreshTimezones() throws CalFacadeException { synchronized (this) { //systemTimezonesInitialised = false; } // force refresh now lookup("not-a-timezone"); }
| 1,117,673
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone tz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); tz.setVtimezone(sb.toString()); try { ri.restoreTimezone(tz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone btz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); tz.setVtimezone(sb.toString()); try { ri.restoreTimezone(tz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
| 1,117,674
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone tz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); tz.setVtimezone(sb.toString()); try { ri.restoreTimezone(tz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone tz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); tz.setVtimezone(sb.toString()); try { ri.restoreTimezone(tz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
| 1,117,675
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone tz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); tz.setVtimezone(sb.toString()); try { ri.restoreTimezone(tz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone tz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); btz.setVtimezone(sb.toString()); try { ri.restoreTimezone(tz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
| 1,117,676
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone tz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); tz.setVtimezone(sb.toString()); try { ri.restoreTimezone(tz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone tz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); tz.setVtimezone(sb.toString()); try { ri.restoreTimezone(btz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
| 1,117,677
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone tz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); tz.setVtimezone(sb.toString()); try { ri.restoreTimezone(tz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone tz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); tz.setVtimezone(sb.toString()); try { ri.restoreTimezone(tz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(tz, vtz); timezones.put(tzid, tzinfo); } else { tzinfo.init(tz, vtz); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
| 1,117,678
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone tz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); tz.setVtimezone(sb.toString()); try { ri.restoreTimezone(tz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
|
public void saveTimeZone(String tzid, VTimeZone vtz) throws CalFacadeException { /* For a user update the map to avoid a refetch. For system timezones we will force a refresh when we're done. */ BwTimeZone tz = new BwTimeZone(); tz.setTzid(tzid); tz.setPublick(publick); tz.setOwner(user); StringBuffer sb = new StringBuffer(); sb.append("BEGIN:VCALENDAR\n"); sb.append("PRODID:-//RPI//BEDEWORK//US\n"); sb.append("VERSION:2.0\n"); sb.append(vtz.toString()); sb.append("END:VCALENDAR\n"); tz.setVtimezone(sb.toString()); try { ri.restoreTimezone(tz); } catch (Throwable t) { throw new CalFacadeException(t); } TimezoneInfo tzinfo = (TimezoneInfo)timezones.get(tzid); if (tzinfo == null) { tzinfo = new TimezoneInfo(); } tzinfo.vtz = vtz; tzinfo.tz = new TimeZone(vtz); timezones.put(tzid, tzinfo); }
| 1,117,679
|
public void write(IdeaDocument ideaDocument) throws Exception;
|
void write(IdeaDocument ideaDocument) throws Exception;
| 1,117,680
|
public void addSeismogram(LocalSeismogramImpl seis, AuditInfo[] audit) { seismogramNameCache = null; // Note this does not set the xlink, as the seis has not been saved anywhere yet. Document doc = config.getOwnerDocument(); Element localSeismogram = doc.createElement("localSeismogram");//doc.createElement("SacSeismogram"); String name =seis.getProperty(seisNameKey); if (name == null || name.length() == 0) { name = seis.channel_id.network_id.network_code+"."+ seis.channel_id.station_code+"."+ seis.channel_id.channel_code; //edu.iris.Fissures.network.ChannelIdUtil.toStringNoDates(seis.channel_id); } name = getUniqueName(getSeismogramNames(), name); seis.setName(name); Element seismogramAttr = doc.createElement("seismogramAttr"); XMLSeismogramAttr.insert(seismogramAttr, (LocalSeismogram)seis); //localSeismogram.appendChild(seismogramAttr);// Element propertyElement = doc.createElement("property");// propertyElement.appendChild(XMLUtil.createTextElement(doc, "name",// "Name"));// propertyElement.appendChild(XMLUtil.createTextElement(doc, "value",// name)); ///seismogramAttr.appendChild(propertyElement); localSeismogram.appendChild(seismogramAttr); /*Property[] props = seis.getProperties(); //logger.debug("the length of the Properties of the seismogram are "+props.length); Element propE, propNameE, propValueE; for (int i=0; i<props.length; i++) { if (props[i] != null && props[i].name != seisNameKey) { propE = doc.createElement("property"); propNameE = doc.createElement("name"); propNameE.setNodeValue(props[i].name); propValueE = doc.createElement("value"); propValueE.setNodeValue(props[i].value); propE.appendChild(propNameE); propE.appendChild(propValueE); localSeismogram.appendChild(propE); } }*/ config.appendChild(localSeismogram); seismogramCache.put(name, new SoftReference(seis)); //logger.debug("added seis now "+getSeismogramNames().length+" seisnogram names."); seismogramNameCache = null; //xpath = new XPathAPI(); //xpath = new CachedXPathAPI(xpath); //logger.debug("2 added seis now "+getSeismogramNames().length+" seisnogram names."); }
|
public void addSeismogram(LocalSeismogramImpl seis, AuditInfo[] audit) { seismogramNameCache = null; // Note this does not set the xlink, as the seis has not been saved anywhere yet. Document doc = config.getOwnerDocument(); Element localSeismogram = doc.createElement("localSeismogram");//doc.createElement("SacSeismogram"); String name =seis.getProperty(seisNameKey); if (name == null || name.length() == 0) { name = seis.channel_id.network_id.network_code+"."+ seis.channel_id.station_code+"."+ seis.channel_id.channel_code; //edu.iris.Fissures.network.ChannelIdUtil.toStringNoDates(seis.channel_id); } name = getUniqueName(getSeismogramNames(), name); seis.setName(name); Element seismogramAttr = doc.createElement("seismogramAttr"); XMLSeismogramAttr.insert(seismogramAttr, (LocalSeismogram)seis); //localSeismogram.appendChild(seismogramAttr);// Element propertyElement = doc.createElement("property");// propertyElement.appendChild(XMLUtil.createTextElement(doc, "name",// "Name"));// propertyElement.appendChild(XMLUtil.createTextElement(doc, "value",// name)); ///seismogramAttr.appendChild(propertyElement); localSeismogram.appendChild(seismogramAttr); /*Property[] props = seis.getProperties(); //logger.debug("the length of the Properties of the seismogram are "+props.length); Element propE, propNameE, propValueE; for (int i=0; i<props.length; i++) { if (props[i] != null && props[i].name != seisNameKey) { propE = doc.createElement("property"); propNameE = doc.createElement("name"); propNameE.setNodeValue(props[i].name); propValueE = doc.createElement("value"); propValueE.setNodeValue(props[i].value); propE.appendChild(propNameE); propE.appendChild(propValueE); localSeismogram.appendChild(propE); } }*/ config.appendChild(localSeismogram); seismogramCache.put(name, seis); //logger.debug("added seis now "+getSeismogramNames().length+" seisnogram names."); seismogramNameCache = null; //xpath = new XPathAPI(); //xpath = new CachedXPathAPI(xpath); //logger.debug("2 added seis now "+getSeismogramNames().length+" seisnogram names."); }
| 1,117,682
|
public Object getParameter(String name) { System.out.println("IN THE METHOD GET PARAMETER ****************************************************"); if (parameterCache.containsKey(name)) { SoftReference softReference = (SoftReference)parameterCache.get(name); if(softReference.get() != null) return softReference.get(); else parameterCache.remove(name); } // end of if (parameterCache.containsKey(name)) NodeList nList = evalNodeList(config, "parameter[name/text()="+ dquote+name+dquote+"]"); if (nList != null && nList.getLength() != 0) { //logger.debug("getting the parameter "+name); Node n = nList.item(0); if (n instanceof Element) { Object r = XMLParameter.getParameter((Element)n); parameterCache.put(name, new SoftReference(r)); return r; } } else { logger.debug("THE NODE LIST IS NULL for parameter "+name); } System.out.println("GO AND GET THE PARAMETER REF"); // not a parameter, try parameterRef nList = evalNodeList(config, "parameterRef");//[text()="+dquote+name+dquote+"]"); if (nList != null && nList.getLength() != 0) { for(int counter = 0 ; counter < nList.getLength() ; counter++) { Node n = nList.item(counter); if (n instanceof Element) { if(!((Element)n).getAttribute("name").equals(name)) continue; SimpleXLink sl = new SimpleXLink(docBuilder, (Element)n, getBase()); try { Element e = sl.retrieve(); //parameterCache.put(name, e); Object obj = XMLParameter.getParameter(e); parameterCache.put(name, new SoftReference(obj)); return obj; } catch (Exception e) { logger.error("can't get paramterRef for "+name, e); } // end of try-catch } } } logger.warn("can't find paramter for "+name); //can't find that name??? return null; }
|
public Object getParameter(String name) { System.out.println("IN THE METHOD GET PARAMETER ****************************************************"); if (parameterCache.containsKey(name)) { SoftReference softReference = (SoftReference)parameterCache.get(name); if(softReference.get() != null) return softReference.get(); else parameterCache.remove(name); } // end of if (parameterCache.containsKey(name)) NodeList nList = evalNodeList(config, "parameter[name/text()="+ dquote+name+dquote+"]"); if (nList != null && nList.getLength() != 0) { //logger.debug("getting the parameter "+name); Node n = nList.item(0); if (n instanceof Element) { Object r = XMLParameter.getParameter((Element)n); parameterCache.put(name, new SoftReference(r)); return r; } } else { logger.debug("THE NODE LIST IS NULL for parameter "+name); } System.out.println("GO AND GET THE PARAMETER REF"); // not a parameter, try parameterRef nList = evalNodeList(config, "parameterRef");//[text()="+dquote+name+dquote+"]"); if (nList != null && nList.getLength() != 0) { for(int counter = 0 ; counter < nList.getLength() ; counter++) { Node n = nList.item(counter); if (n instanceof Element) { if(!((Element)n).getAttribute("name").equals(name)) continue; SimpleXLink sl = new SimpleXLink(docBuilder, (Element)n, getBase()); try { Element e = sl.retrieve(); //parameterCache.put(name, e); Object obj = XMLParameter.getParameter(e); parameterCache.put(name, new SoftReference(obj)); return obj; } catch (Exception e) { logger.error("can't get paramterRef for "+name, e); } // end of try-catch } } } logger.warn("can't find paramter for "+name); //can't find that name??? return null; }
| 1,117,683
|
public boolean getData(SeisDataChangeListener listener, RequestFilter[] requestFilters){ for (int i = 0; i < requestFilters.length; i++){ boolean found = false; for (int j = 0; j < this.requestFilters.length && !found; j++){ if(requestFilters[i] == this.requestFilters[j]){ found = true; } } if(!found){ return false; } } if(!pushed){ synchronized(initiators){ initiators.add(listener); } return true; } LocalSeismogramImpl[] seis = (LocalSeismogramImpl[])seisRef.get(); if(seis != null){ a_client.pushData(seis, listener); a_client.finished(listener); return true; }else{ return false; } }
|
public boolean getData(SeisDataChangeListener listener, RequestFilter[] requestFilters){ for (int i = 0; i < requestFilters.length; i++){ boolean found = false; for (int j = 0; j < this.requestFilters.length && !found; j++){ if(requestFilters[i] == this.requestFilters[j]){ found = true; } } if(!found){ return false; } } if(!pushed){ synchronized(initiators){ initiators.add(listener); } return true; } LocalSeismogramImpl[] seis = (LocalSeismogramImpl[])seisRef.get(); if(seis != null){ a_client.pushData(seis, listener); a_client.finished(listener); return true; }else{ return false; } }
| 1,117,684
|
public boolean getData(SeisDataChangeListener listener, RequestFilter[] requestFilters){ for (int i = 0; i < requestFilters.length; i++){ boolean found = false; for (int j = 0; j < this.requestFilters.length && !found; j++){ if(requestFilters[i] == this.requestFilters[j]){ found = true; } } if(!found){ return false; } } if(!pushed){ synchronized(initiators){ initiators.add(listener); } return true; } LocalSeismogramImpl[] seis = (LocalSeismogramImpl[])seisRef.get(); if(seis != null){ a_client.pushData(seis, listener); a_client.finished(listener); return true; }else{ return false; } }
|
public boolean getData(SeisDataChangeListener listener, RequestFilter[] requestFilters){ for (int i = 0; i < requestFilters.length; i++){ boolean found = false; for (int j = 0; j < this.requestFilters.length && !found; j++){ if(requestFilters[i] == this.requestFilters[j]){ found = true; } } if(!found){ return false; } } if(!pushed){ synchronized(initiators){ initiators.add(listener); } } LocalSeismogramImpl[] seis = (LocalSeismogramImpl[])seisRef.get(); if(seis != null){ a_client.pushData(seis, listener); a_client.finished(listener); }else{ return false; } }
| 1,117,685
|
public boolean getData(SeisDataChangeListener listener, RequestFilter[] requestFilters){ for (int i = 0; i < requestFilters.length; i++){ boolean found = false; for (int j = 0; j < this.requestFilters.length && !found; j++){ if(requestFilters[i] == this.requestFilters[j]){ found = true; } } if(!found){ return false; } } if(!pushed){ synchronized(initiators){ initiators.add(listener); } return true; } LocalSeismogramImpl[] seis = (LocalSeismogramImpl[])seisRef.get(); if(seis != null){ a_client.pushData(seis, listener); a_client.finished(listener); return true; }else{ return false; } }
|
public boolean getData(SeisDataChangeListener listener, RequestFilter[] requestFilters){ for (int i = 0; i < requestFilters.length; i++){ boolean found = false; for (int j = 0; j < this.requestFilters.length && !found; j++){ if(requestFilters[i] == this.requestFilters[j]){ found = true; } } if(!found){ return false; } } if(!pushed){ synchronized(initiators){ initiators.add(listener); } return true; } LocalSeismogramImpl[] seis = (LocalSeismogramImpl[])seisRef.get(); if(seis != null){ a_client.pushData(seis, listener); a_client.finished(listener); return true; }else{ return false; } }
| 1,117,686
|
public void run() { List seismograms = new ArrayList(); for(int counter = 0; counter < requestFilters.length; counter++) { try { RequestFilter[] temp = { requestFilters[counter] }; LocalSeismogram[] seis = dbDataCenter.retrieve_seismograms(temp); LocalSeismogramImpl[] seisImpl = castToLocalSeismogramImplArray(seis); synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.pushData(seisImpl, ((SeisDataChangeListener)it.next())); } } for (int i = 0; i < seisImpl.length; i++){ seismograms.add(seisImpl[i]); } } catch(FissuresException fe) { synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } catch(org.omg.CORBA.SystemException fe) { synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } } LocalSeismogramImpl[] seisArray = new LocalSeismogramImpl[seismograms.size()]; seisRef = new SoftReference(seismograms.toArray(seisArray)); synchronized(initiators){ Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.finished(((SeisDataChangeListener)it.next())); } } }
|
public void run() { List seismograms = new ArrayList(); for(int counter = 0; counter < requestFilters.length; counter++) { try { RequestFilter[] temp = { requestFilters[counter] }; LocalSeismogram[] seis = dbDataCenter.retrieve_seismograms(temp); LocalSeismogramImpl[] seisImpl = castToLocalSeismogramImplArray(seis); synchronized(initiators){ Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.pushData(seisImpl, ((SeisDataChangeListener)it.next())); } } for (int i = 0; i < seisImpl.length; i++){ seismograms.add(seisImpl[i]); } } catch(FissuresException fe) { synchronized(initiators){ Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } catch(org.omg.CORBA.SystemException fe) { synchronized(initiators){ Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } } LocalSeismogramImpl[] seisArray = new LocalSeismogramImpl[seismograms.size()]; seisRef = new SoftReference(seismograms.toArray(seisArray)); synchronized(initiators){ Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.finished(((SeisDataChangeListener)it.next())); } } }
| 1,117,687
|
public void run() { List seismograms = new ArrayList(); for(int counter = 0; counter < requestFilters.length; counter++) { try { RequestFilter[] temp = { requestFilters[counter] }; LocalSeismogram[] seis = dbDataCenter.retrieve_seismograms(temp); LocalSeismogramImpl[] seisImpl = castToLocalSeismogramImplArray(seis); synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.pushData(seisImpl, ((SeisDataChangeListener)it.next())); } } for (int i = 0; i < seisImpl.length; i++){ seismograms.add(seisImpl[i]); } } catch(FissuresException fe) { synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } catch(org.omg.CORBA.SystemException fe) { synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } } LocalSeismogramImpl[] seisArray = new LocalSeismogramImpl[seismograms.size()]; seisRef = new SoftReference(seismograms.toArray(seisArray)); synchronized(initiators){ Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.finished(((SeisDataChangeListener)it.next())); } } }
|
public void run() { List seismograms = new ArrayList(); for(int counter = 0; counter < requestFilters.length; counter++) { try { RequestFilter[] temp = { requestFilters[counter] }; LocalSeismogram[] seis = dbDataCenter.retrieve_seismograms(temp); LocalSeismogramImpl[] seisImpl = castToLocalSeismogramImplArray(seis); synchronized(initiators){ failed = true; System.out.println("FISSURES FAILED"); Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.pushData(seisImpl, ((SeisDataChangeListener)it.next())); } } for (int i = 0; i < seisImpl.length; i++){ seismograms.add(seisImpl[i]); } } catch(FissuresException fe) { synchronized(initiators){ failed = true; System.out.println("FISSURES FAILED"); Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } catch(org.omg.CORBA.SystemException fe) { synchronized(initiators){ failed = true; System.out.println("FISSURES FAILED"); Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } } LocalSeismogramImpl[] seisArray = new LocalSeismogramImpl[seismograms.size()]; seisRef = new SoftReference(seismograms.toArray(seisArray)); synchronized(initiators){ Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.finished(((SeisDataChangeListener)it.next())); } } }
| 1,117,688
|
public void run() { List seismograms = new ArrayList(); for(int counter = 0; counter < requestFilters.length; counter++) { try { RequestFilter[] temp = { requestFilters[counter] }; LocalSeismogram[] seis = dbDataCenter.retrieve_seismograms(temp); LocalSeismogramImpl[] seisImpl = castToLocalSeismogramImplArray(seis); synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.pushData(seisImpl, ((SeisDataChangeListener)it.next())); } } for (int i = 0; i < seisImpl.length; i++){ seismograms.add(seisImpl[i]); } } catch(FissuresException fe) { synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } catch(org.omg.CORBA.SystemException fe) { synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } } LocalSeismogramImpl[] seisArray = new LocalSeismogramImpl[seismograms.size()]; seisRef = new SoftReference(seismograms.toArray(seisArray)); synchronized(initiators){ Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.finished(((SeisDataChangeListener)it.next())); } } }
|
public void run() { List seismograms = new ArrayList(); for(int counter = 0; counter < requestFilters.length; counter++) { try { RequestFilter[] temp = { requestFilters[counter] }; LocalSeismogram[] seis = dbDataCenter.retrieve_seismograms(temp); LocalSeismogramImpl[] seisImpl = castToLocalSeismogramImplArray(seis); synchronized(initiators){ failed = true; System.out.println("CORBA FAILED"); Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.pushData(seisImpl, ((SeisDataChangeListener)it.next())); } } for (int i = 0; i < seisImpl.length; i++){ seismograms.add(seisImpl[i]); } } catch(FissuresException fe) { synchronized(initiators){ failed = true; System.out.println("CORBA FAILED"); Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } catch(org.omg.CORBA.SystemException fe) { synchronized(initiators){ failed = true; System.out.println("CORBA FAILED"); Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } } LocalSeismogramImpl[] seisArray = new LocalSeismogramImpl[seismograms.size()]; seisRef = new SoftReference(seismograms.toArray(seisArray)); synchronized(initiators){ Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.finished(((SeisDataChangeListener)it.next())); } } }
| 1,117,689
|
public void run() { List seismograms = new ArrayList(); for(int counter = 0; counter < requestFilters.length; counter++) { try { RequestFilter[] temp = { requestFilters[counter] }; LocalSeismogram[] seis = dbDataCenter.retrieve_seismograms(temp); LocalSeismogramImpl[] seisImpl = castToLocalSeismogramImplArray(seis); synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.pushData(seisImpl, ((SeisDataChangeListener)it.next())); } } for (int i = 0; i < seisImpl.length; i++){ seismograms.add(seisImpl[i]); } } catch(FissuresException fe) { synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } catch(org.omg.CORBA.SystemException fe) { synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } } LocalSeismogramImpl[] seisArray = new LocalSeismogramImpl[seismograms.size()]; seisRef = new SoftReference(seismograms.toArray(seisArray)); synchronized(initiators){ Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.finished(((SeisDataChangeListener)it.next())); } } }
|
public void run() { List seismograms = new ArrayList(); for(int counter = 0; counter < requestFilters.length; counter++) { try { RequestFilter[] temp = { requestFilters[counter] }; LocalSeismogram[] seis = dbDataCenter.retrieve_seismograms(temp); LocalSeismogramImpl[] seisImpl = castToLocalSeismogramImplArray(seis); synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.pushData(seisImpl, ((SeisDataChangeListener)it.next())); } } for (int i = 0; i < seisImpl.length; i++){ seismograms.add(seisImpl[i]); } } catch(FissuresException fe) { synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } catch(org.omg.CORBA.SystemException fe) { synchronized(initiators){ pushed = true; Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.error(((SeisDataChangeListener)it.next()), fe); } continue; } } } LocalSeismogramImpl[] seisArray = new LocalSeismogramImpl[seismograms.size()]; seisRef = new SoftReference(seismograms.toArray(seisArray)); synchronized(initiators){ Iterator it = initiators.iterator(); while(it.hasNext()){ a_client.finished(((SeisDataChangeListener)it.next())); } } }
| 1,117,690
|
public void runChecks() throws IOException { int sizeofCollection = ConnCheckerCollection.size(); Iterator collectionExe = ConnCheckerCollection.iterator(); while(collectionExe.hasNext()){ ConnChecker connChecker = (ConnChecker)collectionExe.next(); Thread th = new Thread(connChecker); th.start(); } } // close runChecks
|
public void runChecks() throws IOException { int sizeofCollection = ConnCheckerCollection.size(); Iterator collectionExe = ConnCheckerCollection.iterator(); while(collectionExe.hasNext()){ ConnChecker connChecker = (ConnChecker)collectionExe.next(); Thread th = new Thread(checkerThreadGroup, connChecker, "ConnChecker"+getThreadNum()); th.start(); } } // close runChecks
| 1,117,692
|
public static boolean checkStatus(String val) { return BwEvent.statusConfirmed.equals(val) || BwEvent.statusTentative.equals(val) || BwEvent.statusCancelled.equals(val); }
|
public static boolean checkStatus(String val) { return BwEvent.statusConfirmed.equals(val) || BwEvent.statusTentative.equals(val) || BwEvent.statusCancelled.equals(val); }
| 1,117,693
|
public static boolean checkTransparency(String val) { return BwEvent.transparencyOpaque.equals(val) || BwEvent.transparencyTransparent.equals(val); }
|
public static boolean checkTransparency(String val) { return BwEvent.transparencyOpaque.equals(val) || BwEvent.transparencyTransparent.equals(val); }
| 1,117,694
|
public void setString(String key, String value) { setProperty(key, value); }
|
public void setString(String key, String value) { if (value != null) { setProperty(key, value); } }
| 1,117,695
|
public LocalSeismogramImpl apply(LocalSeismogramImpl seis) throws Exception { LocalSeismogramImpl outSeis; TimeSeriesDataSel outData = new TimeSeriesDataSel(); int numPts = seis.num_points / factor; if(seis.can_convert_to_short()) { short[] inS = seis.get_as_shorts(); short[] outS = new short[numPts]; for(int i = 0; i < outS.length; i++) { outS[i] = inS[i * factor]; } outData.sht_values(outS); } else if(seis.can_convert_to_long()) { int[] outI = new int[numPts]; int[] inI = seis.get_as_longs(); for(int i = 0; i < outI.length; i++) { outI[i] = inI[i * factor]; } outData.int_values(outI); } else if(seis.can_convert_to_float()) { float[] outF = new float[numPts]; float[] inF = seis.get_as_floats(); for(int i = 0; i < outF.length; i++) { outF[i] = inF[i * factor]; } outData.flt_values(outF); } else { double[] outD = new double[numPts]; double[] inD = seis.get_as_doubles(); for(int i = 0; i < outD.length; i++) { outD[i] = inD[i * factor]; } outData.dbl_values(outD); } // end of else outSeis = new LocalSeismogramImpl(seis.get_id(), seis.properties, seis.begin_time, numPts, new SamplingImpl(numPts - 1, seis.getTimeInterval()), seis.y_unit, seis.channel_id, seis.parm_ids, seis.time_corrections, seis.sample_rate_history, outData); return outSeis; }
|
public LocalSeismogramImpl apply(LocalSeismogramImpl seis) throws Exception { LocalSeismogramImpl outSeis; TimeSeriesDataSel outData = new TimeSeriesDataSel(); int numPts = seis.num_points / factor; if(seis.can_convert_to_short()) { short[] inS = seis.get_as_shorts(); short[] outS = new short[numPts]; for(int i = 0; i < outS.length; i++) { outS[i] = inS[i * factor]; } outData.sht_values(outS); } else if(seis.can_convert_to_long()) { int[] outI = new int[numPts]; int[] inI = seis.get_as_longs(); for(int i = 0; i < outI.length; i++) { outI[i] = inI[i * factor]; } outData.int_values(outI); } else if(seis.can_convert_to_float()) { float[] outF = new float[numPts]; float[] inF = seis.get_as_floats(); for(int i = 0; i < outF.length; i++) { outF[i] = inF[i * factor]; } outData.flt_values(outF); } else { double[] outD = new double[numPts]; double[] inD = seis.get_as_doubles(); for(int i = 0; i < outD.length; i++) { outD[i] = inD[i * factor]; } outData.dbl_values(outD); } // end of else outSeis = new LocalSeismogramImpl(seis.get_id(), seis.properties, seis.begin_time, numPts, new SamplingImpl(numPts - 1, seis.getTimeInterval()), seis.y_unit, seis.channel_id, seis.parm_ids, seis.time_corrections, seis.sample_rate_history, outData); return outSeis; }
| 1,117,696
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new CreateInputPin_3003Command(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new CreateInputPin_3004Command(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new CreateInputPin_3005Command(req)); } return super.getCreateCommand(req); }
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new CreateInputPin_3003Command(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new CreateInputPin_3004Command(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new CreateInputPin_3005Command(req)); } return super.getCreateCommand(req); }
| 1,117,697
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new CreateInputPin_3003Command(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new CreateInputPin_3004Command(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new CreateInputPin_3005Command(req)); } return super.getCreateCommand(req); }
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new InputPinCreateCommand(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new CreateInputPin_3004Command(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new CreateInputPin_3005Command(req)); } return super.getCreateCommand(req); }
| 1,117,698
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new CreateInputPin_3003Command(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new CreateInputPin_3004Command(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new CreateInputPin_3005Command(req)); } return super.getCreateCommand(req); }
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new CreateInputPin_3003Command(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new CreateInputPin_3004Command(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new CreateInputPin_3005Command(req)); } return super.getCreateCommand(req); }
| 1,117,699
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new CreateInputPin_3003Command(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new CreateInputPin_3004Command(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new CreateInputPin_3005Command(req)); } return super.getCreateCommand(req); }
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new CreateInputPin_3003Command(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new InputPin2CreateCommand(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new CreateInputPin_3005Command(req)); } return super.getCreateCommand(req); }
| 1,117,700
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new CreateInputPin_3003Command(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new CreateInputPin_3004Command(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new CreateInputPin_3005Command(req)); } return super.getCreateCommand(req); }
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new CreateInputPin_3003Command(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new CreateInputPin_3004Command(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new CreateInputPin_3005Command(req)); } return super.getCreateCommand(req); }
| 1,117,701
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new CreateInputPin_3003Command(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new CreateInputPin_3004Command(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new CreateInputPin_3005Command(req)); } return super.getCreateCommand(req); }
|
protected Command getCreateCommand(CreateElementRequest req) { if (UMLElementTypes.InputPin_3003 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getInsertAt() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getAddStructuralFeatureValueAction_InsertAt()); } return getMSLWrapper(new CreateInputPin_3003Command(req)); } if (UMLElementTypes.InputPin_3004 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getValue() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getWriteStructuralFeatureAction_Value()); } return getMSLWrapper(new CreateInputPin_3004Command(req)); } if (UMLElementTypes.InputPin_3005 == req.getElementType()) { AddStructuralFeatureValueAction container = (AddStructuralFeatureValueAction) (req.getContainer() instanceof View ? ((View) req.getContainer()).getElement() : req.getContainer()); if (container.getObject() != null) { return super.getCreateCommand(req); } if (req.getContainmentFeature() == null) { req.setContainmentFeature(UMLPackage.eINSTANCE.getStructuralFeatureAction_Object()); } return getMSLWrapper(new InputPin3CreateCommand(req)); } return super.getCreateCommand(req); }
| 1,117,702
|
public edu.sc.seis.fissuresUtil.bag.Statistics[] createInstanceArray() throws Exception { short size = 4; int[] intTestData = new int[size]; short[] shortTestData = new short[size]; float[] floatTestData = new float[size]; double[] doubleTestData = new double[size]; for (short i=0; i<size; i++) { shortTestData[i] = i; intTestData[i] = i; floatTestData[i] = i; doubleTestData[i] = i; } // end of for (int i=0; i<intTestData.length; i++) Statistics[] out = new edu.sc.seis.fissuresUtil.bag.Statistics[4]; out[0] = new Statistics(shortTestData); out[1] = new Statistics(intTestData); out[2] = new Statistics(floatTestData); out[3] = new Statistics(doubleTestData); return out; }
|
public edu.sc.seis.fissuresUtil.bag.Statistics[] createInstanceArray() throws Exception { short size = 4; int[] intTestData = new int[size]; short[] shortTestData = new short[size]; float[] floatTestData = new float[size]; double[] doubleTestData = new double[size]; for (short i=0; i<size; i++) { shortTestData[i] = i; intTestData[i] = i; floatTestData[i] = i; doubleTestData[i] = i; } // end of for (int i=0; i<intTestData.length; i++) Statistics[] out = new edu.sc.seis.fissuresUtil.bag.Statistics[4]; out[0] = new Statistics(shortTestData); out[1] = new Statistics(intTestData); out[2] = new Statistics(floatTestData); out[3] = new Statistics(doubleTestData); return out; }
| 1,117,703
|
public void testBinarySum() throws Exception { // JUnitDoclet begin method binarySum // JUnitDoclet end method binarySum }
|
public void testBinarySum() throws Exception { // JUnitDoclet begin method binarySum // JUnitDoclet end method binarySum for ( int i = 0; i<stat.length; i++) { int n=stat[i].getLength()-1; double out = stat[i].binarySum(0, intTestData.length); assertEquals("BinarySum", n*(n+1)/2, out, 0.0000001); } }
| 1,117,704
|
public int put(Origin origin, int eventId) throws SQLException { int id = put(origin); setEventId(id, eventId); return id; }
|
public int put(Origin origin, int eventId) throws SQLException { int id = put(origin); setEventId(id, eventId); return id; }
| 1,117,705
|
public boolean isPlaying();
|
public abstract boolean isPlaying();
| 1,117,707
|
public void stop();
|
public abstract void stop();
| 1,117,708
|
public MP3Player(File f) throws PlayerException { try { file = f; stream = new FileInputStream(file); player = new AdvancedPlayer(stream); player.setPlayBackListener(new PlaybackListener() { public void playbackStarted(PlaybackEvent e) { firePlaybackStarted(); } public void playbackFinished(PlaybackEvent e) { if (completed) { firePlaybackCompleted(); } else { firePlaybackStopped(); } player.close(); player = null; } }); } catch (Exception exc) { throw new PlayerException(exc); } }
|
public MP3Player(File f) throws PlayerException { try { file = f; stream = new FileInputStream(file); player = new AdvancedPlayer(stream); player.setPlayBackListener(new PlaybackListener() { public void playbackStarted(PlaybackEvent e) { firePlaybackStarted(); } public void playbackFinished(PlaybackEvent e) { if (completed) { firePlaybackCompleted(); } else { firePlaybackStopped(); } player.close(); player = null; } }); } catch (Exception exc) { throw new PlayerException(exc); } }
| 1,117,709
|
public void addListener(PlayerListener listener);
|
public void addListener(PlayerListener listener) { listeners.add(listener); }
| 1,117,710
|
public void play() throws PlayerException;
|
public abstract void play() throws PlayerException;
| 1,117,711
|
private PacketType getFirstDataPacketOfFirstFile(TimeRange fileTimeWindow) throws RT130FormatException, IOException { File file = new File(this.dataFileLoc); File dataStream = new File(file.getParent()); File[] fileNames = dataStream.listFiles(); Arrays.sort(fileNames); if(fileNames[0].equals(this.firstFileLoc)) { return firstDataPacketOfFirstFile; } else { this.firstFileLoc = fileNames[0].getAbsolutePath(); firstDataPacketOfFirstFile = readFirstDataPacketOfFirstFile(fileTimeWindow); return firstDataPacketOfFirstFile; } }
|
private PacketType getFirstDataPacketOfFirstFile(MicroSecondTimeRange fileTimeWindow) throws RT130FormatException, IOException { File file = new File(this.dataFileLoc); File dataStream = new File(file.getParent()); File[] fileNames = dataStream.listFiles(); Arrays.sort(fileNames); if(fileNames[0].equals(this.firstFileLoc)) { return firstDataPacketOfFirstFile; } else { this.firstFileLoc = fileNames[0].getAbsolutePath(); firstDataPacketOfFirstFile = readFirstDataPacketOfFirstFile(fileTimeWindow); return firstDataPacketOfFirstFile; } }
| 1,117,712
|
public PacketType[] processRT130Data(String dataFileLoc, boolean processData, TimeRange fileTimeWindow) throws RT130FormatException, IOException { this.dataFileLoc = dataFileLoc; File file = new File(this.dataFileLoc); FileInputStream fis = new FileInputStream(file); BufferedInputStream bis = new BufferedInputStream(fis); DataInputStream dis = new DataInputStream(bis); this.seismogramDataInputStream = dis; this.processData = processData; PacketType firstDataPacketOfFirstFile = getFirstDataPacketOfFirstFile(fileTimeWindow); return readEntireDataFile(firstDataPacketOfFirstFile, fileTimeWindow); }
|
public PacketType[] processRT130Data(String dataFileLoc, boolean processData, MicroSecondTimeRange fileTimeWindow) throws RT130FormatException, IOException { this.dataFileLoc = dataFileLoc; File file = new File(this.dataFileLoc); FileInputStream fis = new FileInputStream(file); BufferedInputStream bis = new BufferedInputStream(fis); DataInputStream dis = new DataInputStream(bis); this.seismogramDataInputStream = dis; this.processData = processData; PacketType firstDataPacketOfFirstFile = getFirstDataPacketOfFirstFile(fileTimeWindow); return readEntireDataFile(firstDataPacketOfFirstFile, fileTimeWindow); }
| 1,117,713
|
private PacketType[] readEntireDataFile(PacketType firstFileData, TimeRange fileTimeWindow) throws RT130FormatException, IOException { boolean done = false; List seismogramList = new ArrayList(); PacketType nextPacket = new PacketType(); PacketType header = new PacketType(); Map seismogramData = new HashMap(); boolean haveFile = false; while(!haveFile) { try { nextPacket = new PacketType(this.seismogramDataInputStream, this.processData, fileTimeWindow); haveFile = true; } catch(EOFException e) { logger.error("End of file reached before any data processing was done. " + "The file likely contains no data. " + "PacketType creation failed."); throw new RT130FormatException(" End of file reached before any data processing was done. " + "The file likely contains no data. " + "PacketType creation failed."); } catch(RT130BadPacketException e) { // Skip bad packet. } } while(!done) { if(nextPacket.packetType.equals("DT")) { Integer i = new Integer(nextPacket.dP.channelNumber); if(!seismogramData.containsKey(i)) { seismogramData.put(i, new PacketType(header)); } TimeInterval lengthOfData = new TimeInterval(((double)nextPacket.dP.numberOfSamples / (double)((PacketType)seismogramData.get(i)).sample_rate), UnitImpl.SECOND); nextPacket.end_time_of_last_packet = nextPacket.begin_time_of_first_packet.add(lengthOfData); append(seismogramData, i, nextPacket, seismogramList, firstFileData); } else if(nextPacket.packetType.equals("EH")) { seismogramData.put(new Integer(0), Append.appendEventHeaderPacket(new PacketType(), nextPacket)); header = Append.appendEventHeaderPacket(new PacketType(), nextPacket); } else if(nextPacket.packetType.equals("ET")) { for(Integer j = new Integer(0); seismogramData.containsKey(j); j = new Integer(j.intValue() + 1)) { seismogramData.put(j, Append.appendEventTrailerPacket((PacketType)seismogramData.get(j), nextPacket)); seismogramList.add(finalizeSeismogramCreation((PacketType)seismogramData.get(j), firstFileData, false)); } done = true; } else if(nextPacket.packetType.equals("AD")) { logger.error("The given data file contains an unexpected Auxiliary Data Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Auxiliary Data Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("CD")) { logger.error("The given data file contains an unexpected Calibration Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Calibration Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("DS")) { logger.error("The given data file contains an unexpected Data Stream Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Data Stream Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("OM")) { logger.error("The given data file contains an unexpected Operating Mode Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Operating Mode Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("SH")) { logger.error("The given data file contains an unexpected State-Of-Health Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected State-Of-Health Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("SC")) { logger.error("The given data file contains an unexpected Station/Channel Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Station/Channel Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else { logger.error("The first two bytes of the Packet Header were not formatted " + "correctly, and do not refer to a valid Packet Type."); throw new RT130FormatException(" The first two bytes of the Packet Header were not formatted " + "correctly, and do not refer to a valid Packet Type."); } if(!done) { haveFile = false; while(!haveFile) { try { nextPacket = new PacketType(this.seismogramDataInputStream, this.processData, fileTimeWindow); haveFile = true; } catch(EOFException e) { logger.warn("End of file reached before Event Trailer Packet was read." + " The file likely contains an incomplete seismogram." + " Local seismogram creation was not disturbed."); for(Integer j = new Integer(0); seismogramData.containsKey(j); j = new Integer(j.intValue() + 1)) { seismogramList.add(finalizeSeismogramCreation((PacketType)seismogramData.get(j), firstFileData, false)); } done = true; } catch(RT130BadPacketException e) { // Skip bad packet. } } } } return (PacketType[])seismogramList.toArray(new PacketType[0]); }
|
private PacketType[] readEntireDataFile(PacketType firstFileData, MicroSecondTimeRange fileTimeWindow) throws RT130FormatException, IOException { boolean done = false; List seismogramList = new ArrayList(); PacketType nextPacket = new PacketType(); PacketType header = new PacketType(); Map seismogramData = new HashMap(); boolean haveFile = false; while(!haveFile) { try { nextPacket = new PacketType(this.seismogramDataInputStream, this.processData, fileTimeWindow); haveFile = true; } catch(EOFException e) { logger.error("End of file reached before any data processing was done. " + "The file likely contains no data. " + "PacketType creation failed."); throw new RT130FormatException(" End of file reached before any data processing was done. " + "The file likely contains no data. " + "PacketType creation failed."); } catch(RT130BadPacketException e) { // Skip bad packet. } } while(!done) { if(nextPacket.packetType.equals("DT")) { Integer i = new Integer(nextPacket.dP.channelNumber); if(!seismogramData.containsKey(i)) { seismogramData.put(i, new PacketType(header)); } TimeInterval lengthOfData = new TimeInterval(((double)nextPacket.dP.numberOfSamples / (double)((PacketType)seismogramData.get(i)).sample_rate), UnitImpl.SECOND); nextPacket.end_time_of_last_packet = nextPacket.begin_time_of_first_packet.add(lengthOfData); append(seismogramData, i, nextPacket, seismogramList, firstFileData); } else if(nextPacket.packetType.equals("EH")) { seismogramData.put(new Integer(0), Append.appendEventHeaderPacket(new PacketType(), nextPacket)); header = Append.appendEventHeaderPacket(new PacketType(), nextPacket); } else if(nextPacket.packetType.equals("ET")) { for(Integer j = new Integer(0); seismogramData.containsKey(j); j = new Integer(j.intValue() + 1)) { seismogramData.put(j, Append.appendEventTrailerPacket((PacketType)seismogramData.get(j), nextPacket)); seismogramList.add(finalizeSeismogramCreation((PacketType)seismogramData.get(j), firstFileData, false)); } done = true; } else if(nextPacket.packetType.equals("AD")) { logger.error("The given data file contains an unexpected Auxiliary Data Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Auxiliary Data Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("CD")) { logger.error("The given data file contains an unexpected Calibration Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Calibration Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("DS")) { logger.error("The given data file contains an unexpected Data Stream Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Data Stream Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("OM")) { logger.error("The given data file contains an unexpected Operating Mode Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Operating Mode Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("SH")) { logger.error("The given data file contains an unexpected State-Of-Health Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected State-Of-Health Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("SC")) { logger.error("The given data file contains an unexpected Station/Channel Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Station/Channel Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else { logger.error("The first two bytes of the Packet Header were not formatted " + "correctly, and do not refer to a valid Packet Type."); throw new RT130FormatException(" The first two bytes of the Packet Header were not formatted " + "correctly, and do not refer to a valid Packet Type."); } if(!done) { haveFile = false; while(!haveFile) { try { nextPacket = new PacketType(this.seismogramDataInputStream, this.processData, fileTimeWindow); haveFile = true; } catch(EOFException e) { logger.warn("End of file reached before Event Trailer Packet was read." + " The file likely contains an incomplete seismogram." + " Local seismogram creation was not disturbed."); for(Integer j = new Integer(0); seismogramData.containsKey(j); j = new Integer(j.intValue() + 1)) { seismogramList.add(finalizeSeismogramCreation((PacketType)seismogramData.get(j), firstFileData, false)); } done = true; } catch(RT130BadPacketException e) { // Skip bad packet. } } } } return (PacketType[])seismogramList.toArray(new PacketType[0]); }
| 1,117,714
|
private PacketType[] readEntireDataFile(PacketType firstFileData, TimeRange fileTimeWindow) throws RT130FormatException, IOException { boolean done = false; List seismogramList = new ArrayList(); PacketType nextPacket = new PacketType(); PacketType header = new PacketType(); Map seismogramData = new HashMap(); boolean haveFile = false; while(!haveFile) { try { nextPacket = new PacketType(this.seismogramDataInputStream, this.processData, fileTimeWindow); haveFile = true; } catch(EOFException e) { logger.error("End of file reached before any data processing was done. " + "The file likely contains no data. " + "PacketType creation failed."); throw new RT130FormatException(" End of file reached before any data processing was done. " + "The file likely contains no data. " + "PacketType creation failed."); } catch(RT130BadPacketException e) { // Skip bad packet. } } while(!done) { if(nextPacket.packetType.equals("DT")) { Integer i = new Integer(nextPacket.dP.channelNumber); if(!seismogramData.containsKey(i)) { seismogramData.put(i, new PacketType(header)); } TimeInterval lengthOfData = new TimeInterval(((double)nextPacket.dP.numberOfSamples / (double)((PacketType)seismogramData.get(i)).sample_rate), UnitImpl.SECOND); nextPacket.end_time_of_last_packet = nextPacket.begin_time_of_first_packet.add(lengthOfData); append(seismogramData, i, nextPacket, seismogramList, firstFileData); } else if(nextPacket.packetType.equals("EH")) { seismogramData.put(new Integer(0), Append.appendEventHeaderPacket(new PacketType(), nextPacket)); header = Append.appendEventHeaderPacket(new PacketType(), nextPacket); } else if(nextPacket.packetType.equals("ET")) { for(Integer j = new Integer(0); seismogramData.containsKey(j); j = new Integer(j.intValue() + 1)) { seismogramData.put(j, Append.appendEventTrailerPacket((PacketType)seismogramData.get(j), nextPacket)); seismogramList.add(finalizeSeismogramCreation((PacketType)seismogramData.get(j), firstFileData, false)); } done = true; } else if(nextPacket.packetType.equals("AD")) { logger.error("The given data file contains an unexpected Auxiliary Data Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Auxiliary Data Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("CD")) { logger.error("The given data file contains an unexpected Calibration Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Calibration Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("DS")) { logger.error("The given data file contains an unexpected Data Stream Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Data Stream Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("OM")) { logger.error("The given data file contains an unexpected Operating Mode Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Operating Mode Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("SH")) { logger.error("The given data file contains an unexpected State-Of-Health Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected State-Of-Health Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("SC")) { logger.error("The given data file contains an unexpected Station/Channel Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Station/Channel Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else { logger.error("The first two bytes of the Packet Header were not formatted " + "correctly, and do not refer to a valid Packet Type."); throw new RT130FormatException(" The first two bytes of the Packet Header were not formatted " + "correctly, and do not refer to a valid Packet Type."); } if(!done) { haveFile = false; while(!haveFile) { try { nextPacket = new PacketType(this.seismogramDataInputStream, this.processData, fileTimeWindow); haveFile = true; } catch(EOFException e) { logger.warn("End of file reached before Event Trailer Packet was read." + " The file likely contains an incomplete seismogram." + " Local seismogram creation was not disturbed."); for(Integer j = new Integer(0); seismogramData.containsKey(j); j = new Integer(j.intValue() + 1)) { seismogramList.add(finalizeSeismogramCreation((PacketType)seismogramData.get(j), firstFileData, false)); } done = true; } catch(RT130BadPacketException e) { // Skip bad packet. } } } } return (PacketType[])seismogramList.toArray(new PacketType[0]); }
|
private PacketType[] readEntireDataFile(PacketType firstFileData, TimeRange fileTimeWindow) throws RT130FormatException, IOException { boolean done = false; List seismogramList = new ArrayList(); PacketType nextPacket = new PacketType(); PacketType header = new PacketType(); Map seismogramData = new HashMap(); boolean haveFile = false; while(!haveFile && !done) { try { nextPacket = new PacketType(this.seismogramDataInputStream, this.processData, fileTimeWindow); haveFile = true; } catch(EOFException e) { logger.error("End of file reached before any data processing was done. " + "The file likely contains no data. " + "PacketType creation failed."); throw new RT130FormatException(" End of file reached before any data processing was done. " + "The file likely contains no data. " + "PacketType creation failed."); } catch(RT130BadPacketException e) { // Skip bad packet. } } while(!done) { if(nextPacket.packetType.equals("DT")) { Integer i = new Integer(nextPacket.dP.channelNumber); if(!seismogramData.containsKey(i)) { seismogramData.put(i, new PacketType(header)); } TimeInterval lengthOfData = new TimeInterval(((double)nextPacket.dP.numberOfSamples / (double)((PacketType)seismogramData.get(i)).sample_rate), UnitImpl.SECOND); nextPacket.end_time_of_last_packet = nextPacket.begin_time_of_first_packet.add(lengthOfData); append(seismogramData, i, nextPacket, seismogramList, firstFileData); } else if(nextPacket.packetType.equals("EH")) { seismogramData.put(new Integer(0), Append.appendEventHeaderPacket(new PacketType(), nextPacket)); header = Append.appendEventHeaderPacket(new PacketType(), nextPacket); } else if(nextPacket.packetType.equals("ET")) { for(Integer j = new Integer(0); seismogramData.containsKey(j); j = new Integer(j.intValue() + 1)) { seismogramData.put(j, Append.appendEventTrailerPacket((PacketType)seismogramData.get(j), nextPacket)); seismogramList.add(finalizeSeismogramCreation((PacketType)seismogramData.get(j), firstFileData, false)); } done = true; } else if(nextPacket.packetType.equals("AD")) { logger.error("The given data file contains an unexpected Auxiliary Data Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Auxiliary Data Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("CD")) { logger.error("The given data file contains an unexpected Calibration Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Calibration Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("DS")) { logger.error("The given data file contains an unexpected Data Stream Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Data Stream Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("OM")) { logger.error("The given data file contains an unexpected Operating Mode Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Operating Mode Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("SH")) { logger.error("The given data file contains an unexpected State-Of-Health Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected State-Of-Health Packet. \n" + " More than likely you are reading a State-of-Health file."); } else if(nextPacket.packetType.equals("SC")) { logger.error("The given data file contains an unexpected Station/Channel Parameter Packet. " + "More than likely you are reading a State-of-Health file."); throw new RT130FormatException(" The given data file contains an unexpected Station/Channel Parameter Packet. \n" + " More than likely you are reading a State-of-Health file."); } else { logger.error("The first two bytes of the Packet Header were not formatted " + "correctly, and do not refer to a valid Packet Type."); throw new RT130FormatException(" The first two bytes of the Packet Header were not formatted " + "correctly, and do not refer to a valid Packet Type."); } if(!done) { haveFile = false; while(!haveFile && !done) { try { nextPacket = new PacketType(this.seismogramDataInputStream, this.processData, fileTimeWindow); haveFile = true; } catch(EOFException e) { logger.warn("End of file reached before Event Trailer Packet was read." + " The file likely contains an incomplete seismogram." + " Local seismogram creation was not disturbed."); for(Integer j = new Integer(0); seismogramData.containsKey(j); j = new Integer(j.intValue() + 1)) { seismogramList.add(finalizeSeismogramCreation((PacketType)seismogramData.get(j), firstFileData, false)); } done = true; } catch(RT130BadPacketException e) { // Skip bad packet. } } } } return (PacketType[])seismogramList.toArray(new PacketType[0]); }
| 1,117,715
|
private PacketType readFirstDataPacketOfFirstFile(TimeRange fileTimeWindow) throws IOException, RT130FormatException { DataInputStream firstFileDataInputStream = null; File file = new File(firstFileLoc); FileInputStream fis = new FileInputStream(file); BufferedInputStream bis = new BufferedInputStream(fis); firstFileDataInputStream = new DataInputStream(bis); PacketType firstFileData = new PacketType(); firstFileData.packetType = ""; while(!firstFileData.packetType.equals("DT")) { boolean haveFile = false; while(!haveFile) { try { firstFileData = new PacketType(firstFileDataInputStream, this.processData, fileTimeWindow); haveFile = true; } catch(EOFException e) { logger.error("End of file was reached before any Data Packets were found. " + "The file likely contains no data. " + "The file will not be read."); throw new RT130FormatException(" End of file was reached before any Data Packets were found. " + "The file likely contains no data. " + "The file will not be read."); } catch(RT130BadPacketException e) { // Skip bad packet. } } } return firstFileData; }
|
private PacketType readFirstDataPacketOfFirstFile(MicroSecondTimeRange fileTimeWindow) throws IOException, RT130FormatException { DataInputStream firstFileDataInputStream = null; File file = new File(firstFileLoc); FileInputStream fis = new FileInputStream(file); BufferedInputStream bis = new BufferedInputStream(fis); firstFileDataInputStream = new DataInputStream(bis); PacketType firstFileData = new PacketType(); firstFileData.packetType = ""; while(!firstFileData.packetType.equals("DT")) { boolean haveFile = false; while(!haveFile) { try { firstFileData = new PacketType(firstFileDataInputStream, this.processData, fileTimeWindow); haveFile = true; } catch(EOFException e) { logger.error("End of file was reached before any Data Packets were found. " + "The file likely contains no data. " + "The file will not be read."); throw new RT130FormatException(" End of file was reached before any Data Packets were found. " + "The file likely contains no data. " + "The file will not be read."); } catch(RT130BadPacketException e) { // Skip bad packet. } } } return firstFileData; }
| 1,117,716
|
public PacketType(PacketType original) throws RT130FormatException { this.packetType = original.packetType; this.unitIdNumber = original.unitIdNumber; this.time = original.time; this.begin_time_of_seismogram = original.begin_time_of_seismogram; this.begin_time_of_first_packet = original.begin_time_of_first_packet; this.end_time_of_last_packet = original.end_time_of_last_packet; this.experimentNumber = original.experimentNumber; this.year = original.year; this.byteCount = original.byteCount; this.packetSequence = original.packetSequence; this.number_of_samples = original.number_of_samples; this.sample_rate = original.sample_rate; if(original.encoded_data != null) { this.encoded_data = new EncodedData[original.encoded_data.length]; System.arraycopy(original.encoded_data, 0, this.encoded_data, 0, original.encoded_data.length); } this.begin_time_from_first_data_file = original.begin_time_from_first_data_file; this.latitude_ = original.latitude_; this.longitude_ = original.longitude_; this.elevation_ = original.elevation_; this.number_of_location_readings = original.number_of_location_readings; this.channel_number = original.channel_number; this.data_stream_number = original.data_stream_number; if(original.aDPP != null) { this.aDPP = new AuxiliaryDataParameterPacket(original.aDPP); } if(original.cPP != null) { this.cPP = new CalibrationParameterPacket(original.cPP); } if(original.dSPP != null) { this.dSPP = new DataStreamParameterPacket(original.dSPP); } if(original.dP != null) { this.dP = new DataPacket(original.dP); } if(original.eHP != null) { this.eHP = new EventHeaderPacket(original.eHP); } if(original.eTP != null) { this.eTP = new EventTrailerPacket(original.eTP); } if(original.oMPP != null) { this.oMPP = new OperatingModeParameterPacket(original.oMPP); } if(original.sCPP != null) { this.sCPP = new StationChannelParameterPacket(original.sCPP); } if(original.sOHP != null) { this.sOHP = new StateOfHealthPacket(original.sOHP); } }
|
public PacketType(PacketType original) throws RT130FormatException { this.packetType = original.packetType; this.unitIdNumber = original.unitIdNumber; this.time = original.time; this.begin_time_of_seismogram = original.begin_time_of_seismogram; this.begin_time_of_first_packet = original.begin_time_of_first_packet; this.end_time_of_last_packet = original.end_time_of_last_packet; this.experimentNumber = original.experimentNumber; this.year = original.year; this.byteCount = original.byteCount; this.packetSequence = original.packetSequence; this.number_of_samples = original.number_of_samples; this.sample_rate = original.sample_rate; if(original.encoded_data != null) { this.encoded_data = new EncodedData[original.encoded_data.length]; System.arraycopy(original.encoded_data, 0, this.encoded_data, 0, original.encoded_data.length); } this.begin_time_from_first_data_file = original.begin_time_from_first_data_file; this.latitude_ = original.latitude_; this.longitude_ = original.longitude_; this.elevation_ = original.elevation_; this.number_of_location_readings = original.number_of_location_readings; this.channel_number = original.channel_number; this.data_stream_number = original.data_stream_number; if(original.aDPP != null) { this.aDPP = new AuxiliaryDataParameterPacket(original.aDPP); } if(original.cPP != null) { this.cPP = new CalibrationParameterPacket(original.cPP); } if(original.dSPP != null) { this.dSPP = new DataStreamParameterPacket(original.dSPP); } if(original.dP != null) { this.dP = new DataPacket(original.dP); } if(original.eHP != null) { this.eHP = new EventHeaderPacket(original.eHP); } if(original.eTP != null) { this.eTP = new EventTrailerPacket(original.eTP); } if(original.oMPP != null) { this.oMPP = new OperatingModeParameterPacket(original.oMPP); } if(original.sCPP != null) { this.sCPP = new StationChannelParameterPacket(original.sCPP); } if(original.sOHP != null) { this.sOHP = new StateOfHealthPacket(original.sOHP); } }
| 1,117,717
|
public Object clone() { BwSystem clone = new BwSystem(); clone.setName(getName()); clone.setTzid(getTzid()); clone.setSystemid(getSystemid()); clone.setPublicCalendarRoot(getPublicCalendarRoot()); clone.setUserCalendarRoot(getUserCalendarRoot()); clone.setUserDefaultCalendar(getUserDefaultCalendar()); clone.setDefaultTrashCalendar(getDefaultTrashCalendar()); clone.setUserInbox(getUserInbox()); clone.setUserOutbox(getUserOutbox()); clone.setDefaultUserViewName(getDefaultUserViewName()); clone.setPublicUser(getPublicUser()); clone.setDirectoryBrowsingDisallowed(getDirectoryBrowsingDisallowed()); clone.setHttpConnectionsPerUser(getHttpConnectionsPerUser()); clone.setHttpConnectionsPerHost(getHttpConnectionsPerHost()); clone.setHttpConnections(getHttpConnections()); clone.setMaxPublicDescriptionLength(getMaxPublicDescriptionLength()); clone.setMaxUserDescriptionLength(getMaxUserDescriptionLength()); clone.setMaxUserEntitySize(getMaxUserEntitySize()); clone.setDefaultUserQuota(getDefaultUserQuota()); clone.setUserauthClass(getUserauthClass()); clone.setMailerClass(getMailerClass()); clone.setAdmingroupsClass(getAdmingroupsClass()); clone.setUsergroupsClass(getUsergroupsClass()); return clone(); }
|
public Object clone() { BwSystem clone = new BwSystem(); clone.setName(getName()); clone.setTzid(getTzid()); clone.setSystemid(getSystemid()); clone.setPublicCalendarRoot(getPublicCalendarRoot()); clone.setUserCalendarRoot(getUserCalendarRoot()); clone.setUserDefaultCalendar(getUserDefaultCalendar()); clone.setDefaultTrashCalendar(getDefaultTrashCalendar()); clone.setUserInbox(getUserInbox()); clone.setUserOutbox(getUserOutbox()); clone.setDefaultUserViewName(getDefaultUserViewName()); clone.setPublicUser(getPublicUser()); clone.setDirectoryBrowsingDisallowed(getDirectoryBrowsingDisallowed()); clone.setHttpConnectionsPerUser(getHttpConnectionsPerUser()); clone.setHttpConnectionsPerHost(getHttpConnectionsPerHost()); clone.setHttpConnections(getHttpConnections()); clone.setMaxPublicDescriptionLength(getMaxPublicDescriptionLength()); clone.setMaxUserDescriptionLength(getMaxUserDescriptionLength()); clone.setMaxUserEntitySize(getMaxUserEntitySize()); clone.setDefaultUserQuota(getDefaultUserQuota()); clone.setUserauthClass(getUserauthClass()); clone.setMailerClass(getMailerClass()); clone.setAdmingroupsClass(getAdmingroupsClass()); clone.setUsergroupsClass(getUsergroupsClass()); return clone; }
| 1,117,718
|
public static GregorianChronology getInstance(DateTimeZone zone) { return getInstance(zone, 4); }
|
public static GregorianChronology getInstance(DateTimeZone zone) { return getInstance(zone, 4); }
| 1,117,721
|
public void copyTo(BwOwnedDbentity val) { val.setOwner((BwUser)getOwner().clone()); val.setPublick(getPublick()); }
|
public void copyTo(BwOwnedDbentity val) { if (getOwner() != null) { val.setOwner((BwUser)getOwner().clone()); } val.setPublick(getPublick()); }
| 1,117,722
|
private static boolean processMSeed(JDBCSeismogramFiles jdbcSeisFile, String fileLoc, String fileName, boolean verbose) throws IOException, SeedFormatException, FissuresException, SQLException { MiniSeedRead mseedRead = null; try { mseedRead = new MiniSeedRead(new DataInputStream(new BufferedInputStream(new FileInputStream(fileLoc)))); } catch(EOFException e) { System.err.println(fileName + " seems to be an invalid mseed file."); return false; } catch(FileNotFoundException e) { System.err.println("Unable to find file " + fileName); return false; } LinkedList list = new LinkedList(); try { while(true) { try { DataRecord dr = mseedRead.getNextRecord(); list.add(dr); } catch(SeedFormatException e) { System.out.println("Format exception skipped"); } } } catch(EOFException e) { // must be all } LocalSeismogramImpl seis = FissuresConvert.toFissures((DataRecord[])list.toArray(new DataRecord[0])); jdbcSeisFile.saveSeismogramToDatabase(seis.channel_id, seis, fileLoc, SeismogramFileTypes.MSEED); if(verbose) { System.out.println("MSEED file " + fileName + " added to the database."); } return true; }
|
private static boolean processMSeed(JDBCSeismogramFiles jdbcSeisFile, String fileLoc, String fileName, boolean verbose) throws IOException, SeedFormatException, FissuresException, SQLException { MiniSeedRead mseedRead = null; try { mseedRead = new MiniSeedRead(new DataInputStream(new BufferedInputStream(new FileInputStream(fileLoc)))); } catch(EOFException e) { System.err.println(fileName + " seems to be an invalid mseed file."); return false; } catch(FileNotFoundException e) { System.err.println("Unable to find file " + fileName); return false; } LinkedList list = new LinkedList(); try { while(true) { try { DataRecord dr = mseedRead.getNextRecord(); list.add(dr); } catch(SeedFormatException e) { System.out.println("Format exception skipped"); } } } catch(EOFException e) { // must be all } LocalSeismogramImpl seis = FissuresConvert.toFissures((DataRecord[])list.toArray(new DataRecord[0])); jdbcSeisFile.saveSeismogramToDatabase(seis.channel_id, seis, fileLoc, SeismogramFileTypes.MSEED); if(verbose) { System.out.println("MSEED file " + fileName + " added to the database."); } return true; }
| 1,117,723
|
private static boolean processRefTek(JDBCSeismogramFiles jdbcSeisFile, Connection conn, String fileLoc, String fileName, boolean verbose, Properties props) throws IOException, SQLException { if(props == null || conn == null) { if(verbose) { System.out.println("No props file was specified."); System.out.println("The channel IDs created will not be correct."); } } File seismogramFile = new File(fileLoc); FileInputStream fis = null; fis = new FileInputStream(seismogramFile); BufferedInputStream bis = new BufferedInputStream(fis); DataInputStream dis = new DataInputStream(bis); RT130PopulateDatabaseInfo toSeismogram = new RT130PopulateDatabaseInfo(dis, conn, props); LocalSeismogramImpl[] seismogramArray = null; try { seismogramArray = toSeismogram.readEntireDataFile(); } catch(RT130FormatException e) { System.err.println(fileName + " seems to be an invalid rt130 file."); return false; } for(int i = 0; i < seismogramArray.length; i++) { jdbcSeisFile.saveSeismogramToDatabase(seismogramArray[i].channel_id, seismogramArray[i], fileLoc, SeismogramFileTypes.RT_130); } if(verbose) { System.out.println("REF_TEK file " + fileName + " added to the database."); } return true; }
|
private static boolean processRefTek(JDBCSeismogramFiles jdbcSeisFile, Connection conn, String fileLoc, String fileName, boolean verbose, Properties props) throws IOException, SQLException { if(props == null || conn == null) { if(verbose) { System.out.println("No props file was specified."); System.out.println("The channel IDs created will not be correct."); } } File seismogramFile = new File(fileLoc); FileInputStream fis = null; fis = new FileInputStream(seismogramFile); BufferedInputStream bis = new BufferedInputStream(fis); DataInputStream dis = new DataInputStream(bis); RT130PopulateDatabaseInfo toSeismogram = new RT130PopulateDatabaseInfo(dis, conn, props); LocalSeismogramImpl[] seismogramArray = null; try { seismogramArray = toSeismogram.readEntireDataFile(); } catch(RT130FormatException e) { System.err.println(fileName + " seems to be an invalid rt130 file."); return false; } for(int i = 0; i < seismogramArray.length; i++) { jdbcSeisFile.saveSeismogramToDatabase(seismogramArray[i].channel_id, seismogramArray[i], fileLoc, SeismogramFileTypes.RT_130); } if(verbose) { System.out.println("RT130 file " + fileName + " added to the database."); } return true; }
| 1,117,724
|
public int doStartTag() throws JspException { if (!PortletServlet.isPortletRequest(pageContext.getRequest())) { return super.doStartTag(); } String urlStr = null; BodyContent bodyContent = pageContext.pushBody(); try { super.doStartTag(); URL url = new URL(bodyContent.getString()); String path = url.getPath(); if (path.endsWith(".rdo")) { setRenderURL("true"); } else if (path.endsWith(".rdo")) { setActionURL("true"); } /* We want a context relative url */ urlStr = url.getFile(); System.out.println("RRRRRRRRRRRRRRRRRUrlStr = " + urlStr); /* Drop the context */ int pos = urlStr.indexOf('/'); if (pos > 0) { urlStr = urlStr.substring(pos); } urlStr = TagsSupport.getURL(pageContext, urlStr, urlType); /* remove embedded anchor because calendar xsl stylesheet * adds extra parameters later during transformation */ pos = urlStr.indexOf('#'); if (pos > -1) { urlStr = urlStr.substring(0, pos); } /* Remove bedework dummy request parameter - * it's an encoded form of ?b=de */ urlStr = urlStr.replaceAll(bedeworkDummyPar, ""); //Generate valid xml markup for transformationthrow new urlStr = urlStr.replaceAll("&", "&"); System.out.println("RRRRRRRRRRRRRRRRRUrlStr = " + urlStr); } catch (MalformedURLException mue) { throw new JspException(mue); } finally { pageContext.popBody(); } TagUtils.getInstance().write(pageContext, urlStr); return (SKIP_BODY); }
|
public int doStartTag() throws JspException { if (!PortletServlet.isPortletRequest(pageContext.getRequest())) { return super.doStartTag(); } String urlStr = null; BodyContent bodyContent = pageContext.pushBody(); try { super.doStartTag(); URL url = new URL(bodyContent.getString()); String path = url.getPath(); if (path.endsWith(".rdo")) { setRenderURL("true"); } else if (path.endsWith(".rdo")) { setActionURL("true"); } /* We want a context relative url */ urlStr = url.getFile(); /* Drop the context */ int pos = urlStr.indexOf('/'); if (pos > 0) { urlStr = urlStr.substring(pos); } urlStr = TagsSupport.getURL(pageContext, urlStr, urlType); /* remove embedded anchor because calendar xsl stylesheet * adds extra parameters later during transformation */ pos = urlStr.indexOf('#'); if (pos > -1) { urlStr = urlStr.substring(0, pos); } /* Remove bedework dummy request parameter - * it's an encoded form of ?b=de */ urlStr = urlStr.replaceAll(bedeworkDummyPar, ""); //Generate valid xml markup for transformationthrow new urlStr = urlStr.replaceAll("&", "&"); } catch (MalformedURLException mue) { throw new JspException(mue); } finally { pageContext.popBody(); } TagUtils.getInstance().write(pageContext, urlStr); return (SKIP_BODY); }
| 1,117,725
|
public String doAction(HttpServletRequest request, BwSession sess, PEActionForm form) throws Throwable { /** Check access */ if (!form.getAuthorisedUser()) { return "noAccess"; } /** Set the objects to null so we get new ones. */ form.initFields(); form.assignUploadingTimezones(true); return "continue"; }
|
public String doAction(HttpServletRequest request, BwSession sess, PEActionForm form) throws Throwable { /** Check access */ if (!form.getAuthorisedUser()) { return "noAccess"; } /** Set the objects to null so we get new ones. */ initFields(form); form.assignUploadingTimezones(true); return "continue"; }
| 1,117,727
|
public int sendRequest(String method, String url, Header[] hdrs, String contentType, int contentLen, byte[] content) throws Throwable { return sendRequest(method, url, null, null, hdrs, contentType, contentLen, content); }
|
public int sendRequest(String method, String url, Header[] hdrs, int depth, String contentType, int contentLen, byte[] content) throws Throwable { return sendRequest(method, url, null, null, hdrs, contentType, contentLen, content); }
| 1,117,728
|
public int sendRequest(String method, String url, Header[] hdrs, String contentType, int contentLen, byte[] content) throws Throwable { return sendRequest(method, url, null, null, hdrs, contentType, contentLen, content); }
|
public int sendRequest(String method, String url, Header[] hdrs, String contentType, int contentLen, byte[] content) throws Throwable { return sendRequest(method, url, null, null, hdrs, depth, contentType, contentLen, content); }
| 1,117,729
|
void loadParameterRef(String paramName, String paramFile) { AuditInfo[] audit = new AuditInfo[1]; audit[0] = new AuditInfo(userName, "Added parameter "+paramName+" for "+paramFile); try { dataset.addParameter(paramName,new URL(base, paramFile).toString(), audit); } catch (MalformedURLException e) { //can't happen? e.printStackTrace(); System.err.println("Caught exception on parameterRef " +paramName+", continuing..."); } // end of try-catch }
|
void loadParameterRef(String paramName, String paramFile) { AuditInfo[] audit = new AuditInfo[1]; audit[0] = new AuditInfo(userName, "Added parameter "+paramName+" for "+paramFile); try { dataset.addParameter(paramName,new URL(dirURL, paramFile).toString(), audit); } catch (MalformedURLException e) { //can't happen? e.printStackTrace(); System.err.println("Caught exception on parameterRef " +paramName+", continuing..."); } // end of try-catch }
| 1,117,732
|
void loadSacFile(File sacFile) throws IOException, FissuresException { if (excludes.contains(sacFile.getName())) { return; } // end of if (excludes.contains(sacFile.getName())) if (paramRefs.containsValue(sacFile.getName())) { return; } // end of if (excludes.contains(sacFile.getName())) SacTimeSeries sac = new SacTimeSeries(); sac.read(sacFile.getCanonicalPath()); AuditInfo[] audit = new AuditInfo[1]; audit[0] = new AuditInfo(userName+" via SacDirToDataSet", "seismogram loaded from "+sacFile.getCanonicalPath()); URL seisURL = new URL(base, sacFile.getName()); // System.out.println(" the seisURL is "+seisURL.toString()); // DataInputStream dis = new DataInputStream(new BufferedInputStream(seisURL.openStream())); // SacTimeSeries sac = new SacTimeSeries(); //sac.read(dis); edu.iris.Fissures.seismogramDC.LocalSeismogramImpl seis = SacToFissures.getSeismogram(sac); edu.sc.seis.fissuresUtil.cache.CacheEvent event = SacToFissures.getEvent(sac); if (event != null && dataset.getParameter(EVENT) == null) { // add event AuditInfo[] eventAudit = new AuditInfo[1]; eventAudit[0] = new AuditInfo(System.getProperty("user.name"), "event loaded from sac file."); dataset.addParameter( EVENT, event, eventAudit); } // end of if (event != null) Channel channel = SacToFissures.getChannel(sac); String channelParamName = CHANNEL+ChannelIdUtil.toString(seis.channel_id); if (channel != null && dataset.getParameter(channelParamName) == null) { // add event AuditInfo[] chanAudit = new AuditInfo[1]; chanAudit[0] = new AuditInfo(System.getProperty("user.name"), "channel loaded from sac file."); dataset.addParameter(channelParamName, channel, chanAudit); } String seisName = sacFile.getName(); if (seisName.endsWith(".SAC")) { seisName = seisName.substring(0,seisName.length()-4); } // end of if (seisName.endsWith(".SAC")) seis.setName(seisName); dataset.addSeismogramRef(seis, seisURL, seisName, new Property[0], new ParameterRef[0], audit); }
|
void loadSacFile(File sacFile) throws IOException, FissuresException { if (excludes.contains(sacFile.getName())) { return; } // end of if (excludes.contains(sacFile.getName())) if (paramRefs.containsValue(sacFile.getName())) { return; } // end of if (excludes.contains(sacFile.getName())) SacTimeSeries sac = new SacTimeSeries(); sac.read(sacFile.getCanonicalPath()); AuditInfo[] audit = new AuditInfo[1]; audit[0] = new AuditInfo(userName+" via SacDirToDataSet", "seismogram loaded from "+sacFile.getCanonicalPath()); URL seisURL = new URL(dirURL, sacFile.getName()); // System.out.println(" the seisURL is "+seisURL.toString()); // DataInputStream dis = new DataInputStream(new BufferedInputStream(seisURL.openStream())); // SacTimeSeries sac = new SacTimeSeries(); //sac.read(dis); edu.iris.Fissures.seismogramDC.LocalSeismogramImpl seis = SacToFissures.getSeismogram(sac); edu.sc.seis.fissuresUtil.cache.CacheEvent event = SacToFissures.getEvent(sac); if (event != null && dataset.getParameter(EVENT) == null) { // add event AuditInfo[] eventAudit = new AuditInfo[1]; eventAudit[0] = new AuditInfo(System.getProperty("user.name"), "event loaded from sac file."); dataset.addParameter( EVENT, event, eventAudit); } // end of if (event != null) Channel channel = SacToFissures.getChannel(sac); String channelParamName = CHANNEL+ChannelIdUtil.toString(seis.channel_id); if (channel != null && dataset.getParameter(channelParamName) == null) { // add event AuditInfo[] chanAudit = new AuditInfo[1]; chanAudit[0] = new AuditInfo(System.getProperty("user.name"), "channel loaded from sac file."); dataset.addParameter(channelParamName, channel, chanAudit); } String seisName = sacFile.getName(); if (seisName.endsWith(".SAC")) { seisName = seisName.substring(0,seisName.length()-4); } // end of if (seisName.endsWith(".SAC")) seis.setName(seisName); dataset.addSeismogramRef(seis, seisURL, seisName, new Property[0], new ParameterRef[0], audit); }
| 1,117,733
|
void process() throws ParserConfigurationException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = factory.newDocumentBuilder(); URL dirURL = base; System.out.println(" dirURL is "+dirURL.toString()); try { dirURL = new URL(dirURL, directory.getName()+"/"); System.out.println("updated dirURL is "+dirURL.toString()); } catch (MalformedURLException e) { e.printStackTrace(); return; } // end of try-catch dataset = new XMLDataSet(docBuilder, dirURL, "genid"+Math.round(Math.random()*Integer.MAX_VALUE), dsName, userName); Iterator it = paramRefs.keySet().iterator(); while (it.hasNext()) { String key = (String)it.next(); loadParameterRef(key, (String)paramRefs.get(key)); } // end of while (it.hasNext()) File[] files = directory.listFiles(); for (int i=0; i<files.length; i++) { try { String filename = files[i].getName(); // maybe an image? if (filename.endsWith(".gif") || filename.endsWith(".GIF") || filename.endsWith(".png") || filename.endsWith(".PNG") || filename.endsWith(".jpeg") || filename.endsWith(".JPEG") || filename.endsWith(".jpg") || filename.endsWith(".JPG") ) { String name = filename.substring(0, filename.lastIndexOf('.')); loadParameterRef(name, filename); } else { // try as a sac file loadSacFile(files[i]); } // end of else } catch (Exception e) { e.printStackTrace(); System.err.println("Caught exception on " +files[i].getName()+", continuing..."); } // end of try-catch } // end of for (int i=0; i<sacFiles.length; i++) }
|
void process() throws ParserConfigurationException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = factory.newDocumentBuilder(); dirURL = base; System.out.println(" dirURL is "+dirURL.toString()); try { dirURL = new URL(dirURL, directory.getName()+"/"); System.out.println("updated dirURL is "+dirURL.toString()); } catch (MalformedURLException e) { e.printStackTrace(); return; } // end of try-catch dataset = new XMLDataSet(docBuilder, dirURL, "genid"+Math.round(Math.random()*Integer.MAX_VALUE), dsName, userName); Iterator it = paramRefs.keySet().iterator(); while (it.hasNext()) { String key = (String)it.next(); loadParameterRef(key, (String)paramRefs.get(key)); } // end of while (it.hasNext()) File[] files = directory.listFiles(); for (int i=0; i<files.length; i++) { try { String filename = files[i].getName(); // maybe an image? if (filename.endsWith(".gif") || filename.endsWith(".GIF") || filename.endsWith(".png") || filename.endsWith(".PNG") || filename.endsWith(".jpeg") || filename.endsWith(".JPEG") || filename.endsWith(".jpg") || filename.endsWith(".JPG") ) { String name = filename.substring(0, filename.lastIndexOf('.')); loadParameterRef(name, filename); } else { // try as a sac file loadSacFile(files[i]); } // end of else } catch (Exception e) { e.printStackTrace(); System.err.println("Caught exception on " +files[i].getName()+", continuing..."); } // end of try-catch } // end of for (int i=0; i<sacFiles.length; i++) }
| 1,117,734
|
public static Channel getChannel(SacTimeSeries sac) { ChannelId chanId = getChannelId(sac); float stel = sac.stel; if (stel == -12345.0f) { stel = 0; } // end of if (stel == -12345.0f) float stdp = sac.stdp; if (stdp == -12345.0f) { stdp = 0; } // end of if (stdp == -12345.0f) Location loc = new Location(sac.stla, sac.stlo, new QuantityImpl(sac.stel, UnitImpl.METER), new QuantityImpl(sac.stdp, UnitImpl.METER), LocationType.GEOGRAPHIC); Orientation orient = new Orientation(sac.cmpaz, sac.cmpinc); SamplingImpl samp = new SamplingImpl(1, new TimeInterval(sac.delta, UnitImpl.SECOND)); TimeRange effective = new TimeRange(chanId.network_id.begin_time, new Time(edu.iris.Fissures.TIME_UNKNOWN.value, 0)); NetworkAttr netAttr = new NetworkAttrImpl(chanId.network_id, chanId.network_id.network_code, "", "", effective); StationId staId = new StationId(chanId.network_id, chanId.station_code, chanId.network_id.begin_time); Station station = new StationImpl(staId, chanId.station_code, loc, effective, "", "", "from sac", netAttr); SiteId siteId = new SiteId(chanId.network_id, chanId.station_code, chanId.site_code, chanId.network_id.begin_time); Site site = new SiteImpl(siteId, loc, effective, station, "from sac"); return new ChannelImpl(chanId, chanId.channel_code, orient, samp, effective, site); }
|
public static Channel getChannel(SacTimeSeries sac) { ChannelId chanId = getChannelId(sac); float stel = sac.stel; if (stel == -12345.0f) { stel = 0; } // end of if (stel == -12345.0f) float stdp = sac.stdp; if (stdp == -12345.0f) { stdp = 0; } // end of if (stdp == -12345.0f) Location loc = new Location(sac.stla, sac.stlo, new QuantityImpl(sac.stel, UnitImpl.METER), new QuantityImpl(sac.stdp, UnitImpl.METER), LocationType.GEOGRAPHIC); Orientation orient = new Orientation(sac.cmpaz, sac.cmpinc - 90); SamplingImpl samp = new SamplingImpl(1, new TimeInterval(sac.delta, UnitImpl.SECOND)); TimeRange effective = new TimeRange(chanId.network_id.begin_time, new Time(edu.iris.Fissures.TIME_UNKNOWN.value, 0)); NetworkAttr netAttr = new NetworkAttrImpl(chanId.network_id, chanId.network_id.network_code, "", "", effective); StationId staId = new StationId(chanId.network_id, chanId.station_code, chanId.network_id.begin_time); Station station = new StationImpl(staId, chanId.station_code, loc, effective, "", "", "from sac", netAttr); SiteId siteId = new SiteId(chanId.network_id, chanId.station_code, chanId.site_code, chanId.network_id.begin_time); Site site = new SiteImpl(siteId, loc, effective, station, "from sac"); return new ChannelImpl(chanId, chanId.channel_code, orient, samp, effective, site); }
| 1,117,735
|
public int getInt(String key, int def) { try { return Integer.parseInt(getString(key, String.valueOf(def))); } catch (Exception exc) { return def; } }
|
public int getInt(String key, int def) { try { return Integer.parseInt(getString(key, String.valueOf(def))); } catch (Exception exc) { return def; } }
| 1,117,736
|
public Object ejbFindByAuthorizationCode(String code) throws FinderException { return this.idoFindOnePKByColumnBySQL(COLUMN_AUTHORIZATION_CODE, code); }
|
public Object ejbFindByAuthorizationCode(String code) throws FinderException { return this.idoFindOnePKByColumnBySQL(COLUMN_AUTHORIZATION_CODE, code); }
| 1,117,737
|
public void retrieveData(SeisDataChangeListener dataListener){ RequestFilter[] temp = {requestFilter}; Iterator it = seisCache.iterator(); List matchingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); } else { /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
|
public void retrieveData(SeisDataChangeListener dataListener){ Iterator it = seisCache.iterator(); List matchingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); } else { /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
| 1,117,738
|
public void retrieveData(SeisDataChangeListener dataListener){ RequestFilter[] temp = {requestFilter}; Iterator it = seisCache.iterator(); List matchingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); } else { /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
|
public void retrieveData(SeisDataChangeListener dataListener){ RequestFilter[] temp = {requestFilter}; Iterator it = seisCache.iterator(); List existingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); } else { /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
| 1,117,739
|
public void retrieveData(SeisDataChangeListener dataListener){ RequestFilter[] temp = {requestFilter}; Iterator it = seisCache.iterator(); List matchingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); } else { /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
|
public void retrieveData(SeisDataChangeListener dataListener){ RequestFilter[] temp = {requestFilter}; Iterator it = seisCache.iterator(); List matchingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); } else { /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
| 1,117,740
|
public void retrieveData(SeisDataChangeListener dataListener){ RequestFilter[] temp = {requestFilter}; Iterator it = seisCache.iterator(); List matchingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); } else { /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
|
public void retrieveData(SeisDataChangeListener dataListener){ RequestFilter[] temp = {requestFilter}; Iterator it = seisCache.iterator(); List matchingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); } else { /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
| 1,117,741
|
public void retrieveData(SeisDataChangeListener dataListener){ RequestFilter[] temp = {requestFilter}; Iterator it = seisCache.iterator(); List matchingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); } else { /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
|
public void retrieveData(SeisDataChangeListener dataListener){ RequestFilter[] temp = {requestFilter}; Iterator it = seisCache.iterator(); List matchingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); } else { /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
| 1,117,742
|
public void retrieveData(SeisDataChangeListener dataListener){ RequestFilter[] temp = {requestFilter}; Iterator it = seisCache.iterator(); List matchingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); } else { /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
|
public void retrieveData(SeisDataChangeListener dataListener){ RequestFilter[] temp = {requestFilter}; Iterator it = seisCache.iterator(); List matchingSeismos = new ArrayList(); while(it.hasNext()){ LocalSeismogramImpl current = (LocalSeismogramImpl)((SoftReference)it.next()).get(); if(current != null&& requestFilter.channel_id.equals(current.channel_id) && requestFilter.start_time.equals(current.getBeginTime().getFissuresTime()) && requestFilter.end_time.equals(current.getEndTime().getFissuresTime())){ matchingSeismos.add(current); } } LocalSeismogramImpl[] cachedSeismos = new LocalSeismogramImpl[matchingSeismos.size()]; cachedSeismos = (LocalSeismogramImpl[])matchingSeismos.toArray(cachedSeismos); pushData(cachedSeismos, dataListener); try{ if(this.dataCenterOps instanceof DBDataCenter) { ((DBDataCenter)this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, ClockUtil.now().getFissuresTime()); /* DBDataCenter.getDataCenter(this.dataCenterOps).request_seismograms(temp, (LocalDataCenterCallBack)this, dataListener, false, new MicroSecondDate().getFissuresTime()); */ } } catch(FissuresException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); // } catch(SQLException fe) { // throw new DataRetrievalException("Exception occurred while using DataCenter to get Data",fe); } }
| 1,117,743
|
public void networkDataChanged(StationDataEvent s);
|
public void networkDataChanged(NetworkDataEvent s);
| 1,117,744
|
public void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ DataSetSeismogram[] creatorSeismograms = creator.getSeismograms(); ArrayList allMatchingDSS = new ArrayList(); for(int i = 0; i < creatorSeismograms.length; i++){ LocalSeismogramImpl seis = creatorSeismograms[i].getSeismogram(); XMLDataSet dataSet = (XMLDataSet)creatorSeismograms[i].getDataSet(); ChannelId[] channelGroup = DataSetChannelGrouper.retrieveGrouping(dataSet, seis.getChannelID()); for(int counter = 0; counter < channelGroup.length; counter++) { LocalSeismogram[] seismograms = DisplayUtils.getSeismogram(channelGroup[counter], dataSet, new TimeRange(seis.getBeginTime().getFissuresTime(), seis.getEndTime().getFissuresTime())); if(seismograms.length > 0){ DataSetSeismogram[] dataSetSeismograms = new DataSetSeismogram[seismograms.length]; for(int j = 0; i < seismograms.length; i++){ dataSetSeismograms[i] = new DataSetSeismogram((LocalSeismogramImpl)seismograms[i], dataSet); allMatchingDSS.add(dataSetSeismograms[i]); } creator.addDisplay(reaper.addDisplay(dataSetSeismograms, (TimeConfig)creator.getInternalRegistrar(), channelGroup[i]+ "." + creator.getColor())); } } } Iterator g = basicDisplays.iterator(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] seismos = current.getSeismograms(); for(int i = 0; i < seismos.length; i++){ Iterator e = allMatchingDSS.iterator(); while(e.hasNext()){ if(seismos[i].getSeismogram() == ((DataSetSeismogram)e.next()).getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); e.remove(); } } } } }
|
public void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ DataSetSeismogram[] creatorSeismograms = creator.getSeismograms(); ArrayList allMatchingDSS = new ArrayList(); for(int i = 0; i < creatorSeismograms.length; i++){ LocalSeismogramImpl seis = creatorSeismograms[i].getSeismogram(); XMLDataSet dataSet = (XMLDataSet)creatorSeismograms[i].getDataSet(); ChannelId[] channelGroup = DataSetChannelGrouper.retrieveGrouping(dataSet, seis.getChannelID()); for(int counter = 0; counter < channelGroup.length; counter++) { LocalSeismogram[] seismograms = DisplayUtils.getSeismogram(channelGroup[counter], dataSet, new TimeRange(seis.getBeginTime().getFissuresTime(), seis.getEndTime().getFissuresTime())); if(seismograms.length > 0){ DataSetSeismogram[] dataSetSeismograms = new DataSetSeismogram[seismograms.length]; for(int j = 0; i < seismograms.length; i++){ dataSetSeismograms[i] = new DataSetSeismogram((LocalSeismogramImpl)seismograms[i], dataSet); allMatchingDSS.add(dataSetSeismograms[i]); } creator.addDisplay(reaper.addDisplay(dataSetSeismograms, (TimeConfig)creator.getInternalRegistrar(), channelGroup[i]+ "." + creator.getColor())); } } } Iterator g = basicDisplays.iterator(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] seismos = current.getSeismograms(); for(int i = 0; i < seismos.length; i++){ Iterator e = allMatchingDSS.iterator(); while(e.hasNext()){ if(seismos[i].getSeismogram() == ((DataSetSeismogram)e.next()).getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); e.remove(); } } } } }
| 1,117,745
|
public void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ DataSetSeismogram[] creatorSeismograms = creator.getSeismograms(); ArrayList allMatchingDSS = new ArrayList(); for(int i = 0; i < creatorSeismograms.length; i++){ LocalSeismogramImpl seis = creatorSeismograms[i].getSeismogram(); XMLDataSet dataSet = (XMLDataSet)creatorSeismograms[i].getDataSet(); ChannelId[] channelGroup = DataSetChannelGrouper.retrieveGrouping(dataSet, seis.getChannelID()); for(int counter = 0; counter < channelGroup.length; counter++) { LocalSeismogram[] seismograms = DisplayUtils.getSeismogram(channelGroup[counter], dataSet, new TimeRange(seis.getBeginTime().getFissuresTime(), seis.getEndTime().getFissuresTime())); if(seismograms.length > 0){ DataSetSeismogram[] dataSetSeismograms = new DataSetSeismogram[seismograms.length]; for(int j = 0; i < seismograms.length; i++){ dataSetSeismograms[i] = new DataSetSeismogram((LocalSeismogramImpl)seismograms[i], dataSet); allMatchingDSS.add(dataSetSeismograms[i]); } creator.addDisplay(reaper.addDisplay(dataSetSeismograms, (TimeConfig)creator.getInternalRegistrar(), channelGroup[i]+ "." + creator.getColor())); } } } Iterator g = basicDisplays.iterator(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] seismos = current.getSeismograms(); for(int i = 0; i < seismos.length; i++){ Iterator e = allMatchingDSS.iterator(); while(e.hasNext()){ if(seismos[i].getSeismogram() == ((DataSetSeismogram)e.next()).getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); e.remove(); } } } } }
|
public void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ DataSetSeismogram[] creatorSeismograms = creator.getSeismograms(); ArrayList allMatchingDSS = new ArrayList(); for(int i = 0; i < creatorSeismograms.length; i++){ LocalSeismogramImpl seis = creatorSeismograms[i].getSeismogram(); XMLDataSet dataSet = (XMLDataSet)creatorSeismograms[i].getDataSet(); ChannelId[] channelGroup = DataSetChannelGrouper.retrieveGrouping(dataSet, seis.getChannelID()); for(int counter = 0; counter < channelGroup.length; counter++) { LocalSeismogram[] seismograms = DisplayUtils.getSeismogram(channelGroup[counter], dataSet, new TimeRange(seis.getBeginTime().getFissuresTime(), seis.getEndTime().getFissuresTime())); if(seismograms.length > 0){ DataSetSeismogram[] dataSetSeismograms = new DataSetSeismogram[seismograms.length]; for(int j = 0; i < seismograms.length; i++){ dataSetSeismograms[i] = new DataSetSeismogram((LocalSeismogramImpl)seismograms[i], dataSet); allMatchingDSS.add(dataSetSeismograms[i]); } creator.addDisplay(reaper.addDisplay(dataSetSeismograms, (TimeConfig)creator.getInternalRegistrar(), channelGroup[i]+ "." + creator.getColor())); } } } Iterator g = basicDisplays.iterator(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] seismos = current.getSeismograms(); for(int i = 0; i < seismos.length; i++){ Iterator e = allMatchingDSS.iterator(); while(e.hasNext()){ if(seismos[i].getSeismogram() == ((DataSetSeismogram)e.next()).getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); e.remove(); } } } } }
| 1,117,746
|
public void testSimple() { testWalker("f(1, 2)"); }
|
public void testSimple() { testWalker("a ; b; c"); }
| 1,117,747
|
private void testWalker(String walkerInput) { try { ATLexer lexer = new ATLexer(new ByteArrayInputStream(walkerInput.getBytes())); ATParser parser = new ATParser(lexer); // Parse the input expression parser.program(); CommonAST t = (CommonAST)parser.getAST(); // Print the resulting tree out in LISP notation System.out.println(t.toStringList()); ATTreeWalker walker = new ATTreeWalker(); // Traverse the tree created by the parser walker.program(t); } catch(Exception e) { fail("exception: "+e); } }
|
private void testWalker(String walkerInput) { try { ATLexer lexer = new ATLexer(new ByteArrayInputStream(walkerInput.getBytes())); ATParser parser = new ATParser(lexer); // Parse the input expression parser.program(); CommonAST t = (CommonAST)parser.getAST(); // Print the resulting tree out in LISP notation System.out.println(t.toStringList()); ATTreeWalker walker = new ATTreeWalker(); // Traverse the tree created by the parser NATAbstractGrammar ag = walker.program(t); System.out.println("final = "+ag); } catch(Exception e) { fail("exception: "+e); } }
| 1,117,748
|
public DataSetSeismogram[] retrieve_seismograms() { LocalSeismogramImpl seis = dataSetSeismogram[0].getSeismogram(); ChannelId[] channelIds = ((edu.sc.seis.fissuresUtil.xml.XMLDataSet)dataSetSeismogram[0].getDataSet()).getChannelIds(); ChannelGrouperImpl channelProxy = new ChannelGrouperImpl(); logger.debug("the original channel_code from the seismogram is "+seis.getChannelID().channel_code); channelGroup = channelProxy.retrieve_grouping(channelIds, seis.getChannelID()); logger.debug("THe length of the channel group is "+channelGroup.length); //decide whether to form the radioSetPanel or the checkBoxPanel. if(!advancedOption) { particleMotionDisplay.formRadioSetPanel(channelGroup); } else { particleMotionDisplay.formCheckBoxPanel(channelGroup); } edu.iris.Fissures.Time startTime; edu.iris.Fissures.Time endTime; DataSetSeismogram[] seismograms = new DataSetSeismogram[3]; if(timeConfigRegistrar != null) { startTime = timeConfigRegistrar.getTimeRange().getBeginTime().getFissuresTime(); endTime = timeConfigRegistrar.getTimeRange().getEndTime().getFissuresTime(); } else { startTime = seis.getBeginTime().getFissuresTime(); endTime = seis.getEndTime().getFissuresTime(); } try { for(int counter = 0; counter < channelGroup.length; counter++) { seismograms[counter] = new DataSetSeismogram(dataSetSeismogram[0].getDataSet(). getSeismogram(DisplayUtils.getSeismogramName(channelGroup[counter], dataSetSeismogram[0].getDataSet(), new edu.iris.Fissures.TimeRange(seis.getBeginTime().getFissuresTime(), seis.getEndTime().getFissuresTime()))), dataSetSeismogram[0].getDataSet()); //ChannelIdUtil.toStringNoDates(channelGroup[counter])); timeConfigRegistrar.addSeismogram(seismograms[counter]); //hAmpRangeConfigRegistrar.addSeismogram(seismograms[counter]); } return seismograms; } catch(Exception e) { e.printStackTrace();//strack trace } return new DataSetSeismogram[0]; }
|
public DataSetSeismogram[] retrieve_seismograms() { LocalSeismogramImpl seis = dataSetSeismogram[0].getSeismogram(); ChannelId[] channelIds = ((edu.sc.seis.fissuresUtil.xml.XMLDataSet)dataSetSeismogram[0].getDataSet()).getChannelIds(); ChannelGrouperImpl channelProxy = new ChannelGrouperImpl(); logger.debug("the original channel_code from the seismogram is "+seis.getChannelID().channel_code); channelGroup = channelProxy.retrieve_grouping(channelIds, seis.getChannelID()); logger.debug("THe length of the channel group is "+channelGroup.length); //decide whether to form the radioSetPanel or the checkBoxPanel. if(!advancedOption) { particleMotionDisplay.formRadioSetPanel(channelGroup); } else { particleMotionDisplay.formCheckBoxPanel(channelGroup); } edu.iris.Fissures.Time startTime; edu.iris.Fissures.Time endTime; DataSetSeismogram[] seismograms = new DataSetSeismogram[3]; if(timeConfigRegistrar != null) { startTime = timeConfigRegistrar.getTimeRange().getBeginTime().getFissuresTime(); endTime = timeConfigRegistrar.getTimeRange().getEndTime().getFissuresTime(); } else { startTime = seis.getBeginTime().getFissuresTime(); endTime = seis.getEndTime().getFissuresTime(); } try { for(int counter = 0; counter < channelGroup.length; counter++) { seismograms[counter] = new DataSetSeismogram(dataSetSeismogram[0].getDataSet(). getSeismogram(DisplayUtils.getSeismogramName(channelGroup[counter], dataSetSeismogram[0].getDataSet(), new edu.iris.Fissures.TimeRange(seis.getBeginTime().getFissuresTime(), seis.getEndTime().getFissuresTime()))), dataSetSeismogram[0].getDataSet()); //ChannelIdUtil.toStringNoDates(channelGroup[counter])); timeConfigRegistrar.addSeismogram(seismograms[counter]); //hAmpRangeConfigRegistrar.addSeismogram(seismograms[counter]); } return seismograms; } catch(Exception e) { e.printStackTrace();//strack trace } return new DataSetSeismogram[0]; }
| 1,117,749
|
public static Orientation getEast() { return new Orientation(0, 90); }
|
public static Orientation getEast() { return new Orientation(90, 0); }
| 1,117,751
|
public void stationAvailabiltyChanged(AvailableStationDataEvent e) { Station station = e.getStation(); boolean isUp = e.stationIsUp(); Iterator it = omgraphics.iterator(); boolean found = false; while (it.hasNext() && !found){ OMStation current = (OMStation)it.next(); if (current.getStation() == station && !isUp){ current.setDefaultColor(DOWN_STATION); } } repaint(); }
|
public void stationAvailabiltyChanged(AvailableStationDataEvent e) { Station station = e.getStation(); boolean isUp = e.stationIsUp(); Iterator it = omgraphics.iterator(); boolean found = false; while (it.hasNext() && !found){ OMStation current = (OMStation)it.next(); if (current.getStation() == station && !isUp){ current.setDefaultColor(DOWN_STATION); } } repaint(); }
| 1,117,752
|
public static String shorten(String value, int maxLength) { if (value == null) return value; if (value.length() <= maxLength) return value; if (maxLength <= 3) return "..."; return value.substring(0, maxLength - 3) + "..."; }
|
public static String shorten(String value, int maxLength) { if (value == null) return value; if (value.length() <= maxLength) return value; if (maxLength <= 3) return "..."; return value.substring(0, maxLength - 2) + "..."; }
| 1,117,753
|
private void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ String[] names = DisplayUtils.getSeismogramNames(creator.getSeismograms()); for(int i = 0; i < names.length; i++){ names[i] += "." + creator.getColor(); } threeSelectionDisplay.addDisplay(creator.getSeismograms(), (TimeConfig)creator.getInternalRegistrar(), names); Iterator g = basicDisplays.iterator(); DataSetSeismogram[] addedSeismograms = ((ComponentSortedSeismogramDisplay)threeSelectionDisplay).getRecentlyAddedSeismograms(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] basicDisplaySeismos = current.getSeismograms(); for(int i = 0; i < addedSeismograms.length; i++){ for(int j = 0; j < basicDisplaySeismos.length; j++){ if(addedSeismograms[i].getSeismogram() == basicDisplaySeismos[j].getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); } } } } }
|
private void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ String[] names = DisplayUtils.getSeismogramNames(creator.getSeismograms()); for(int i = 0; i < names.length; i++){ names[i] += "." + creator.getColor(); } threeSelectionDisplay.addDisplay(creator.getSeismograms(), (TimeConfig)creator.getInternalRegistrar(), names); Iterator g = basicDisplays.iterator(); DataSetSeismogram[] addedSeismograms = ((ComponentSortedSeismogramDisplay)threeSelectionDisplay).getRecentlyAddedSeismograms(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] basicDisplaySeismos = current.getSeismograms(); for(int i = 0; i < addedSeismograms.length; i++){ for(int j = 0; j < basicDisplaySeismos.length; j++){ if(addedSeismograms[i].getSeismogram() == basicDisplaySeismos[j].getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); } } } } }
| 1,117,755
|
private void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ String[] names = DisplayUtils.getSeismogramNames(creator.getSeismograms()); for(int i = 0; i < names.length; i++){ names[i] += "." + creator.getColor(); } threeSelectionDisplay.addDisplay(creator.getSeismograms(), (TimeConfig)creator.getInternalRegistrar(), names); Iterator g = basicDisplays.iterator(); DataSetSeismogram[] addedSeismograms = ((ComponentSortedSeismogramDisplay)threeSelectionDisplay).getRecentlyAddedSeismograms(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] basicDisplaySeismos = current.getSeismograms(); for(int i = 0; i < addedSeismograms.length; i++){ for(int j = 0; j < basicDisplaySeismos.length; j++){ if(addedSeismograms[i].getSeismogram() == basicDisplaySeismos[j].getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); } } } } }
|
private void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ String[] names = DisplayUtils.getSeismogramNames(creator.getSeismograms()); for(int i = 0; i < names.length; i++){ names[i] += "." + creator.getColor(); } threeSelectionDisplay.addDisplay(creator.getSeismograms(), (TimeConfig)creator.getInternalRegistrar(), names); Iterator g = basicDisplays.iterator(); DataSetSeismogram[] addedSeismograms = ((ComponentSortedSeismogramDisplay)threeSelectionDisplay).getRecentlyAddedSeismograms(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] basicDisplaySeismos = current.getSeismograms(); for(int i = 0; i < addedSeismograms.length; i++){ for(int j = 0; j < basicDisplaySeismos.length; j++){ if(addedSeismograms[i].getSeismogram() == basicDisplaySeismos[j].getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); } } } } }
| 1,117,756
|
private void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ String[] names = DisplayUtils.getSeismogramNames(creator.getSeismograms()); for(int i = 0; i < names.length; i++){ names[i] += "." + creator.getColor(); } threeSelectionDisplay.addDisplay(creator.getSeismograms(), (TimeConfig)creator.getInternalRegistrar(), names); Iterator g = basicDisplays.iterator(); DataSetSeismogram[] addedSeismograms = ((ComponentSortedSeismogramDisplay)threeSelectionDisplay).getRecentlyAddedSeismograms(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] basicDisplaySeismos = current.getSeismograms(); for(int i = 0; i < addedSeismograms.length; i++){ for(int j = 0; j < basicDisplaySeismos.length; j++){ if(addedSeismograms[i].getSeismogram() == basicDisplaySeismos[j].getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); } } } } }
|
private void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ String[] names = DisplayUtils.getSeismogramNames(creator.getSeismograms()); for(int i = 0; i < names.length; i++){ names[i] += "." + creator.getColor(); } threeSelectionDisplay.addDisplay(creator.getSeismograms(), (TimeConfig)creator.getInternalRegistrar(), names); Iterator g = basicDisplays.iterator(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] basicDisplaySeismos = current.getSeismograms(); for(int i = 0; i < addedSeismograms.length; i++){ for(int j = 0; j < basicDisplaySeismos.length; j++){ if(addedSeismograms[i].getSeismogram() == basicDisplaySeismos[j].getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); } } } } }
| 1,117,757
|
private void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ String[] names = DisplayUtils.getSeismogramNames(creator.getSeismograms()); for(int i = 0; i < names.length; i++){ names[i] += "." + creator.getColor(); } threeSelectionDisplay.addDisplay(creator.getSeismograms(), (TimeConfig)creator.getInternalRegistrar(), names); Iterator g = basicDisplays.iterator(); DataSetSeismogram[] addedSeismograms = ((ComponentSortedSeismogramDisplay)threeSelectionDisplay).getRecentlyAddedSeismograms(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] basicDisplaySeismos = current.getSeismograms(); for(int i = 0; i < addedSeismograms.length; i++){ for(int j = 0; j < basicDisplaySeismos.length; j++){ if(addedSeismograms[i].getSeismogram() == basicDisplaySeismos[j].getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); } } } } }
|
private void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ String[] names = DisplayUtils.getSeismogramNames(creator.getSeismograms()); for(int i = 0; i < names.length; i++){ names[i] += "." + creator.getColor(); } threeSelectionDisplay.addDisplay(creator.getSeismograms(), (TimeConfig)creator.getInternalRegistrar(), names); Iterator g = basicDisplays.iterator(); DataSetSeismogram[] addedSeismograms = ((ComponentSortedSeismogramDisplay)threeSelectionDisplay).getRecentlyAddedSeismograms(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] basicDisplaySeismos = current.getSeismograms(); for(int i = 0; i < componentSorted.length; i++){ for(int j = 0; j < basicDisplaySeismos.length; j++){ if(addedSeismograms[i].getSeismogram() == basicDisplaySeismos[j].getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); } } } } }
| 1,117,758
|
private void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ String[] names = DisplayUtils.getSeismogramNames(creator.getSeismograms()); for(int i = 0; i < names.length; i++){ names[i] += "." + creator.getColor(); } threeSelectionDisplay.addDisplay(creator.getSeismograms(), (TimeConfig)creator.getInternalRegistrar(), names); Iterator g = basicDisplays.iterator(); DataSetSeismogram[] addedSeismograms = ((ComponentSortedSeismogramDisplay)threeSelectionDisplay).getRecentlyAddedSeismograms(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] basicDisplaySeismos = current.getSeismograms(); for(int i = 0; i < addedSeismograms.length; i++){ for(int j = 0; j < basicDisplaySeismos.length; j++){ if(addedSeismograms[i].getSeismogram() == basicDisplaySeismos[j].getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); } } } } }
|
private void addGroupedSelection(Selection creator, VerticalSeismogramDisplay reaper){ String[] names = DisplayUtils.getSeismogramNames(creator.getSeismograms()); for(int i = 0; i < names.length; i++){ names[i] += "." + creator.getColor(); } threeSelectionDisplay.addDisplay(creator.getSeismograms(), (TimeConfig)creator.getInternalRegistrar(), names); Iterator g = basicDisplays.iterator(); DataSetSeismogram[] addedSeismograms = ((ComponentSortedSeismogramDisplay)threeSelectionDisplay).getRecentlyAddedSeismograms(); while(g.hasNext()){ BasicSeismogramDisplay current = ((BasicSeismogramDisplay)g.next()); DataSetSeismogram[] basicDisplaySeismos = current.getSeismograms(); for(int i = 0; i < addedSeismograms.length; i++){ for(int j = 0; j < basicDisplaySeismos.length; j++){ if(addedSeismograms[i].getSeismogram() == basicDisplaySeismos[j].getSeismogram()){ current.add3CSelection(creator); creator.addParent(current); } } } } }
| 1,117,759
|
private void addSelection(Selection creator, VerticalSeismogramDisplay reaper){ DataSetSeismogram[] seismos = creator.getSeismograms(); String[] parentNames = creator.getParent().getNames(); String[] names = new String[parentNames.length]; for(int i = 0; i < parentNames.length; i++){ names[i] = parentNames[i] + "." + creator.getColor(); } creator.addDisplay(reaper.addDisplay(seismos, (TimeConfig)creator.getInternalRegistrar(), names)); }
|
private void addSelection(Selection creator, VerticalSeismogramDisplay reaper){ DataSetSeismogram[] seismos = creator.getSeismograms(); String[] parentNames = creator.getParent().getNames(); String[] names = new String[parentNames.length]; for(int i = 0; i < parentNames.length; i++){ names[i] = parentNames[i] + "." + creator.getColor(); } creator.addDisplay(reaper.addDisplay(seismos, (TimeConfig)creator.getInternalRegistrar(), names)); }
| 1,117,761
|
private void addSelection(Selection creator, VerticalSeismogramDisplay reaper){ DataSetSeismogram[] seismos = creator.getSeismograms(); String[] parentNames = creator.getParent().getNames(); String[] names = new String[parentNames.length]; for(int i = 0; i < parentNames.length; i++){ names[i] = parentNames[i] + "." + creator.getColor(); } creator.addDisplay(reaper.addDisplay(seismos, (TimeConfig)creator.getInternalRegistrar(), names)); }
|
private void addSelection(Selection creator, VerticalSeismogramDisplay reaper){ DataSetSeismogram[] seismos = creator.getSeismograms(); String[] parentNames = creator.getParent().getNames(); String[] names = new String[parentNames.length]; for(int i = 0; i < parentNames.length; i++){ names[i] = parentNames[i] + "." + creator.getColor(); } creator.addDisplay(reaper.addDisplay(seismos, (TimeConfig)creator.getInternalRegistrar(), names)); }
| 1,117,762
|
public void createThreeSelectionDisplay(Selection creator){ if(threeSelectionDisplay == null){ logger.debug("creating 3C selection display"); threeSelectionWindow = new JFrame(particleTagWindowName); threeSelectionWindow.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { threeSelectionDisplay.removeAll(); } }); threeSelectionWindow.setSize(400, 400); Toolkit tk = Toolkit.getDefaultToolkit(); threeSelectionWindow.setLocation((tk.getScreenSize().width - threeSelectionWindow.getSize().width)/2, (tk.getScreenSize().height - threeSelectionWindow.getSize().height)/2); threeSelectionDisplay = new ComponentSortedSeismogramDisplay(mouseForwarder, motionForwarder, this); addGroupedSelection(creator, threeSelectionDisplay); threeSelectionWindow.getContentPane().add(new JScrollPane(threeSelectionDisplay)); threeSelectionWindow.setVisible(true); }else{ logger.debug("adding another 3Cselection"); addGroupedSelection(creator, threeSelectionDisplay); threeSelectionWindow.toFront(); } }
|
public void createThreeSelectionDisplay(Selection creator){ if(threeSelectionDisplay == null){ logger.debug("creating 3C selection display"); threeSelectionWindow = new JFrame(particleTagWindowName); threeSelectionWindow.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { threeSelectionDisplay.removeAll(); } }); threeSelectionWindow.setSize(400, 400); Toolkit tk = Toolkit.getDefaultToolkit(); threeSelectionWindow.setLocation((tk.getScreenSize().width - threeSelectionWindow.getSize().width)/2, (tk.getScreenSize().height - threeSelectionWindow.getSize().height)/2); threeSelectionDisplay = new MultiSeismogramWindowDisplay(mouseForwarder, motionForwarder, this); addGroupedSelection(creator, threeSelectionDisplay); threeSelectionWindow.getContentPane().add(new JScrollPane(threeSelectionDisplay)); threeSelectionWindow.setVisible(true); }else{ logger.debug("adding another 3Cselection"); addGroupedSelection(creator, threeSelectionDisplay); threeSelectionWindow.toFront(); } }
| 1,117,763
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.