id
stringlengths
7
14
text
stringlengths
1
106k
1595052_6
@Override public boolean isShouldRemove(Throwable throwable) { Throwable current = throwable; while (current != null) { if (current.getClass().getName().equals(JDBC_COMMUNICATIONS_EXCEPTION_NAME)) { return true; } if (current.getClass().getName().equals(JDBC_4_COMMUNICATIONS_EXCEPTION_NAME)) { return true; } current = current.getCause(); } return false; }
1597325_0
@Override @SuppressWarnings("unchecked") public <T> T convert(Object source, Type type) { return (T) DataConversion.convert(source, erase(type)); }
1597325_1
@Override @SuppressWarnings("unchecked") public <T> T convert(final Object source, Type type) { / special case for handling a null source values if (source == null) { return (T) nullValue(type); } // check we already have the exact type if (source.getClass() == type) { return (T) source; } // check if we already have a sub type if (Generics.isSuperType(type, source.getClass())) { return (T) source; } // special case for handling empty string if ("".equals(source) && type != String.class && isEmptyStringNull()) { return null; } Type sourceType = source.getClass(); Class<?> sourceClass = Generics.erase(sourceType); // conversion of all array types to collections if (sourceClass.isArray() && Generics.isSuperType(Collection.class, type)) { return (T) Arrays.asList(source); } // conversion of all collections to arrays Class<?> targetClass = Generics.erase(type); if (Collection.class.isAssignableFrom(sourceClass) && targetClass.isArray()) { // TODO: convert collections to arrays throw new UnsupportedOperationException("Not implemented yet"); } // use primitive wrapper types if (type instanceof Class<?> && ((Class<?>) type).isPrimitive()) { type = Primitives.wrap((Class<?>) type); } // look for converters for exact types or super types Object result = null; do { // first try to find a converter in the forward direction SourceAndTarget key = new SourceAndTarget(sourceType, type); Collection<Converter<?, ?>> forwards = convertersBySourceAndTarget.get(key); // stop at the first converter that returns non-null for (Converter<?, ?> forward : forwards) if ((result = typeSafeTo(forward, source)) != null) break; if (result == null) { // try the reverse direction (target to source) Collection<Converter<?,?>> reverses = convertersBySourceAndTarget.get(key.reverse()); // stop at the first converter that returns non-null for (Converter<?, ?> reverse : reverses) if ((result = typeSafeFrom(reverse, source)) != null) break; } // we have no more super classes to try if (sourceType == Object.class) break; // try every super type of the source Class<?> superClass = erase(sourceType).getSuperclass(); sourceType = getExactSuperType(sourceType, superClass); } while (result == null); if (result == null) throw new IllegalStateException("Cannot convert " + source.getClass() + " to " + type); return (T) result; }
1597325_2
@Override @SuppressWarnings("unchecked") public <T> T convert(final Object source, Type type) { / special case for handling a null source values if (source == null) { return (T) nullValue(type); } // check we already have the exact type if (source.getClass() == type) { return (T) source; } // check if we already have a sub type if (Generics.isSuperType(type, source.getClass())) { return (T) source; } // special case for handling empty string if ("".equals(source) && type != String.class && isEmptyStringNull()) { return null; } Type sourceType = source.getClass(); Class<?> sourceClass = Generics.erase(sourceType); // conversion of all array types to collections if (sourceClass.isArray() && Generics.isSuperType(Collection.class, type)) { return (T) Arrays.asList(source); } // conversion of all collections to arrays Class<?> targetClass = Generics.erase(type); if (Collection.class.isAssignableFrom(sourceClass) && targetClass.isArray()) { // TODO: convert collections to arrays throw new UnsupportedOperationException("Not implemented yet"); } // use primitive wrapper types if (type instanceof Class<?> && ((Class<?>) type).isPrimitive()) { type = Primitives.wrap((Class<?>) type); } // look for converters for exact types or super types Object result = null; do { // first try to find a converter in the forward direction SourceAndTarget key = new SourceAndTarget(sourceType, type); Collection<Converter<?, ?>> forwards = convertersBySourceAndTarget.get(key); // stop at the first converter that returns non-null for (Converter<?, ?> forward : forwards) if ((result = typeSafeTo(forward, source)) != null) break; if (result == null) { // try the reverse direction (target to source) Collection<Converter<?,?>> reverses = convertersBySourceAndTarget.get(key.reverse()); // stop at the first converter that returns non-null for (Converter<?, ?> reverse : reverses) if ((result = typeSafeFrom(reverse, source)) != null) break; } // we have no more super classes to try if (sourceType == Object.class) break; // try every super type of the source Class<?> superClass = erase(sourceType).getSuperclass(); sourceType = getExactSuperType(sourceType, superClass); } while (result == null); if (result == null) throw new IllegalStateException("Cannot convert " + source.getClass() + " to " + type); return (T) result; }
1597325_3
@Override @SuppressWarnings("unchecked") public <T> T convert(final Object source, Type type) { / special case for handling a null source values if (source == null) { return (T) nullValue(type); } // check we already have the exact type if (source.getClass() == type) { return (T) source; } // check if we already have a sub type if (Generics.isSuperType(type, source.getClass())) { return (T) source; } // special case for handling empty string if ("".equals(source) && type != String.class && isEmptyStringNull()) { return null; } Type sourceType = source.getClass(); Class<?> sourceClass = Generics.erase(sourceType); // conversion of all array types to collections if (sourceClass.isArray() && Generics.isSuperType(Collection.class, type)) { return (T) Arrays.asList(source); } // conversion of all collections to arrays Class<?> targetClass = Generics.erase(type); if (Collection.class.isAssignableFrom(sourceClass) && targetClass.isArray()) { // TODO: convert collections to arrays throw new UnsupportedOperationException("Not implemented yet"); } // use primitive wrapper types if (type instanceof Class<?> && ((Class<?>) type).isPrimitive()) { type = Primitives.wrap((Class<?>) type); } // look for converters for exact types or super types Object result = null; do { // first try to find a converter in the forward direction SourceAndTarget key = new SourceAndTarget(sourceType, type); Collection<Converter<?, ?>> forwards = convertersBySourceAndTarget.get(key); // stop at the first converter that returns non-null for (Converter<?, ?> forward : forwards) if ((result = typeSafeTo(forward, source)) != null) break; if (result == null) { // try the reverse direction (target to source) Collection<Converter<?,?>> reverses = convertersBySourceAndTarget.get(key.reverse()); // stop at the first converter that returns non-null for (Converter<?, ?> reverse : reverses) if ((result = typeSafeFrom(reverse, source)) != null) break; } // we have no more super classes to try if (sourceType == Object.class) break; // try every super type of the source Class<?> superClass = erase(sourceType).getSuperclass(); sourceType = getExactSuperType(sourceType, superClass); } while (result == null); if (result == null) throw new IllegalStateException("Cannot convert " + source.getClass() + " to " + type); return (T) result; }
1597325_4
@Override @SuppressWarnings("unchecked") public <T> T convert(final Object source, Type type) { / special case for handling a null source values if (source == null) { return (T) nullValue(type); } // check we already have the exact type if (source.getClass() == type) { return (T) source; } // check if we already have a sub type if (Generics.isSuperType(type, source.getClass())) { return (T) source; } // special case for handling empty string if ("".equals(source) && type != String.class && isEmptyStringNull()) { return null; } Type sourceType = source.getClass(); Class<?> sourceClass = Generics.erase(sourceType); // conversion of all array types to collections if (sourceClass.isArray() && Generics.isSuperType(Collection.class, type)) { return (T) Arrays.asList(source); } // conversion of all collections to arrays Class<?> targetClass = Generics.erase(type); if (Collection.class.isAssignableFrom(sourceClass) && targetClass.isArray()) { // TODO: convert collections to arrays throw new UnsupportedOperationException("Not implemented yet"); } // use primitive wrapper types if (type instanceof Class<?> && ((Class<?>) type).isPrimitive()) { type = Primitives.wrap((Class<?>) type); } // look for converters for exact types or super types Object result = null; do { // first try to find a converter in the forward direction SourceAndTarget key = new SourceAndTarget(sourceType, type); Collection<Converter<?, ?>> forwards = convertersBySourceAndTarget.get(key); // stop at the first converter that returns non-null for (Converter<?, ?> forward : forwards) if ((result = typeSafeTo(forward, source)) != null) break; if (result == null) { // try the reverse direction (target to source) Collection<Converter<?,?>> reverses = convertersBySourceAndTarget.get(key.reverse()); // stop at the first converter that returns non-null for (Converter<?, ?> reverse : reverses) if ((result = typeSafeFrom(reverse, source)) != null) break; } // we have no more super classes to try if (sourceType == Object.class) break; // try every super type of the source Class<?> superClass = erase(sourceType).getSuperclass(); sourceType = getExactSuperType(sourceType, superClass); } while (result == null); if (result == null) throw new IllegalStateException("Cannot convert " + source.getClass() + " to " + type); return (T) result; }
1597325_5
@Override @SuppressWarnings("unchecked") public <T> T convert(final Object source, Type type) { / special case for handling a null source values if (source == null) { return (T) nullValue(type); } // check we already have the exact type if (source.getClass() == type) { return (T) source; } // check if we already have a sub type if (Generics.isSuperType(type, source.getClass())) { return (T) source; } // special case for handling empty string if ("".equals(source) && type != String.class && isEmptyStringNull()) { return null; } Type sourceType = source.getClass(); Class<?> sourceClass = Generics.erase(sourceType); // conversion of all array types to collections if (sourceClass.isArray() && Generics.isSuperType(Collection.class, type)) { return (T) Arrays.asList(source); } // conversion of all collections to arrays Class<?> targetClass = Generics.erase(type); if (Collection.class.isAssignableFrom(sourceClass) && targetClass.isArray()) { // TODO: convert collections to arrays throw new UnsupportedOperationException("Not implemented yet"); } // use primitive wrapper types if (type instanceof Class<?> && ((Class<?>) type).isPrimitive()) { type = Primitives.wrap((Class<?>) type); } // look for converters for exact types or super types Object result = null; do { // first try to find a converter in the forward direction SourceAndTarget key = new SourceAndTarget(sourceType, type); Collection<Converter<?, ?>> forwards = convertersBySourceAndTarget.get(key); // stop at the first converter that returns non-null for (Converter<?, ?> forward : forwards) if ((result = typeSafeTo(forward, source)) != null) break; if (result == null) { // try the reverse direction (target to source) Collection<Converter<?,?>> reverses = convertersBySourceAndTarget.get(key.reverse()); // stop at the first converter that returns non-null for (Converter<?, ?> reverse : reverses) if ((result = typeSafeFrom(reverse, source)) != null) break; } // we have no more super classes to try if (sourceType == Object.class) break; // try every super type of the source Class<?> superClass = erase(sourceType).getSuperclass(); sourceType = getExactSuperType(sourceType, superClass); } while (result == null); if (result == null) throw new IllegalStateException("Cannot convert " + source.getClass() + " to " + type); return (T) result; }
1597325_6
@Override @SuppressWarnings("unchecked") public <T> T convert(final Object source, Type type) { / special case for handling a null source values if (source == null) { return (T) nullValue(type); } // check we already have the exact type if (source.getClass() == type) { return (T) source; } // check if we already have a sub type if (Generics.isSuperType(type, source.getClass())) { return (T) source; } // special case for handling empty string if ("".equals(source) && type != String.class && isEmptyStringNull()) { return null; } Type sourceType = source.getClass(); Class<?> sourceClass = Generics.erase(sourceType); // conversion of all array types to collections if (sourceClass.isArray() && Generics.isSuperType(Collection.class, type)) { return (T) Arrays.asList(source); } // conversion of all collections to arrays Class<?> targetClass = Generics.erase(type); if (Collection.class.isAssignableFrom(sourceClass) && targetClass.isArray()) { // TODO: convert collections to arrays throw new UnsupportedOperationException("Not implemented yet"); } // use primitive wrapper types if (type instanceof Class<?> && ((Class<?>) type).isPrimitive()) { type = Primitives.wrap((Class<?>) type); } // look for converters for exact types or super types Object result = null; do { // first try to find a converter in the forward direction SourceAndTarget key = new SourceAndTarget(sourceType, type); Collection<Converter<?, ?>> forwards = convertersBySourceAndTarget.get(key); // stop at the first converter that returns non-null for (Converter<?, ?> forward : forwards) if ((result = typeSafeTo(forward, source)) != null) break; if (result == null) { // try the reverse direction (target to source) Collection<Converter<?,?>> reverses = convertersBySourceAndTarget.get(key.reverse()); // stop at the first converter that returns non-null for (Converter<?, ?> reverse : reverses) if ((result = typeSafeFrom(reverse, source)) != null) break; } // we have no more super classes to try if (sourceType == Object.class) break; // try every super type of the source Class<?> superClass = erase(sourceType).getSuperclass(); sourceType = getExactSuperType(sourceType, superClass); } while (result == null); if (result == null) throw new IllegalStateException("Cannot convert " + source.getClass() + " to " + type); return (T) result; }
1597325_7
private void processMessage(String message) throws Exception { Boolean toStdOut = logAllMessagesForUsers.get(config.getUsername()); if (toStdOut != null) { if (toStdOut) System.out.println("IMAPrcv[" + config.getUsername() + "]: " + message); else log.info("IMAPrcv[{}]: {}", config.getUsername(), message); } if (ENABLE_WIRE_TRACE) { wireTrace.add(message); log.trace(message); } if (SYSTEM_ERROR_REGEX.matcher(message).matches() || ". NO [ALERT] Account exceeded command or bandwidth limits. (Failure)".equalsIgnoreCase( message.trim())) { log.warn("{} disconnected by IMAP Server due to system error: {}", config.getUsername(), message); disconnectAbnormally(message); return; } try { if (halt) { log.error("Mail client for {} is halted but continues to receive messages, ignoring!", config.getUsername()); return; } if (loginSuccess.getCount() > 0) { if (message.startsWith(CAPABILITY_PREFIX)) { this.capabilities = Arrays.asList( message.substring(CAPABILITY_PREFIX.length() + 1).split("[ ]+")); return; } else if (GMAIL_AUTH_SUCCESS_REGEX.matcher(message).matches() || IMAP_AUTH_SUCCESS_REGEX.matcher(message).matches()) { log.info("Authentication success for user {}", config.getUsername()); loginSuccess.countDown(); } else { Matcher matcher = COMMAND_FAILED_REGEX.matcher(message); if (matcher.find()) { // WARNING: DO NOT COUNTDOWN THE LOGIN LATCH ON FAILURE!!! log.warn("Authentication failed for {} due to: {}", config.getUsername(), message); errorStack.push(new Error(null /* logins have no completion */, extractError(matcher), wireTrace.list())); disconnectAbnormally(message); } } return; } // Copy to local var as the value can change underneath us. FolderObserver observer = this.observer; if (idleRequested.get() || idleAcknowledged.get()) { synchronized (idleMutex) { if (IDLE_ENDED_REGEX.matcher(message).matches()) { idleRequested.compareAndSet(true, false); idleAcknowledged.set(false); // Now fire the events. PushedData data = pushedData; pushedData = null; idler.idleEnd(); observer.changed(data.pushAdds.isEmpty() ? null : data.pushAdds, data.pushRemoves.isEmpty() ? null : data.pushRemoves); return; } // Queue up any push notifications to publish to the client in a second. Matcher existsMatcher = IDLE_EXISTS_REGEX.matcher(message); boolean matched = false; if (existsMatcher.matches()) { int number = Integer.parseInt(existsMatcher.group(1)); pushedData.pushAdds.add(number); matched = true; } else { Matcher expungeMatcher = IDLE_EXPUNGE_REGEX.matcher(message); if (expungeMatcher.matches()) { int number = Integer.parseInt(expungeMatcher.group(1)); pushedData.pushRemoves.add(number); matched = true; } } // Stop idling, when we get the idle ended message (next cycle) we can publish what's been gathered. if (matched) { if(!pushedData.idleExitSent) { idler.done(); pushedData.idleExitSent = true; } return; } } } complete(message); } catch (Exception ex) { CommandCompletion completion = completions.poll(); if (completion != null) completion.error(message, ex); else { log.error("Strange exception during mail processing (no completions available!): {}", message, ex); errorStack.push(new Error(null, "No completions available!", wireTrace.list())); } throw ex; } }
1597325_8
static String normalizeDateToken(String token) { return token.replaceAll(" \\(.+\\)$", "").replaceAll("[ ]+", " ").trim(); }
1597325_9
@Override public List<Integer> extract(List<String> messages) { final List<Integer> uids = Lists.newArrayList(); for (int i = 0, messagesSize = messages.size(); i < messagesSize; i++) { String message = messages.get(i); if (null == message || message.isEmpty()) continue; // Discard the success token and any EXISTS or EXPUNGE tokens. try { if (Command.OK_SUCCESS.matcher(message).matches() || Command.isEndOfSequence(message) || MessageStatusExtractor.HELPFUL_NOTIFICATION_PATTERN.matcher(message).matches()) continue; } catch (ExtractionException ee) { log.error("Warning: error parsing search results! {}", messages, ee); continue; } message = message.substring("* search".length()); if (message.trim().isEmpty()) continue; for (String piece : message.split("[ ]+")) { if (piece.isEmpty()) continue; try { uids.add(Integer.valueOf(piece)); } catch (NumberFormatException nfe) { log.error("Unable to parse search result {}", message, nfe); } } } return uids; }
1603876_0
public String getHtmlForFlag(FeatureFlags flag, String userName) { ingBuilder content = new StringBuilder(); userName == null) { content.append(getHtmlForUserFlag(flag, userName,null)); ing[] userNames = flagManager.getUsersForFlag(flag); (userNames.length > 0) { for (String user : userNames) { (userName == null || user.equals(userName)) { content.append(getHtmlForUserFlag(flag, userName, user)); } ing inputString = ""; userName == null) { inputString = getInput(servletUri + "/" + flag); urn getDiv(content.toString(), inputString); }
1603876_1
public String getHtmlForFlag(FeatureFlags flag, String userName) { ingBuilder content = new StringBuilder(); userName == null) { content.append(getHtmlForUserFlag(flag, userName,null)); ing[] userNames = flagManager.getUsersForFlag(flag); (userNames.length > 0) { for (String user : userNames) { (userName == null || user.equals(userName)) { content.append(getHtmlForUserFlag(flag, userName, user)); } ing inputString = ""; userName == null) { inputString = getInput(servletUri + "/" + flag); urn getDiv(content.toString(), inputString); }
1603876_2
public FeatureFlags getFlag(String flagName) { urn (FeatureFlags) invokeStaticMethod("valueOf", new Object[] { flagName }, String.class); }
1603876_3
public Result flipFlag(String flagName) { tureFlags flag = getFlag(flagName); (flag == null) { return Result.NOT_FOUND; urn flipFlag(flag); }
1603876_4
public Result flipFlagForUser(String userName, String flagName) { ThreadUserName(userName); <FeatureFlags, FlagState> userFlagsState = getOrCreateUser(userName); tureFlags flag = getFlag(flagName); gState newFlagState = flag.isUp() ? FlagState.DOWN : FlagState.UP; etThreadUserName(); urn setFlagStateToAndPersist(userFlagsState, userName, flag, newFlagState); }
1604565_0
public Iterable<Vertex> getVertices() { Collection<Iterable<Vertex>> base = new LinkedList<Iterable<Vertex>>(); for (int pos = 0; pos < bases.length; pos++) { base.add(new MultiVertexIterable(pos)); } return new MultiIterable<Vertex>(base); }
1604565_1
public Vertex getVertex(Object id) { Collection<Vertex> baseVertices = new LinkedList<Vertex>(); // TODO: allow bases to be refreshed for (Graph g : bases) { Vertex v = g.getVertex(id); if (null != v) { baseVertices.add(v); } } if (baseVertices.size() > 0) { return new MultiVertex(this, id, baseVertices); } else { return null; } }
1604565_2
public Edge getEdge(Object id) { Collection<Edge> baseEdges = new LinkedList<Edge>(); // TODO: allow bases to be refreshed for (Graph g : bases) { Edge e = g.getEdge(id); if (null != e) { baseEdges.add(e); } } if (baseEdges.size() > 0) { return new MultiEdge(this, id, baseEdges); } else { return null; } }
1604565_3
public void clear() { throw new UnsupportedOperationException(READONLY_MSG); }
1604565_4
public Vertex addVertex(Object id) { throw new UnsupportedOperationException(READONLY_MSG); }
1604565_5
public Edge addEdge(Object id, Vertex outVertex, Vertex inVertex, String label) { throw new UnsupportedOperationException(READONLY_MSG); }
1604565_6
public void removeEdge(Edge edge) { throw new UnsupportedOperationException(READONLY_MSG); }
1604565_7
public void removeVertex(Vertex vertex) { throw new UnsupportedOperationException(READONLY_MSG); }
1604565_8
public Vertex getVertex(Object id) { Collection<Vertex> baseVertices = new LinkedList<Vertex>(); // TODO: allow bases to be refreshed for (Graph g : bases) { Vertex v = g.getVertex(id); if (null != v) { baseVertices.add(v); } } if (baseVertices.size() > 0) { return new MultiVertex(this, id, baseVertices); } else { return null; } }
1604565_9
public Edge getEdge(Object id) { Collection<Edge> baseEdges = new LinkedList<Edge>(); // TODO: allow bases to be refreshed for (Graph g : bases) { Edge e = g.getEdge(id); if (null != e) { baseEdges.add(e); } } if (baseEdges.size() > 0) { return new MultiEdge(this, id, baseEdges); } else { return null; } }
1605107_0
@Override public void defineFilter() throws Exception { /// local variables /// /// define parameters /// int notFiltered = 0; final long outputLines = nRows - nlMean + 1; final long firstLine = ((nlMean - 1) / 2); // indices in matrix system final long lastLine = firstLine + outputLines - 1; final boolean doHamming = (alphaHamming < 0.9999); /// shift parameters //// final double deltaF = RSR / nCols; DoubleMatrix freqAxis = defineFrequencyAxis(nCols, RSR); DoubleMatrix inverseHamming = null; if (doHamming) { inverseHamming = WeightWindows.inverseHamming(freqAxis, RBW, RSR, alphaHamming); } //// Use weighted correlation due to bias in normal definition // Note: Actually better de-weight with autoconvoluted hamming. if (doWeightCorrelFlag) { doWeightCorrel(RSR, RBW, fftLength, power); } DoubleMatrix nlMeanPower = computeNlMeanPower(nlMean, fftLength, power); long shift; // returned by max double meanSNR = 0.; double meanShift = 0.; // Start actual filtering for (long outLine = firstLine; outLine <= lastLine; ++outLine) { double totalPower = nlMeanPower.sum(); double maxValue = nlMeanPower.max(); shift = nlMeanPower.argmax(); long lastShift = shift; double SNR = fftLength * (maxValue / (totalPower - maxValue)); meanSNR += SNR; //// Check for negative shift boolean negShift = false; if (shift > (fftLength / 2)) { shift = fftLength - shift; lastShift = shift; // use this if current shift not OK. negShift = true; } // ______ Do actual filtering ______ if (SNR < SNRthreshold) { notFiltered++; // update notFiltered counter shift = lastShift; logger.warn("using last shift for filter"); } meanShift += shift; DoubleMatrix filterVector = defineFilterVector(deltaF, freqAxis, inverseHamming, shift); //// Use freq. as returned by fft //// SpectralUtils.ifftshift_inplace(filterVector); if (!negShift) { filter.putRow((int) outLine, filterVector); } else { fliplr_inplace(filterVector); filter.putRow((int) outLine, filterVector); } /// Update 'walking' mean if (outLine != lastLine) { DoubleMatrix line1 = power.getRow((int) (outLine - firstLine)); DoubleMatrix lineN = power.getRow((int) (outLine - firstLine + nlMean)); nlMeanPower.addi(lineN.sub(line1)); } } // loop over outLines }
1605107_1
@Deprecated public static void filterBlock(ComplexDoubleMatrix masterDataBlock, // updated ComplexDoubleMatrix slaveDataBlock, // updated int nlMean, double SNRthreshold, double RSR, // in MHz double RBW, // in MHz double alphaHamming, int ovsFactor, boolean doWeightCorrelFlag) throws Exception { // returned double meanSNR; double percentNotFiltered; /// define parameters /// final long numLines = masterDataBlock.rows; final long numPixs = masterDataBlock.columns; final long outputLines = numLines - nlMean + 1; final long firstLine = ((nlMean - 1) / 2); // indices in matrix system final long lastLine = firstLine + outputLines - 1; final boolean doHammingFlag = (alphaHamming < 0.9999); // use oversampling before int. gen. final boolean doOversampleFlag = (ovsFactor != 1); int notFiltered = 0; // method counter /// sanity check on input paramaters /// if (!MathUtils.isOdd(nlMean)) { logger.error("nlMean has to be odd."); throw new IllegalArgumentException("nlMean has to be odd."); } if (!MathUtils.isPower2(numPixs)) { logger.error("numPixels (FFT) has to be power of 2."); throw new IllegalArgumentException("numPixels (FFT) has to be power of 2."); } if (!MathUtils.isPower2(ovsFactor)) { logger.error("oversample factor (FFT) has to be power of 2."); throw new IllegalArgumentException("oversample factor (FFT) has to be power of 2."); } if (slaveDataBlock.rows != numLines) { logger.error("slave not same size as master."); throw new IllegalArgumentException("slave not same size as master."); } if (slaveDataBlock.columns != numPixs) { logger.error("slave not same size as master."); throw new IllegalArgumentException("slave not same size as master."); } if (outputLines < 1) { logger.warn("no outputLines, continuing...."); } /// local variables /// DoubleMatrix inverseHamming = null; /// shift parameters //// final double deltaF = RSR / numPixs; final double freq = -RSR / 2.; // defined in defineFrequencyAxis DoubleMatrix freqAxis = defineFrequencyAxis(numPixs, RSR); if (doHammingFlag) { inverseHamming = WeightWindows.inverseHamming(freqAxis, RBW, RSR, alphaHamming); } //// COMPUTE CPLX IFG ON THE FLY -> power //// ComplexDoubleMatrix cplxIfg; if (doOversampleFlag) { cplxIfg = SarUtils.computeIfg(masterDataBlock, slaveDataBlock, 1, ovsFactor); } else { cplxIfg = SarUtils.computeIfg(masterDataBlock, slaveDataBlock); } long fftLength = cplxIfg.columns; logger.debug("is real4 accurate enough? it seems so!"); SpectralUtils.fft_inplace(cplxIfg, 2); // cplxIfg = fft over rows DoubleMatrix power = SarUtils.intensity(cplxIfg); // power = cplxIfg.*conj(cplxIfg); //// Use weighted correlation due to bias in normal definition // Note: Actually better de-weight with autoconvoluted hamming. if (doWeightCorrelFlag) { doWeightCorrel(RSR, RBW, fftLength, power); doWeightCorrel(RSR, RBW, numLines, numPixs, fftLength, power); } /// Average power to reduce noise : fft.ing in-place over data rows /// SpectralUtils.fft_inplace(masterDataBlock, 2); SpectralUtils.fft_inplace(slaveDataBlock, 2); logger.trace("Took FFT over rows of master, slave."); DoubleMatrix nlMeanPower = computeNlMeanPower(nlMean, fftLength, power); long shift; // returned by max meanSNR = 0.; double meanShift = 0.; // Start actual filtering for (long outLine = firstLine; outLine <= lastLine; ++outLine) { double totalPower = nlMeanPower.sum(); double maxValue = nlMeanPower.max(); shift = nlMeanPower.argmax(); long lastShift = shift; double SNR = fftLength * (maxValue / (totalPower - maxValue)); meanSNR += SNR; //// Check for negative shift boolean negShift = false; if (shift > (int) (fftLength / 2)) { shift = (int) fftLength - shift; lastShift = shift; // use this if current shift not OK. negShift = true; } // ______ Do actual filtering ______ if (SNR < SNRthreshold) { notFiltered++; // update notFiltered counter shift = lastShift; logger.warn("using last shift for filter"); } // interim variables meanShift += shift; DoubleMatrix filter; if (doHammingFlag) { // Newhamming is scaled and centered around new mean // filter is fftshifted filter = WeightWindows.hamming( freqAxis.sub(0.5 * shift * deltaF), RBW - (shift * deltaF), RSR, alphaHamming); filter.muli(inverseHamming); } else { // no weighting of spectra // filter is fftshifted filter = WeightWindows.rect((freqAxis.sub(.5 * shift * deltaF)).div((RBW - shift * deltaF))); } //// Use freq. as returned by fft //// // Note that filter_slave = fliplr(filter_m) // and that this is also valid after ifftshift SpectralUtils.ifftshift_inplace(filter); //// Actual spectral filtering //// if (!negShift) { masterDataBlock.putRow((int) outLine, dotmult(masterDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); fliplr_inplace(filter); slaveDataBlock.putRow((int) outLine, dotmult(slaveDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); } else { slaveDataBlock.putRow((int) outLine, dotmult(slaveDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); fliplr_inplace(filter); masterDataBlock.putRow((int) outLine, dotmult(masterDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); } /// Update 'walking' mean if (outLine != lastLine) { DoubleMatrix line1 = power.getRow((int) (outLine - firstLine)); DoubleMatrix lineN = power.getRow((int) (outLine - firstLine + nlMean)); nlMeanPower.addi(lineN.sub(line1)); } } // loop over outLines // IFFT of spectrally filtered data, and return these SpectralUtils.invfft_inplace(masterDataBlock, 2); SpectralUtils.invfft_inplace(slaveDataBlock, 2); // return these main filter call meanShift /= (outputLines - notFiltered); meanSNR /= outputLines; percentNotFiltered = 100. * (float) (notFiltered) / (float) outputLines; // Some info for this data block final double meanFrFreq = meanShift * deltaF; // Hz? logger.debug("mean SHIFT for block" + ": " + meanShift + " = " + meanFrFreq / 1e6 + " MHz (fringe freq.)."); logger.debug("mean SNR for block: " + meanSNR); logger.debug("filtered for block" + ": " + (100.00 - percentNotFiltered) + "%"); if (percentNotFiltered > 60.0) { logger.warn("more then 60% of signal filtered?!?"); } }
1605107_2
@Deprecated public static void filterBlock(ComplexDoubleMatrix masterDataBlock, // updated ComplexDoubleMatrix slaveDataBlock, // updated int nlMean, double SNRthreshold, double RSR, // in MHz double RBW, // in MHz double alphaHamming, int ovsFactor, boolean doWeightCorrelFlag) throws Exception { // returned double meanSNR; double percentNotFiltered; /// define parameters /// final long numLines = masterDataBlock.rows; final long numPixs = masterDataBlock.columns; final long outputLines = numLines - nlMean + 1; final long firstLine = ((nlMean - 1) / 2); // indices in matrix system final long lastLine = firstLine + outputLines - 1; final boolean doHammingFlag = (alphaHamming < 0.9999); // use oversampling before int. gen. final boolean doOversampleFlag = (ovsFactor != 1); int notFiltered = 0; // method counter /// sanity check on input paramaters /// if (!MathUtils.isOdd(nlMean)) { logger.error("nlMean has to be odd."); throw new IllegalArgumentException("nlMean has to be odd."); } if (!MathUtils.isPower2(numPixs)) { logger.error("numPixels (FFT) has to be power of 2."); throw new IllegalArgumentException("numPixels (FFT) has to be power of 2."); } if (!MathUtils.isPower2(ovsFactor)) { logger.error("oversample factor (FFT) has to be power of 2."); throw new IllegalArgumentException("oversample factor (FFT) has to be power of 2."); } if (slaveDataBlock.rows != numLines) { logger.error("slave not same size as master."); throw new IllegalArgumentException("slave not same size as master."); } if (slaveDataBlock.columns != numPixs) { logger.error("slave not same size as master."); throw new IllegalArgumentException("slave not same size as master."); } if (outputLines < 1) { logger.warn("no outputLines, continuing...."); } /// local variables /// DoubleMatrix inverseHamming = null; /// shift parameters //// final double deltaF = RSR / numPixs; final double freq = -RSR / 2.; // defined in defineFrequencyAxis DoubleMatrix freqAxis = defineFrequencyAxis(numPixs, RSR); if (doHammingFlag) { inverseHamming = WeightWindows.inverseHamming(freqAxis, RBW, RSR, alphaHamming); } //// COMPUTE CPLX IFG ON THE FLY -> power //// ComplexDoubleMatrix cplxIfg; if (doOversampleFlag) { cplxIfg = SarUtils.computeIfg(masterDataBlock, slaveDataBlock, 1, ovsFactor); } else { cplxIfg = SarUtils.computeIfg(masterDataBlock, slaveDataBlock); } long fftLength = cplxIfg.columns; logger.debug("is real4 accurate enough? it seems so!"); SpectralUtils.fft_inplace(cplxIfg, 2); // cplxIfg = fft over rows DoubleMatrix power = SarUtils.intensity(cplxIfg); // power = cplxIfg.*conj(cplxIfg); //// Use weighted correlation due to bias in normal definition // Note: Actually better de-weight with autoconvoluted hamming. if (doWeightCorrelFlag) { doWeightCorrel(RSR, RBW, fftLength, power); doWeightCorrel(RSR, RBW, numLines, numPixs, fftLength, power); } /// Average power to reduce noise : fft.ing in-place over data rows /// SpectralUtils.fft_inplace(masterDataBlock, 2); SpectralUtils.fft_inplace(slaveDataBlock, 2); logger.trace("Took FFT over rows of master, slave."); DoubleMatrix nlMeanPower = computeNlMeanPower(nlMean, fftLength, power); long shift; // returned by max meanSNR = 0.; double meanShift = 0.; // Start actual filtering for (long outLine = firstLine; outLine <= lastLine; ++outLine) { double totalPower = nlMeanPower.sum(); double maxValue = nlMeanPower.max(); shift = nlMeanPower.argmax(); long lastShift = shift; double SNR = fftLength * (maxValue / (totalPower - maxValue)); meanSNR += SNR; //// Check for negative shift boolean negShift = false; if (shift > (int) (fftLength / 2)) { shift = (int) fftLength - shift; lastShift = shift; // use this if current shift not OK. negShift = true; } // ______ Do actual filtering ______ if (SNR < SNRthreshold) { notFiltered++; // update notFiltered counter shift = lastShift; logger.warn("using last shift for filter"); } // interim variables meanShift += shift; DoubleMatrix filter; if (doHammingFlag) { // Newhamming is scaled and centered around new mean // filter is fftshifted filter = WeightWindows.hamming( freqAxis.sub(0.5 * shift * deltaF), RBW - (shift * deltaF), RSR, alphaHamming); filter.muli(inverseHamming); } else { // no weighting of spectra // filter is fftshifted filter = WeightWindows.rect((freqAxis.sub(.5 * shift * deltaF)).div((RBW - shift * deltaF))); } //// Use freq. as returned by fft //// // Note that filter_slave = fliplr(filter_m) // and that this is also valid after ifftshift SpectralUtils.ifftshift_inplace(filter); //// Actual spectral filtering //// if (!negShift) { masterDataBlock.putRow((int) outLine, dotmult(masterDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); fliplr_inplace(filter); slaveDataBlock.putRow((int) outLine, dotmult(slaveDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); } else { slaveDataBlock.putRow((int) outLine, dotmult(slaveDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); fliplr_inplace(filter); masterDataBlock.putRow((int) outLine, dotmult(masterDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); } /// Update 'walking' mean if (outLine != lastLine) { DoubleMatrix line1 = power.getRow((int) (outLine - firstLine)); DoubleMatrix lineN = power.getRow((int) (outLine - firstLine + nlMean)); nlMeanPower.addi(lineN.sub(line1)); } } // loop over outLines // IFFT of spectrally filtered data, and return these SpectralUtils.invfft_inplace(masterDataBlock, 2); SpectralUtils.invfft_inplace(slaveDataBlock, 2); // return these main filter call meanShift /= (outputLines - notFiltered); meanSNR /= outputLines; percentNotFiltered = 100. * (float) (notFiltered) / (float) outputLines; // Some info for this data block final double meanFrFreq = meanShift * deltaF; // Hz? logger.debug("mean SHIFT for block" + ": " + meanShift + " = " + meanFrFreq / 1e6 + " MHz (fringe freq.)."); logger.debug("mean SNR for block: " + meanSNR); logger.debug("filtered for block" + ": " + (100.00 - percentNotFiltered) + "%"); if (percentNotFiltered > 60.0) { logger.warn("more then 60% of signal filtered?!?"); } }
1605107_3
@Deprecated public static void filterBlock(ComplexDoubleMatrix masterDataBlock, // updated ComplexDoubleMatrix slaveDataBlock, // updated int nlMean, double SNRthreshold, double RSR, // in MHz double RBW, // in MHz double alphaHamming, int ovsFactor, boolean doWeightCorrelFlag) throws Exception { // returned double meanSNR; double percentNotFiltered; /// define parameters /// final long numLines = masterDataBlock.rows; final long numPixs = masterDataBlock.columns; final long outputLines = numLines - nlMean + 1; final long firstLine = ((nlMean - 1) / 2); // indices in matrix system final long lastLine = firstLine + outputLines - 1; final boolean doHammingFlag = (alphaHamming < 0.9999); // use oversampling before int. gen. final boolean doOversampleFlag = (ovsFactor != 1); int notFiltered = 0; // method counter /// sanity check on input paramaters /// if (!MathUtils.isOdd(nlMean)) { logger.error("nlMean has to be odd."); throw new IllegalArgumentException("nlMean has to be odd."); } if (!MathUtils.isPower2(numPixs)) { logger.error("numPixels (FFT) has to be power of 2."); throw new IllegalArgumentException("numPixels (FFT) has to be power of 2."); } if (!MathUtils.isPower2(ovsFactor)) { logger.error("oversample factor (FFT) has to be power of 2."); throw new IllegalArgumentException("oversample factor (FFT) has to be power of 2."); } if (slaveDataBlock.rows != numLines) { logger.error("slave not same size as master."); throw new IllegalArgumentException("slave not same size as master."); } if (slaveDataBlock.columns != numPixs) { logger.error("slave not same size as master."); throw new IllegalArgumentException("slave not same size as master."); } if (outputLines < 1) { logger.warn("no outputLines, continuing...."); } /// local variables /// DoubleMatrix inverseHamming = null; /// shift parameters //// final double deltaF = RSR / numPixs; final double freq = -RSR / 2.; // defined in defineFrequencyAxis DoubleMatrix freqAxis = defineFrequencyAxis(numPixs, RSR); if (doHammingFlag) { inverseHamming = WeightWindows.inverseHamming(freqAxis, RBW, RSR, alphaHamming); } //// COMPUTE CPLX IFG ON THE FLY -> power //// ComplexDoubleMatrix cplxIfg; if (doOversampleFlag) { cplxIfg = SarUtils.computeIfg(masterDataBlock, slaveDataBlock, 1, ovsFactor); } else { cplxIfg = SarUtils.computeIfg(masterDataBlock, slaveDataBlock); } long fftLength = cplxIfg.columns; logger.debug("is real4 accurate enough? it seems so!"); SpectralUtils.fft_inplace(cplxIfg, 2); // cplxIfg = fft over rows DoubleMatrix power = SarUtils.intensity(cplxIfg); // power = cplxIfg.*conj(cplxIfg); //// Use weighted correlation due to bias in normal definition // Note: Actually better de-weight with autoconvoluted hamming. if (doWeightCorrelFlag) { doWeightCorrel(RSR, RBW, fftLength, power); doWeightCorrel(RSR, RBW, numLines, numPixs, fftLength, power); } /// Average power to reduce noise : fft.ing in-place over data rows /// SpectralUtils.fft_inplace(masterDataBlock, 2); SpectralUtils.fft_inplace(slaveDataBlock, 2); logger.trace("Took FFT over rows of master, slave."); DoubleMatrix nlMeanPower = computeNlMeanPower(nlMean, fftLength, power); long shift; // returned by max meanSNR = 0.; double meanShift = 0.; // Start actual filtering for (long outLine = firstLine; outLine <= lastLine; ++outLine) { double totalPower = nlMeanPower.sum(); double maxValue = nlMeanPower.max(); shift = nlMeanPower.argmax(); long lastShift = shift; double SNR = fftLength * (maxValue / (totalPower - maxValue)); meanSNR += SNR; //// Check for negative shift boolean negShift = false; if (shift > (int) (fftLength / 2)) { shift = (int) fftLength - shift; lastShift = shift; // use this if current shift not OK. negShift = true; } // ______ Do actual filtering ______ if (SNR < SNRthreshold) { notFiltered++; // update notFiltered counter shift = lastShift; logger.warn("using last shift for filter"); } // interim variables meanShift += shift; DoubleMatrix filter; if (doHammingFlag) { // Newhamming is scaled and centered around new mean // filter is fftshifted filter = WeightWindows.hamming( freqAxis.sub(0.5 * shift * deltaF), RBW - (shift * deltaF), RSR, alphaHamming); filter.muli(inverseHamming); } else { // no weighting of spectra // filter is fftshifted filter = WeightWindows.rect((freqAxis.sub(.5 * shift * deltaF)).div((RBW - shift * deltaF))); } //// Use freq. as returned by fft //// // Note that filter_slave = fliplr(filter_m) // and that this is also valid after ifftshift SpectralUtils.ifftshift_inplace(filter); //// Actual spectral filtering //// if (!negShift) { masterDataBlock.putRow((int) outLine, dotmult(masterDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); fliplr_inplace(filter); slaveDataBlock.putRow((int) outLine, dotmult(slaveDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); } else { slaveDataBlock.putRow((int) outLine, dotmult(slaveDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); fliplr_inplace(filter); masterDataBlock.putRow((int) outLine, dotmult(masterDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); } /// Update 'walking' mean if (outLine != lastLine) { DoubleMatrix line1 = power.getRow((int) (outLine - firstLine)); DoubleMatrix lineN = power.getRow((int) (outLine - firstLine + nlMean)); nlMeanPower.addi(lineN.sub(line1)); } } // loop over outLines // IFFT of spectrally filtered data, and return these SpectralUtils.invfft_inplace(masterDataBlock, 2); SpectralUtils.invfft_inplace(slaveDataBlock, 2); // return these main filter call meanShift /= (outputLines - notFiltered); meanSNR /= outputLines; percentNotFiltered = 100. * (float) (notFiltered) / (float) outputLines; // Some info for this data block final double meanFrFreq = meanShift * deltaF; // Hz? logger.debug("mean SHIFT for block" + ": " + meanShift + " = " + meanFrFreq / 1e6 + " MHz (fringe freq.)."); logger.debug("mean SNR for block: " + meanSNR); logger.debug("filtered for block" + ": " + (100.00 - percentNotFiltered) + "%"); if (percentNotFiltered > 60.0) { logger.warn("more then 60% of signal filtered?!?"); } }
1605107_4
@Deprecated public static void filterBlock(ComplexDoubleMatrix masterDataBlock, // updated ComplexDoubleMatrix slaveDataBlock, // updated int nlMean, double SNRthreshold, double RSR, // in MHz double RBW, // in MHz double alphaHamming, int ovsFactor, boolean doWeightCorrelFlag) throws Exception { // returned double meanSNR; double percentNotFiltered; /// define parameters /// final long numLines = masterDataBlock.rows; final long numPixs = masterDataBlock.columns; final long outputLines = numLines - nlMean + 1; final long firstLine = ((nlMean - 1) / 2); // indices in matrix system final long lastLine = firstLine + outputLines - 1; final boolean doHammingFlag = (alphaHamming < 0.9999); // use oversampling before int. gen. final boolean doOversampleFlag = (ovsFactor != 1); int notFiltered = 0; // method counter /// sanity check on input paramaters /// if (!MathUtils.isOdd(nlMean)) { logger.error("nlMean has to be odd."); throw new IllegalArgumentException("nlMean has to be odd."); } if (!MathUtils.isPower2(numPixs)) { logger.error("numPixels (FFT) has to be power of 2."); throw new IllegalArgumentException("numPixels (FFT) has to be power of 2."); } if (!MathUtils.isPower2(ovsFactor)) { logger.error("oversample factor (FFT) has to be power of 2."); throw new IllegalArgumentException("oversample factor (FFT) has to be power of 2."); } if (slaveDataBlock.rows != numLines) { logger.error("slave not same size as master."); throw new IllegalArgumentException("slave not same size as master."); } if (slaveDataBlock.columns != numPixs) { logger.error("slave not same size as master."); throw new IllegalArgumentException("slave not same size as master."); } if (outputLines < 1) { logger.warn("no outputLines, continuing...."); } /// local variables /// DoubleMatrix inverseHamming = null; /// shift parameters //// final double deltaF = RSR / numPixs; final double freq = -RSR / 2.; // defined in defineFrequencyAxis DoubleMatrix freqAxis = defineFrequencyAxis(numPixs, RSR); if (doHammingFlag) { inverseHamming = WeightWindows.inverseHamming(freqAxis, RBW, RSR, alphaHamming); } //// COMPUTE CPLX IFG ON THE FLY -> power //// ComplexDoubleMatrix cplxIfg; if (doOversampleFlag) { cplxIfg = SarUtils.computeIfg(masterDataBlock, slaveDataBlock, 1, ovsFactor); } else { cplxIfg = SarUtils.computeIfg(masterDataBlock, slaveDataBlock); } long fftLength = cplxIfg.columns; logger.debug("is real4 accurate enough? it seems so!"); SpectralUtils.fft_inplace(cplxIfg, 2); // cplxIfg = fft over rows DoubleMatrix power = SarUtils.intensity(cplxIfg); // power = cplxIfg.*conj(cplxIfg); //// Use weighted correlation due to bias in normal definition // Note: Actually better de-weight with autoconvoluted hamming. if (doWeightCorrelFlag) { doWeightCorrel(RSR, RBW, fftLength, power); doWeightCorrel(RSR, RBW, numLines, numPixs, fftLength, power); } /// Average power to reduce noise : fft.ing in-place over data rows /// SpectralUtils.fft_inplace(masterDataBlock, 2); SpectralUtils.fft_inplace(slaveDataBlock, 2); logger.trace("Took FFT over rows of master, slave."); DoubleMatrix nlMeanPower = computeNlMeanPower(nlMean, fftLength, power); long shift; // returned by max meanSNR = 0.; double meanShift = 0.; // Start actual filtering for (long outLine = firstLine; outLine <= lastLine; ++outLine) { double totalPower = nlMeanPower.sum(); double maxValue = nlMeanPower.max(); shift = nlMeanPower.argmax(); long lastShift = shift; double SNR = fftLength * (maxValue / (totalPower - maxValue)); meanSNR += SNR; //// Check for negative shift boolean negShift = false; if (shift > (int) (fftLength / 2)) { shift = (int) fftLength - shift; lastShift = shift; // use this if current shift not OK. negShift = true; } // ______ Do actual filtering ______ if (SNR < SNRthreshold) { notFiltered++; // update notFiltered counter shift = lastShift; logger.warn("using last shift for filter"); } // interim variables meanShift += shift; DoubleMatrix filter; if (doHammingFlag) { // Newhamming is scaled and centered around new mean // filter is fftshifted filter = WeightWindows.hamming( freqAxis.sub(0.5 * shift * deltaF), RBW - (shift * deltaF), RSR, alphaHamming); filter.muli(inverseHamming); } else { // no weighting of spectra // filter is fftshifted filter = WeightWindows.rect((freqAxis.sub(.5 * shift * deltaF)).div((RBW - shift * deltaF))); } //// Use freq. as returned by fft //// // Note that filter_slave = fliplr(filter_m) // and that this is also valid after ifftshift SpectralUtils.ifftshift_inplace(filter); //// Actual spectral filtering //// if (!negShift) { masterDataBlock.putRow((int) outLine, dotmult(masterDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); fliplr_inplace(filter); slaveDataBlock.putRow((int) outLine, dotmult(slaveDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); } else { slaveDataBlock.putRow((int) outLine, dotmult(slaveDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); fliplr_inplace(filter); masterDataBlock.putRow((int) outLine, dotmult(masterDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); } /// Update 'walking' mean if (outLine != lastLine) { DoubleMatrix line1 = power.getRow((int) (outLine - firstLine)); DoubleMatrix lineN = power.getRow((int) (outLine - firstLine + nlMean)); nlMeanPower.addi(lineN.sub(line1)); } } // loop over outLines // IFFT of spectrally filtered data, and return these SpectralUtils.invfft_inplace(masterDataBlock, 2); SpectralUtils.invfft_inplace(slaveDataBlock, 2); // return these main filter call meanShift /= (outputLines - notFiltered); meanSNR /= outputLines; percentNotFiltered = 100. * (float) (notFiltered) / (float) outputLines; // Some info for this data block final double meanFrFreq = meanShift * deltaF; // Hz? logger.debug("mean SHIFT for block" + ": " + meanShift + " = " + meanFrFreq / 1e6 + " MHz (fringe freq.)."); logger.debug("mean SNR for block: " + meanSNR); logger.debug("filtered for block" + ": " + (100.00 - percentNotFiltered) + "%"); if (percentNotFiltered > 60.0) { logger.warn("more then 60% of signal filtered?!?"); } }
1605107_5
@Deprecated public static void filterBlock(ComplexDoubleMatrix masterDataBlock, // updated ComplexDoubleMatrix slaveDataBlock, // updated int nlMean, double SNRthreshold, double RSR, // in MHz double RBW, // in MHz double alphaHamming, int ovsFactor, boolean doWeightCorrelFlag) throws Exception { // returned double meanSNR; double percentNotFiltered; /// define parameters /// final long numLines = masterDataBlock.rows; final long numPixs = masterDataBlock.columns; final long outputLines = numLines - nlMean + 1; final long firstLine = ((nlMean - 1) / 2); // indices in matrix system final long lastLine = firstLine + outputLines - 1; final boolean doHammingFlag = (alphaHamming < 0.9999); // use oversampling before int. gen. final boolean doOversampleFlag = (ovsFactor != 1); int notFiltered = 0; // method counter /// sanity check on input paramaters /// if (!MathUtils.isOdd(nlMean)) { logger.error("nlMean has to be odd."); throw new IllegalArgumentException("nlMean has to be odd."); } if (!MathUtils.isPower2(numPixs)) { logger.error("numPixels (FFT) has to be power of 2."); throw new IllegalArgumentException("numPixels (FFT) has to be power of 2."); } if (!MathUtils.isPower2(ovsFactor)) { logger.error("oversample factor (FFT) has to be power of 2."); throw new IllegalArgumentException("oversample factor (FFT) has to be power of 2."); } if (slaveDataBlock.rows != numLines) { logger.error("slave not same size as master."); throw new IllegalArgumentException("slave not same size as master."); } if (slaveDataBlock.columns != numPixs) { logger.error("slave not same size as master."); throw new IllegalArgumentException("slave not same size as master."); } if (outputLines < 1) { logger.warn("no outputLines, continuing...."); } /// local variables /// DoubleMatrix inverseHamming = null; /// shift parameters //// final double deltaF = RSR / numPixs; final double freq = -RSR / 2.; // defined in defineFrequencyAxis DoubleMatrix freqAxis = defineFrequencyAxis(numPixs, RSR); if (doHammingFlag) { inverseHamming = WeightWindows.inverseHamming(freqAxis, RBW, RSR, alphaHamming); } //// COMPUTE CPLX IFG ON THE FLY -> power //// ComplexDoubleMatrix cplxIfg; if (doOversampleFlag) { cplxIfg = SarUtils.computeIfg(masterDataBlock, slaveDataBlock, 1, ovsFactor); } else { cplxIfg = SarUtils.computeIfg(masterDataBlock, slaveDataBlock); } long fftLength = cplxIfg.columns; logger.debug("is real4 accurate enough? it seems so!"); SpectralUtils.fft_inplace(cplxIfg, 2); // cplxIfg = fft over rows DoubleMatrix power = SarUtils.intensity(cplxIfg); // power = cplxIfg.*conj(cplxIfg); //// Use weighted correlation due to bias in normal definition // Note: Actually better de-weight with autoconvoluted hamming. if (doWeightCorrelFlag) { doWeightCorrel(RSR, RBW, fftLength, power); doWeightCorrel(RSR, RBW, numLines, numPixs, fftLength, power); } /// Average power to reduce noise : fft.ing in-place over data rows /// SpectralUtils.fft_inplace(masterDataBlock, 2); SpectralUtils.fft_inplace(slaveDataBlock, 2); logger.trace("Took FFT over rows of master, slave."); DoubleMatrix nlMeanPower = computeNlMeanPower(nlMean, fftLength, power); long shift; // returned by max meanSNR = 0.; double meanShift = 0.; // Start actual filtering for (long outLine = firstLine; outLine <= lastLine; ++outLine) { double totalPower = nlMeanPower.sum(); double maxValue = nlMeanPower.max(); shift = nlMeanPower.argmax(); long lastShift = shift; double SNR = fftLength * (maxValue / (totalPower - maxValue)); meanSNR += SNR; //// Check for negative shift boolean negShift = false; if (shift > (int) (fftLength / 2)) { shift = (int) fftLength - shift; lastShift = shift; // use this if current shift not OK. negShift = true; } // ______ Do actual filtering ______ if (SNR < SNRthreshold) { notFiltered++; // update notFiltered counter shift = lastShift; logger.warn("using last shift for filter"); } // interim variables meanShift += shift; DoubleMatrix filter; if (doHammingFlag) { // Newhamming is scaled and centered around new mean // filter is fftshifted filter = WeightWindows.hamming( freqAxis.sub(0.5 * shift * deltaF), RBW - (shift * deltaF), RSR, alphaHamming); filter.muli(inverseHamming); } else { // no weighting of spectra // filter is fftshifted filter = WeightWindows.rect((freqAxis.sub(.5 * shift * deltaF)).div((RBW - shift * deltaF))); } //// Use freq. as returned by fft //// // Note that filter_slave = fliplr(filter_m) // and that this is also valid after ifftshift SpectralUtils.ifftshift_inplace(filter); //// Actual spectral filtering //// if (!negShift) { masterDataBlock.putRow((int) outLine, dotmult(masterDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); fliplr_inplace(filter); slaveDataBlock.putRow((int) outLine, dotmult(slaveDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); } else { slaveDataBlock.putRow((int) outLine, dotmult(slaveDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); fliplr_inplace(filter); masterDataBlock.putRow((int) outLine, dotmult(masterDataBlock.getRow((int) outLine), new ComplexDoubleMatrix(filter))); } /// Update 'walking' mean if (outLine != lastLine) { DoubleMatrix line1 = power.getRow((int) (outLine - firstLine)); DoubleMatrix lineN = power.getRow((int) (outLine - firstLine + nlMean)); nlMeanPower.addi(lineN.sub(line1)); } } // loop over outLines // IFFT of spectrally filtered data, and return these SpectralUtils.invfft_inplace(masterDataBlock, 2); SpectralUtils.invfft_inplace(slaveDataBlock, 2); // return these main filter call meanShift /= (outputLines - notFiltered); meanSNR /= outputLines; percentNotFiltered = 100. * (float) (notFiltered) / (float) outputLines; // Some info for this data block final double meanFrFreq = meanShift * deltaF; // Hz? logger.debug("mean SHIFT for block" + ": " + meanShift + " = " + meanFrFreq / 1e6 + " MHz (fringe freq.)."); logger.debug("mean SNR for block: " + meanSNR); logger.debug("filtered for block" + ": " + (100.00 - percentNotFiltered) + "%"); if (percentNotFiltered > 60.0) { logger.warn("more then 60% of signal filtered?!?"); } }
1605107_6
public static DoubleMatrix arrangeKernel2d(DoubleMatrix kernel2dIn, final double scaleFactor) { final int kernelLines = kernel2dIn.rows; final int kernelPixels = kernel2dIn.columns; final int size = kernelLines; final int hbsL = (kernelLines / 2); final int hbsP = (kernelPixels / 2); final int extraL = isEven(kernelLines) ? 1 : 0; // 1 less to fill final int extraP = isEven(kernelPixels) ? 1 : 0; // 1 less to fill DoubleMatrix kernel2dOut = new DoubleMatrix(size, size); // allocate THE matrix int rowCnt = 0; int colCnt; for (int ii = -hbsL + extraL; ii <= hbsL; ++ii) { colCnt = 0; final int indexii = (ii + size) % size; for (int jj = -hbsP + extraP; jj <= hbsP; ++jj) { final int indexjj = (jj + size) % size; kernel2dOut.put(indexii, indexjj, kernel2dIn.get(rowCnt, colCnt)); colCnt++; } rowCnt++; } if (scaleFactor != 1) { kernel2dOut.muli(scaleFactor); } return kernel2dOut; }
1605107_7
private void constructRectKernel() { // 1d kernel final DoubleMatrix kernel1d = new DoubleMatrix(1, blockSize); // init to zeros final int overlapLines = (int) Math.floor(kernelArray.length / 2.); // 1d kernel function for (int ii = -overlapLines; ii <= overlapLines; ++ii) { kernel1d.put(0, (ii + blockSize) % blockSize, kernelArray[ii + overlapLines]); } kernel2d = new ComplexDoubleMatrix(LinearAlgebraUtils.matTxmat(kernel1d, kernel1d)); SpectralUtils.fft2D_inplace(kernel2d); kernel2d.conji(); }
1605107_8
private void constructRectKernel() { // 1d kernel final DoubleMatrix kernel1d = new DoubleMatrix(1, blockSize); // init to zeros final int overlapLines = (int) Math.floor(kernelArray.length / 2.); // 1d kernel function for (int ii = -overlapLines; ii <= overlapLines; ++ii) { kernel1d.put(0, (ii + blockSize) % blockSize, kernelArray[ii + overlapLines]); } kernel2d = new ComplexDoubleMatrix(LinearAlgebraUtils.matTxmat(kernel1d, kernel1d)); SpectralUtils.fft2D_inplace(kernel2d); kernel2d.conji(); }
1605107_9
public void buildConfFile() throws Exception { // Mid point final double lineMid = 0.5d * dataWindow.lines() + 0.5; final double pixelMid = 0.5d * dataWindow.pixels() + 0.5; Point pointSAR = new Point(pixelMid, lineMid, 0); final double earthRadius = masterOrbit.computeEarthRadius(pointSAR, masterSLC); final double orbitRadius = masterOrbit.computeOrbitRadius(pointSAR, masterSLC); final double rangeNear = masterSLC.pix2range(dataWindow.pixlo); final double rangeDelta = masterSLC.computeDeltaRange(pointSAR); final double rangeResolution = masterSLC.computeRangeResolution(pointSAR); final double azimuthDelta = masterOrbit.computeAzimuthDelta(pointSAR, masterSLC); final double azimuthResolution = masterOrbit.computeAzimuthResolution(pointSAR, masterSLC); //// baseline parametrization final Baseline baseline = new Baseline(); baseline.model(masterSLC, slaveSLC, masterOrbit, slaveOrbit); final double baselineTotal = baseline.getB(pointSAR); final double baselineAlpha = baseline.getAlpha(pointSAR); String DIMENSIONS = Long.toString(dataWindow.pixels() - 1); // account for zeros String IN_FILE_NAME = parameters.phaseFileName; formattedConfig.format("# CONFIG FOR SNAPHU\n"); formattedConfig.format("# ---------------------------------------------------------------- \n"); formattedConfig.format("# Created by NEST software on: " + printCurrentTimeDate() + "\n"); formattedConfig.format("#\n"); formattedConfig.format("# Command to call snaphu:\n"); formattedConfig.format("# \n"); formattedConfig.format("# snaphu -f snaphu.conf " + IN_FILE_NAME + " " + DIMENSIONS + "\n"); formattedConfig.format("\n"); formattedConfig.format("#########################\n"); formattedConfig.format("# Unwrapping parameters #\n"); formattedConfig.format("#########################\n"); formattedConfig.format("\n"); formattedConfig.format("STATCOSTMODE \t %s %n", parameters.unwrapMode.toUpperCase()); formattedConfig.format("INITMETHOD \t %s %n", parameters.snaphuInit.toUpperCase()); formattedConfig.format("VERBOSE \t %s %n", parameters.verbosityFlag.toUpperCase()); formattedConfig.format("\n"); formattedConfig.format("###############\n"); formattedConfig.format("# Input files #\n"); formattedConfig.format("###############\n"); formattedConfig.format("\n"); formattedConfig.format("CORRFILE \t\t" + parameters.coherenceFileName + "\n"); formattedConfig.format("\n"); formattedConfig.format("################\n"); formattedConfig.format("# Output files #\n"); formattedConfig.format("################\n"); formattedConfig.format("\n"); formattedConfig.format("OUTFILE \t\t" + parameters.outFileName + "\n"); formattedConfig.format("LOGFILE \t\t" + parameters.logFileName + "\n"); formattedConfig.format("\n"); formattedConfig.format("################\n"); formattedConfig.format("# File formats #\n"); formattedConfig.format("################\n"); formattedConfig.format("\n"); formattedConfig.format("INFILEFORMAT \t" + "COMPLEX_DATA\n"); // Eventually converged to export/work with FLOAT formattedConfig.format("INFILEFORMAT \t" + "FLOAT_DATA\n"); formattedConfig.format("CORRFILEFORMAT \t" + "FLOAT_DATA\n"); formattedConfig.format("OUTFILEFORMAT \t" + "FLOAT_DATA\n"); formattedConfig.format("\n"); formattedConfig.format("###############################\n"); formattedConfig.format("# SAR and geometry parameters #\n"); formattedConfig.format("###############################\n"); formattedConfig.format("\n"); formattedConfig.format("TRANSMITMODE \t" + "REPEATPASS\n"); formattedConfig.format("\n"); formattedConfig.format("ORBITRADIUS \t" + doubleToString(orbitRadius, format3) + "\n"); formattedConfig.format("EARTHRADIUS \t" + doubleToString(earthRadius, format3) + "\n"); formattedConfig.format("\n"); formattedConfig.format("LAMBDA \t\t\t" + doubleToString(masterSLC.getRadarWavelength(), format7) + "\n"); formattedConfig.format("\n"); formattedConfig.format("BASELINE \t\t" + doubleToString(baselineTotal, format3) + "\n"); formattedConfig.format("BASELINEANGLE_RAD \t" + doubleToString(baselineAlpha, format3) + "\n"); formattedConfig.format("\n"); formattedConfig.format("NEARRANGE \t\t" + doubleToString(rangeNear, format7) + "\n"); formattedConfig.format("\n"); formattedConfig.format("# Slant range and azimuth pixel spacings\n"); formattedConfig.format("DR \t\t\t\t" + doubleToString(rangeDelta, format7) + "\n"); formattedConfig.format("DA \t\t\t\t" + doubleToString(azimuthDelta, format7) + "\n"); formattedConfig.format("\n"); formattedConfig.format("# Single-look slant range and azimuth resolutions.\n"); formattedConfig.format("RANGERES \t\t" + doubleToString(rangeResolution, format7) + "\n"); formattedConfig.format("AZRES \t\t\t" + doubleToString(azimuthResolution, format7) + "\n"); formattedConfig.format("\n"); formattedConfig.format("# The number of independent looks: approximately equal to the\n" + "# real number of looks divided by the product of range and\n" + "# azimuth resolutions, and multiplied by the product of the\n" + "# single-look range and azimuth spacings. It is about 0.53\n" + "# times the number of real looks for ERS data processed\n" + "# without windowing.\n"); formattedConfig.format("NCORRLOOKS \t\t" + Float.toString(N_CORR_LOOKS) + "\n"); tileControlFlags(); formattedConfig.format("# End of snaphu configuration file"); }
1608936_0
public static List<Tuple> extractTuplesFromObject(Object obj) { if ( obj instanceof Iterable ) { Iterable<?> it = (Iterable) obj; return StreamSupport.stream( it.spliterator(), false ) .map( TupleExtractor::extractFromObject ) .collect( Collectors.toList() ); } Tuple tuple = new Tuple(); tuple.put( "result", obj ); return Collections.singletonList( tuple ); }
1608936_1
public static List<Tuple> extractTuplesFromObject(Object obj) { if ( obj instanceof Iterable ) { Iterable<?> it = (Iterable) obj; return StreamSupport.stream( it.spliterator(), false ) .map( TupleExtractor::extractFromObject ) .collect( Collectors.toList() ); } Tuple tuple = new Tuple(); tuple.put( "result", obj ); return Collections.singletonList( tuple ); }
1608936_2
public static List<Tuple> extractTuplesFromObject(Object obj) { if ( obj instanceof Iterable ) { Iterable<?> it = (Iterable) obj; return StreamSupport.stream( it.spliterator(), false ) .map( TupleExtractor::extractFromObject ) .collect( Collectors.toList() ); } Tuple tuple = new Tuple(); tuple.put( "result", obj ); return Collections.singletonList( tuple ); }
1608936_3
public static String[] toStringArray(Collection coll) { return (String[]) coll.toArray( new String[coll.size()] ); }
1608936_4
public static String[][] to2DStringArray(Collection coll) { return (String[][]) coll.toArray( new String[coll.size()][] ); }
1608936_5
public static String[] slice(String[] strings, int begin, int length) { String[] result = new String[length]; System.arraycopy( strings, begin, result, 0, length ); return result; }
1608936_6
public static <T> int indexOf(T[] array, T element) { for ( int i = 0; i < array.length; i++ ) { if ( array[i].equals( element ) ) { return i; } } return -1; }
1608936_7
public static boolean contains(Object[] array, Object element) { return indexOf( array, element ) != -1; }
1608936_8
public static <T> T[] concat(T[] first, T... second) { int firstLength = first.length; int secondLength = second.length; @SuppressWarnings("unchecked") T[] result = (T[]) Array.newInstance( first.getClass().getComponentType(), firstLength + secondLength ); System.arraycopy( first, 0, result, 0, firstLength ); System.arraycopy( second, 0, result, firstLength, secondLength ); return result; }
160996_10
public synchronized void add( AlarmListener listener, Object state, int delay ) { if (listener == null) throw new NullPointerException( "listener == null" ); if (delay <= 0) throw new IllegalArgumentException( "delay <= 0" ); checkIsStarted(); long due = Timer.getNanos() + delay * Timer.NANOS_PER_MILLI; Alarm alarm = getAlarm( listener ); if (alarm != null) { // schedule is being adjusted dequeue( alarm ); alarm.setDue( due ); alarm.setState( state ); enqueue( alarm ); } else { alarm = new Alarm( listener, state, due ); addAlarm( listener, alarm ); enqueue( alarm ); } notifyWorker( "add" ); }
160996_11
public synchronized void add( AlarmListener listener, Object state, int delay ) { if (listener == null) throw new NullPointerException( "listener == null" ); if (delay <= 0) throw new IllegalArgumentException( "delay <= 0" ); checkIsStarted(); long due = Timer.getNanos() + delay * Timer.NANOS_PER_MILLI; Alarm alarm = getAlarm( listener ); if (alarm != null) { // schedule is being adjusted dequeue( alarm ); alarm.setDue( due ); alarm.setState( state ); enqueue( alarm ); } else { alarm = new Alarm( listener, state, due ); addAlarm( listener, alarm ); enqueue( alarm ); } notifyWorker( "add" ); }
160996_12
public synchronized void add( AlarmListener listener, Object state, int delay ) { if (listener == null) throw new NullPointerException( "listener == null" ); if (delay <= 0) throw new IllegalArgumentException( "delay <= 0" ); checkIsStarted(); long due = Timer.getNanos() + delay * Timer.NANOS_PER_MILLI; Alarm alarm = getAlarm( listener ); if (alarm != null) { // schedule is being adjusted dequeue( alarm ); alarm.setDue( due ); alarm.setState( state ); enqueue( alarm ); } else { alarm = new Alarm( listener, state, due ); addAlarm( listener, alarm ); enqueue( alarm ); } notifyWorker( "add" ); }
160996_13
@Override public String toString() { return "Monitor "+description+": "+value; }
160996_14
public synchronized T set( T newValue ) { T oldValue = value; value = newValue; notifyAll(); return oldValue; }
160996_15
public synchronized T set( T newValue ) { T oldValue = value; value = newValue; notifyAll(); return oldValue; }
160996_16
public synchronized T set( T newValue ) { T oldValue = value; value = newValue; notifyAll(); return oldValue; }
160996_17
public void waitUntilEq( T desiredValue ) throws InterruptedException, TimeoutException { waitUntilEq( desiredValue, 0 ); }
160996_18
public void waitUntilEq( T desiredValue ) throws InterruptedException, TimeoutException { waitUntilEq( desiredValue, 0 ); }
160996_19
public void waitUntilEq( T desiredValue ) throws InterruptedException, TimeoutException { waitUntilEq( desiredValue, 0 ); }
160996_20
public T waitUntilNotEq( T undesiredValue ) throws InterruptedException, TimeoutException { return waitUntilNotEq( undesiredValue, 0 ); }
160996_21
public T waitUntilNotEq( T undesiredValue ) throws InterruptedException, TimeoutException { return waitUntilNotEq( undesiredValue, 0 ); }
160996_22
public T waitUntilEqAndSet( T desiredValue, T newValue ) throws InterruptedException, TimeoutException { return waitUntilEqAndSet( desiredValue, 0, newValue ); }
160996_23
public T waitUntilNotEqAndSet( T undesiredValue, T newValue ) throws InterruptedException, TimeoutException { return waitUntilNotEqAndSet( undesiredValue, 0, newValue ); }
160996_24
public void transportPacket( Who recipient, FlexBuffer buf ) throws IOException { SocketAddress recipientAddress = remoteAddress; if (recipient != null && recipient instanceof InetWho) { InetWho inetWho = (InetWho)recipient; recipientAddress = new InetSocketAddress( inetWho.getInetAddress(), inetWho.getPort() ); } sendDatagramPacket.setData( buf.getBuf(), buf.index(), buf.avail() ); sendDatagramPacket.setSocketAddress( recipientAddress ); if (listener != null) listener.Send(sendDatagramPacket); else socket.send(sendDatagramPacket); }
160996_25
@Override public void close( boolean reset ) throws Exception { ServerSocket ss = serverSocket; if (ss != null) { serverSocket = null; ss.close(); } }
160996_26
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_27
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_28
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_29
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_30
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_31
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_32
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_33
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_34
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_35
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_36
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_37
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_38
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_39
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_40
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_41
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_42
public void sessionData( Who sender, FlexBuffer buf ) throws Exception { // there are two options here. one is that we have no buffered data // and the entire packet is contained within the buf. in that case // we could optimize the daylights out of the process and directly // drop the packet on the handler. while (buf.avail() > 0) { if (wantHeader) { // do we have enough to make a header? if (savedBuf.length() + buf.avail() >= HEADER_SIZE) { int pktSize; if (savedBuf.length() == 0) { // savedBuf is empty, entire header in buf. pktSize = processHeader( buf, false ); } else // header split across savedBuf and buf { // move just enough data from buf to savedBuf to have a header. int needFromBuf = HEADER_SIZE - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); pktSize = processHeader( savedBuf, true ); } if (pktSize == 0) continue; bodyLen = pktSize; wantHeader = false; } else // want header, but there's not enough to make it. { // save buf in savedBuf. savedBuf.setIndex( savedBuf.length() ); savedBuf.put( buf ); } } else if (savedBuf.length() + buf.avail() >= bodyLen) { // want body, and there's enough to make it. // three possible cases: the body is entirely in savedBuf, // the body is split, or the body is entirely in buf. assert // that the body cannot entirely be in savedBuf, or else // we'd have processed it last time. assert savedBuf.length() < bodyLen; if (savedBuf.length() == 0) { // savedBuf is empty, entire body in buf. int length = buf.length(); int index = buf.index(); buf.setLength( index+bodyLen ); session.sessionPacket( sender, buf ); buf.setLength( length ); buf.setIndex( index+bodyLen ); wantHeader = true; } else // body split across savedBuf and buf { // move just enough data from buf to savedBuf to have a body. int needFromBuf = bodyLen - savedBuf.length(); savedBuf.put( buf, needFromBuf ); savedBuf.setIndex( 0 ); session.sessionPacket( sender, savedBuf ); savedBuf.reset(); wantHeader = true; } } else // want body, but there's not enough to make it. { // save buf in savedBuf. savedBuf.put( buf ); } } // buf is now empty, and there's nothing else to do. assert buf.avail() == 0; }
160996_43
public Object sessionQuery( Object query ) throws Exception { return session.sessionQuery( query ); }
160996_44
public void sessionControl( Object control, Object value ) throws Exception { session.sessionControl( control, value ); }
160996_45
public void sessionNotify( Object event ) throws Exception { session.sessionNotify( event ); }
160996_46
public Object transportQuery( Object query ) throws Exception { return transport.transportQuery( query ); }
160996_47
public void transportControl( Object control, Object value ) throws Exception { transport.transportControl( control, value ); }
160996_48
public void transportNotify( Object event ) throws Exception { transport.transportNotify( event ); }
160996_49
public ByteBuffer alloc( Notify notify ) throws IOException { synchronized (saved) { if (!history.used( 1 )) { if (notify == null) throw new IOException( "out of buffers" ); // notify != null register( notify ); return null; } try { return allocBuf(); } catch ( Error e ) { history.used( -1 ); throw e; } catch ( RuntimeException e ) { history.used( -1 ); throw e; } } }
160996_50
public boolean used( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newUsed = used + k; if (newUsed > limit) return false; // overflow if (newUsed < 0) throw new IllegalStateException( "newUsed < 0" ); used = newUsed; bump( used ); return true; }
160996_51
public boolean used( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newUsed = used + k; if (newUsed > limit) return false; // overflow if (newUsed < 0) throw new IllegalStateException( "newUsed < 0" ); used = newUsed; bump( used ); return true; }
160996_52
public boolean used( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newUsed = used + k; if (newUsed > limit) return false; // overflow if (newUsed < 0) throw new IllegalStateException( "newUsed < 0" ); used = newUsed; bump( used ); return true; }
160996_53
public boolean used( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newUsed = used + k; if (newUsed > limit) return false; // overflow if (newUsed < 0) throw new IllegalStateException( "newUsed < 0" ); used = newUsed; bump( used ); return true; }
160996_54
public boolean used( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newUsed = used + k; if (newUsed > limit) return false; // overflow if (newUsed < 0) throw new IllegalStateException( "newUsed < 0" ); used = newUsed; bump( used ); return true; }
160996_55
public boolean used( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newUsed = used + k; if (newUsed > limit) return false; // overflow if (newUsed < 0) throw new IllegalStateException( "newUsed < 0" ); used = newUsed; bump( used ); return true; }
160996_56
public boolean used( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newUsed = used + k; if (newUsed > limit) return false; // overflow if (newUsed < 0) throw new IllegalStateException( "newUsed < 0" ); used = newUsed; bump( used ); return true; }
160996_57
public boolean used( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newUsed = used + k; if (newUsed > limit) return false; // overflow if (newUsed < 0) throw new IllegalStateException( "newUsed < 0" ); used = newUsed; bump( used ); return true; }
160996_58
public boolean used( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newUsed = used + k; if (newUsed > limit) return false; // overflow if (newUsed < 0) throw new IllegalStateException( "newUsed < 0" ); used = newUsed; bump( used ); return true; }
160996_59
public void alloc( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newAlloc = alloc + k; if (newAlloc > limit) throw new IllegalStateException( "newAlloc > limit" ); if (newAlloc < 0) throw new IllegalStateException( "newAlloc < 0" ); alloc = newAlloc; }
160996_60
public void alloc( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newAlloc = alloc + k; if (newAlloc > limit) throw new IllegalStateException( "newAlloc > limit" ); if (newAlloc < 0) throw new IllegalStateException( "newAlloc < 0" ); alloc = newAlloc; }
160996_61
public void alloc( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newAlloc = alloc + k; if (newAlloc > limit) throw new IllegalStateException( "newAlloc > limit" ); if (newAlloc < 0) throw new IllegalStateException( "newAlloc < 0" ); alloc = newAlloc; }
160996_62
public void alloc( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newAlloc = alloc + k; if (newAlloc > limit) throw new IllegalStateException( "newAlloc > limit" ); if (newAlloc < 0) throw new IllegalStateException( "newAlloc < 0" ); alloc = newAlloc; }
160996_63
public void alloc( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newAlloc = alloc + k; if (newAlloc > limit) throw new IllegalStateException( "newAlloc > limit" ); if (newAlloc < 0) throw new IllegalStateException( "newAlloc < 0" ); alloc = newAlloc; }
160996_64
public void alloc( int k ) { if (k == 0) throw new IllegalArgumentException( "k == 0" ); int newAlloc = alloc + k; if (newAlloc > limit) throw new IllegalStateException( "newAlloc > limit" ); if (newAlloc < 0) throw new IllegalStateException( "newAlloc < 0" ); alloc = newAlloc; }