code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public void process(CAS tcas) { String text = tcas.getDocumentText(); Document document = new DocumentImpl(text); cogroo.analyze(document); for (Sentence sentence : document.getSentences()) { // create sentence annotations AnnotationFS sentenceAnn = tcas.createAnnotation(mSentenceType, sentence.getStart(), sentence.getEnd()); tcas.getIndexRepository().addFS(sentenceAnn); int sentenceOffset = sentence.getStart(); AnnotationFS[] tokenAnnotationArr = new AnnotationFS[sentence .getTokens().size()]; int i = 0; for (Token token : sentence.getTokens()) { // create token annotations tokenAnnotationArr[i] = tcas.createAnnotation(mTokenType, sentenceOffset + token.getStart(), sentenceOffset + token.getEnd()); // add POSTag annotations tokenAnnotationArr[i].setStringValue(this.mPosFeature, token.getPOSTag()); // add lexeme annotations tokenAnnotationArr[i].setStringValue(this.mLexemeFeature, token.getLexeme()); // add lemma annotations StringArrayFS lemmas = tcas.createStringArrayFS(token .getLemmas().length); lemmas.copyFromArray(token.getLemmas(), 0, 0, token.getLemmas().length); tokenAnnotationArr[i].setFeatureValue(this.mLemmaFeature, lemmas); tokenAnnotationArr[i].setStringValue(this.mFeaturesFeature, token.getFeatures()); tcas.getIndexRepository().addFS(tokenAnnotationArr[i]); i++; } // chunks for (Chunk chunk : sentence.getChunks()) { int start = sentence.getTokens().get(chunk.getStart()) .getStart() + sentenceOffset; int end = sentence.getTokens().get(chunk.getEnd() - 1).getEnd() + sentenceOffset; AnnotationFS chunkAnn = tcas.createAnnotation(mChunkType, start, end); chunkAnn.setStringValue(mChunkFeature, chunk.getTag()); if(chunk.getHeadIndex() >= 0) { chunkAnn.setFeatureValue(mChunkHead, tokenAnnotationArr[chunk.getHeadIndex()]); } tcas.getIndexRepository().addFS(chunkAnn); } // syntactic chunk for (SyntacticChunk sc : sentence.getSyntacticChunks()) { int start = sentence.getTokens().get(sc.getStart()).getStart() + sentenceOffset; int end = sentence.getTokens().get(sc.getEnd() - 1).getEnd() + sentenceOffset; AnnotationFS syntChunkAnn = tcas.createAnnotation( mSyntacticChunkType, start, end); syntChunkAnn.setStringValue(mSyntacticChunkFeature, sc.getTag()); tcas.getIndexRepository().addFS(syntChunkAnn); } } } }
public class class_name { public void process(CAS tcas) { String text = tcas.getDocumentText(); Document document = new DocumentImpl(text); cogroo.analyze(document); for (Sentence sentence : document.getSentences()) { // create sentence annotations AnnotationFS sentenceAnn = tcas.createAnnotation(mSentenceType, sentence.getStart(), sentence.getEnd()); tcas.getIndexRepository().addFS(sentenceAnn); // depends on control dependency: [for], data = [sentence] int sentenceOffset = sentence.getStart(); AnnotationFS[] tokenAnnotationArr = new AnnotationFS[sentence .getTokens().size()]; int i = 0; for (Token token : sentence.getTokens()) { // create token annotations tokenAnnotationArr[i] = tcas.createAnnotation(mTokenType, sentenceOffset + token.getStart(), sentenceOffset + token.getEnd()); // depends on control dependency: [for], data = [token] // add POSTag annotations tokenAnnotationArr[i].setStringValue(this.mPosFeature, token.getPOSTag()); // depends on control dependency: [for], data = [token] // add lexeme annotations tokenAnnotationArr[i].setStringValue(this.mLexemeFeature, token.getLexeme()); // depends on control dependency: [for], data = [token] // add lemma annotations StringArrayFS lemmas = tcas.createStringArrayFS(token .getLemmas().length); lemmas.copyFromArray(token.getLemmas(), 0, 0, token.getLemmas().length); // depends on control dependency: [for], data = [token] tokenAnnotationArr[i].setFeatureValue(this.mLemmaFeature, lemmas); // depends on control dependency: [for], data = [token] tokenAnnotationArr[i].setStringValue(this.mFeaturesFeature, token.getFeatures()); // depends on control dependency: [for], data = [token] tcas.getIndexRepository().addFS(tokenAnnotationArr[i]); // depends on control dependency: [for], data = [token] i++; // depends on control dependency: [for], data = [none] } // chunks for (Chunk chunk : sentence.getChunks()) { int start = sentence.getTokens().get(chunk.getStart()) .getStart() + sentenceOffset; int end = sentence.getTokens().get(chunk.getEnd() - 1).getEnd() + sentenceOffset; AnnotationFS chunkAnn = tcas.createAnnotation(mChunkType, start, end); chunkAnn.setStringValue(mChunkFeature, chunk.getTag()); // depends on control dependency: [for], data = [chunk] if(chunk.getHeadIndex() >= 0) { chunkAnn.setFeatureValue(mChunkHead, tokenAnnotationArr[chunk.getHeadIndex()]); // depends on control dependency: [if], data = [none] } tcas.getIndexRepository().addFS(chunkAnn); // depends on control dependency: [for], data = [chunk] } // syntactic chunk for (SyntacticChunk sc : sentence.getSyntacticChunks()) { int start = sentence.getTokens().get(sc.getStart()).getStart() + sentenceOffset; int end = sentence.getTokens().get(sc.getEnd() - 1).getEnd() + sentenceOffset; AnnotationFS syntChunkAnn = tcas.createAnnotation( mSyntacticChunkType, start, end); syntChunkAnn.setStringValue(mSyntacticChunkFeature, sc.getTag()); // depends on control dependency: [for], data = [sc] tcas.getIndexRepository().addFS(syntChunkAnn); // depends on control dependency: [for], data = [none] } } } }
public class class_name { public static final double hypot(final double a, final double b) { double r; if (Math.abs(a) > Math.abs(b)) { r = b / a; r = Math.abs(a) * Math.sqrt(1.0 + r * r); } else if (b != 0) { r = a / b; r = Math.abs(b) * Math.sqrt(1.0 + r * r); } else { r = 0.0; } return r; } }
public class class_name { public static final double hypot(final double a, final double b) { double r; if (Math.abs(a) > Math.abs(b)) { r = b / a; // depends on control dependency: [if], data = [none] r = Math.abs(a) * Math.sqrt(1.0 + r * r); // depends on control dependency: [if], data = [none] } else if (b != 0) { r = a / b; // depends on control dependency: [if], data = [none] r = Math.abs(b) * Math.sqrt(1.0 + r * r); // depends on control dependency: [if], data = [(b] } else { r = 0.0; // depends on control dependency: [if], data = [none] } return r; } }
public class class_name { public OqlBuilder<T> groupBy(final String what) { if (Strings.isNotEmpty(what)) { groups.add(what); } return this; } }
public class class_name { public OqlBuilder<T> groupBy(final String what) { if (Strings.isNotEmpty(what)) { groups.add(what); // depends on control dependency: [if], data = [none] } return this; } }
public class class_name { public Comment removeComment(final long commentId) { Comment removedComment = null; if (comments != null) { for (int index = comments.size() - 1; index >= 0; --index) { Comment currentComment = comments.get(index); if (currentComment.getId() == commentId) { removedComment = comments.remove(index); break; } } } return removedComment; } }
public class class_name { public Comment removeComment(final long commentId) { Comment removedComment = null; if (comments != null) { for (int index = comments.size() - 1; index >= 0; --index) { Comment currentComment = comments.get(index); if (currentComment.getId() == commentId) { removedComment = comments.remove(index); // depends on control dependency: [if], data = [none] break; } } } return removedComment; } }
public class class_name { public static JsonUtil getInstance() { if (instance == null) { synchronized (JsonUtilImpl.class) { if (instance == null) { JsonUtilImpl impl = new JsonUtilImpl(); impl.initialize(); } } } return instance; } }
public class class_name { public static JsonUtil getInstance() { if (instance == null) { synchronized (JsonUtilImpl.class) { // depends on control dependency: [if], data = [none] if (instance == null) { JsonUtilImpl impl = new JsonUtilImpl(); impl.initialize(); // depends on control dependency: [if], data = [none] } } } return instance; } }
public class class_name { private void drawCircle(Canvas canvas, float x, float y, int color, float circleScale) { dayPaint.setColor(color); if (animationStatus == ANIMATE_INDICATORS) { float maxRadius = circleScale * bigCircleIndicatorRadius * 1.4f; drawCircle(canvas, growfactorIndicator > maxRadius ? maxRadius: growfactorIndicator, x, y - (textHeight / 6)); } else { drawCircle(canvas, circleScale * bigCircleIndicatorRadius, x, y - (textHeight / 6)); } } }
public class class_name { private void drawCircle(Canvas canvas, float x, float y, int color, float circleScale) { dayPaint.setColor(color); if (animationStatus == ANIMATE_INDICATORS) { float maxRadius = circleScale * bigCircleIndicatorRadius * 1.4f; drawCircle(canvas, growfactorIndicator > maxRadius ? maxRadius: growfactorIndicator, x, y - (textHeight / 6)); // depends on control dependency: [if], data = [none] } else { drawCircle(canvas, circleScale * bigCircleIndicatorRadius, x, y - (textHeight / 6)); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String toCamelCase(String name) { String[] parts = name.split("_"); String result = parts[0]; for (int i = 1; i < parts.length; i++) { if (parts[i].length() > 0) { result += upperFirstChar(parts[i]); } } return result; } }
public class class_name { public static String toCamelCase(String name) { String[] parts = name.split("_"); String result = parts[0]; for (int i = 1; i < parts.length; i++) { if (parts[i].length() > 0) { result += upperFirstChar(parts[i]); // depends on control dependency: [if], data = [none] } } return result; } }
public class class_name { private boolean paramAppend(StringBuilder sb, String name, String value, ParameterParser parser) { boolean isEdited = false; if (name != null) { sb.append(name); isEdited = true; } if (value != null) { sb.append(parser.getDefaultKeyValueSeparator()); sb.append(value); isEdited = true; } return isEdited; } }
public class class_name { private boolean paramAppend(StringBuilder sb, String name, String value, ParameterParser parser) { boolean isEdited = false; if (name != null) { sb.append(name); // depends on control dependency: [if], data = [(name] isEdited = true; // depends on control dependency: [if], data = [none] } if (value != null) { sb.append(parser.getDefaultKeyValueSeparator()); // depends on control dependency: [if], data = [none] sb.append(value); // depends on control dependency: [if], data = [(value] isEdited = true; // depends on control dependency: [if], data = [none] } return isEdited; } }
public class class_name { public T set(int index, T element) { if (index == 0) { T previousElement = this.element1; this.element1 = element; return previousElement; } throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + this.size()); } }
public class class_name { public T set(int index, T element) { if (index == 0) { T previousElement = this.element1; this.element1 = element; // depends on control dependency: [if], data = [none] return previousElement; // depends on control dependency: [if], data = [none] } throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + this.size()); } }
public class class_name { private static Interval parseEndDateTime(Instant start, ZoneOffset offset, CharSequence endStr) { try { TemporalAccessor temporal = DateTimeFormatter.ISO_DATE_TIME.parseBest(endStr, OffsetDateTime::from, LocalDateTime::from); if (temporal instanceof OffsetDateTime) { OffsetDateTime odt = (OffsetDateTime) temporal; return Interval.of(start, odt.toInstant()); } else { // infer offset from start if not specified by end LocalDateTime ldt = (LocalDateTime) temporal; return Interval.of(start, ldt.toInstant(offset)); } } catch (DateTimeParseException ex) { Instant end = Instant.parse(endStr); return Interval.of(start, end); } } }
public class class_name { private static Interval parseEndDateTime(Instant start, ZoneOffset offset, CharSequence endStr) { try { TemporalAccessor temporal = DateTimeFormatter.ISO_DATE_TIME.parseBest(endStr, OffsetDateTime::from, LocalDateTime::from); if (temporal instanceof OffsetDateTime) { OffsetDateTime odt = (OffsetDateTime) temporal; return Interval.of(start, odt.toInstant()); // depends on control dependency: [if], data = [none] } else { // infer offset from start if not specified by end LocalDateTime ldt = (LocalDateTime) temporal; return Interval.of(start, ldt.toInstant(offset)); // depends on control dependency: [if], data = [none] } } catch (DateTimeParseException ex) { Instant end = Instant.parse(endStr); return Interval.of(start, end); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void killTask(TaskAttemptID tid, String msg, boolean wasFailure) { // Kill the task and mark it as killed. taskTracker.cleanUpOverMemoryTask(tid, wasFailure, msg); // Now destroy the ProcessTree, remove it from monitoring map. CGroupProcessTreeInfo ptInfo = processTreeInfoMap.get(tid); try { LinuxSystemCall.killProcessGroup(Integer.parseInt(ptInfo.getPID())); } catch (java.io.IOException e) { LOG.error("Could not kill process group " + ptInfo.getPID(), e); } processTreeInfoMap.remove(tid); LOG.info("Removed ProcessTree with root " + ptInfo.getPID()); } }
public class class_name { private void killTask(TaskAttemptID tid, String msg, boolean wasFailure) { // Kill the task and mark it as killed. taskTracker.cleanUpOverMemoryTask(tid, wasFailure, msg); // Now destroy the ProcessTree, remove it from monitoring map. CGroupProcessTreeInfo ptInfo = processTreeInfoMap.get(tid); try { LinuxSystemCall.killProcessGroup(Integer.parseInt(ptInfo.getPID())); // depends on control dependency: [try], data = [none] } catch (java.io.IOException e) { LOG.error("Could not kill process group " + ptInfo.getPID(), e); } // depends on control dependency: [catch], data = [none] processTreeInfoMap.remove(tid); LOG.info("Removed ProcessTree with root " + ptInfo.getPID()); } }
public class class_name { public synchronized void unsynchronizeWith(UpdateSynchronizer oldsync) { if(synchronizer == null) { LoggingUtil.warning("Warning: was not synchronized."); return; } if(synchronizer != oldsync) { LoggingUtil.warning("Warning: was synchronized differently!"); return; } // LoggingUtil.warning("Unsynchronizing: " + sync + " " + oldsync); synchronizer = null; runQueue(); } }
public class class_name { public synchronized void unsynchronizeWith(UpdateSynchronizer oldsync) { if(synchronizer == null) { LoggingUtil.warning("Warning: was not synchronized."); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } if(synchronizer != oldsync) { LoggingUtil.warning("Warning: was synchronized differently!"); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } // LoggingUtil.warning("Unsynchronizing: " + sync + " " + oldsync); synchronizer = null; runQueue(); } }
public class class_name { @Override public Map<String, VariantSet> getAvailableVariants(String mapping) { Map<String, VariantSet> availableVariants = new HashMap<>(); String skinRootDir = getSkinRootDir(mapping, skinMapping.keySet()); if (skinRootDir != null) { Map<String, VariantSet> variantSets = skinMapping.get(skinRootDir); for (VariantSet variantSet : variantSets.values()) { availableVariants.put(variantSet.getType(), variantSet); } } return availableVariants; } }
public class class_name { @Override public Map<String, VariantSet> getAvailableVariants(String mapping) { Map<String, VariantSet> availableVariants = new HashMap<>(); String skinRootDir = getSkinRootDir(mapping, skinMapping.keySet()); if (skinRootDir != null) { Map<String, VariantSet> variantSets = skinMapping.get(skinRootDir); for (VariantSet variantSet : variantSets.values()) { availableVariants.put(variantSet.getType(), variantSet); // depends on control dependency: [for], data = [variantSet] } } return availableVariants; } }
public class class_name { public void marshall(S3ApplicationCodeLocationDescription s3ApplicationCodeLocationDescription, ProtocolMarshaller protocolMarshaller) { if (s3ApplicationCodeLocationDescription == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(s3ApplicationCodeLocationDescription.getBucketARN(), BUCKETARN_BINDING); protocolMarshaller.marshall(s3ApplicationCodeLocationDescription.getFileKey(), FILEKEY_BINDING); protocolMarshaller.marshall(s3ApplicationCodeLocationDescription.getObjectVersion(), OBJECTVERSION_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(S3ApplicationCodeLocationDescription s3ApplicationCodeLocationDescription, ProtocolMarshaller protocolMarshaller) { if (s3ApplicationCodeLocationDescription == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(s3ApplicationCodeLocationDescription.getBucketARN(), BUCKETARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(s3ApplicationCodeLocationDescription.getFileKey(), FILEKEY_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(s3ApplicationCodeLocationDescription.getObjectVersion(), OBJECTVERSION_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void and(CClassNode other, ScanEnvironment env) { boolean not1 = isNot(); BitSet bsr1 = bs; CodeRangeBuffer buf1 = mbuf; boolean not2 = other.isNot(); BitSet bsr2 = other.bs; CodeRangeBuffer buf2 = other.mbuf; if (not1) { BitSet bs1 = new BitSet(); bsr1.invertTo(bs1); bsr1 = bs1; } if (not2) { BitSet bs2 = new BitSet(); bsr2.invertTo(bs2); bsr2 = bs2; } bsr1.and(bsr2); if (bsr1 != bs) { bs.copy(bsr1); bsr1 = bs; } if (not1) { bs.invert(); } CodeRangeBuffer pbuf = null; if (!env.enc.isSingleByte()) { if (not1 && not2) { pbuf = CodeRangeBuffer.orCodeRangeBuff(env, buf1, false, buf2, false); } else { pbuf = CodeRangeBuffer.andCodeRangeBuff(buf1, not1, buf2, not2, env); if (not1) { pbuf = CodeRangeBuffer.notCodeRangeBuff(env, pbuf); } } mbuf = pbuf; } } }
public class class_name { public void and(CClassNode other, ScanEnvironment env) { boolean not1 = isNot(); BitSet bsr1 = bs; CodeRangeBuffer buf1 = mbuf; boolean not2 = other.isNot(); BitSet bsr2 = other.bs; CodeRangeBuffer buf2 = other.mbuf; if (not1) { BitSet bs1 = new BitSet(); bsr1.invertTo(bs1); // depends on control dependency: [if], data = [none] bsr1 = bs1; // depends on control dependency: [if], data = [none] } if (not2) { BitSet bs2 = new BitSet(); bsr2.invertTo(bs2); // depends on control dependency: [if], data = [none] bsr2 = bs2; // depends on control dependency: [if], data = [none] } bsr1.and(bsr2); if (bsr1 != bs) { bs.copy(bsr1); // depends on control dependency: [if], data = [(bsr1] bsr1 = bs; // depends on control dependency: [if], data = [none] } if (not1) { bs.invert(); // depends on control dependency: [if], data = [none] } CodeRangeBuffer pbuf = null; if (!env.enc.isSingleByte()) { if (not1 && not2) { pbuf = CodeRangeBuffer.orCodeRangeBuff(env, buf1, false, buf2, false); // depends on control dependency: [if], data = [none] } else { pbuf = CodeRangeBuffer.andCodeRangeBuff(buf1, not1, buf2, not2, env); // depends on control dependency: [if], data = [none] if (not1) { pbuf = CodeRangeBuffer.notCodeRangeBuff(env, pbuf); // depends on control dependency: [if], data = [none] } } mbuf = pbuf; // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public boolean moveToPosition(int position) { try { // For SQLite forward only, best we can do is assume the result set // is at the beginning for (int i = 0; i < position; i++) { if (!resultSet.next()) { return false; } } } catch (SQLException e) { throw new GeoPackageException( "Failed to move ResultSet cursor to first", e); } return true; } }
public class class_name { @Override public boolean moveToPosition(int position) { try { // For SQLite forward only, best we can do is assume the result set // is at the beginning for (int i = 0; i < position; i++) { if (!resultSet.next()) { return false; // depends on control dependency: [if], data = [none] } } } catch (SQLException e) { throw new GeoPackageException( "Failed to move ResultSet cursor to first", e); } // depends on control dependency: [catch], data = [none] return true; } }
public class class_name { public static <T> T pickRandom (T[] values, T skip) { if (values == null || values.length < 2) { return null; } int index = getInt(values.length-1); for (int ii = 0; ii <= index; ii++) { if (values[ii] == skip) { index++; } } return (index >= values.length) ? null : values[index]; } }
public class class_name { public static <T> T pickRandom (T[] values, T skip) { if (values == null || values.length < 2) { return null; // depends on control dependency: [if], data = [none] } int index = getInt(values.length-1); for (int ii = 0; ii <= index; ii++) { if (values[ii] == skip) { index++; // depends on control dependency: [if], data = [none] } } return (index >= values.length) ? null : values[index]; } }
public class class_name { public static <T> List<List<T>> partition(List<T> list, final int partitionSize) { List<List<T>> parts = new ArrayList<List<T>>(); final int listSize = list.size(); for (int i = 0; i < listSize; i += partitionSize) { parts.add(new ArrayList<T>(list.subList(i, Math.min(listSize, i + partitionSize)))); } return parts; } }
public class class_name { public static <T> List<List<T>> partition(List<T> list, final int partitionSize) { List<List<T>> parts = new ArrayList<List<T>>(); final int listSize = list.size(); for (int i = 0; i < listSize; i += partitionSize) { parts.add(new ArrayList<T>(list.subList(i, Math.min(listSize, i + partitionSize)))); // depends on control dependency: [for], data = [i] } return parts; } }
public class class_name { @SuppressWarnings("unchecked") public static <T> T asType(Number self, Class<T> c) { if (c == BigDecimal.class) { return (T) toBigDecimal(self); } else if (c == BigInteger.class) { return (T) toBigInteger(self); } else if (c == Double.class) { return (T) toDouble(self); } else if (c == Float.class) { return (T) toFloat(self); } return asType((Object) self, c); } }
public class class_name { @SuppressWarnings("unchecked") public static <T> T asType(Number self, Class<T> c) { if (c == BigDecimal.class) { return (T) toBigDecimal(self); // depends on control dependency: [if], data = [none] } else if (c == BigInteger.class) { return (T) toBigInteger(self); // depends on control dependency: [if], data = [none] } else if (c == Double.class) { return (T) toDouble(self); // depends on control dependency: [if], data = [none] } else if (c == Float.class) { return (T) toFloat(self); // depends on control dependency: [if], data = [none] } return asType((Object) self, c); } }
public class class_name { public final boolean isEmpty() { if (mDatas.isEmpty()) { return true; } for (int i = 0; i < mDatas.size(); i++) { if (!mDatas.get(i).isEmpty()) { return false; } } return true; } }
public class class_name { public final boolean isEmpty() { if (mDatas.isEmpty()) { return true; // depends on control dependency: [if], data = [none] } for (int i = 0; i < mDatas.size(); i++) { if (!mDatas.get(i).isEmpty()) { return false; // depends on control dependency: [if], data = [none] } } return true; } }
public class class_name { public NmeaMessage parse(String line) { LinkedHashMap<String, String> tags = Maps.newLinkedHashMap(); String remaining; if (line.startsWith("\\")) { int tagFinish = line.lastIndexOf('\\', line.length() - 1); if (tagFinish == -1) throw new NmeaMessageParseException( "no matching \\ symbol to finish tag block: " + line); if (tagFinish == 0) throw new NmeaMessageParseException("tag block is empty or not terminated"); tags = extractTags(line.substring(1, tagFinish)); remaining = line.substring(tagFinish + 1); } else remaining = line; String[] items; String checksum; if (remaining.length() > 0) { if (!remaining.contains("*")) throw new NmeaMessageParseException("checksum delimiter * not found"); items = getNmeaItems(remaining); // TODO validate message using checksum checksum = line.substring(line.indexOf('*') + 1); } else { items = new String[] {}; // TODO decide what value to put here checksum = ""; } return new NmeaMessage(tags, Arrays.asList(items), checksum); } }
public class class_name { public NmeaMessage parse(String line) { LinkedHashMap<String, String> tags = Maps.newLinkedHashMap(); String remaining; if (line.startsWith("\\")) { int tagFinish = line.lastIndexOf('\\', line.length() - 1); if (tagFinish == -1) throw new NmeaMessageParseException( "no matching \\ symbol to finish tag block: " + line); if (tagFinish == 0) throw new NmeaMessageParseException("tag block is empty or not terminated"); tags = extractTags(line.substring(1, tagFinish)); // depends on control dependency: [if], data = [none] remaining = line.substring(tagFinish + 1); // depends on control dependency: [if], data = [none] } else remaining = line; String[] items; String checksum; if (remaining.length() > 0) { if (!remaining.contains("*")) throw new NmeaMessageParseException("checksum delimiter * not found"); items = getNmeaItems(remaining); // depends on control dependency: [if], data = [none] // TODO validate message using checksum checksum = line.substring(line.indexOf('*') + 1); // depends on control dependency: [if], data = [none] } else { items = new String[] {}; // depends on control dependency: [if], data = [none] // TODO decide what value to put here checksum = ""; // depends on control dependency: [if], data = [none] } return new NmeaMessage(tags, Arrays.asList(items), checksum); } }
public class class_name { @Programmatic public void refreshServices() { final Collection<ObjectSpecification> specifications = Lists.newArrayList(specificationLoader.allSpecifications()); for (final ObjectSpecification objectSpec : specifications) { if(objectSpec.isService()){ specificationLoader.invalidateCache(objectSpec.getCorrespondingClass()); } } } }
public class class_name { @Programmatic public void refreshServices() { final Collection<ObjectSpecification> specifications = Lists.newArrayList(specificationLoader.allSpecifications()); for (final ObjectSpecification objectSpec : specifications) { if(objectSpec.isService()){ specificationLoader.invalidateCache(objectSpec.getCorrespondingClass()); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public String ipToString(long ip) { // if ip is bigger than 255.255.255.255 or smaller than 0.0.0.0 if (ip > 4294967295l || ip < 0) { throw new IllegalArgumentException("invalid ip"); } val ipAddress = new StringBuilder(); for (int i = 3; i >= 0; i--) { int shift = i * 8; ipAddress.append((ip & (0xff << shift)) >> shift); if (i > 0) { ipAddress.append("."); } } return ipAddress.toString(); } }
public class class_name { public String ipToString(long ip) { // if ip is bigger than 255.255.255.255 or smaller than 0.0.0.0 if (ip > 4294967295l || ip < 0) { throw new IllegalArgumentException("invalid ip"); } val ipAddress = new StringBuilder(); for (int i = 3; i >= 0; i--) { int shift = i * 8; ipAddress.append((ip & (0xff << shift)) >> shift); // depends on control dependency: [for], data = [none] if (i > 0) { ipAddress.append("."); // depends on control dependency: [if], data = [none] } } return ipAddress.toString(); } }
public class class_name { @SuppressWarnings("unchecked") public void onLoad(ItemGroup<? extends Item> parent, String name) throws IOException { RunMap<RunT> _builds = createBuildRunMap(); int max = _builds.maxNumberOnDisk(); int next = asJob().getNextBuildNumber(); if (next <= max) { LOGGER.log(Level.WARNING, "JENKINS-27530: improper nextBuildNumber {0} detected in {1} with highest build number {2}; adjusting", new Object[] {next, asJob(), max}); asJob().updateNextBuildNumber(max + 1); } RunMap<RunT> currentBuilds = this.builds; if (parent != null) { // are we overwriting what currently exist? // this is primarily when Jenkins is getting reloaded Item current; try { current = parent.getItem(name); } catch (RuntimeException x) { LOGGER.log(Level.WARNING, "failed to look up " + name + " in " + parent, x); current = null; } if (current != null && current.getClass() == asJob().getClass()) { currentBuilds = (RunMap<RunT>) ((LazyLoadingJob) current).getLazyBuildMixIn().builds; } } if (currentBuilds != null) { // if we are reloading, keep all those that are still building intact for (RunT r : currentBuilds.getLoadedBuilds().values()) { if (r.isBuilding()) { // Do not use RunMap.put(Run): _builds.put(r.getNumber(), r); LOGGER.log(Level.FINE, "keeping reloaded {0}", r); } } } this.builds = _builds; } }
public class class_name { @SuppressWarnings("unchecked") public void onLoad(ItemGroup<? extends Item> parent, String name) throws IOException { RunMap<RunT> _builds = createBuildRunMap(); int max = _builds.maxNumberOnDisk(); int next = asJob().getNextBuildNumber(); if (next <= max) { LOGGER.log(Level.WARNING, "JENKINS-27530: improper nextBuildNumber {0} detected in {1} with highest build number {2}; adjusting", new Object[] {next, asJob(), max}); asJob().updateNextBuildNumber(max + 1); } RunMap<RunT> currentBuilds = this.builds; if (parent != null) { // are we overwriting what currently exist? // this is primarily when Jenkins is getting reloaded Item current; try { current = parent.getItem(name); // depends on control dependency: [try], data = [none] } catch (RuntimeException x) { LOGGER.log(Level.WARNING, "failed to look up " + name + " in " + parent, x); current = null; } // depends on control dependency: [catch], data = [none] if (current != null && current.getClass() == asJob().getClass()) { currentBuilds = (RunMap<RunT>) ((LazyLoadingJob) current).getLazyBuildMixIn().builds; // depends on control dependency: [if], data = [none] } } if (currentBuilds != null) { // if we are reloading, keep all those that are still building intact for (RunT r : currentBuilds.getLoadedBuilds().values()) { if (r.isBuilding()) { // Do not use RunMap.put(Run): _builds.put(r.getNumber(), r); // depends on control dependency: [if], data = [none] LOGGER.log(Level.FINE, "keeping reloaded {0}", r); // depends on control dependency: [if], data = [none] } } } this.builds = _builds; } }
public class class_name { public Set<AbstractPlugin> getPlugins(final String viewName) { if (pluginCache.isEmpty()) { LOGGER.info("Plugin cache miss, reload"); load(); } final Set<AbstractPlugin> ret = pluginCache.get(viewName); if (null == ret) { return Collections.emptySet(); } return ret; } }
public class class_name { public Set<AbstractPlugin> getPlugins(final String viewName) { if (pluginCache.isEmpty()) { LOGGER.info("Plugin cache miss, reload"); // depends on control dependency: [if], data = [none] load(); // depends on control dependency: [if], data = [none] } final Set<AbstractPlugin> ret = pluginCache.get(viewName); if (null == ret) { return Collections.emptySet(); // depends on control dependency: [if], data = [none] } return ret; } }
public class class_name { void rfftf(final double a[], final int offa) { if (n == 1) return; int l1, l2, na, kh, nf, ip, iw, ido, idl1; final int twon = 2 * n; nf = (int) wtable_r[1 + twon]; na = 1; l2 = n; iw = twon - 1; for (int k1 = 1; k1 <= nf; ++k1) { kh = nf - k1; ip = (int) wtable_r[kh + 2 + twon]; l1 = l2 / ip; ido = n / l2; idl1 = ido * l1; iw -= (ip - 1) * ido; na = 1 - na; switch (ip) { case 2: if (na == 0) { radf2(ido, l1, a, offa, ch, 0, iw); } else { radf2(ido, l1, ch, 0, a, offa, iw); } break; case 3: if (na == 0) { radf3(ido, l1, a, offa, ch, 0, iw); } else { radf3(ido, l1, ch, 0, a, offa, iw); } break; case 4: if (na == 0) { radf4(ido, l1, a, offa, ch, 0, iw); } else { radf4(ido, l1, ch, 0, a, offa, iw); } break; case 5: if (na == 0) { radf5(ido, l1, a, offa, ch, 0, iw); } else { radf5(ido, l1, ch, 0, a, offa, iw); } break; default: if (ido == 1) na = 1 - na; if (na == 0) { radfg(ido, ip, l1, idl1, a, offa, ch, 0, iw); na = 1; } else { radfg(ido, ip, l1, idl1, ch, 0, a, offa, iw); na = 0; } break; } l2 = l1; } if (na == 1) return; System.arraycopy(ch, 0, a, offa, n); } }
public class class_name { void rfftf(final double a[], final int offa) { if (n == 1) return; int l1, l2, na, kh, nf, ip, iw, ido, idl1; final int twon = 2 * n; nf = (int) wtable_r[1 + twon]; na = 1; l2 = n; iw = twon - 1; for (int k1 = 1; k1 <= nf; ++k1) { kh = nf - k1; // depends on control dependency: [for], data = [k1] ip = (int) wtable_r[kh + 2 + twon]; // depends on control dependency: [for], data = [none] l1 = l2 / ip; // depends on control dependency: [for], data = [none] ido = n / l2; // depends on control dependency: [for], data = [none] idl1 = ido * l1; // depends on control dependency: [for], data = [none] iw -= (ip - 1) * ido; // depends on control dependency: [for], data = [none] na = 1 - na; // depends on control dependency: [for], data = [none] switch (ip) { case 2: if (na == 0) { radf2(ido, l1, a, offa, ch, 0, iw); // depends on control dependency: [if], data = [none] } else { radf2(ido, l1, ch, 0, a, offa, iw); // depends on control dependency: [if], data = [none] } break; case 3: if (na == 0) { radf3(ido, l1, a, offa, ch, 0, iw); // depends on control dependency: [if], data = [none] } else { radf3(ido, l1, ch, 0, a, offa, iw); // depends on control dependency: [if], data = [none] } break; case 4: if (na == 0) { radf4(ido, l1, a, offa, ch, 0, iw); // depends on control dependency: [if], data = [none] } else { radf4(ido, l1, ch, 0, a, offa, iw); // depends on control dependency: [if], data = [none] } break; case 5: if (na == 0) { radf5(ido, l1, a, offa, ch, 0, iw); // depends on control dependency: [if], data = [none] } else { radf5(ido, l1, ch, 0, a, offa, iw); // depends on control dependency: [if], data = [none] } break; default: if (ido == 1) na = 1 - na; if (na == 0) { radfg(ido, ip, l1, idl1, a, offa, ch, 0, iw); // depends on control dependency: [if], data = [none] na = 1; // depends on control dependency: [if], data = [none] } else { radfg(ido, ip, l1, idl1, ch, 0, a, offa, iw); // depends on control dependency: [if], data = [none] na = 0; // depends on control dependency: [if], data = [none] } break; } l2 = l1; // depends on control dependency: [for], data = [none] } if (na == 1) return; System.arraycopy(ch, 0, a, offa, n); } }
public class class_name { public int findLast(int low, int high) { int ndx = -1; while (low <= high) { int mid = (low + high) >>> 1; int delta = compare(mid); if (delta > 0) { high = mid - 1; } else { if (delta == 0) { ndx = mid; } low = mid + 1; } } if (ndx == -1) { return -(low + 1); } return ndx; } }
public class class_name { public int findLast(int low, int high) { int ndx = -1; while (low <= high) { int mid = (low + high) >>> 1; int delta = compare(mid); if (delta > 0) { high = mid - 1; // depends on control dependency: [if], data = [none] } else { if (delta == 0) { ndx = mid; // depends on control dependency: [if], data = [none] } low = mid + 1; // depends on control dependency: [if], data = [none] } } if (ndx == -1) { return -(low + 1); // depends on control dependency: [if], data = [none] } return ndx; } }
public class class_name { public int getOriginWeight() { if (originWeight == null) { if (providerInfo == null) { originWeight = RpcConfigs.getIntValue(RpcOptions.PROVIDER_WEIGHT); } else { originWeight = CommonUtils.parseInt(providerInfo.getStaticAttr(ProviderInfoAttrs.ATTR_WEIGHT), RpcConfigs.getIntValue(RpcOptions.PROVIDER_WEIGHT)); } } return originWeight; } }
public class class_name { public int getOriginWeight() { if (originWeight == null) { if (providerInfo == null) { originWeight = RpcConfigs.getIntValue(RpcOptions.PROVIDER_WEIGHT); // depends on control dependency: [if], data = [none] } else { originWeight = CommonUtils.parseInt(providerInfo.getStaticAttr(ProviderInfoAttrs.ATTR_WEIGHT), RpcConfigs.getIntValue(RpcOptions.PROVIDER_WEIGHT)); // depends on control dependency: [if], data = [(providerInfo] } } return originWeight; } }
public class class_name { public void execute() throws MojoExecutionException, MojoFailureException { // If we're not on a supported packaging with just skip (Issue 87) // http://code.google.com/p/maven-android-plugin/issues/detail?id=87 if ( ! SUPPORTED_PACKAGING_TYPES.contains( project.getPackaging() ) ) { getLog().info( "Skipping zipalign on " + project.getPackaging() ); return; } ConfigHandler configHandler = new ConfigHandler( this, this.session, this.execution ); configHandler.parseConfiguration(); parsedInputApk = FilenameUtils.separatorsToSystem( parsedInputApk ); parsedOutputApk = FilenameUtils.separatorsToSystem( parsedOutputApk ); getLog().debug( "skip:" + parsedSkip ); getLog().debug( "verbose:" + parsedVerbose ); getLog().debug( "inputApk:" + parsedInputApk ); getLog().debug( "outputApk:" + parsedOutputApk ); getLog().debug( "classifier:" + parsedClassifier ); if ( parsedSkip ) { getLog().info( "Skipping zipalign" ); } else { boolean outputToSameFile = sameOutputAsInput(); CommandExecutor executor = CommandExecutor.Factory.createDefaultCommmandExecutor(); executor.setLogger( this.getLog() ); String command = getAndroidSdk().getZipalignPath(); List<String> parameters = new ArrayList<String>(); if ( parsedVerbose ) { parameters.add( "-v" ); } parameters.add( "-f" ); // force overwriting existing output file parameters.add( "4" ); // byte alignment has to be 4! parameters.add( parsedInputApk ); String outputApk = outputToSameFile ? getTemporaryOutputApkFilename() : parsedOutputApk; parameters.add( outputApk ); try { getLog().info( "Running command: " + command ); getLog().info( "with parameters: " + parameters ); executor.setCaptureStdOut( true ); executor.executeCommand( command, parameters ); if ( FileUtils.fileExists( outputApk ) ) { if ( outputToSameFile ) { // No needs to attach zipaligned apk to artifacts try { FileUtils.rename( new File( outputApk ), new File( parsedInputApk ) ); } catch ( IOException e ) { getLog().error( "Failed to replace original apk with aligned " + getFullPathWithName( outputApk ), e ); } } else { // Attach the resulting artifact (Issue 88) // http://code.google.com/p/maven-android-plugin/issues/detail?id=88 projectHelper.attachArtifact( project, APK, parsedClassifier, new File( outputApk ) ); getLog().info( "Attach " + getFullPathWithName( outputApk ) + " as '" + parsedClassifier + "' to the project" ); } } else { getLog().error( "Cannot attach " + getFullPathWithName( outputApk ) + " to the project" + " - The file does not exist" ); } } catch ( ExecutionException e ) { throw new MojoExecutionException( "", e ); } } } }
public class class_name { public void execute() throws MojoExecutionException, MojoFailureException { // If we're not on a supported packaging with just skip (Issue 87) // http://code.google.com/p/maven-android-plugin/issues/detail?id=87 if ( ! SUPPORTED_PACKAGING_TYPES.contains( project.getPackaging() ) ) { getLog().info( "Skipping zipalign on " + project.getPackaging() ); return; } ConfigHandler configHandler = new ConfigHandler( this, this.session, this.execution ); configHandler.parseConfiguration(); parsedInputApk = FilenameUtils.separatorsToSystem( parsedInputApk ); parsedOutputApk = FilenameUtils.separatorsToSystem( parsedOutputApk ); getLog().debug( "skip:" + parsedSkip ); getLog().debug( "verbose:" + parsedVerbose ); getLog().debug( "inputApk:" + parsedInputApk ); getLog().debug( "outputApk:" + parsedOutputApk ); getLog().debug( "classifier:" + parsedClassifier ); if ( parsedSkip ) { getLog().info( "Skipping zipalign" ); } else { boolean outputToSameFile = sameOutputAsInput(); CommandExecutor executor = CommandExecutor.Factory.createDefaultCommmandExecutor(); executor.setLogger( this.getLog() ); String command = getAndroidSdk().getZipalignPath(); List<String> parameters = new ArrayList<String>(); if ( parsedVerbose ) { parameters.add( "-v" ); // depends on control dependency: [if], data = [none] } parameters.add( "-f" ); // force overwriting existing output file parameters.add( "4" ); // byte alignment has to be 4! parameters.add( parsedInputApk ); String outputApk = outputToSameFile ? getTemporaryOutputApkFilename() : parsedOutputApk; parameters.add( outputApk ); try { getLog().info( "Running command: " + command ); getLog().info( "with parameters: " + parameters ); executor.setCaptureStdOut( true ); executor.executeCommand( command, parameters ); if ( FileUtils.fileExists( outputApk ) ) { if ( outputToSameFile ) { // No needs to attach zipaligned apk to artifacts try { FileUtils.rename( new File( outputApk ), new File( parsedInputApk ) ); } catch ( IOException e ) { getLog().error( "Failed to replace original apk with aligned " + getFullPathWithName( outputApk ), e ); } } else { // Attach the resulting artifact (Issue 88) // http://code.google.com/p/maven-android-plugin/issues/detail?id=88 projectHelper.attachArtifact( project, APK, parsedClassifier, new File( outputApk ) ); getLog().info( "Attach " + getFullPathWithName( outputApk ) + " as '" + parsedClassifier + "' to the project" ); } } else { getLog().error( "Cannot attach " + getFullPathWithName( outputApk ) + " to the project" + " - The file does not exist" ); } } catch ( ExecutionException e ) { throw new MojoExecutionException( "", e ); } } } }
public class class_name { public Object processSecurityPreInvokeException(SecurityViolationException sve, RequestProcessor requestProcessor, HttpServletRequest request, HttpServletResponse response, WebAppDispatcherContext dispatchContext, WebApp context, String name) throws ServletErrorReport { Object secObject = null; secObject = sve.getWebSecurityContext(); int sc = sve.getStatusCode(); Throwable cause = sve.getCause(); if (sc == HttpServletResponse.SC_FORBIDDEN) { // If the user has defined a custom error page for // SC_FORBIDDEN (HTTP status code 403) then send // it to the client ... if (context.isErrorPageDefined(sc) == true) { WebAppErrorReport wErrorReport = new WebAppErrorReport(cause); wErrorReport.setErrorCode(sc); context.sendError(request, response, wErrorReport); } else { // ... otherwise, use the one provided by the // SecurityCollaborator try { securityCollaborator.handleException(request, response, cause); } catch (Exception ex) { if (requestProcessor != null) { throw WebAppErrorReport.constructErrorReport(ex, requestProcessor); } else { throw WebAppErrorReport.constructErrorReport(ex, name); } } // reply.sendError(wResp); } // end if-else } else if (sc == HttpServletResponse.SC_UNAUTHORIZED) { // Invoking handleException will add the necessary headers // to the response ... try { securityCollaborator.handleException(request, response, cause); } catch (Exception ex) { if (requestProcessor != null) { throw WebAppErrorReport.constructErrorReport(ex, requestProcessor); } else { throw WebAppErrorReport.constructErrorReport(ex, name); } } // ... if the user has defined a custom error page for // SC_UNAUTHORIZED (HTTP status code 401) then // send it to the client if (context.isErrorPageDefined(sc) == true) { WebAppErrorReport wErrorReport = new WebAppErrorReport(cause); wErrorReport.setErrorCode(sc); context.sendError(request, response, wErrorReport); } else { // reply.sendError(wResp); comment-out 140967 } } else { // Unexpected status code ... not SC_UNAUTHORIZED or SC_FORBIDDEN try { securityCollaborator.handleException(request, response, cause); } catch (Exception ex) { if (requestProcessor != null) { throw WebAppErrorReport.constructErrorReport(ex, requestProcessor); } else { throw WebAppErrorReport.constructErrorReport(ex, name); } } } return secObject; } }
public class class_name { public Object processSecurityPreInvokeException(SecurityViolationException sve, RequestProcessor requestProcessor, HttpServletRequest request, HttpServletResponse response, WebAppDispatcherContext dispatchContext, WebApp context, String name) throws ServletErrorReport { Object secObject = null; secObject = sve.getWebSecurityContext(); int sc = sve.getStatusCode(); Throwable cause = sve.getCause(); if (sc == HttpServletResponse.SC_FORBIDDEN) { // If the user has defined a custom error page for // SC_FORBIDDEN (HTTP status code 403) then send // it to the client ... if (context.isErrorPageDefined(sc) == true) { WebAppErrorReport wErrorReport = new WebAppErrorReport(cause); wErrorReport.setErrorCode(sc); // depends on control dependency: [if], data = [none] context.sendError(request, response, wErrorReport); // depends on control dependency: [if], data = [none] } else { // ... otherwise, use the one provided by the // SecurityCollaborator try { securityCollaborator.handleException(request, response, cause); // depends on control dependency: [try], data = [none] } catch (Exception ex) { if (requestProcessor != null) { throw WebAppErrorReport.constructErrorReport(ex, requestProcessor); } else { throw WebAppErrorReport.constructErrorReport(ex, name); } } // depends on control dependency: [catch], data = [none] // reply.sendError(wResp); } // end if-else } else if (sc == HttpServletResponse.SC_UNAUTHORIZED) { // Invoking handleException will add the necessary headers // to the response ... try { securityCollaborator.handleException(request, response, cause); } catch (Exception ex) { if (requestProcessor != null) { throw WebAppErrorReport.constructErrorReport(ex, requestProcessor); } else { throw WebAppErrorReport.constructErrorReport(ex, name); } } // ... if the user has defined a custom error page for // SC_UNAUTHORIZED (HTTP status code 401) then // send it to the client if (context.isErrorPageDefined(sc) == true) { WebAppErrorReport wErrorReport = new WebAppErrorReport(cause); wErrorReport.setErrorCode(sc); context.sendError(request, response, wErrorReport); } else { // reply.sendError(wResp); comment-out 140967 } } else { // Unexpected status code ... not SC_UNAUTHORIZED or SC_FORBIDDEN try { securityCollaborator.handleException(request, response, cause); } catch (Exception ex) { if (requestProcessor != null) { throw WebAppErrorReport.constructErrorReport(ex, requestProcessor); } else { throw WebAppErrorReport.constructErrorReport(ex, name); } } } return secObject; } }
public class class_name { public HttpRequestInitializer getHttpRequestInitializer( final ServiceOptions<?, ?> serviceOptions) { Credentials scopedCredentials = serviceOptions.getScopedCredentials(); final HttpRequestInitializer delegate = scopedCredentials != null && scopedCredentials != NoCredentials.getInstance() ? new HttpCredentialsAdapter(scopedCredentials) : null; HeaderProvider internalHeaderProvider = getInternalHeaderProviderBuilder(serviceOptions).build(); final HeaderProvider headerProvider = serviceOptions.getMergedHeaderProvider(internalHeaderProvider); return new HttpRequestInitializer() { @Override public void initialize(HttpRequest httpRequest) throws IOException { if (delegate != null) { delegate.initialize(httpRequest); } if (connectTimeout >= 0) { httpRequest.setConnectTimeout(connectTimeout); } if (readTimeout >= 0) { httpRequest.setReadTimeout(readTimeout); } HttpHeadersUtils.setHeaders(httpRequest.getHeaders(), headerProvider.getHeaders()); } }; } }
public class class_name { public HttpRequestInitializer getHttpRequestInitializer( final ServiceOptions<?, ?> serviceOptions) { Credentials scopedCredentials = serviceOptions.getScopedCredentials(); final HttpRequestInitializer delegate = scopedCredentials != null && scopedCredentials != NoCredentials.getInstance() ? new HttpCredentialsAdapter(scopedCredentials) : null; HeaderProvider internalHeaderProvider = getInternalHeaderProviderBuilder(serviceOptions).build(); final HeaderProvider headerProvider = serviceOptions.getMergedHeaderProvider(internalHeaderProvider); return new HttpRequestInitializer() { @Override public void initialize(HttpRequest httpRequest) throws IOException { if (delegate != null) { delegate.initialize(httpRequest); } if (connectTimeout >= 0) { httpRequest.setConnectTimeout(connectTimeout); // depends on control dependency: [if], data = [(connectTimeout] } if (readTimeout >= 0) { httpRequest.setReadTimeout(readTimeout); // depends on control dependency: [if], data = [(readTimeout] } HttpHeadersUtils.setHeaders(httpRequest.getHeaders(), headerProvider.getHeaders()); } }; } }
public class class_name { public void savePolicy() { if (isFiltered()) { throw new Error("cannot save a filtered policy"); } adapter.savePolicy(model); if (watcher != null) { watcher.update(); } } }
public class class_name { public void savePolicy() { if (isFiltered()) { throw new Error("cannot save a filtered policy"); } adapter.savePolicy(model); if (watcher != null) { watcher.update(); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override protected List<XExpression> getArguments() { List<XExpression> syntacticArguments = getSyntacticArguments(); XExpression firstArgument = getFirstArgument(); if (firstArgument != null) { return createArgumentList(firstArgument, syntacticArguments); } return syntacticArguments; } }
public class class_name { @Override protected List<XExpression> getArguments() { List<XExpression> syntacticArguments = getSyntacticArguments(); XExpression firstArgument = getFirstArgument(); if (firstArgument != null) { return createArgumentList(firstArgument, syntacticArguments); // depends on control dependency: [if], data = [(firstArgument] } return syntacticArguments; } }
public class class_name { public static Slice wrappedIntArray(int[] array, int offset, int length) { if (length == 0) { return EMPTY_SLICE; } return new Slice(array, offset, length); } }
public class class_name { public static Slice wrappedIntArray(int[] array, int offset, int length) { if (length == 0) { return EMPTY_SLICE; // depends on control dependency: [if], data = [none] } return new Slice(array, offset, length); } }
public class class_name { private void checkOpenFilesLimit() throws InterruptedException { CountDownLatch ol = openLatch.get(); if (ol != null) ol.await(); while (openFiles.get() > openLimit) { final CountDownLatch latch = new CountDownLatch(1); //make other threads to wait till we evict entries and close evicted open files if (openLatch.compareAndSet(null, latch)) { while (openFiles.get() > openLimit) { emptyBuffers(); } latch.countDown(); openLatch.set(null); } else { ol = openLatch.get(); if (ol != null) ol.await(); } } } }
public class class_name { private void checkOpenFilesLimit() throws InterruptedException { CountDownLatch ol = openLatch.get(); if (ol != null) ol.await(); while (openFiles.get() > openLimit) { final CountDownLatch latch = new CountDownLatch(1); //make other threads to wait till we evict entries and close evicted open files if (openLatch.compareAndSet(null, latch)) { while (openFiles.get() > openLimit) { emptyBuffers(); // depends on control dependency: [while], data = [none] } latch.countDown(); // depends on control dependency: [if], data = [none] openLatch.set(null); // depends on control dependency: [if], data = [none] } else { ol = openLatch.get(); // depends on control dependency: [if], data = [none] if (ol != null) ol.await(); } } } }
public class class_name { private void addSlab(int minimumSize) { int nextSlabSize; if (bytesUsed == 0) { nextSlabSize = initialSlabSize; } else if (bytesUsed > maxCapacityHint / 5) { // to avoid an overhead of up to twice the needed size, we get linear when approaching target page size nextSlabSize = maxCapacityHint / 5; } else { // double the size every time nextSlabSize = bytesUsed; } if (nextSlabSize < minimumSize) { LOG.debug("slab size {} too small for value of size {}. Bumping up slab size", nextSlabSize, minimumSize); nextSlabSize = minimumSize; } LOG.debug("used {} slabs, adding new slab of size {}", slabs.size(), nextSlabSize); this.currentSlab = allocator.allocate(nextSlabSize); this.slabs.add(currentSlab); this.bytesAllocated += nextSlabSize; this.currentSlabIndex = 0; } }
public class class_name { private void addSlab(int minimumSize) { int nextSlabSize; if (bytesUsed == 0) { nextSlabSize = initialSlabSize; // depends on control dependency: [if], data = [none] } else if (bytesUsed > maxCapacityHint / 5) { // to avoid an overhead of up to twice the needed size, we get linear when approaching target page size nextSlabSize = maxCapacityHint / 5; // depends on control dependency: [if], data = [none] } else { // double the size every time nextSlabSize = bytesUsed; // depends on control dependency: [if], data = [none] } if (nextSlabSize < minimumSize) { LOG.debug("slab size {} too small for value of size {}. Bumping up slab size", nextSlabSize, minimumSize); // depends on control dependency: [if], data = [minimumSize)] nextSlabSize = minimumSize; // depends on control dependency: [if], data = [none] } LOG.debug("used {} slabs, adding new slab of size {}", slabs.size(), nextSlabSize); this.currentSlab = allocator.allocate(nextSlabSize); this.slabs.add(currentSlab); this.bytesAllocated += nextSlabSize; this.currentSlabIndex = 0; } }
public class class_name { protected void getPropertySource(final SourceFile src) { final String sourceUuid = source.getUuid(); final SourceLine line = src.line(source, "public static final"); line.append(" Property<"); line.append(getValueType()); line.append("> "); line.append(SchemaHelper.cleanPropertyName(source.getPropertyName())); line.append("Property"); line.append(" = new "); line.append(getPropertyType()); line.append("("); line.quoted(source.getPropertyName()); if (StringUtils.isNotBlank(source.getDbName())) { line.append(", "); line.quoted(source.getDbName()); } if (getPropertyParameters() != null) { line.append(getPropertyParameters()); } line.append(")"); if (StringUtils.isNotBlank(source.getContentType())) { line.append(".contentType(").quoted(source.getContentType()).append(")"); } if (StringUtils.isNotBlank(source.getDefaultValue())) { line.append(".defaultValue(").append(getDefaultValue()).append(")"); } if (StringUtils.isNotBlank(source.getFormat())) { line.append(".format(").quoted(StringEscapeUtils.escapeJava(source.getFormat())).append(")"); } if (StringUtils.isNotBlank(sourceUuid)) { line.append(".setSourceUuid(").quoted(sourceUuid).append(")"); } else { if (StringUtils.isNotBlank(source.getReadFunction())) { line.append(".readFunction(").quoted(StringEscapeUtils.escapeJava(source.getReadFunction())).append(")"); } if (StringUtils.isNotBlank(source.getWriteFunction())) { line.append(".writeFunction(").quoted(StringEscapeUtils.escapeJava(source.getWriteFunction())).append(")"); } } if (StringUtils.isNotBlank(source.getTypeHint())) { line.append(".typeHint(").quoted(StringEscapeUtils.escapeJava(source.getTypeHint())).append(")"); } if (source.isUnique()) { line.append(".unique()"); } if (source.isCompound()) { line.append(".compound()"); } if (source.isNotNull()) { line.append(".notNull()"); } if (source.isCachingEnabled()) { line.append(".cachingEnabled(true)"); } if (source.isIndexed()) { if (StringUtils.isNotBlank(source.getDefaultValue())) { line.append(".indexedWhenEmpty()"); } else { line.append(".indexed()"); } } if (source.isReadOnly()) { line.append(".readOnly()"); } final String[] transformators = source.getTransformators(); if (transformators != null && transformators.length > 0) { line.append(".transformators("); line.quoted(StringUtils.join(transformators, "\", \"")); line.append(")"); } if (source.isPartOfBuiltInSchema()) { line.append(".partOfBuiltInSchema()"); } line.append(".dynamic()"); if (StringUtils.isNotBlank(source.getHint())) { line.append(".hint(").quoted(StringEscapeUtils.escapeJava(source.getHint())).append(")"); } if (StringUtils.isNotBlank(source.getCategory())) { line.append(".category(").quoted(StringEscapeUtils.escapeJava(source.getCategory())).append(")"); } line.append(";"); } }
public class class_name { protected void getPropertySource(final SourceFile src) { final String sourceUuid = source.getUuid(); final SourceLine line = src.line(source, "public static final"); line.append(" Property<"); line.append(getValueType()); line.append("> "); line.append(SchemaHelper.cleanPropertyName(source.getPropertyName())); line.append("Property"); line.append(" = new "); line.append(getPropertyType()); line.append("("); line.quoted(source.getPropertyName()); if (StringUtils.isNotBlank(source.getDbName())) { line.append(", "); // depends on control dependency: [if], data = [none] line.quoted(source.getDbName()); // depends on control dependency: [if], data = [none] } if (getPropertyParameters() != null) { line.append(getPropertyParameters()); // depends on control dependency: [if], data = [(getPropertyParameters()] } line.append(")"); if (StringUtils.isNotBlank(source.getContentType())) { line.append(".contentType(").quoted(source.getContentType()).append(")"); // depends on control dependency: [if], data = [none] } if (StringUtils.isNotBlank(source.getDefaultValue())) { line.append(".defaultValue(").append(getDefaultValue()).append(")"); // depends on control dependency: [if], data = [none] } if (StringUtils.isNotBlank(source.getFormat())) { line.append(".format(").quoted(StringEscapeUtils.escapeJava(source.getFormat())).append(")"); // depends on control dependency: [if], data = [none] } if (StringUtils.isNotBlank(sourceUuid)) { line.append(".setSourceUuid(").quoted(sourceUuid).append(")"); // depends on control dependency: [if], data = [none] } else { if (StringUtils.isNotBlank(source.getReadFunction())) { line.append(".readFunction(").quoted(StringEscapeUtils.escapeJava(source.getReadFunction())).append(")"); // depends on control dependency: [if], data = [none] } if (StringUtils.isNotBlank(source.getWriteFunction())) { line.append(".writeFunction(").quoted(StringEscapeUtils.escapeJava(source.getWriteFunction())).append(")"); // depends on control dependency: [if], data = [none] } } if (StringUtils.isNotBlank(source.getTypeHint())) { line.append(".typeHint(").quoted(StringEscapeUtils.escapeJava(source.getTypeHint())).append(")"); // depends on control dependency: [if], data = [none] } if (source.isUnique()) { line.append(".unique()"); // depends on control dependency: [if], data = [none] } if (source.isCompound()) { line.append(".compound()"); // depends on control dependency: [if], data = [none] } if (source.isNotNull()) { line.append(".notNull()"); // depends on control dependency: [if], data = [none] } if (source.isCachingEnabled()) { line.append(".cachingEnabled(true)"); // depends on control dependency: [if], data = [none] } if (source.isIndexed()) { if (StringUtils.isNotBlank(source.getDefaultValue())) { line.append(".indexedWhenEmpty()"); // depends on control dependency: [if], data = [none] } else { line.append(".indexed()"); // depends on control dependency: [if], data = [none] } } if (source.isReadOnly()) { line.append(".readOnly()"); // depends on control dependency: [if], data = [none] } final String[] transformators = source.getTransformators(); if (transformators != null && transformators.length > 0) { line.append(".transformators("); // depends on control dependency: [if], data = [none] line.quoted(StringUtils.join(transformators, "\", \"")); // depends on control dependency: [if], data = [(transformators] line.append(")"); // depends on control dependency: [if], data = [none] } if (source.isPartOfBuiltInSchema()) { line.append(".partOfBuiltInSchema()"); // depends on control dependency: [if], data = [none] } line.append(".dynamic()"); if (StringUtils.isNotBlank(source.getHint())) { line.append(".hint(").quoted(StringEscapeUtils.escapeJava(source.getHint())).append(")"); // depends on control dependency: [if], data = [none] } if (StringUtils.isNotBlank(source.getCategory())) { line.append(".category(").quoted(StringEscapeUtils.escapeJava(source.getCategory())).append(")"); // depends on control dependency: [if], data = [none] } line.append(";"); } }
public class class_name { private void compileClause(Clause clause, WAMCompiledPredicate compiledPredicate, boolean isFirst, boolean isLast, boolean multipleClauses, int clauseNumber) throws SourceCodeException { // Used to build up the compiled clause in. WAMCompiledClause result = new WAMCompiledClause(compiledPredicate); // Check if the clause to compile is a fact (no body). boolean isFact = clause.getBody() == null; // Check if the clause to compile is a chain rule, (one called body). boolean isChainRule = (clause.getBody() != null) && (clause.getBody().length == 1); // Used to keep track of registers as they are seen during compilation. The first time a variable is seen, // a variable is written onto the heap, subsequent times its value. The first time a functor is seen, // its structure is written onto the heap, subsequent times it is compared with. seenRegisters = new TreeSet<Integer>(); // This is used to keep track of the next temporary register available to allocate. lastAllocatedTempReg = findMaxArgumentsInClause(clause); // This is used to keep track of the number of permanent variables. numPermanentVars = 0; // This is used to keep track of the allocation slot for the cut level variable, when needed. -1 means it is // not needed, so it is initialized to this. cutLevelVarSlot = -1; // These are used to generate pre and post instructions for the clause, for example, for the creation and // clean-up of stack frames. SizeableList<WAMInstruction> preFixInstructions = new SizeableLinkedList<WAMInstruction>(); SizeableList<WAMInstruction> postFixInstructions = new SizeableLinkedList<WAMInstruction>(); // Find all the free non-anonymous variables in the clause. Set<Variable> freeVars = TermUtils.findFreeNonAnonymousVariables(clause); Collection<Integer> freeVarNames = new TreeSet<Integer>(); for (Variable var : freeVars) { freeVarNames.add(var.getName()); } // Allocate permanent variables for a program clause. Program clauses only use permanent variables when really // needed to preserve variables across calls. allocatePermanentProgramRegisters(clause); // Gather information about the counts and positions of occurrence of variables and constants within the clause. gatherPositionAndOccurrenceInfo(clause); // Labels the entry point to each choice point. FunctorName fn = interner.getFunctorFunctorName(clause.getHead()); WAMLabel entryLabel = new WAMLabel(fn, clauseNumber); // Label for the entry point to the next choice point, to backtrack to. WAMLabel retryLabel = new WAMLabel(fn, clauseNumber + 1); // Create choice point instructions for the clause, depending on its position within the containing predicate. // The choice point instructions are only created when a predicate is built from multiple clauses, as otherwise // there are no choices to be made. if (isFirst && !isLast && multipleClauses) { // try me else. preFixInstructions.add(new WAMInstruction(entryLabel, WAMInstruction.WAMInstructionSet.TryMeElse, retryLabel)); } else if (!isFirst && !isLast && multipleClauses) { // retry me else. preFixInstructions.add(new WAMInstruction(entryLabel, WAMInstruction.WAMInstructionSet.RetryMeElse, retryLabel)); } else if (isLast && multipleClauses) { // trust me. preFixInstructions.add(new WAMInstruction(entryLabel, WAMInstruction.WAMInstructionSet.TrustMe)); } // Generate the prefix code for the clause. // Rules may chain multiple, so require stack frames to preserve registers across calls. // Facts are always leafs so can use the global continuation point register to return from calls. // Chain rules only make one call, so also do not need a stack frame. if (!(isFact || isChainRule)) { // Allocate a stack frame at the start of the clause. /*log.fine("ALLOCATE " + numPermanentVars);*/ preFixInstructions.add(new WAMInstruction(WAMInstruction.WAMInstructionSet.Allocate)); } // Deep cuts require the current choice point to be kept in a permanent variable, so that it can be recovered // once deeper choice points or environments have been reached. if (cutLevelVarSlot >= 0) { /*log.fine("GET_LEVEL "+ cutLevelVarSlot);*/ preFixInstructions.add(new WAMInstruction(WAMInstruction.WAMInstructionSet.GetLevel, (byte) cutLevelVarSlot)); } result.addInstructions(preFixInstructions); // Compile the clause head. Functor expression = clause.getHead(); SizeableLinkedList<WAMInstruction> instructions = compileHead(expression); result.addInstructions(expression, instructions); // Compile all of the conjunctive parts of the body of the clause, if there are any. if (!isFact) { Functor[] expressions = clause.getBody(); for (int i = 0; i < expressions.length; i++) { expression = expressions[i]; boolean isLastBody = i == (expressions.length - 1); boolean isFirstBody = i == 0; Integer permVarsRemaining = (Integer) symbolTable.get(expression.getSymbolKey(), SymbolTableKeys.SYMKEY_PERM_VARS_REMAINING); // Select a non-default built-in implementation to compile the functor with, if it is a built-in. BuiltIn builtIn; if (expression instanceof BuiltIn) { builtIn = (BuiltIn) expression; } else { builtIn = this; } // The 'isFirstBody' parameter is only set to true, when this is the first functor of a rule. instructions = builtIn.compileBodyArguments(expression, i == 0, fn, i); result.addInstructions(expression, instructions); // Call the body. The number of permanent variables remaining is specified for environment trimming. instructions = builtIn.compileBodyCall(expression, isFirstBody, isLastBody, isChainRule, permVarsRemaining); result.addInstructions(expression, instructions); } } // Generate the postfix code for the clause. Rules may chain, so require stack frames. // Facts are always leafs so can use the global continuation point register to return from calls. if (isFact) { /*log.fine("PROCEED");*/ postFixInstructions.add(new WAMInstruction(WAMInstruction.WAMInstructionSet.Proceed)); } result.addInstructions(postFixInstructions); } }
public class class_name { private void compileClause(Clause clause, WAMCompiledPredicate compiledPredicate, boolean isFirst, boolean isLast, boolean multipleClauses, int clauseNumber) throws SourceCodeException { // Used to build up the compiled clause in. WAMCompiledClause result = new WAMCompiledClause(compiledPredicate); // Check if the clause to compile is a fact (no body). boolean isFact = clause.getBody() == null; // Check if the clause to compile is a chain rule, (one called body). boolean isChainRule = (clause.getBody() != null) && (clause.getBody().length == 1); // Used to keep track of registers as they are seen during compilation. The first time a variable is seen, // a variable is written onto the heap, subsequent times its value. The first time a functor is seen, // its structure is written onto the heap, subsequent times it is compared with. seenRegisters = new TreeSet<Integer>(); // This is used to keep track of the next temporary register available to allocate. lastAllocatedTempReg = findMaxArgumentsInClause(clause); // This is used to keep track of the number of permanent variables. numPermanentVars = 0; // This is used to keep track of the allocation slot for the cut level variable, when needed. -1 means it is // not needed, so it is initialized to this. cutLevelVarSlot = -1; // These are used to generate pre and post instructions for the clause, for example, for the creation and // clean-up of stack frames. SizeableList<WAMInstruction> preFixInstructions = new SizeableLinkedList<WAMInstruction>(); SizeableList<WAMInstruction> postFixInstructions = new SizeableLinkedList<WAMInstruction>(); // Find all the free non-anonymous variables in the clause. Set<Variable> freeVars = TermUtils.findFreeNonAnonymousVariables(clause); Collection<Integer> freeVarNames = new TreeSet<Integer>(); for (Variable var : freeVars) { freeVarNames.add(var.getName()); } // Allocate permanent variables for a program clause. Program clauses only use permanent variables when really // needed to preserve variables across calls. allocatePermanentProgramRegisters(clause); // Gather information about the counts and positions of occurrence of variables and constants within the clause. gatherPositionAndOccurrenceInfo(clause); // Labels the entry point to each choice point. FunctorName fn = interner.getFunctorFunctorName(clause.getHead()); WAMLabel entryLabel = new WAMLabel(fn, clauseNumber); // Label for the entry point to the next choice point, to backtrack to. WAMLabel retryLabel = new WAMLabel(fn, clauseNumber + 1); // Create choice point instructions for the clause, depending on its position within the containing predicate. // The choice point instructions are only created when a predicate is built from multiple clauses, as otherwise // there are no choices to be made. if (isFirst && !isLast && multipleClauses) { // try me else. preFixInstructions.add(new WAMInstruction(entryLabel, WAMInstruction.WAMInstructionSet.TryMeElse, retryLabel)); } else if (!isFirst && !isLast && multipleClauses) { // retry me else. preFixInstructions.add(new WAMInstruction(entryLabel, WAMInstruction.WAMInstructionSet.RetryMeElse, retryLabel)); } else if (isLast && multipleClauses) { // trust me. preFixInstructions.add(new WAMInstruction(entryLabel, WAMInstruction.WAMInstructionSet.TrustMe)); } // Generate the prefix code for the clause. // Rules may chain multiple, so require stack frames to preserve registers across calls. // Facts are always leafs so can use the global continuation point register to return from calls. // Chain rules only make one call, so also do not need a stack frame. if (!(isFact || isChainRule)) { // Allocate a stack frame at the start of the clause. /*log.fine("ALLOCATE " + numPermanentVars);*/ preFixInstructions.add(new WAMInstruction(WAMInstruction.WAMInstructionSet.Allocate)); } // Deep cuts require the current choice point to be kept in a permanent variable, so that it can be recovered // once deeper choice points or environments have been reached. if (cutLevelVarSlot >= 0) { /*log.fine("GET_LEVEL "+ cutLevelVarSlot);*/ preFixInstructions.add(new WAMInstruction(WAMInstruction.WAMInstructionSet.GetLevel, (byte) cutLevelVarSlot)); } result.addInstructions(preFixInstructions); // Compile the clause head. Functor expression = clause.getHead(); SizeableLinkedList<WAMInstruction> instructions = compileHead(expression); result.addInstructions(expression, instructions); // Compile all of the conjunctive parts of the body of the clause, if there are any. if (!isFact) { Functor[] expressions = clause.getBody(); for (int i = 0; i < expressions.length; i++) { expression = expressions[i]; // depends on control dependency: [for], data = [i] boolean isLastBody = i == (expressions.length - 1); boolean isFirstBody = i == 0; Integer permVarsRemaining = (Integer) symbolTable.get(expression.getSymbolKey(), SymbolTableKeys.SYMKEY_PERM_VARS_REMAINING); // Select a non-default built-in implementation to compile the functor with, if it is a built-in. BuiltIn builtIn; if (expression instanceof BuiltIn) { builtIn = (BuiltIn) expression; // depends on control dependency: [if], data = [none] } else { builtIn = this; // depends on control dependency: [if], data = [none] } // The 'isFirstBody' parameter is only set to true, when this is the first functor of a rule. instructions = builtIn.compileBodyArguments(expression, i == 0, fn, i); // depends on control dependency: [for], data = [i] result.addInstructions(expression, instructions); // depends on control dependency: [for], data = [none] // Call the body. The number of permanent variables remaining is specified for environment trimming. instructions = builtIn.compileBodyCall(expression, isFirstBody, isLastBody, isChainRule, permVarsRemaining); // depends on control dependency: [for], data = [none] result.addInstructions(expression, instructions); // depends on control dependency: [for], data = [none] } } // Generate the postfix code for the clause. Rules may chain, so require stack frames. // Facts are always leafs so can use the global continuation point register to return from calls. if (isFact) { /*log.fine("PROCEED");*/ postFixInstructions.add(new WAMInstruction(WAMInstruction.WAMInstructionSet.Proceed)); } result.addInstructions(postFixInstructions); } }
public class class_name { private Object getFieldValueName(@NonNull Map<String, SparseArray<String>> valueArrays, @NonNull Configuration conf, @NonNull Field f) throws IllegalAccessException { final String fieldName = f.getName(); switch (fieldName) { case FIELD_MCC: case FIELD_MNC: return f.getInt(conf); case FIELD_UIMODE: return activeFlags(valueArrays.get(PREFIX_UI_MODE), f.getInt(conf)); case FIELD_SCREENLAYOUT: return activeFlags(valueArrays.get(PREFIX_SCREENLAYOUT), f.getInt(conf)); default: final SparseArray<String> values = valueArrays.get(fieldName.toUpperCase() + '_'); if (values == null) { // Unknown field, return the raw int as String return f.getInt(conf); } final String value = values.get(f.getInt(conf)); if (value == null) { // Unknown value, return the raw int as String return f.getInt(conf); } return value; } } }
public class class_name { private Object getFieldValueName(@NonNull Map<String, SparseArray<String>> valueArrays, @NonNull Configuration conf, @NonNull Field f) throws IllegalAccessException { final String fieldName = f.getName(); switch (fieldName) { case FIELD_MCC: case FIELD_MNC: return f.getInt(conf); case FIELD_UIMODE: return activeFlags(valueArrays.get(PREFIX_UI_MODE), f.getInt(conf)); case FIELD_SCREENLAYOUT: return activeFlags(valueArrays.get(PREFIX_SCREENLAYOUT), f.getInt(conf)); default: final SparseArray<String> values = valueArrays.get(fieldName.toUpperCase() + '_'); if (values == null) { // Unknown field, return the raw int as String return f.getInt(conf); // depends on control dependency: [if], data = [none] } final String value = values.get(f.getInt(conf)); if (value == null) { // Unknown value, return the raw int as String return f.getInt(conf); // depends on control dependency: [if], data = [none] } return value; } } }
public class class_name { @SuppressWarnings("unchecked") public final Node<E_OUT> evaluateToArrayNode(IntFunction<E_OUT[]> generator) { if (linkedOrConsumed) throw new IllegalStateException(MSG_STREAM_LINKED); linkedOrConsumed = true; // If the last intermediate operation is stateful then // evaluate directly to avoid an extra collection step if (isParallel() && previousStage != null && opIsStateful()) { // Set the depth of this, last, pipeline stage to zero to slice the // pipeline such that this operation will not be included in the // upstream slice and upstream operations will not be included // in this slice depth = 0; return opEvaluateParallel(previousStage, previousStage.sourceSpliterator(0), generator); } else { return evaluate(sourceSpliterator(0), true, generator); } } }
public class class_name { @SuppressWarnings("unchecked") public final Node<E_OUT> evaluateToArrayNode(IntFunction<E_OUT[]> generator) { if (linkedOrConsumed) throw new IllegalStateException(MSG_STREAM_LINKED); linkedOrConsumed = true; // If the last intermediate operation is stateful then // evaluate directly to avoid an extra collection step if (isParallel() && previousStage != null && opIsStateful()) { // Set the depth of this, last, pipeline stage to zero to slice the // pipeline such that this operation will not be included in the // upstream slice and upstream operations will not be included // in this slice depth = 0; // depends on control dependency: [if], data = [none] return opEvaluateParallel(previousStage, previousStage.sourceSpliterator(0), generator); // depends on control dependency: [if], data = [none] } else { return evaluate(sourceSpliterator(0), true, generator); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void addElement(Document document, Element parentElement, String elementName, Object elementValue) { Element element = CollectionExtensionPropertySerializer.toElement(elementValue, document, elementName, this.serializerByClass); if (element != null) { parentElement.appendChild(element); } } }
public class class_name { private void addElement(Document document, Element parentElement, String elementName, Object elementValue) { Element element = CollectionExtensionPropertySerializer.toElement(elementValue, document, elementName, this.serializerByClass); if (element != null) { parentElement.appendChild(element); // depends on control dependency: [if], data = [(element] } } }
public class class_name { public void onApplicationUpdate(ZWaveControllerContext context, ApplicationUpdate update) { switch (nodeState) { case NodeInfo: // if the application update failed to send, re-send it if (update.didInfoRequestFail()) { if (stateRetries < 1) { logger.trace("Application update failed for node {}; will retry", getNodeId()); sendDataFrame(context, new RequestNodeInfo(getNodeId())); stateRetries++; } else { if (isListeningNode()) { logger.trace("Node {} provided no node info after {} retries; should be listening so flagging as unavailable and started", getNodeId(), stateRetries); available = false; setState(context, ZWaveNodeState.Started); } else { logger.trace("Node {} provided no node info after {} retries; not flagged as listening so assuming it's asleep", getNodeId(), stateRetries); available = true; setState(context, ZWaveNodeState.Started); } } } else { // check if there are optional command classes byte[] commandClasses = update.getNodeInfo().getCommandClasses(); for (byte commandClassId : commandClasses) { if (!hasCommandClass(commandClassId)) { CommandClass cc = CommandClassFactory.createCommandClass(commandClassId); if (cc != null) { addCommandClass(commandClassId, cc); } else { logger.trace("Ignoring optional command class: {}", ByteUtil.createString(commandClassId)); } } } // if this node has the Version command class, then we should retrieve version for information // for all command classes it supports if (getCommandClass(VersionCommandClass.ID) != null) { setState(context, ZWaveNodeState.RetrieveVersionPending); // otherwise, we assume all command classes are version 1 and move on } else { setState(context, ZWaveNodeState.RetrieveStatePending); } } break; default: logger.trace("Unsolicited ApplicationUpdate received; refreshing node"); refresh(false); break; } } }
public class class_name { public void onApplicationUpdate(ZWaveControllerContext context, ApplicationUpdate update) { switch (nodeState) { case NodeInfo: // if the application update failed to send, re-send it if (update.didInfoRequestFail()) { if (stateRetries < 1) { logger.trace("Application update failed for node {}; will retry", getNodeId()); // depends on control dependency: [if], data = [none] sendDataFrame(context, new RequestNodeInfo(getNodeId())); // depends on control dependency: [if], data = [none] stateRetries++; // depends on control dependency: [if], data = [none] } else { if (isListeningNode()) { logger.trace("Node {} provided no node info after {} retries; should be listening so flagging as unavailable and started", getNodeId(), stateRetries); // depends on control dependency: [if], data = [none] available = false; // depends on control dependency: [if], data = [none] setState(context, ZWaveNodeState.Started); // depends on control dependency: [if], data = [none] } else { logger.trace("Node {} provided no node info after {} retries; not flagged as listening so assuming it's asleep", getNodeId(), stateRetries); // depends on control dependency: [if], data = [none] available = true; // depends on control dependency: [if], data = [none] setState(context, ZWaveNodeState.Started); // depends on control dependency: [if], data = [none] } } } else { // check if there are optional command classes byte[] commandClasses = update.getNodeInfo().getCommandClasses(); for (byte commandClassId : commandClasses) { if (!hasCommandClass(commandClassId)) { CommandClass cc = CommandClassFactory.createCommandClass(commandClassId); if (cc != null) { addCommandClass(commandClassId, cc); // depends on control dependency: [if], data = [none] } else { logger.trace("Ignoring optional command class: {}", ByteUtil.createString(commandClassId)); // depends on control dependency: [if], data = [none] } } } // if this node has the Version command class, then we should retrieve version for information // for all command classes it supports if (getCommandClass(VersionCommandClass.ID) != null) { setState(context, ZWaveNodeState.RetrieveVersionPending); // depends on control dependency: [if], data = [none] // otherwise, we assume all command classes are version 1 and move on } else { setState(context, ZWaveNodeState.RetrieveStatePending); // depends on control dependency: [if], data = [none] } } break; default: logger.trace("Unsolicited ApplicationUpdate received; refreshing node"); refresh(false); break; } } }
public class class_name { private static <X extends Exception> X newWithCause( Class<X> exceptionClass, Throwable cause) { // getConstructors() guarantees this as long as we don't modify the array. @SuppressWarnings("unchecked") List<Constructor<X>> constructors = (List) Arrays.asList(exceptionClass.getConstructors()); for (Constructor<X> constructor : preferringStrings(constructors)) { @Nullable X instance = newFromConstructor(constructor, cause); if (instance != null) { if (instance.getCause() == null) { instance.initCause(cause); } return instance; } } throw new IllegalArgumentException( "No appropriate constructor for exception of type " + exceptionClass + " in response to chained exception", cause); } }
public class class_name { private static <X extends Exception> X newWithCause( Class<X> exceptionClass, Throwable cause) { // getConstructors() guarantees this as long as we don't modify the array. @SuppressWarnings("unchecked") List<Constructor<X>> constructors = (List) Arrays.asList(exceptionClass.getConstructors()); for (Constructor<X> constructor : preferringStrings(constructors)) { @Nullable X instance = newFromConstructor(constructor, cause); if (instance != null) { if (instance.getCause() == null) { instance.initCause(cause); // depends on control dependency: [if], data = [none] } return instance; // depends on control dependency: [if], data = [none] } } throw new IllegalArgumentException( "No appropriate constructor for exception of type " + exceptionClass + " in response to chained exception", cause); } }
public class class_name { public static boolean accept(String fName, String name, Version min, Version max) { if (fName.startsWith(name + "_") && fName.toLowerCase().endsWith(".jar")) { if (min == null || max == null) return true; int i = fName.indexOf("_"); String versionStr = fName.substring(i + 1, fName.length() - 4); Version v = Version.createVersion(versionStr); if (v == null) return true; return v.compareTo(min) >= 0 && v.compareTo(max) < 0; } return false; } }
public class class_name { public static boolean accept(String fName, String name, Version min, Version max) { if (fName.startsWith(name + "_") && fName.toLowerCase().endsWith(".jar")) { if (min == null || max == null) return true; int i = fName.indexOf("_"); String versionStr = fName.substring(i + 1, fName.length() - 4); Version v = Version.createVersion(versionStr); if (v == null) return true; return v.compareTo(min) >= 0 && v.compareTo(max) < 0; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public static base_responses update(nitro_service client, lbgroup resources[]) throws Exception { base_responses result = null; if (resources != null && resources.length > 0) { lbgroup updateresources[] = new lbgroup[resources.length]; for (int i=0;i<resources.length;i++){ updateresources[i] = new lbgroup(); updateresources[i].name = resources[i].name; updateresources[i].persistencetype = resources[i].persistencetype; updateresources[i].persistencebackup = resources[i].persistencebackup; updateresources[i].backuppersistencetimeout = resources[i].backuppersistencetimeout; updateresources[i].persistmask = resources[i].persistmask; updateresources[i].cookiename = resources[i].cookiename; updateresources[i].v6persistmasklen = resources[i].v6persistmasklen; updateresources[i].cookiedomain = resources[i].cookiedomain; updateresources[i].timeout = resources[i].timeout; updateresources[i].rule = resources[i].rule; } result = update_bulk_request(client, updateresources); } return result; } }
public class class_name { public static base_responses update(nitro_service client, lbgroup resources[]) throws Exception { base_responses result = null; if (resources != null && resources.length > 0) { lbgroup updateresources[] = new lbgroup[resources.length]; for (int i=0;i<resources.length;i++){ updateresources[i] = new lbgroup(); // depends on control dependency: [for], data = [i] updateresources[i].name = resources[i].name; // depends on control dependency: [for], data = [i] updateresources[i].persistencetype = resources[i].persistencetype; // depends on control dependency: [for], data = [i] updateresources[i].persistencebackup = resources[i].persistencebackup; // depends on control dependency: [for], data = [i] updateresources[i].backuppersistencetimeout = resources[i].backuppersistencetimeout; // depends on control dependency: [for], data = [i] updateresources[i].persistmask = resources[i].persistmask; // depends on control dependency: [for], data = [i] updateresources[i].cookiename = resources[i].cookiename; // depends on control dependency: [for], data = [i] updateresources[i].v6persistmasklen = resources[i].v6persistmasklen; // depends on control dependency: [for], data = [i] updateresources[i].cookiedomain = resources[i].cookiedomain; // depends on control dependency: [for], data = [i] updateresources[i].timeout = resources[i].timeout; // depends on control dependency: [for], data = [i] updateresources[i].rule = resources[i].rule; // depends on control dependency: [for], data = [i] } result = update_bulk_request(client, updateresources); } return result; } }
public class class_name { public static TreeSet<Integer> supplyLackedNumberMappingColumn(final Class<?> beanType, final List<ColumnMapping> list, final Optional<CsvPartial> partialAnno, final String[] suppliedHeaders) { final TreeSet<Integer> checkedNumber = list.stream() .filter(col -> col.isDeterminedNumber()) .map(col -> col.getNumber()) .collect(Collectors.toCollection(TreeSet::new)); // 定義されている列番号の最大値 final int maxColumnNumber = checkedNumber.last(); // Beanに定義されていない欠けているカラム番号の取得 final TreeSet<Integer> lackedNumbers = new TreeSet<Integer>(); for(int i=1; i <= maxColumnNumber; i++) { if(!checkedNumber.contains(i)) { lackedNumbers.add(i); } } // 定義されているカラム番号より、大きなカラム番号を持つカラム情報の補足 if(partialAnno.isPresent()) { final int partialColumnSize = partialAnno.get().columnSize(); if(maxColumnNumber > partialColumnSize) { throw new SuperCsvInvalidAnnotationException(partialAnno.get(), MessageBuilder.create("anno.CsvPartial.columSizeMin") .var("property", beanType.getName()) .var("columnSize", partialColumnSize) .var("maxColumnNumber", maxColumnNumber) .format()); } if(maxColumnNumber < partialColumnSize) { for(int i= maxColumnNumber+1; i <= partialColumnSize; i++) { lackedNumbers.add(i); } } } // 不足分のカラムがある場合は、部分的な読み書き用カラムとして追加する if(lackedNumbers.size() > 0) { for(int number : lackedNumbers) { list.add(createPartialColumnMapping(number, partialAnno, getSuppliedHeaders(suppliedHeaders, number))); } list.sort(null); } return lackedNumbers; } }
public class class_name { public static TreeSet<Integer> supplyLackedNumberMappingColumn(final Class<?> beanType, final List<ColumnMapping> list, final Optional<CsvPartial> partialAnno, final String[] suppliedHeaders) { final TreeSet<Integer> checkedNumber = list.stream() .filter(col -> col.isDeterminedNumber()) .map(col -> col.getNumber()) .collect(Collectors.toCollection(TreeSet::new)); // 定義されている列番号の最大値 final int maxColumnNumber = checkedNumber.last(); // Beanに定義されていない欠けているカラム番号の取得 final TreeSet<Integer> lackedNumbers = new TreeSet<Integer>(); for(int i=1; i <= maxColumnNumber; i++) { if(!checkedNumber.contains(i)) { lackedNumbers.add(i); // depends on control dependency: [if], data = [none] } } // 定義されているカラム番号より、大きなカラム番号を持つカラム情報の補足 if(partialAnno.isPresent()) { final int partialColumnSize = partialAnno.get().columnSize(); if(maxColumnNumber > partialColumnSize) { throw new SuperCsvInvalidAnnotationException(partialAnno.get(), MessageBuilder.create("anno.CsvPartial.columSizeMin") .var("property", beanType.getName()) .var("columnSize", partialColumnSize) .var("maxColumnNumber", maxColumnNumber) .format()); } if(maxColumnNumber < partialColumnSize) { for(int i= maxColumnNumber+1; i <= partialColumnSize; i++) { lackedNumbers.add(i); // depends on control dependency: [for], data = [i] } } } // 不足分のカラムがある場合は、部分的な読み書き用カラムとして追加する if(lackedNumbers.size() > 0) { for(int number : lackedNumbers) { list.add(createPartialColumnMapping(number, partialAnno, getSuppliedHeaders(suppliedHeaders, number))); // depends on control dependency: [for], data = [number] } list.sort(null); // depends on control dependency: [if], data = [none] } return lackedNumbers; } }
public class class_name { public static Pair<Schema, JavaRDD<List<List<Writable>>>> toRecordsSequence(DataRowsFacade dataFrame) { //Need to convert from flattened to sequence data... //First: Group by the Sequence UUID (first column) JavaPairRDD<String, Iterable<Row>> grouped = dataFrame.get().javaRDD().groupBy(new Function<Row, String>() { @Override public String call(Row row) throws Exception { return row.getString(0); } }); Schema schema = fromStructType(dataFrame.get().schema()); //Group by sequence UUID, and sort each row within the sequences using the time step index Function<Iterable<Row>, List<List<Writable>>> createCombiner = new DataFrameToSequenceCreateCombiner(schema); //Function to create the initial combiner Function2<List<List<Writable>>, Iterable<Row>, List<List<Writable>>> mergeValue = new DataFrameToSequenceMergeValue(schema); //Function to add a row Function2<List<List<Writable>>, List<List<Writable>>, List<List<Writable>>> mergeCombiners = new DataFrameToSequenceMergeCombiner(); //Function to merge existing sequence writables JavaRDD<List<List<Writable>>> sequences = grouped.combineByKey(createCombiner, mergeValue, mergeCombiners).values(); //We no longer want/need the sequence UUID and sequence time step columns - extract those out JavaRDD<List<List<Writable>>> out = sequences.map(new Function<List<List<Writable>>, List<List<Writable>>>() { @Override public List<List<Writable>> call(List<List<Writable>> v1) throws Exception { List<List<Writable>> out = new ArrayList<>(v1.size()); for (List<Writable> l : v1) { List<Writable> subset = new ArrayList<>(); for (int i = 2; i < l.size(); i++) { subset.add(l.get(i)); } out.add(subset); } return out; } }); return new Pair<>(schema, out); } }
public class class_name { public static Pair<Schema, JavaRDD<List<List<Writable>>>> toRecordsSequence(DataRowsFacade dataFrame) { //Need to convert from flattened to sequence data... //First: Group by the Sequence UUID (first column) JavaPairRDD<String, Iterable<Row>> grouped = dataFrame.get().javaRDD().groupBy(new Function<Row, String>() { @Override public String call(Row row) throws Exception { return row.getString(0); } }); Schema schema = fromStructType(dataFrame.get().schema()); //Group by sequence UUID, and sort each row within the sequences using the time step index Function<Iterable<Row>, List<List<Writable>>> createCombiner = new DataFrameToSequenceCreateCombiner(schema); //Function to create the initial combiner Function2<List<List<Writable>>, Iterable<Row>, List<List<Writable>>> mergeValue = new DataFrameToSequenceMergeValue(schema); //Function to add a row Function2<List<List<Writable>>, List<List<Writable>>, List<List<Writable>>> mergeCombiners = new DataFrameToSequenceMergeCombiner(); //Function to merge existing sequence writables JavaRDD<List<List<Writable>>> sequences = grouped.combineByKey(createCombiner, mergeValue, mergeCombiners).values(); //We no longer want/need the sequence UUID and sequence time step columns - extract those out JavaRDD<List<List<Writable>>> out = sequences.map(new Function<List<List<Writable>>, List<List<Writable>>>() { @Override public List<List<Writable>> call(List<List<Writable>> v1) throws Exception { List<List<Writable>> out = new ArrayList<>(v1.size()); for (List<Writable> l : v1) { List<Writable> subset = new ArrayList<>(); for (int i = 2; i < l.size(); i++) { subset.add(l.get(i)); // depends on control dependency: [for], data = [i] } out.add(subset); } return out; } }); return new Pair<>(schema, out); } }
public class class_name { @Override public void destroy() { if (dataRecorderLifycycleListner != null) { this.dataRecorderLifycycleListner.recorderDataRecorderClosed(this); } try { this.dataLogger.destroy(); } catch (IOException e) { throw new DelegatedRuntimeException(e); } } }
public class class_name { @Override public void destroy() { if (dataRecorderLifycycleListner != null) { this.dataRecorderLifycycleListner.recorderDataRecorderClosed(this); // depends on control dependency: [if], data = [none] } try { this.dataLogger.destroy(); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new DelegatedRuntimeException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public Set<String> getAlignmentOptions() { Set<String> result = new HashSet<String>(); for (BlastAlignmentParameterEnum parameter : param.keySet()) { result.add(parameter.name()); } return result; } }
public class class_name { @Override public Set<String> getAlignmentOptions() { Set<String> result = new HashSet<String>(); for (BlastAlignmentParameterEnum parameter : param.keySet()) { result.add(parameter.name()); // depends on control dependency: [for], data = [parameter] } return result; } }
public class class_name { public Consumer<LoadMoreOp> asDoLoadMoreFinishConsumer() { return new Consumer<LoadMoreOp>() { @Override public void accept(LoadMoreOp result) throws Exception { Card card = result.getArg1(); card.loading = false; card.loaded = true; List<BaseCell> cells = result.getArg2(); boolean hasMore = result.getArg3(); if (cells != null && !cells.isEmpty()) { if (card.page == sInitialPage) { card.setCells(cells); } else { card.addCells(cells); } card.page++; card.hasMore = hasMore; card.notifyDataChange(); } else { card.hasMore = hasMore; } } }; } }
public class class_name { public Consumer<LoadMoreOp> asDoLoadMoreFinishConsumer() { return new Consumer<LoadMoreOp>() { @Override public void accept(LoadMoreOp result) throws Exception { Card card = result.getArg1(); card.loading = false; card.loaded = true; List<BaseCell> cells = result.getArg2(); boolean hasMore = result.getArg3(); if (cells != null && !cells.isEmpty()) { if (card.page == sInitialPage) { card.setCells(cells); // depends on control dependency: [if], data = [none] } else { card.addCells(cells); // depends on control dependency: [if], data = [none] } card.page++; card.hasMore = hasMore; card.notifyDataChange(); } else { card.hasMore = hasMore; } } }; } }
public class class_name { public T enhance(T t) { if (!needsEnhancement(t)) { return t; } try { return getEnhancedClass().getConstructor(baseClass).newInstance(t); } catch (Exception e) { throw new RuntimeException(String.format("Could not enhance object %s (%s)", t, t.getClass()), e); } } }
public class class_name { public T enhance(T t) { if (!needsEnhancement(t)) { return t; // depends on control dependency: [if], data = [none] } try { return getEnhancedClass().getConstructor(baseClass).newInstance(t); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new RuntimeException(String.format("Could not enhance object %s (%s)", t, t.getClass()), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public BigMoney minusMinor(long amountToSubtract) { if (amountToSubtract == 0) { return this; } BigDecimal newAmount = amount.subtract(BigDecimal.valueOf(amountToSubtract, currency.getDecimalPlaces())); return BigMoney.of(currency, newAmount); } }
public class class_name { public BigMoney minusMinor(long amountToSubtract) { if (amountToSubtract == 0) { return this; // depends on control dependency: [if], data = [none] } BigDecimal newAmount = amount.subtract(BigDecimal.valueOf(amountToSubtract, currency.getDecimalPlaces())); return BigMoney.of(currency, newAmount); } }
public class class_name { Constraint getUniqueConstraintForColumns(int[] mainTableCols, int[] refTableCols) { for (int i = 0, size = constraintList.length; i < size; i++) { Constraint c = constraintList[i]; // A VoltDB extension -- Don't consider non-column expression indexes for this purpose if (c.hasExprs()) { continue; } // End of VoltDB extension int type = c.getConstraintType(); if (type != Constraint.UNIQUE && type != Constraint.PRIMARY_KEY) { continue; } int[] constraintCols = c.getMainColumns(); if (constraintCols.length != mainTableCols.length) { continue; } if (ArrayUtil.areEqual(constraintCols, mainTableCols, mainTableCols.length, true)) { return c; } if (ArrayUtil.areEqualSets(constraintCols, mainTableCols)) { int[] newRefTableCols = new int[mainTableCols.length]; for (int j = 0; j < mainTableCols.length; j++) { int pos = ArrayUtil.find(constraintCols, mainTableCols[j]); newRefTableCols[pos] = refTableCols[j]; } for (int j = 0; j < mainTableCols.length; j++) { refTableCols[j] = newRefTableCols[j]; } return c; } } return null; } }
public class class_name { Constraint getUniqueConstraintForColumns(int[] mainTableCols, int[] refTableCols) { for (int i = 0, size = constraintList.length; i < size; i++) { Constraint c = constraintList[i]; // A VoltDB extension -- Don't consider non-column expression indexes for this purpose if (c.hasExprs()) { continue; } // End of VoltDB extension int type = c.getConstraintType(); if (type != Constraint.UNIQUE && type != Constraint.PRIMARY_KEY) { continue; } int[] constraintCols = c.getMainColumns(); if (constraintCols.length != mainTableCols.length) { continue; } if (ArrayUtil.areEqual(constraintCols, mainTableCols, mainTableCols.length, true)) { return c; // depends on control dependency: [if], data = [none] } if (ArrayUtil.areEqualSets(constraintCols, mainTableCols)) { int[] newRefTableCols = new int[mainTableCols.length]; for (int j = 0; j < mainTableCols.length; j++) { int pos = ArrayUtil.find(constraintCols, mainTableCols[j]); newRefTableCols[pos] = refTableCols[j]; // depends on control dependency: [for], data = [j] } for (int j = 0; j < mainTableCols.length; j++) { refTableCols[j] = newRefTableCols[j]; // depends on control dependency: [for], data = [j] } return c; // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { protected void onValueSelect(String value, boolean fireEvents) { String oldValue = m_selectedValue; selectValue(value); if (fireEvents) { if ((oldValue == null) || !oldValue.equals(value)) { // fire value change only if the the value really changed ValueChangeEvent.<String> fire(this, value); } } } }
public class class_name { protected void onValueSelect(String value, boolean fireEvents) { String oldValue = m_selectedValue; selectValue(value); if (fireEvents) { if ((oldValue == null) || !oldValue.equals(value)) { // fire value change only if the the value really changed ValueChangeEvent.<String> fire(this, value); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public WiFiP2pService getServiceByDevice(WifiP2pDevice device) { if (device == null) { return null; } WfdLog.d(TAG, "groupownerdevice passed to getServiceByDevice: " + device.deviceName + ", " + device.deviceAddress); WfdLog.d(TAG, "servicelist size: " + serviceList.size()); for (WiFiP2pService element : serviceList) { WfdLog.d(TAG, "element in list: " + element.getDevice().deviceName + ", " + element.getDevice().deviceAddress); WfdLog.d(TAG, "element passed : " + device.deviceName + ", " + device.deviceAddress); if (element.getDevice().deviceAddress.equals(device.deviceAddress)) { WfdLog.d(TAG, "getServiceByDevice if satisfied : " + device.deviceAddress + ", " + element.getDevice().deviceAddress); return element; } } WfdLog.d(TAG, "servicelist size: " + serviceList.size()); return null; } }
public class class_name { public WiFiP2pService getServiceByDevice(WifiP2pDevice device) { if (device == null) { return null; // depends on control dependency: [if], data = [none] } WfdLog.d(TAG, "groupownerdevice passed to getServiceByDevice: " + device.deviceName + ", " + device.deviceAddress); WfdLog.d(TAG, "servicelist size: " + serviceList.size()); for (WiFiP2pService element : serviceList) { WfdLog.d(TAG, "element in list: " + element.getDevice().deviceName + ", " + element.getDevice().deviceAddress); // depends on control dependency: [for], data = [element] WfdLog.d(TAG, "element passed : " + device.deviceName + ", " + device.deviceAddress); // depends on control dependency: [for], data = [element] if (element.getDevice().deviceAddress.equals(device.deviceAddress)) { WfdLog.d(TAG, "getServiceByDevice if satisfied : " + device.deviceAddress + ", " + element.getDevice().deviceAddress); // depends on control dependency: [if], data = [none] return element; // depends on control dependency: [if], data = [none] } } WfdLog.d(TAG, "servicelist size: " + serviceList.size()); return null; } }
public class class_name { public ListReservationsResult withReservations(Reservation... reservations) { if (this.reservations == null) { setReservations(new java.util.ArrayList<Reservation>(reservations.length)); } for (Reservation ele : reservations) { this.reservations.add(ele); } return this; } }
public class class_name { public ListReservationsResult withReservations(Reservation... reservations) { if (this.reservations == null) { setReservations(new java.util.ArrayList<Reservation>(reservations.length)); // depends on control dependency: [if], data = [none] } for (Reservation ele : reservations) { this.reservations.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public Field getFieldById(Long fieldIdParam) { Field field = new Field(fieldIdParam); //Set for Payara server... field.setFieldValue(new MultiChoice()); if(this.serviceTicket != null) { field.setServiceTicket(this.serviceTicket); } return new Field(this.postJson( field, Version1.getById())); } }
public class class_name { public Field getFieldById(Long fieldIdParam) { Field field = new Field(fieldIdParam); //Set for Payara server... field.setFieldValue(new MultiChoice()); if(this.serviceTicket != null) { field.setServiceTicket(this.serviceTicket); // depends on control dependency: [if], data = [(this.serviceTicket] } return new Field(this.postJson( field, Version1.getById())); } }
public class class_name { private Object extractAsRuntime(Field field, String valueStr) { if (field.getType() == Byte.TYPE || field.getType() == Byte.class || field.getType() == Short.TYPE || field.getType() == Short.class || field.getType() == Integer.TYPE || field.getType() == Integer.class || field.getType() == Long.TYPE || field.getType() == Long.class || field.getType() == Float.TYPE || field.getType() == Float.class || field.getType() == Double.TYPE || field.getType() == Double.class) { return extractAsNumber(field, valueStr); } else if (String.class.isAssignableFrom(field.getType())) { return valueStr; } throw new IllegalArgumentException(Utils.format("Invalid type for RUNTIME type: {}", field.getType())); } }
public class class_name { private Object extractAsRuntime(Field field, String valueStr) { if (field.getType() == Byte.TYPE || field.getType() == Byte.class || field.getType() == Short.TYPE || field.getType() == Short.class || field.getType() == Integer.TYPE || field.getType() == Integer.class || field.getType() == Long.TYPE || field.getType() == Long.class || field.getType() == Float.TYPE || field.getType() == Float.class || field.getType() == Double.TYPE || field.getType() == Double.class) { return extractAsNumber(field, valueStr); // depends on control dependency: [if], data = [none] } else if (String.class.isAssignableFrom(field.getType())) { return valueStr; // depends on control dependency: [if], data = [none] } throw new IllegalArgumentException(Utils.format("Invalid type for RUNTIME type: {}", field.getType())); } }
public class class_name { static void assertReflectionEquals(final Object expected, final Object actual) { final List<Comparator> comparators = getComparators(); final ReflectionComparator reflectionComparator = new ReflectionComparator(comparators); final Difference difference = reflectionComparator.getDifference(expected, actual); if (difference != null) { fail(getFailureMessage(difference)); } } }
public class class_name { static void assertReflectionEquals(final Object expected, final Object actual) { final List<Comparator> comparators = getComparators(); final ReflectionComparator reflectionComparator = new ReflectionComparator(comparators); final Difference difference = reflectionComparator.getDifference(expected, actual); if (difference != null) { fail(getFailureMessage(difference)); // depends on control dependency: [if], data = [(difference] } } }
public class class_name { @Override public final void writeStartArray() throws IOException, JsonGenerationException { _verifyValueWrite("start an array"); _writeContext = _writeContext.createChildArrayContext(); if (_cfgPrettyPrinter != null) { _cfgPrettyPrinter.writeStartArray(this); } else { if (_outputTail >= _outputEnd) { _flushBuffer(); } _outputBuffer[_outputTail++] = '['; } } }
public class class_name { @Override public final void writeStartArray() throws IOException, JsonGenerationException { _verifyValueWrite("start an array"); _writeContext = _writeContext.createChildArrayContext(); if (_cfgPrettyPrinter != null) { _cfgPrettyPrinter.writeStartArray(this); } else { if (_outputTail >= _outputEnd) { _flushBuffer(); // depends on control dependency: [if], data = [none] } _outputBuffer[_outputTail++] = '['; } } }
public class class_name { protected void processLinkQueue() { long timeout = this.configuration.getCheckTimeout(); List<Pattern> excludedReferencePatterns = this.configuration.getExcludedReferencePatterns(); // Unqueue till we find an item that needs to be processed. We process an item if: // - it isn't present in the state map // - it is present but not enough time has elapsed since its last check time LinkQueueItem queueItem = null; boolean shouldBeChecked = false; while (!this.linkQueue.isEmpty() && !shouldBeChecked) { queueItem = this.linkQueue.poll(); // Don't check excluded references shouldBeChecked = isExcluded(queueItem.getContentReference(), excludedReferencePatterns); if (!shouldBeChecked) { break; } Map<String, LinkState> contentReferences = this.linkStateManager.getLinkStates().get(queueItem.getLinkReference()); if (contentReferences != null) { LinkState state = contentReferences.get(queueItem.getContentReference()); if (state != null && (System.currentTimeMillis() - state.getLastCheckedTime() <= timeout)) { shouldBeChecked = false; } } } if (shouldBeChecked && queueItem != null) { checkLink(queueItem); } } }
public class class_name { protected void processLinkQueue() { long timeout = this.configuration.getCheckTimeout(); List<Pattern> excludedReferencePatterns = this.configuration.getExcludedReferencePatterns(); // Unqueue till we find an item that needs to be processed. We process an item if: // - it isn't present in the state map // - it is present but not enough time has elapsed since its last check time LinkQueueItem queueItem = null; boolean shouldBeChecked = false; while (!this.linkQueue.isEmpty() && !shouldBeChecked) { queueItem = this.linkQueue.poll(); // depends on control dependency: [while], data = [none] // Don't check excluded references shouldBeChecked = isExcluded(queueItem.getContentReference(), excludedReferencePatterns); // depends on control dependency: [while], data = [none] if (!shouldBeChecked) { break; } Map<String, LinkState> contentReferences = this.linkStateManager.getLinkStates().get(queueItem.getLinkReference()); if (contentReferences != null) { LinkState state = contentReferences.get(queueItem.getContentReference()); if (state != null && (System.currentTimeMillis() - state.getLastCheckedTime() <= timeout)) { shouldBeChecked = false; // depends on control dependency: [if], data = [none] } } } if (shouldBeChecked && queueItem != null) { checkLink(queueItem); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected NodeCacheIterator nodes( String workspaceName, Path path ) { // Determine which filter we should use based upon the workspace name. For the system workspace, // all queryable nodes are included. For all other workspaces, all queryable nodes are included except // for those that are actually stored in the system workspace (e.g., the "/jcr:system" nodes). NodeFilter nodeFilterForWorkspace = nodeFilterForWorkspace(workspaceName); if (nodeFilterForWorkspace == null) return null; // always append a shared nodes filter to the end of the workspace filter, // JCR #14.16 -If a query matches a descendant node of a shared set, it appears in query results only once. NodeFilter compositeFilter = new CompositeNodeFilter(nodeFilterForWorkspace, sharedNodesFilter()); // Then create an iterator over that workspace ... NodeCache cache = repo.getWorkspaceCache(workspaceName); NodeKey startingNode = null; if (path != null) { CachedNode node = getNodeAtPath(path, cache); if (node != null) startingNode = node.getKey(); } else { startingNode = cache.getRootKey(); } if (startingNode != null) { return new NodeCacheIterator(cache, startingNode, compositeFilter); } return null; } }
public class class_name { protected NodeCacheIterator nodes( String workspaceName, Path path ) { // Determine which filter we should use based upon the workspace name. For the system workspace, // all queryable nodes are included. For all other workspaces, all queryable nodes are included except // for those that are actually stored in the system workspace (e.g., the "/jcr:system" nodes). NodeFilter nodeFilterForWorkspace = nodeFilterForWorkspace(workspaceName); if (nodeFilterForWorkspace == null) return null; // always append a shared nodes filter to the end of the workspace filter, // JCR #14.16 -If a query matches a descendant node of a shared set, it appears in query results only once. NodeFilter compositeFilter = new CompositeNodeFilter(nodeFilterForWorkspace, sharedNodesFilter()); // Then create an iterator over that workspace ... NodeCache cache = repo.getWorkspaceCache(workspaceName); NodeKey startingNode = null; if (path != null) { CachedNode node = getNodeAtPath(path, cache); if (node != null) startingNode = node.getKey(); } else { startingNode = cache.getRootKey(); // depends on control dependency: [if], data = [none] } if (startingNode != null) { return new NodeCacheIterator(cache, startingNode, compositeFilter); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { private int getHeaderViewType(int position) { if (headerContent != null) { if (headerContent.getData() == getItem(position) && (position == 0)) { return headerContent.getViewtype(); } } return PeasyHeaderViewHolder.VIEWTYPE_NOTHING; } }
public class class_name { private int getHeaderViewType(int position) { if (headerContent != null) { if (headerContent.getData() == getItem(position) && (position == 0)) { return headerContent.getViewtype(); // depends on control dependency: [if], data = [none] } } return PeasyHeaderViewHolder.VIEWTYPE_NOTHING; } }
public class class_name { protected final <T> T readMessage(String message, Class<T> clazz) { if ((message == null) || message.isEmpty()) { ClientSocketAdapter.LOGGER.info("Got empty session data"); return null; } try { return this.mapper.readValue(message, clazz); } catch (IOException e1) { ClientSocketAdapter.LOGGER.info("Got invalid session data", e1); return null; } } }
public class class_name { protected final <T> T readMessage(String message, Class<T> clazz) { if ((message == null) || message.isEmpty()) { ClientSocketAdapter.LOGGER.info("Got empty session data"); // depends on control dependency: [if], data = [none] return null; // depends on control dependency: [if], data = [none] } try { return this.mapper.readValue(message, clazz); // depends on control dependency: [try], data = [none] } catch (IOException e1) { ClientSocketAdapter.LOGGER.info("Got invalid session data", e1); return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { private boolean updateTrgPic(int index) { SubPicture picSrc = subtitleStream.getSubPicture(index); SubPicture picTrg = subPictures[index]; double scaleX = (double) picTrg.getWidth() / picSrc.getWidth(); double scaleY = (double) picTrg.getHeight() / picSrc.getHeight(); double fx; double fy; if (configuration.getApplyFreeScale()) { fx = configuration.getFreeScaleFactorX(); fy = configuration.getFreeScaleFactorY(); } else { fx = 1.0; fy = 1.0; } int wOld = picTrg.getImageWidth(); int hOld = picTrg.getImageHeight(); int wNew = (int) (picSrc.getImageWidth() * scaleX * fx + 0.5); if (wNew < MIN_IMAGE_DIMENSION) { wNew = picSrc.getImageWidth(); } else if (wNew > picTrg.getWidth()) { wNew = picTrg.getWidth(); } int hNew = (int) (picSrc.getImageHeight() * scaleY * fy + 0.5); if (hNew < MIN_IMAGE_DIMENSION) { hNew = picSrc.getImageHeight(); } else if (hNew > picTrg.getHeight()) { hNew = picTrg.getHeight(); } picTrg.setImageWidth(wNew); picTrg.setImageHeight(hNew); if (wNew != wOld) { int xOfs = (int) (picSrc.getXOffset() * scaleX + 0.5); int spaceSrc = (int) ((picSrc.getWidth() - picSrc.getImageWidth()) * scaleX + 0.5); int spaceTrg = picTrg.getWidth() - wNew; xOfs += (spaceTrg - spaceSrc) / 2; if (xOfs < 0) { xOfs = 0; } else if (xOfs + wNew > picTrg.getWidth()) { xOfs = picTrg.getWidth() - wNew; } picTrg.setOfsX(xOfs); } if (hNew != hOld) { int yOfs = (int) (picSrc.getYOffset() * scaleY + 0.5); int spaceSrc = (int) ((picSrc.getHeight() - picSrc.getImageHeight()) * scaleY + 0.5); int spaceTrg = picTrg.getHeight() - hNew; yOfs += (spaceTrg - spaceSrc) / 2; if (yOfs + hNew > picTrg.getHeight()) { yOfs = picTrg.getHeight() - hNew; } picTrg.setOfsY(yOfs); } // was image cropped? return (wNew != wOld) || (hNew != hOld); } }
public class class_name { private boolean updateTrgPic(int index) { SubPicture picSrc = subtitleStream.getSubPicture(index); SubPicture picTrg = subPictures[index]; double scaleX = (double) picTrg.getWidth() / picSrc.getWidth(); double scaleY = (double) picTrg.getHeight() / picSrc.getHeight(); double fx; double fy; if (configuration.getApplyFreeScale()) { fx = configuration.getFreeScaleFactorX(); // depends on control dependency: [if], data = [none] fy = configuration.getFreeScaleFactorY(); // depends on control dependency: [if], data = [none] } else { fx = 1.0; // depends on control dependency: [if], data = [none] fy = 1.0; // depends on control dependency: [if], data = [none] } int wOld = picTrg.getImageWidth(); int hOld = picTrg.getImageHeight(); int wNew = (int) (picSrc.getImageWidth() * scaleX * fx + 0.5); if (wNew < MIN_IMAGE_DIMENSION) { wNew = picSrc.getImageWidth(); // depends on control dependency: [if], data = [none] } else if (wNew > picTrg.getWidth()) { wNew = picTrg.getWidth(); // depends on control dependency: [if], data = [none] } int hNew = (int) (picSrc.getImageHeight() * scaleY * fy + 0.5); if (hNew < MIN_IMAGE_DIMENSION) { hNew = picSrc.getImageHeight(); // depends on control dependency: [if], data = [none] } else if (hNew > picTrg.getHeight()) { hNew = picTrg.getHeight(); // depends on control dependency: [if], data = [none] } picTrg.setImageWidth(wNew); picTrg.setImageHeight(hNew); if (wNew != wOld) { int xOfs = (int) (picSrc.getXOffset() * scaleX + 0.5); int spaceSrc = (int) ((picSrc.getWidth() - picSrc.getImageWidth()) * scaleX + 0.5); int spaceTrg = picTrg.getWidth() - wNew; xOfs += (spaceTrg - spaceSrc) / 2; // depends on control dependency: [if], data = [none] if (xOfs < 0) { xOfs = 0; // depends on control dependency: [if], data = [none] } else if (xOfs + wNew > picTrg.getWidth()) { xOfs = picTrg.getWidth() - wNew; // depends on control dependency: [if], data = [none] } picTrg.setOfsX(xOfs); // depends on control dependency: [if], data = [none] } if (hNew != hOld) { int yOfs = (int) (picSrc.getYOffset() * scaleY + 0.5); int spaceSrc = (int) ((picSrc.getHeight() - picSrc.getImageHeight()) * scaleY + 0.5); int spaceTrg = picTrg.getHeight() - hNew; yOfs += (spaceTrg - spaceSrc) / 2; // depends on control dependency: [if], data = [none] if (yOfs + hNew > picTrg.getHeight()) { yOfs = picTrg.getHeight() - hNew; // depends on control dependency: [if], data = [none] } picTrg.setOfsY(yOfs); // depends on control dependency: [if], data = [none] } // was image cropped? return (wNew != wOld) || (hNew != hOld); } }
public class class_name { public String getIconPath(CmsListItem item) { boolean showingUsers = isShowingUsers(); try { CmsPrincipal principal; if (showingUsers) { principal = getCms().readUser((String)item.get(LIST_COLUMN_NAME)); } else { principal = getCms().readGroup((String)item.get(LIST_COLUMN_NAME)); } if (principal.getOuFqn().equals(getCms().getRequestContext().getCurrentUser().getOuFqn())) { if (showingUsers) { return PATH_BUTTONS + "user.png"; } else { return PATH_BUTTONS + "group.png"; } } else { if (showingUsers) { return PATH_BUTTONS + "user_other_ou.png"; } else { return PATH_BUTTONS + "group_other_ou.png"; } } } catch (CmsException e) { if (item.get(LIST_COLUMN_DISPLAY).equals(key(Messages.GUI_LABEL_OVERWRITEALL_0))) { return "commons/" + CmsAccessControlEntry.PRINCIPAL_OVERWRITE_ALL_NAME.toLowerCase() + ".png"; } else if (item.get(LIST_COLUMN_DISPLAY).equals(key(Messages.GUI_LABEL_ALLOTHERS_0))) { return "commons/" + CmsAccessControlEntry.PRINCIPAL_ALL_OTHERS_NAME.toLowerCase() + ".png"; } else if (showingUsers) { return PATH_BUTTONS + "user.png"; } else { return PATH_BUTTONS + "group.png"; } } } }
public class class_name { public String getIconPath(CmsListItem item) { boolean showingUsers = isShowingUsers(); try { CmsPrincipal principal; if (showingUsers) { principal = getCms().readUser((String)item.get(LIST_COLUMN_NAME)); // depends on control dependency: [if], data = [none] } else { principal = getCms().readGroup((String)item.get(LIST_COLUMN_NAME)); // depends on control dependency: [if], data = [none] } if (principal.getOuFqn().equals(getCms().getRequestContext().getCurrentUser().getOuFqn())) { if (showingUsers) { return PATH_BUTTONS + "user.png"; // depends on control dependency: [if], data = [none] } else { return PATH_BUTTONS + "group.png"; // depends on control dependency: [if], data = [none] } } else { if (showingUsers) { return PATH_BUTTONS + "user_other_ou.png"; // depends on control dependency: [if], data = [none] } else { return PATH_BUTTONS + "group_other_ou.png"; // depends on control dependency: [if], data = [none] } } } catch (CmsException e) { if (item.get(LIST_COLUMN_DISPLAY).equals(key(Messages.GUI_LABEL_OVERWRITEALL_0))) { return "commons/" + CmsAccessControlEntry.PRINCIPAL_OVERWRITE_ALL_NAME.toLowerCase() + ".png"; // depends on control dependency: [if], data = [none] } else if (item.get(LIST_COLUMN_DISPLAY).equals(key(Messages.GUI_LABEL_ALLOTHERS_0))) { return "commons/" + CmsAccessControlEntry.PRINCIPAL_ALL_OTHERS_NAME.toLowerCase() + ".png"; // depends on control dependency: [if], data = [none] } else if (showingUsers) { return PATH_BUTTONS + "user.png"; // depends on control dependency: [if], data = [none] } else { return PATH_BUTTONS + "group.png"; // depends on control dependency: [if], data = [none] } } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static <E, T> Page<T> createPage(final Page<? extends E> sourcePage, final PageEntryTransformer<T, E> transformer) { if (sourcePage == null) { return null; } if (transformer == null) { return cast(sourcePage); } return createPage(sourcePage.getEntries(), sourcePage.getPageRequest(), sourcePage.getTotalSize(), transformer); } }
public class class_name { public static <E, T> Page<T> createPage(final Page<? extends E> sourcePage, final PageEntryTransformer<T, E> transformer) { if (sourcePage == null) { return null; // depends on control dependency: [if], data = [none] } if (transformer == null) { return cast(sourcePage); // depends on control dependency: [if], data = [none] } return createPage(sourcePage.getEntries(), sourcePage.getPageRequest(), sourcePage.getTotalSize(), transformer); } }
public class class_name { public boolean addToSet(State<S, EP> state) { boolean first = list.isEmpty(); if (first || !setContents.get(state.getId())) { list.add(state); setContents.set(state.getId()); } return first; } }
public class class_name { public boolean addToSet(State<S, EP> state) { boolean first = list.isEmpty(); if (first || !setContents.get(state.getId())) { list.add(state); // depends on control dependency: [if], data = [none] setContents.set(state.getId()); // depends on control dependency: [if], data = [none] } return first; } }
public class class_name { public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { String key = reader.getAttribute("key"); String type = reader.getAttribute("type"); String value = reader.getAttribute("value"); XExtension extension = null; String extensionString = reader.getAttribute("extension"); if (extensionString != null && extensionString.length() > 0) { URI uri = URI.create(extensionString); extension = XExtensionManager.instance().getByUri(uri); } XFactory factory = XFactoryRegistry.instance().currentDefault(); XAttribute attribute = XAttributeUtils.composeAttribute(factory, key, value, type, extension); XAttribute parent = (XAttribute) context.get(PARENT); if (parent != null && parent instanceof XAttributeCollection) { ((XAttributeCollection) parent).addToCollection(attribute); } System.err.println("8"); if (reader.hasMoreChildren()) { System.err.println("9"); reader.moveDown(); Object oldParent = context.get(PARENT); context.put(PARENT, attribute); XAttributeMap metaAttributes = (XAttributeMap) context .convertAnother(attribute, XAttributeMap.class, XesXStreamPersistency.attributeMapConverter); context.put(PARENT, oldParent); reader.moveUp(); attribute.setAttributes(metaAttributes); } System.err.println("done"); return attribute; } }
public class class_name { public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { String key = reader.getAttribute("key"); String type = reader.getAttribute("type"); String value = reader.getAttribute("value"); XExtension extension = null; String extensionString = reader.getAttribute("extension"); if (extensionString != null && extensionString.length() > 0) { URI uri = URI.create(extensionString); extension = XExtensionManager.instance().getByUri(uri); // depends on control dependency: [if], data = [none] } XFactory factory = XFactoryRegistry.instance().currentDefault(); XAttribute attribute = XAttributeUtils.composeAttribute(factory, key, value, type, extension); XAttribute parent = (XAttribute) context.get(PARENT); if (parent != null && parent instanceof XAttributeCollection) { ((XAttributeCollection) parent).addToCollection(attribute); // depends on control dependency: [if], data = [none] } System.err.println("8"); if (reader.hasMoreChildren()) { System.err.println("9"); // depends on control dependency: [if], data = [none] reader.moveDown(); // depends on control dependency: [if], data = [none] Object oldParent = context.get(PARENT); context.put(PARENT, attribute); // depends on control dependency: [if], data = [none] XAttributeMap metaAttributes = (XAttributeMap) context .convertAnother(attribute, XAttributeMap.class, XesXStreamPersistency.attributeMapConverter); context.put(PARENT, oldParent); // depends on control dependency: [if], data = [none] reader.moveUp(); // depends on control dependency: [if], data = [none] attribute.setAttributes(metaAttributes); // depends on control dependency: [if], data = [none] } System.err.println("done"); return attribute; } }
public class class_name { public SIMPPubSubOutboundTransmitControllable getPubSubOutboundTransmitControllable() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "getPubSubOutboundTransmitControllable"); SIMPPubSubOutboundTransmitControllable outbound = null; //there will only be a stream set if the PSOH is not null if(_outputHandler!=null) { //at the moment there should be just one stream set in the //iterator SIMPIterator iterator = getPubSubOutboundTransmitIterator(); if(iterator.hasNext()) { outbound = (SIMPPubSubOutboundTransmitControllable)iterator.next(); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getPubSubOutboundTransmitControllable", outbound); return outbound; } }
public class class_name { public SIMPPubSubOutboundTransmitControllable getPubSubOutboundTransmitControllable() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "getPubSubOutboundTransmitControllable"); SIMPPubSubOutboundTransmitControllable outbound = null; //there will only be a stream set if the PSOH is not null if(_outputHandler!=null) { //at the moment there should be just one stream set in the //iterator SIMPIterator iterator = getPubSubOutboundTransmitIterator(); if(iterator.hasNext()) { outbound = (SIMPPubSubOutboundTransmitControllable)iterator.next(); // depends on control dependency: [if], data = [none] } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getPubSubOutboundTransmitControllable", outbound); return outbound; } }
public class class_name { public JsonWriter values(Iterable<String> values) { for (String value : values) { value(value); } return this; } }
public class class_name { public JsonWriter values(Iterable<String> values) { for (String value : values) { value(value); // depends on control dependency: [for], data = [value] } return this; } }
public class class_name { private int buildLookUpTable() { int i = 0; int incDen = Math.round(8F * radiusMin); // increment denominator lut = new int[2][incDen][depth]; for( int radius = radiusMin; radius <= radiusMax; radius = radius + radiusInc ) { i = 0; for( int incNun = 0; incNun < incDen; incNun++ ) { double angle = (2 * Math.PI * (double) incNun) / (double) incDen; int indexR = (radius - radiusMin) / radiusInc; int rcos = (int) Math.round((double) radius * Math.cos(angle)); int rsin = (int) Math.round((double) radius * Math.sin(angle)); if ((i == 0) | (rcos != lut[0][i][indexR]) & (rsin != lut[1][i][indexR])) { lut[0][i][indexR] = rcos; lut[1][i][indexR] = rsin; i++; } } } return i; } }
public class class_name { private int buildLookUpTable() { int i = 0; int incDen = Math.round(8F * radiusMin); // increment denominator lut = new int[2][incDen][depth]; for( int radius = radiusMin; radius <= radiusMax; radius = radius + radiusInc ) { i = 0; // depends on control dependency: [for], data = [none] for( int incNun = 0; incNun < incDen; incNun++ ) { double angle = (2 * Math.PI * (double) incNun) / (double) incDen; int indexR = (radius - radiusMin) / radiusInc; int rcos = (int) Math.round((double) radius * Math.cos(angle)); int rsin = (int) Math.round((double) radius * Math.sin(angle)); if ((i == 0) | (rcos != lut[0][i][indexR]) & (rsin != lut[1][i][indexR])) { lut[0][i][indexR] = rcos; // depends on control dependency: [if], data = [none] lut[1][i][indexR] = rsin; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } } } return i; } }
public class class_name { private static int computeE1(int a, int b, int c, int d, int e, int f) { if (d == b && e != c || b == f && e != a) { return b; } return e; } }
public class class_name { private static int computeE1(int a, int b, int c, int d, int e, int f) { if (d == b && e != c || b == f && e != a) { return b; // depends on control dependency: [if], data = [none] } return e; } }
public class class_name { public void synchronize(List<QueryControllerEntity> queryEntities) { if (queryEntities.size() > 0) { for (QueryControllerEntity queryEntity : queryEntities) { if (queryEntity instanceof QueryControllerGroup) { QueryControllerGroup group = (QueryControllerGroup) queryEntity; QueryGroupTreeElement queryGroupEle = new QueryGroupTreeElement(group.getID()); Vector<QueryControllerQuery> queries = group.getQueries(); for (QueryControllerQuery query : queries) { QueryTreeElement queryTreeEle = new QueryTreeElement(query.getID(), query.getQuery()); insertNodeInto(queryTreeEle, queryGroupEle, queryGroupEle.getChildCount()); } insertNodeInto(queryGroupEle, (DefaultMutableTreeNode) root, root.getChildCount()); } else { QueryControllerQuery query = (QueryControllerQuery) queryEntity; QueryTreeElement queryTreeEle = new QueryTreeElement(query.getID(), query.getQuery()); insertNodeInto(queryTreeEle, (DefaultMutableTreeNode) root, root.getChildCount()); } } } } }
public class class_name { public void synchronize(List<QueryControllerEntity> queryEntities) { if (queryEntities.size() > 0) { for (QueryControllerEntity queryEntity : queryEntities) { if (queryEntity instanceof QueryControllerGroup) { QueryControllerGroup group = (QueryControllerGroup) queryEntity; QueryGroupTreeElement queryGroupEle = new QueryGroupTreeElement(group.getID()); Vector<QueryControllerQuery> queries = group.getQueries(); for (QueryControllerQuery query : queries) { QueryTreeElement queryTreeEle = new QueryTreeElement(query.getID(), query.getQuery()); insertNodeInto(queryTreeEle, queryGroupEle, queryGroupEle.getChildCount()); // depends on control dependency: [for], data = [query] } insertNodeInto(queryGroupEle, (DefaultMutableTreeNode) root, root.getChildCount()); // depends on control dependency: [if], data = [none] } else { QueryControllerQuery query = (QueryControllerQuery) queryEntity; QueryTreeElement queryTreeEle = new QueryTreeElement(query.getID(), query.getQuery()); insertNodeInto(queryTreeEle, (DefaultMutableTreeNode) root, root.getChildCount()); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { private void setDockerHostAddressProperty(DockerAccessContext dockerAccessContext, String dockerUrl) throws MojoFailureException { Properties props = dockerAccessContext.getProjectProperties(); if (props.getProperty("docker.host.address") == null) { final String host; try { URI uri = new URI(dockerUrl); if (uri.getHost() == null && (uri.getScheme().equals("unix") || uri.getScheme().equals("npipe"))) { host = "localhost"; } else { host = uri.getHost(); } } catch (URISyntaxException e) { throw new MojoFailureException("Cannot parse " + dockerUrl + " as URI: " + e.getMessage(), e); } props.setProperty("docker.host.address", host == null ? "" : host); } } }
public class class_name { private void setDockerHostAddressProperty(DockerAccessContext dockerAccessContext, String dockerUrl) throws MojoFailureException { Properties props = dockerAccessContext.getProjectProperties(); if (props.getProperty("docker.host.address") == null) { final String host; try { URI uri = new URI(dockerUrl); if (uri.getHost() == null && (uri.getScheme().equals("unix") || uri.getScheme().equals("npipe"))) { host = "localhost"; // depends on control dependency: [if], data = [none] } else { host = uri.getHost(); // depends on control dependency: [if], data = [none] } } catch (URISyntaxException e) { throw new MojoFailureException("Cannot parse " + dockerUrl + " as URI: " + e.getMessage(), e); } props.setProperty("docker.host.address", host == null ? "" : host); } } }
public class class_name { public void setValueOptions(java.util.Collection<String> valueOptions) { if (valueOptions == null) { this.valueOptions = null; return; } this.valueOptions = new com.amazonaws.internal.SdkInternalList<String>(valueOptions); } }
public class class_name { public void setValueOptions(java.util.Collection<String> valueOptions) { if (valueOptions == null) { this.valueOptions = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.valueOptions = new com.amazonaws.internal.SdkInternalList<String>(valueOptions); } }
public class class_name { private void insertNode(PatriciaNode<V> node) { PatriciaNode<V> current = root.getLeft(); PatriciaNode<V> parent = root; while (parent.getBit() < current.getBit() && current.getBit() < node.getBit()) { parent = current; if (!keyMapper.isSet(current.getBit(), node.getKey())) { current = current.getLeft(); } else { current = current.getRight(); } } if (!keyMapper.isSet(node.getBit(), node.getKey())) { node.setLeft(node); node.setRight(current); } else { node.setLeft(current); node.setRight(node); } if (!keyMapper.isSet(parent.getBit(), node.getKey())) { parent.setLeft(node); } else { parent.setRight(node); } } }
public class class_name { private void insertNode(PatriciaNode<V> node) { PatriciaNode<V> current = root.getLeft(); PatriciaNode<V> parent = root; while (parent.getBit() < current.getBit() && current.getBit() < node.getBit()) { parent = current; // depends on control dependency: [while], data = [none] if (!keyMapper.isSet(current.getBit(), node.getKey())) { current = current.getLeft(); // depends on control dependency: [if], data = [none] } else { current = current.getRight(); // depends on control dependency: [if], data = [none] } } if (!keyMapper.isSet(node.getBit(), node.getKey())) { node.setLeft(node); // depends on control dependency: [if], data = [none] node.setRight(current); // depends on control dependency: [if], data = [none] } else { node.setLeft(current); // depends on control dependency: [if], data = [none] node.setRight(node); // depends on control dependency: [if], data = [none] } if (!keyMapper.isSet(parent.getBit(), node.getKey())) { parent.setLeft(node); // depends on control dependency: [if], data = [none] } else { parent.setRight(node); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String obfuscateWith( final BiMap<Character, ObfuscationOperationRule<Character, Character>> rules, final String toObfuscate) { final StringBuilder sb = new StringBuilder(); for (int i = 0; i < toObfuscate.length(); i++) { final char currentCharacter = toObfuscate.charAt(i); final Character asCharacter = Character.valueOf(currentCharacter); final String charAsString = Character.toString(currentCharacter); if (rules.containsKey(asCharacter)) { final ObfuscationOperationRule<Character, Character> obfuscationOperationRule = rules .get(asCharacter); final Set<Integer> indexes = obfuscationOperationRule.getIndexes(); final Operation operation = obfuscationOperationRule.getOperation(); if (indexes.contains(Integer.valueOf(i)) && operation != null) { sb.append(Operation.operate(currentCharacter, operation)); continue; } final Character replaceWith = obfuscationOperationRule.getReplaceWith(); sb.append(replaceWith); } else { sb.append(charAsString); } } return sb.toString(); } }
public class class_name { public static String obfuscateWith( final BiMap<Character, ObfuscationOperationRule<Character, Character>> rules, final String toObfuscate) { final StringBuilder sb = new StringBuilder(); for (int i = 0; i < toObfuscate.length(); i++) { final char currentCharacter = toObfuscate.charAt(i); final Character asCharacter = Character.valueOf(currentCharacter); final String charAsString = Character.toString(currentCharacter); if (rules.containsKey(asCharacter)) { final ObfuscationOperationRule<Character, Character> obfuscationOperationRule = rules .get(asCharacter); final Set<Integer> indexes = obfuscationOperationRule.getIndexes(); final Operation operation = obfuscationOperationRule.getOperation(); if (indexes.contains(Integer.valueOf(i)) && operation != null) { sb.append(Operation.operate(currentCharacter, operation)); // depends on control dependency: [if], data = [none] continue; } final Character replaceWith = obfuscationOperationRule.getReplaceWith(); sb.append(replaceWith); // depends on control dependency: [if], data = [none] } else { sb.append(charAsString); // depends on control dependency: [if], data = [none] } } return sb.toString(); } }
public class class_name { public static String encodeBasicAuth(final String username, final String password) { requireNonNull(username, "username"); requireNonNull(password, "password"); final String auth = username + ':' + password; byte[] encoded; try { encoded = Base64.getEncoder().encode(auth.getBytes(UTF_8)); } catch (final IllegalArgumentException e) { throw new IllegalArgumentException("Failed to encode basic authentication token", e); } return BASIC_AUTH_PREFIX + new String(encoded, UTF_8); } }
public class class_name { public static String encodeBasicAuth(final String username, final String password) { requireNonNull(username, "username"); requireNonNull(password, "password"); final String auth = username + ':' + password; byte[] encoded; try { encoded = Base64.getEncoder().encode(auth.getBytes(UTF_8)); // depends on control dependency: [try], data = [none] } catch (final IllegalArgumentException e) { throw new IllegalArgumentException("Failed to encode basic authentication token", e); } // depends on control dependency: [catch], data = [none] return BASIC_AUTH_PREFIX + new String(encoded, UTF_8); } }
public class class_name { protected final Node getInternal(int index) { //Check preconditions for the index variable if (index >= size()) { throw new IndexOutOfBoundsException("Index out of bounds: " + index + " >= " + size()); } if (index < 0) { throw new IndexOutOfBoundsException("Index out of bounds: " + index + " < 0"); } if (index == 0) { return first.next; } else if (index == (size() - 1)) { return last; } else { Node pointer = first.next; for (int i = 0; i < index; i++) { pointer = pointer.next; } return pointer; } } }
public class class_name { protected final Node getInternal(int index) { //Check preconditions for the index variable if (index >= size()) { throw new IndexOutOfBoundsException("Index out of bounds: " + index + " >= " + size()); } if (index < 0) { throw new IndexOutOfBoundsException("Index out of bounds: " + index + " < 0"); } if (index == 0) { return first.next; // depends on control dependency: [if], data = [none] } else if (index == (size() - 1)) { return last; // depends on control dependency: [if], data = [none] } else { Node pointer = first.next; for (int i = 0; i < index; i++) { pointer = pointer.next; // depends on control dependency: [for], data = [none] } return pointer; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static List<TaskEntity> getTaskEntities(TopologyInfo topologyInfo, String componentName) { TreeMap<Integer, TaskEntity> tasks = new TreeMap<>(); for (ComponentSummary cs : topologyInfo.get_components()) { String compName = cs.get_name(); String type = cs.get_type(); if (componentName.equals(compName)) { for (int id : cs.get_taskIds()) { tasks.put(id, new TaskEntity(id, compName, type)); } } } for (TaskSummary ts : topologyInfo.get_tasks()) { if (tasks.containsKey(ts.get_taskId())) { TaskEntity te = tasks.get(ts.get_taskId()); te.setHost(ts.get_host()); te.setPort(ts.get_port()); te.setStatus(ts.get_status()); te.setUptime(ts.get_uptime()); te.setErrors(ts.get_errors()); } } return new ArrayList<>(tasks.values()); } }
public class class_name { public static List<TaskEntity> getTaskEntities(TopologyInfo topologyInfo, String componentName) { TreeMap<Integer, TaskEntity> tasks = new TreeMap<>(); for (ComponentSummary cs : topologyInfo.get_components()) { String compName = cs.get_name(); String type = cs.get_type(); if (componentName.equals(compName)) { for (int id : cs.get_taskIds()) { tasks.put(id, new TaskEntity(id, compName, type)); // depends on control dependency: [for], data = [id] } } } for (TaskSummary ts : topologyInfo.get_tasks()) { if (tasks.containsKey(ts.get_taskId())) { TaskEntity te = tasks.get(ts.get_taskId()); te.setHost(ts.get_host()); // depends on control dependency: [if], data = [none] te.setPort(ts.get_port()); // depends on control dependency: [if], data = [none] te.setStatus(ts.get_status()); // depends on control dependency: [if], data = [none] te.setUptime(ts.get_uptime()); // depends on control dependency: [if], data = [none] te.setErrors(ts.get_errors()); // depends on control dependency: [if], data = [none] } } return new ArrayList<>(tasks.values()); } }
public class class_name { public UpdateEndpointWeightsAndCapacitiesRequest withDesiredWeightsAndCapacities(DesiredWeightAndCapacity... desiredWeightsAndCapacities) { if (this.desiredWeightsAndCapacities == null) { setDesiredWeightsAndCapacities(new java.util.ArrayList<DesiredWeightAndCapacity>(desiredWeightsAndCapacities.length)); } for (DesiredWeightAndCapacity ele : desiredWeightsAndCapacities) { this.desiredWeightsAndCapacities.add(ele); } return this; } }
public class class_name { public UpdateEndpointWeightsAndCapacitiesRequest withDesiredWeightsAndCapacities(DesiredWeightAndCapacity... desiredWeightsAndCapacities) { if (this.desiredWeightsAndCapacities == null) { setDesiredWeightsAndCapacities(new java.util.ArrayList<DesiredWeightAndCapacity>(desiredWeightsAndCapacities.length)); // depends on control dependency: [if], data = [none] } for (DesiredWeightAndCapacity ele : desiredWeightsAndCapacities) { this.desiredWeightsAndCapacities.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public static AbstractWarningsParser getParser(@CheckForNull final String group) { if (StringUtils.isEmpty(group)) { return new NullWarnigsParser("NULL"); } List<AbstractWarningsParser> parsers = ParserRegistry.getParsers(group); if (parsers.isEmpty()) { return new NullWarnigsParser(group); } else { return parsers.get(0); } } }
public class class_name { public static AbstractWarningsParser getParser(@CheckForNull final String group) { if (StringUtils.isEmpty(group)) { return new NullWarnigsParser("NULL"); // depends on control dependency: [if], data = [none] } List<AbstractWarningsParser> parsers = ParserRegistry.getParsers(group); if (parsers.isEmpty()) { return new NullWarnigsParser(group); // depends on control dependency: [if], data = [none] } else { return parsers.get(0); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public Object execute(ExecutionEvent event) throws ExecutionException { IStructuredSelection selection = (IStructuredSelection) HandlerUtil.getCurrentSelection(event); for (Iterator<?> iterator = selection.iterator(); iterator.hasNext(); ) { Object object = iterator.next(); IProject project; if (object instanceof IProject) { project = (IProject) object; } else if (object instanceof IJavaProject) { project = ((IJavaProject) object).getProject(); } else { continue; } final IProject proj = project; WorkspaceModifyOperation operation = new WorkspaceModifyOperation() { @Override protected void execute(IProgressMonitor monitor) throws CoreException, InvocationTargetException, InterruptedException { // Enabling Gig nature. monitor.beginTask("Enabling Gig", 3); monitor.subTask("Enabling Gig nature"); IProjectDescription description = proj.getDescription(); String[] natures = description.getNatureIds(); String[] newNatures = new String[natures.length + 1]; System.arraycopy(natures, 0, newNatures, 0, natures.length); newNatures[natures.length] = GigNature.NATURE_ID; description.setNatureIds(newNatures); proj.setDescription(description, null); monitor.worked(1); // Adding Gig libraries to the classpath. monitor.subTask("Adding Gig libraries to the classpath"); IJavaProject javaProject = JavaCore.create(proj); IClasspathEntry[] classpath = javaProject.getRawClasspath(); IPath path = new Path("org.eiichiro.gig.eclipse.core.GIG_CONTAINER"); boolean found = false; for (IClasspathEntry entry : classpath) { if (entry.getPath().equals(path)) { found = true; break; } } if (!found) { IClasspathEntry container = JavaCore.newContainerEntry(path, false); IClasspathEntry[] entries = new IClasspathEntry[classpath.length + 1]; System.arraycopy(classpath, 0, entries, 0, classpath.length); entries[classpath.length] = container; javaProject.setRawClasspath(entries, monitor); } // Creating Gig-generated source directory. monitor.subTask("Creating Gig-generated source directory"); IFolder folder = proj.getFolder(".gig_generated"); if (!folder.exists()) { folder.create(true, true, monitor); IClasspathEntry[] c = javaProject.getRawClasspath(); IClasspathEntry[] entries = new IClasspathEntry[c.length + 1]; System.arraycopy(c, 0, entries, 0, c.length); IClasspathEntry source = JavaCore.newSourceEntry(folder.getFullPath()); entries[c.length] = source; javaProject.setRawClasspath(entries, monitor); } monitor.worked(1); monitor.done(); } }; IProgressService service = PlatformUI.getWorkbench().getProgressService(); try { service.run(true, true, operation); } catch (InvocationTargetException e) { IStatus status = new Status(IStatus.ERROR, GigUIPlugin.PLUGIN_ID, e.getTargetException().getMessage(), e.getTargetException()); GigUIPlugin.getDefault().getLog().log(status); ErrorDialog.openError(HandlerUtil.getActiveShell(event), "Gig", "Failed to enable Gig in " + project.getName() + " project", status); } catch (InterruptedException e) {} } return null; } }
public class class_name { @Override public Object execute(ExecutionEvent event) throws ExecutionException { IStructuredSelection selection = (IStructuredSelection) HandlerUtil.getCurrentSelection(event); for (Iterator<?> iterator = selection.iterator(); iterator.hasNext(); ) { Object object = iterator.next(); IProject project; if (object instanceof IProject) { project = (IProject) object; // depends on control dependency: [if], data = [none] } else if (object instanceof IJavaProject) { project = ((IJavaProject) object).getProject(); // depends on control dependency: [if], data = [none] } else { continue; } final IProject proj = project; WorkspaceModifyOperation operation = new WorkspaceModifyOperation() { @Override protected void execute(IProgressMonitor monitor) throws CoreException, InvocationTargetException, InterruptedException { // Enabling Gig nature. monitor.beginTask("Enabling Gig", 3); monitor.subTask("Enabling Gig nature"); IProjectDescription description = proj.getDescription(); String[] natures = description.getNatureIds(); String[] newNatures = new String[natures.length + 1]; System.arraycopy(natures, 0, newNatures, 0, natures.length); newNatures[natures.length] = GigNature.NATURE_ID; description.setNatureIds(newNatures); proj.setDescription(description, null); monitor.worked(1); // Adding Gig libraries to the classpath. monitor.subTask("Adding Gig libraries to the classpath"); IJavaProject javaProject = JavaCore.create(proj); IClasspathEntry[] classpath = javaProject.getRawClasspath(); IPath path = new Path("org.eiichiro.gig.eclipse.core.GIG_CONTAINER"); boolean found = false; for (IClasspathEntry entry : classpath) { if (entry.getPath().equals(path)) { found = true; // depends on control dependency: [if], data = [none] break; } } if (!found) { IClasspathEntry container = JavaCore.newContainerEntry(path, false); IClasspathEntry[] entries = new IClasspathEntry[classpath.length + 1]; System.arraycopy(classpath, 0, entries, 0, classpath.length); entries[classpath.length] = container; javaProject.setRawClasspath(entries, monitor); } // Creating Gig-generated source directory. monitor.subTask("Creating Gig-generated source directory"); IFolder folder = proj.getFolder(".gig_generated"); if (!folder.exists()) { folder.create(true, true, monitor); IClasspathEntry[] c = javaProject.getRawClasspath(); IClasspathEntry[] entries = new IClasspathEntry[c.length + 1]; System.arraycopy(c, 0, entries, 0, c.length); IClasspathEntry source = JavaCore.newSourceEntry(folder.getFullPath()); entries[c.length] = source; javaProject.setRawClasspath(entries, monitor); } monitor.worked(1); monitor.done(); } }; IProgressService service = PlatformUI.getWorkbench().getProgressService(); try { service.run(true, true, operation); } catch (InvocationTargetException e) { IStatus status = new Status(IStatus.ERROR, GigUIPlugin.PLUGIN_ID, e.getTargetException().getMessage(), e.getTargetException()); GigUIPlugin.getDefault().getLog().log(status); ErrorDialog.openError(HandlerUtil.getActiveShell(event), "Gig", "Failed to enable Gig in " + project.getName() + " project", status); } catch (InterruptedException e) {} } return null; } }
public class class_name { public Map<String, MethodInfoList> asMap() { // Note that MethodInfoList extends InfoList rather than MappableInfoList, because one // name can be shared by multiple MethodInfo objects (so asMap() needs to be of type // Map<String, MethodInfoList> rather than Map<String, MethodInfo>) final Map<String, MethodInfoList> methodNameToMethodInfoList = new HashMap<>(); for (final MethodInfo methodInfo : this) { final String name = methodInfo.getName(); MethodInfoList methodInfoList = methodNameToMethodInfoList.get(name); if (methodInfoList == null) { methodInfoList = new MethodInfoList(1); methodNameToMethodInfoList.put(name, methodInfoList); } methodInfoList.add(methodInfo); } return methodNameToMethodInfoList; } }
public class class_name { public Map<String, MethodInfoList> asMap() { // Note that MethodInfoList extends InfoList rather than MappableInfoList, because one // name can be shared by multiple MethodInfo objects (so asMap() needs to be of type // Map<String, MethodInfoList> rather than Map<String, MethodInfo>) final Map<String, MethodInfoList> methodNameToMethodInfoList = new HashMap<>(); for (final MethodInfo methodInfo : this) { final String name = methodInfo.getName(); MethodInfoList methodInfoList = methodNameToMethodInfoList.get(name); if (methodInfoList == null) { methodInfoList = new MethodInfoList(1); // depends on control dependency: [if], data = [none] methodNameToMethodInfoList.put(name, methodInfoList); // depends on control dependency: [if], data = [none] } methodInfoList.add(methodInfo); // depends on control dependency: [for], data = [methodInfo] } return methodNameToMethodInfoList; } }
public class class_name { @SuppressWarnings("unchecked") public <S extends io.sarl.lang.core.Space> S getOrCreateSpaceWithID(SpaceID spaceID, Class<? extends SpaceSpecification<S>> spec, Object... creationParams) { synchronized (getSpaceRepositoryMutex()) { Space space = this.spaces.get(spaceID); if (space == null) { space = createSpaceInstance(spec, spaceID, true, creationParams); } assert space != null; return (S) space; } } }
public class class_name { @SuppressWarnings("unchecked") public <S extends io.sarl.lang.core.Space> S getOrCreateSpaceWithID(SpaceID spaceID, Class<? extends SpaceSpecification<S>> spec, Object... creationParams) { synchronized (getSpaceRepositoryMutex()) { Space space = this.spaces.get(spaceID); if (space == null) { space = createSpaceInstance(spec, spaceID, true, creationParams); // depends on control dependency: [if], data = [none] } assert space != null; return (S) space; } } }
public class class_name { private void firePluginStopped(final Plugin plugin) { PluginEvent e = null; synchronized (listenerList) { for (Iterator iter = listenerList.iterator(); iter.hasNext();) { PluginListener l = (PluginListener) iter.next(); if (e == null) { e = new PluginEvent(plugin); } l.pluginStopped(e); } } } }
public class class_name { private void firePluginStopped(final Plugin plugin) { PluginEvent e = null; synchronized (listenerList) { for (Iterator iter = listenerList.iterator(); iter.hasNext();) { PluginListener l = (PluginListener) iter.next(); if (e == null) { e = new PluginEvent(plugin); // depends on control dependency: [if], data = [none] } l.pluginStopped(e); // depends on control dependency: [for], data = [none] } } } }
public class class_name { public static String getString(Config config, String path, String def) { if (config.hasPath(path)) { return config.getString(path); } return def; } }
public class class_name { public static String getString(Config config, String path, String def) { if (config.hasPath(path)) { return config.getString(path); // depends on control dependency: [if], data = [none] } return def; } }
public class class_name { @Override public void deleteAll() { switch (keyMode) { case NAMESPACE: throw new CacheException.OperationNotSupportedException( "Key mode[" + keyMode + "] does not support 'deleteAll' operation."); case MONOPOLISTIC: try { getJedis().getClusterNodes().forEach((name, pool) -> { try (Jedis jedis = pool.getResource()) { jedis.flushAll(); } }); } catch (Exception e) { throw e instanceof CacheException ? (CacheException) e : new CacheException(e); } break; case HASH: try { getJedis().del(getName()); } catch (Exception e) { throw e instanceof CacheException ? (CacheException) e : new CacheException(e); } break; default: throw new IllegalStateException("Invalid key mode: " + keyMode); } } }
public class class_name { @Override public void deleteAll() { switch (keyMode) { case NAMESPACE: throw new CacheException.OperationNotSupportedException( "Key mode[" + keyMode + "] does not support 'deleteAll' operation."); case MONOPOLISTIC: try { getJedis().getClusterNodes().forEach((name, pool) -> { try (Jedis jedis = pool.getResource()) { jedis.flushAll(); } }); } catch (Exception e) { throw e instanceof CacheException ? (CacheException) e : new CacheException(e); } break; case HASH: try { getJedis().del(getName()); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw e instanceof CacheException ? (CacheException) e : new CacheException(e); } // depends on control dependency: [catch], data = [none] break; default: throw new IllegalStateException("Invalid key mode: " + keyMode); } } }
public class class_name { private CompositeExpression parsePartialVersionRange() { int major = intOf(consumeNextToken(NUMERIC).lexeme); if (!tokens.positiveLookahead(DOT)) { return gte(versionFor(major)).and(lt(versionFor(major + 1))); } consumeNextToken(DOT); int minor = intOf(consumeNextToken(NUMERIC).lexeme); return gte(versionFor(major, minor)).and(lt(versionFor(major, minor + 1))); } }
public class class_name { private CompositeExpression parsePartialVersionRange() { int major = intOf(consumeNextToken(NUMERIC).lexeme); if (!tokens.positiveLookahead(DOT)) { return gte(versionFor(major)).and(lt(versionFor(major + 1))); // depends on control dependency: [if], data = [none] } consumeNextToken(DOT); int minor = intOf(consumeNextToken(NUMERIC).lexeme); return gte(versionFor(major, minor)).and(lt(versionFor(major, minor + 1))); } }
public class class_name { public void initRegistryEntry(String groupName, String entryName) throws RepositoryException, RepositoryConfigurationException { String relPath = EXO_REGISTRY + "/" + groupName + "/" + entryName; for (RepositoryEntry repConfiguration : repConfigurations()) { String repName = repConfiguration.getName(); SessionProvider sysProvider = SessionProvider.createSystemProvider(); Node root = session(sysProvider, repositoryService.getRepository(repName)).getRootNode(); if (!root.hasNode(relPath)) { root.addNode(relPath, EXO_REGISTRYENTRY_NT); root.save(); } else { LOG.info("The RegistryEntry " + relPath + "is already initialized on repository " + repName); } sysProvider.close(); } } }
public class class_name { public void initRegistryEntry(String groupName, String entryName) throws RepositoryException, RepositoryConfigurationException { String relPath = EXO_REGISTRY + "/" + groupName + "/" + entryName; for (RepositoryEntry repConfiguration : repConfigurations()) { String repName = repConfiguration.getName(); SessionProvider sysProvider = SessionProvider.createSystemProvider(); Node root = session(sysProvider, repositoryService.getRepository(repName)).getRootNode(); if (!root.hasNode(relPath)) { root.addNode(relPath, EXO_REGISTRYENTRY_NT); // depends on control dependency: [if], data = [none] root.save(); // depends on control dependency: [if], data = [none] } else { LOG.info("The RegistryEntry " + relPath + "is already initialized on repository " + repName); // depends on control dependency: [if], data = [none] } sysProvider.close(); } } }
public class class_name { Boolean getBooleanAttribute(org.w3c.dom.Node attribute) { String s = getStringAttribute(attribute); if (s == null) { return null; } return Boolean.parseBoolean(s); } }
public class class_name { Boolean getBooleanAttribute(org.w3c.dom.Node attribute) { String s = getStringAttribute(attribute); if (s == null) { return null; // depends on control dependency: [if], data = [none] } return Boolean.parseBoolean(s); } }
public class class_name { private CompletableFuture<Long> executeConditionallyOnce(Function<Duration, CompletableFuture<Long>> indexOperation, TimeoutTimer timer) { return Futures.exceptionallyCompose( indexOperation.apply(timer.getRemaining()), ex -> { if (Exceptions.unwrap(ex) instanceof BadOffsetException) { BadOffsetException boe = (BadOffsetException) Exceptions.unwrap(ex); if (boe.getExpectedOffset() != this.index.getIndexLength()) { log.warn("{}: Conditional Index Update failed (expected {}, given {}). Reinitializing index.", this.traceObjectId, boe.getExpectedOffset(), boe.getGivenOffset()); return this.index.initialize(timer.getRemaining()) .thenCompose(v -> Futures.failedFuture(ex)); } } // Make sure the exception bubbles up. return Futures.failedFuture(ex); }); } }
public class class_name { private CompletableFuture<Long> executeConditionallyOnce(Function<Duration, CompletableFuture<Long>> indexOperation, TimeoutTimer timer) { return Futures.exceptionallyCompose( indexOperation.apply(timer.getRemaining()), ex -> { if (Exceptions.unwrap(ex) instanceof BadOffsetException) { BadOffsetException boe = (BadOffsetException) Exceptions.unwrap(ex); if (boe.getExpectedOffset() != this.index.getIndexLength()) { log.warn("{}: Conditional Index Update failed (expected {}, given {}). Reinitializing index.", this.traceObjectId, boe.getExpectedOffset(), boe.getGivenOffset()); // depends on control dependency: [if], data = [none] return this.index.initialize(timer.getRemaining()) .thenCompose(v -> Futures.failedFuture(ex)); // depends on control dependency: [if], data = [none] } } // Make sure the exception bubbles up. return Futures.failedFuture(ex); }); } }
public class class_name { public DescribeApplicationVersionsResult withApplicationVersions(ApplicationVersionDescription... applicationVersions) { if (this.applicationVersions == null) { setApplicationVersions(new com.amazonaws.internal.SdkInternalList<ApplicationVersionDescription>(applicationVersions.length)); } for (ApplicationVersionDescription ele : applicationVersions) { this.applicationVersions.add(ele); } return this; } }
public class class_name { public DescribeApplicationVersionsResult withApplicationVersions(ApplicationVersionDescription... applicationVersions) { if (this.applicationVersions == null) { setApplicationVersions(new com.amazonaws.internal.SdkInternalList<ApplicationVersionDescription>(applicationVersions.length)); // depends on control dependency: [if], data = [none] } for (ApplicationVersionDescription ele : applicationVersions) { this.applicationVersions.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public Object fromStream(final String iStream) { if (iStream == null || iStream.length() == 0) // NULL VALUE return null; final ODocument instance = new ODocument(); try { ORecordSerializerSchemaAware2CSV.INSTANCE.fromStream(iStream.getBytes("UTF-8"), instance, null); } catch (UnsupportedEncodingException e) { throw OException.wrapException(new OSerializationException("Error decoding string"), e); } final String className = instance.field(ODocumentSerializable.CLASS_NAME); if (className == null) return instance; Class<?> clazz = null; try { clazz = Class.forName(className); } catch (ClassNotFoundException e) { OLogManager.instance().debug(this, "Class name provided in embedded document " + className + " does not exist.", e); } if (clazz == null) return instance; if (ODocumentSerializable.class.isAssignableFrom(clazz)) { try { final ODocumentSerializable documentSerializable = (ODocumentSerializable) clazz.newInstance(); final ODocument docClone = new ODocument(); instance.copyTo(docClone); docClone.removeField(ODocumentSerializable.CLASS_NAME); documentSerializable.fromDocument(docClone); return documentSerializable; } catch (InstantiationException e) { throw OException.wrapException(new OSerializationException("Cannot serialize the object"), e); } catch (IllegalAccessException e) { throw OException.wrapException(new OSerializationException("Cannot serialize the object"), e); } } return instance; } }
public class class_name { public Object fromStream(final String iStream) { if (iStream == null || iStream.length() == 0) // NULL VALUE return null; final ODocument instance = new ODocument(); try { ORecordSerializerSchemaAware2CSV.INSTANCE.fromStream(iStream.getBytes("UTF-8"), instance, null); // depends on control dependency: [try], data = [none] } catch (UnsupportedEncodingException e) { throw OException.wrapException(new OSerializationException("Error decoding string"), e); } // depends on control dependency: [catch], data = [none] final String className = instance.field(ODocumentSerializable.CLASS_NAME); if (className == null) return instance; Class<?> clazz = null; try { clazz = Class.forName(className); // depends on control dependency: [try], data = [none] } catch (ClassNotFoundException e) { OLogManager.instance().debug(this, "Class name provided in embedded document " + className + " does not exist.", e); } // depends on control dependency: [catch], data = [none] if (clazz == null) return instance; if (ODocumentSerializable.class.isAssignableFrom(clazz)) { try { final ODocumentSerializable documentSerializable = (ODocumentSerializable) clazz.newInstance(); final ODocument docClone = new ODocument(); instance.copyTo(docClone); // depends on control dependency: [try], data = [none] docClone.removeField(ODocumentSerializable.CLASS_NAME); // depends on control dependency: [try], data = [none] documentSerializable.fromDocument(docClone); // depends on control dependency: [try], data = [none] return documentSerializable; // depends on control dependency: [try], data = [none] } catch (InstantiationException e) { throw OException.wrapException(new OSerializationException("Cannot serialize the object"), e); } catch (IllegalAccessException e) { // depends on control dependency: [catch], data = [none] throw OException.wrapException(new OSerializationException("Cannot serialize the object"), e); } // depends on control dependency: [catch], data = [none] } return instance; } }
public class class_name { protected LightweightTypeReference doNormalizeElementType(LightweightTypeReference actual, LightweightTypeReference expected) { if (matchesExpectation(actual, expected)) { return expected; } return normalizeFunctionTypeReference(actual); } }
public class class_name { protected LightweightTypeReference doNormalizeElementType(LightweightTypeReference actual, LightweightTypeReference expected) { if (matchesExpectation(actual, expected)) { return expected; // depends on control dependency: [if], data = [none] } return normalizeFunctionTypeReference(actual); } }
public class class_name { @Override public boolean setNotificationOptions( long newMonitorIntervalMilliSec, boolean newMonitorIsExplicit) { synchronized ( listenersLock ) { // Notification for an non-exposed zip container are received // from the parent container. That parent container notifier // controls the notification options. Notification options // on this enclosed notifier are unused. if ( isExposedNotifier() ) { return false; // The options were not accepted. } if ( (newMonitorIntervalMilliSec == monitorIntervalMilliSec) && (newMonitorIsExplicit == monitorIsExplicit) ) { return true; // The options were accepted, but have no effect. } monitorIsExplicit = newMonitorIsExplicit; monitorIntervalMilliSec = newMonitorIntervalMilliSec; updateMonitorService(); return true; // The options were accepted, and had an effect. } } }
public class class_name { @Override public boolean setNotificationOptions( long newMonitorIntervalMilliSec, boolean newMonitorIsExplicit) { synchronized ( listenersLock ) { // Notification for an non-exposed zip container are received // from the parent container. That parent container notifier // controls the notification options. Notification options // on this enclosed notifier are unused. if ( isExposedNotifier() ) { return false; // The options were not accepted. // depends on control dependency: [if], data = [none] } if ( (newMonitorIntervalMilliSec == monitorIntervalMilliSec) && (newMonitorIsExplicit == monitorIsExplicit) ) { return true; // The options were accepted, but have no effect. // depends on control dependency: [if], data = [none] } monitorIsExplicit = newMonitorIsExplicit; monitorIntervalMilliSec = newMonitorIntervalMilliSec; updateMonitorService(); return true; // The options were accepted, and had an effect. } } }
public class class_name { synchronized public long generateIdMini() { long timestamp = System.currentTimeMillis(); long sequence = 0; boolean done = false; while (!done) { done = true; while (timestamp < lastTimestampMillisec.get()) { timestamp = waitTillNextMillisec(timestamp); } if (timestamp == lastTimestampMillisec.get()) { // increase sequence sequence = sequenceMillisec.incrementAndGet(); if (sequence > MAX_SEQUENCE_MINI) { // reset sequence sequenceMillisec.set(sequence = 0); timestamp = waitTillNextMillisec(timestamp); done = false; } } } sequenceMillisec.set(sequence); lastTimestampMillisec.set(timestamp); timestamp = (timestamp - TIMESTAMP_EPOCH) & MASK_TIMESTAMP_MINI; return timestamp << SHIFT_TIMESTAMP_MINI | templateMini | (sequence & MASK_SEQUENCE_MINI); } }
public class class_name { synchronized public long generateIdMini() { long timestamp = System.currentTimeMillis(); long sequence = 0; boolean done = false; while (!done) { done = true; // depends on control dependency: [while], data = [none] while (timestamp < lastTimestampMillisec.get()) { timestamp = waitTillNextMillisec(timestamp); // depends on control dependency: [while], data = [(timestamp] } if (timestamp == lastTimestampMillisec.get()) { // increase sequence sequence = sequenceMillisec.incrementAndGet(); // depends on control dependency: [if], data = [none] if (sequence > MAX_SEQUENCE_MINI) { // reset sequence sequenceMillisec.set(sequence = 0); // depends on control dependency: [if], data = [(sequence] timestamp = waitTillNextMillisec(timestamp); // depends on control dependency: [if], data = [none] done = false; // depends on control dependency: [if], data = [none] } } } sequenceMillisec.set(sequence); lastTimestampMillisec.set(timestamp); timestamp = (timestamp - TIMESTAMP_EPOCH) & MASK_TIMESTAMP_MINI; return timestamp << SHIFT_TIMESTAMP_MINI | templateMini | (sequence & MASK_SEQUENCE_MINI); } }
public class class_name { public void setSmoothing(final boolean SMOOTHING) { if (null == smoothing) { _smoothing = SMOOTHING; fireUpdateEvent(REDRAW_EVENT); } else { smoothing.set(SMOOTHING); } } }
public class class_name { public void setSmoothing(final boolean SMOOTHING) { if (null == smoothing) { _smoothing = SMOOTHING; // depends on control dependency: [if], data = [none] fireUpdateEvent(REDRAW_EVENT); // depends on control dependency: [if], data = [none] } else { smoothing.set(SMOOTHING); // depends on control dependency: [if], data = [none] } } }
public class class_name { public synchronized Map<String, Map<String, Monomer>> getMonomerDB(boolean includeNewMonomers) { if (includeNewMonomers) { return monomerDB; } else { Map<String, Map<String, Monomer>> reducedMonomerDB = new TreeMap<String, Map<String, Monomer>>( String.CASE_INSENSITIVE_ORDER); for (String polymerType : monomerDB.keySet()) { Map<String, Monomer> monomerMap = monomerDB.get(polymerType); reducedMonomerDB.put(polymerType, excludeNewMonomers(monomerMap)); } return reducedMonomerDB; } } }
public class class_name { public synchronized Map<String, Map<String, Monomer>> getMonomerDB(boolean includeNewMonomers) { if (includeNewMonomers) { return monomerDB; // depends on control dependency: [if], data = [none] } else { Map<String, Map<String, Monomer>> reducedMonomerDB = new TreeMap<String, Map<String, Monomer>>( String.CASE_INSENSITIVE_ORDER); for (String polymerType : monomerDB.keySet()) { Map<String, Monomer> monomerMap = monomerDB.get(polymerType); reducedMonomerDB.put(polymerType, excludeNewMonomers(monomerMap)); // depends on control dependency: [for], data = [polymerType] } return reducedMonomerDB; // depends on control dependency: [if], data = [none] } } }