code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public void setRow(int i, double[] array, int offset) { int n = cols; for (int j=0;j<n;j++) { set(i, j, array[j+offset]); } } }
public class class_name { public void setRow(int i, double[] array, int offset) { int n = cols; for (int j=0;j<n;j++) { set(i, j, array[j+offset]); // depends on control dependency: [for], data = [j] } } }
public class class_name { public GetRelationalDatabaseLogEventsResult withResourceLogEvents(LogEvent... resourceLogEvents) { if (this.resourceLogEvents == null) { setResourceLogEvents(new java.util.ArrayList<LogEvent>(resourceLogEvents.length)); } for (LogEvent ele : resourceLogEvents) { this.resourceLogEvents.add(ele); } return this; } }
public class class_name { public GetRelationalDatabaseLogEventsResult withResourceLogEvents(LogEvent... resourceLogEvents) { if (this.resourceLogEvents == null) { setResourceLogEvents(new java.util.ArrayList<LogEvent>(resourceLogEvents.length)); // depends on control dependency: [if], data = [none] } for (LogEvent ele : resourceLogEvents) { this.resourceLogEvents.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public String getLogoUrl() { try { val items = getLogoUrls(); if (!items.isEmpty()) { return items.iterator().next().getUrl(); } } catch (final Exception e) { LOGGER.debug(e.getMessage(), e); } return this.registeredService.getLogo(); } }
public class class_name { public String getLogoUrl() { try { val items = getLogoUrls(); if (!items.isEmpty()) { return items.iterator().next().getUrl(); // depends on control dependency: [if], data = [none] } } catch (final Exception e) { LOGGER.debug(e.getMessage(), e); } // depends on control dependency: [catch], data = [none] return this.registeredService.getLogo(); } }
public class class_name { public Format[] getFormatsByArgumentIndex() { if (msgPattern.hasNamedArguments()) { throw new IllegalArgumentException( "This method is not available in MessageFormat objects " + "that use alphanumeric argument names."); } ArrayList<Format> list = new ArrayList<Format>(); for (int partIndex = 0; (partIndex = nextTopLevelArgStart(partIndex)) >= 0;) { int argNumber = msgPattern.getPart(partIndex + 1).getValue(); while (argNumber >= list.size()) { list.add(null); } list.set(argNumber, cachedFormatters == null ? null : cachedFormatters.get(partIndex)); } return list.toArray(new Format[list.size()]); } }
public class class_name { public Format[] getFormatsByArgumentIndex() { if (msgPattern.hasNamedArguments()) { throw new IllegalArgumentException( "This method is not available in MessageFormat objects " + "that use alphanumeric argument names."); } ArrayList<Format> list = new ArrayList<Format>(); for (int partIndex = 0; (partIndex = nextTopLevelArgStart(partIndex)) >= 0;) { int argNumber = msgPattern.getPart(partIndex + 1).getValue(); while (argNumber >= list.size()) { list.add(null); // depends on control dependency: [while], data = [none] } list.set(argNumber, cachedFormatters == null ? null : cachedFormatters.get(partIndex)); // depends on control dependency: [for], data = [partIndex] } return list.toArray(new Format[list.size()]); } }
public class class_name { protected static <T extends IPAddress, S extends IPAddressSegment> Iterator<T> iterator( T lower, T upper, AddressCreator<T, ?, ?, S> creator, SegFunction<T, S> segProducer, SegFunction<S, Iterator<S>> segmentIteratorProducer, SegValueComparator<T> segValueComparator, int networkSegmentIndex, int hostSegmentIndex, SegFunction<S, Iterator<S>> prefixedSegIteratorProducer) { int divCount = lower.getDivisionCount(); // at any given point in time, this list provides an iterator for the segment at each index ArrayList<Supplier<Iterator<S>>> segIteratorProducerList = new ArrayList<Supplier<Iterator<S>>>(divCount); // at any given point in time, finalValue[i] is true if and only if we have reached the very last value for segment i - 1 // when that happens, the next iterator for the segment at index i will be the last boolean finalValue[] = new boolean[divCount + 1]; // here is how the segment iterators will work: // the low and high values at each segment are low, high // the maximum possible valoues for any segment are min, max // we first find the first k >= 0 such that low != high for the segment at index k // the initial set of iterators at each index are as follows: // for i < k finalValue[i] is set to true right away. // we create an iterator from seg = new Seg(low) // for i == k we create a wrapped iterator from Seg(low, high), wrapper will set finalValue[i] once we reach the final value of the iterator // for i > k we create an iterator from Seg(low, max) // // after the initial iterator has been supplied, any further iterator supplied for the same segment is as follows: // for i <= k, there was only one iterator, there will be no further iterator // for i > k, // if i == 0 or of if flagged[i - 1] is true, we create a wrapped iterator from Seg(low, high), wrapper will set finalValue[i] once we reach the final value of the iterator // otherwise we create an iterator from Seg(min, max) // // By following these rules, we iterator through all possible addresses boolean notDiffering = true; finalValue[0] = true; S allSegShared = null; for(int i = 0; i < divCount; i++) { SegFunction<S, Iterator<S>> segIteratorProducer; if(prefixedSegIteratorProducer != null && i >= networkSegmentIndex) { segIteratorProducer = prefixedSegIteratorProducer; } else { segIteratorProducer = segmentIteratorProducer; } S lowerSeg = segProducer.apply(lower, i); int indexi = i; if(notDiffering) { notDiffering = segValueComparator.apply(lower, upper, i); if(notDiffering) { // there is only one iterator and it produces only one value finalValue[i + 1] = true; Iterator<S> iterator = segIteratorProducer.apply(lowerSeg, i); segIteratorProducerList.add(() -> iterator); } else { // in the first differing segment the only iterator will go from segment value of lower address to segment value of upper address Iterator<S> iterator = segIteratorProducer.apply( creator.createSegment(lowerSeg.getSegmentValue(), upper.getSegment(i).getSegmentValue(), null), i); // the wrapper iterator detects when the iterator has reached its final value Iterator<S> wrappedFinalIterator = new Iterator<S>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public S next() { S next = iterator.next(); if(!iterator.hasNext()) { finalValue[indexi + 1] = true; } return next; } }; segIteratorProducerList.add(() -> wrappedFinalIterator); } } else { // in the second and all following differing segments, rather than go from segment value of lower address to segment value of upper address // we go from segment value of lower address to the max seg value the first time through // then we go from the min value of the seg to the max seg value each time until the final time, // the final time we go from the min value to the segment value of upper address // we know it is the final time through when the previous iterator has reached its final value, which we track // the first iterator goes from the segment value of lower address to the max value of the segment Iterator<S> firstIterator = segIteratorProducer.apply(creator.createSegment(lowerSeg.getSegmentValue(), lower.getMaxSegmentValue(), null), i); // the final iterator goes from 0 to the segment value of our upper address Iterator<S> finalIterator = segIteratorProducer.apply(creator.createSegment(0, upper.getSegment(i).getSegmentValue(), null), i); // the wrapper iterator detects when the final iterator has reached its final value Iterator<S> wrappedFinalIterator = new Iterator<S>() { @Override public boolean hasNext() { return finalIterator.hasNext(); } @Override public S next() { S next = finalIterator.next(); if(!finalIterator.hasNext()) { finalValue[indexi + 1] = true; } return next; } }; if(allSegShared == null) { allSegShared = creator.createSegment(0, lower.getMaxSegmentValue(), null); } // all iterators after the first iterator and before the final iterator go from 0 the max segment value, // and there will be many such iterators S allSeg = allSegShared; Supplier<Iterator<S>> finalIteratorProducer = () -> finalValue[indexi] ? wrappedFinalIterator : segIteratorProducer.apply(allSeg, indexi); segIteratorProducerList.add(() -> { //the first time through, we replace the iterator producer so the first iterator used only once segIteratorProducerList.set(indexi, finalIteratorProducer); return firstIterator; }); } } IntFunction<Iterator<S>> iteratorProducer = iteratorIndex -> segIteratorProducerList.get(iteratorIndex).get(); return IPAddressSection.iterator(null, creator, IPAddressSection.iterator( lower.getSegmentCount(), creator, iteratorProducer, networkSegmentIndex, hostSegmentIndex, iteratorProducer) ); } }
public class class_name { protected static <T extends IPAddress, S extends IPAddressSegment> Iterator<T> iterator( T lower, T upper, AddressCreator<T, ?, ?, S> creator, SegFunction<T, S> segProducer, SegFunction<S, Iterator<S>> segmentIteratorProducer, SegValueComparator<T> segValueComparator, int networkSegmentIndex, int hostSegmentIndex, SegFunction<S, Iterator<S>> prefixedSegIteratorProducer) { int divCount = lower.getDivisionCount(); // at any given point in time, this list provides an iterator for the segment at each index ArrayList<Supplier<Iterator<S>>> segIteratorProducerList = new ArrayList<Supplier<Iterator<S>>>(divCount); // at any given point in time, finalValue[i] is true if and only if we have reached the very last value for segment i - 1 // when that happens, the next iterator for the segment at index i will be the last boolean finalValue[] = new boolean[divCount + 1]; // here is how the segment iterators will work: // the low and high values at each segment are low, high // the maximum possible valoues for any segment are min, max // we first find the first k >= 0 such that low != high for the segment at index k // the initial set of iterators at each index are as follows: // for i < k finalValue[i] is set to true right away. // we create an iterator from seg = new Seg(low) // for i == k we create a wrapped iterator from Seg(low, high), wrapper will set finalValue[i] once we reach the final value of the iterator // for i > k we create an iterator from Seg(low, max) // // after the initial iterator has been supplied, any further iterator supplied for the same segment is as follows: // for i <= k, there was only one iterator, there will be no further iterator // for i > k, // if i == 0 or of if flagged[i - 1] is true, we create a wrapped iterator from Seg(low, high), wrapper will set finalValue[i] once we reach the final value of the iterator // otherwise we create an iterator from Seg(min, max) // // By following these rules, we iterator through all possible addresses boolean notDiffering = true; finalValue[0] = true; S allSegShared = null; for(int i = 0; i < divCount; i++) { SegFunction<S, Iterator<S>> segIteratorProducer; if(prefixedSegIteratorProducer != null && i >= networkSegmentIndex) { segIteratorProducer = prefixedSegIteratorProducer; // depends on control dependency: [if], data = [none] } else { segIteratorProducer = segmentIteratorProducer; // depends on control dependency: [if], data = [none] } S lowerSeg = segProducer.apply(lower, i); int indexi = i; if(notDiffering) { notDiffering = segValueComparator.apply(lower, upper, i); // depends on control dependency: [if], data = [none] if(notDiffering) { // there is only one iterator and it produces only one value finalValue[i + 1] = true; // depends on control dependency: [if], data = [none] Iterator<S> iterator = segIteratorProducer.apply(lowerSeg, i); segIteratorProducerList.add(() -> iterator); // depends on control dependency: [if], data = [none] } else { // in the first differing segment the only iterator will go from segment value of lower address to segment value of upper address Iterator<S> iterator = segIteratorProducer.apply( creator.createSegment(lowerSeg.getSegmentValue(), upper.getSegment(i).getSegmentValue(), null), i); // the wrapper iterator detects when the iterator has reached its final value Iterator<S> wrappedFinalIterator = new Iterator<S>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public S next() { S next = iterator.next(); if(!iterator.hasNext()) { finalValue[indexi + 1] = true; // depends on control dependency: [if], data = [none] } return next; } }; segIteratorProducerList.add(() -> wrappedFinalIterator); // depends on control dependency: [if], data = [none] } } else { // in the second and all following differing segments, rather than go from segment value of lower address to segment value of upper address // we go from segment value of lower address to the max seg value the first time through // then we go from the min value of the seg to the max seg value each time until the final time, // the final time we go from the min value to the segment value of upper address // we know it is the final time through when the previous iterator has reached its final value, which we track // the first iterator goes from the segment value of lower address to the max value of the segment Iterator<S> firstIterator = segIteratorProducer.apply(creator.createSegment(lowerSeg.getSegmentValue(), lower.getMaxSegmentValue(), null), i); // the final iterator goes from 0 to the segment value of our upper address Iterator<S> finalIterator = segIteratorProducer.apply(creator.createSegment(0, upper.getSegment(i).getSegmentValue(), null), i); // the wrapper iterator detects when the final iterator has reached its final value Iterator<S> wrappedFinalIterator = new Iterator<S>() { @Override public boolean hasNext() { return finalIterator.hasNext(); } @Override public S next() { S next = finalIterator.next(); if(!finalIterator.hasNext()) { finalValue[indexi + 1] = true; // depends on control dependency: [if], data = [none] } return next; } }; if(allSegShared == null) { allSegShared = creator.createSegment(0, lower.getMaxSegmentValue(), null); // depends on control dependency: [if], data = [null)] } // all iterators after the first iterator and before the final iterator go from 0 the max segment value, // and there will be many such iterators S allSeg = allSegShared; Supplier<Iterator<S>> finalIteratorProducer = () -> finalValue[indexi] ? wrappedFinalIterator : segIteratorProducer.apply(allSeg, indexi); segIteratorProducerList.add(() -> { //the first time through, we replace the iterator producer so the first iterator used only once segIteratorProducerList.set(indexi, finalIteratorProducer); return firstIterator; // depends on control dependency: [if], data = [none] }); } } IntFunction<Iterator<S>> iteratorProducer = iteratorIndex -> segIteratorProducerList.get(iteratorIndex).get(); return IPAddressSection.iterator(null, creator, IPAddressSection.iterator( lower.getSegmentCount(), creator, iteratorProducer, networkSegmentIndex, hostSegmentIndex, iteratorProducer) ); } }
public class class_name { public static double decodeTimestamp(byte[] array, int pointer) { double r = 0.0; for (int i = 0; i < 8; i++) { r += unsignedByteToShort(array[pointer + i]) * Math.pow(2, (3 - i) * 8); } return r; } }
public class class_name { public static double decodeTimestamp(byte[] array, int pointer) { double r = 0.0; for (int i = 0; i < 8; i++) { r += unsignedByteToShort(array[pointer + i]) * Math.pow(2, (3 - i) * 8); // depends on control dependency: [for], data = [i] } return r; } }
public class class_name { private static String initRemoveAssociationRowQuery(EntityKeyMetadata ownerEntityKeyMetadata, AssociationKeyMetadata associationKeyMetadata) { StringBuilder queryBuilder = new StringBuilder( "MATCH " ); queryBuilder.append( "(n:" ); queryBuilder.append( ENTITY ); queryBuilder.append( ":" ); appendLabel( ownerEntityKeyMetadata, queryBuilder ); appendProperties( ownerEntityKeyMetadata, queryBuilder ); queryBuilder.append( ") - " ); queryBuilder.append( "[r" ); queryBuilder.append( ":" ); appendRelationshipType( queryBuilder, associationKeyMetadata ); int offset = ownerEntityKeyMetadata.getColumnNames().length; boolean hasIndexColumns = associationKeyMetadata.getRowKeyIndexColumnNames().length > 0; if ( hasIndexColumns ) { appendProperties( queryBuilder, associationKeyMetadata.getRowKeyIndexColumnNames(), offset ); } queryBuilder.append( "] - (e" ); if ( associationKeyMetadata.getAssociationKind() == AssociationKind.EMBEDDED_COLLECTION ) { queryBuilder.append( ":" ); queryBuilder.append( EMBEDDED ); } if ( !hasIndexColumns ) { appendProperties( queryBuilder, associationKeyMetadata.getAssociatedEntityKeyMetadata().getEntityKeyMetadata().getColumnNames(), offset ); } queryBuilder.append( ")" ); queryBuilder.append( " DELETE r" ); if ( associationKeyMetadata.getAssociationKind() == AssociationKind.EMBEDDED_COLLECTION ) { queryBuilder.append( ", e" ); } return queryBuilder.toString(); } }
public class class_name { private static String initRemoveAssociationRowQuery(EntityKeyMetadata ownerEntityKeyMetadata, AssociationKeyMetadata associationKeyMetadata) { StringBuilder queryBuilder = new StringBuilder( "MATCH " ); queryBuilder.append( "(n:" ); queryBuilder.append( ENTITY ); queryBuilder.append( ":" ); appendLabel( ownerEntityKeyMetadata, queryBuilder ); appendProperties( ownerEntityKeyMetadata, queryBuilder ); queryBuilder.append( ") - " ); queryBuilder.append( "[r" ); queryBuilder.append( ":" ); appendRelationshipType( queryBuilder, associationKeyMetadata ); int offset = ownerEntityKeyMetadata.getColumnNames().length; boolean hasIndexColumns = associationKeyMetadata.getRowKeyIndexColumnNames().length > 0; if ( hasIndexColumns ) { appendProperties( queryBuilder, associationKeyMetadata.getRowKeyIndexColumnNames(), offset ); // depends on control dependency: [if], data = [none] } queryBuilder.append( "] - (e" ); if ( associationKeyMetadata.getAssociationKind() == AssociationKind.EMBEDDED_COLLECTION ) { queryBuilder.append( ":" ); queryBuilder.append( EMBEDDED ); } if ( !hasIndexColumns ) { appendProperties( queryBuilder, associationKeyMetadata.getAssociatedEntityKeyMetadata().getEntityKeyMetadata().getColumnNames(), offset ); } queryBuilder.append( ")" ); queryBuilder.append( " DELETE r" ); if ( associationKeyMetadata.getAssociationKind() == AssociationKind.EMBEDDED_COLLECTION ) { queryBuilder.append( ", e" ); } return queryBuilder.toString(); } }
public class class_name { public void addMaskedHeaderName(String maskedHeaderName) { if (maskedHeaderName != null) { maskedHeaderName = maskedHeaderName.trim(); if (maskedHeaderName.length() > 0) { maskedHeaderNames.add(maskedHeaderName.toLowerCase()); } } } }
public class class_name { public void addMaskedHeaderName(String maskedHeaderName) { if (maskedHeaderName != null) { maskedHeaderName = maskedHeaderName.trim(); // depends on control dependency: [if], data = [none] if (maskedHeaderName.length() > 0) { maskedHeaderNames.add(maskedHeaderName.toLowerCase()); // depends on control dependency: [if], data = [none] } } } }
public class class_name { private void handleChangeManagerStatusRequest(final HttpServletRequest req, final Map<String, Object> ret, final boolean enableMetricManager) { try { logger.info("Updating metric manager status"); if ((enableMetricManager && MetricReportManager.isInstantiated()) || MetricReportManager.isAvailable()) { final MetricReportManager metricManager = MetricReportManager.getInstance(); if (enableMetricManager) { metricManager.enableManager(); } else { metricManager.disableManager(); } ret.put(STATUS_PARAM, RESPONSE_SUCCESS); } else { ret.put(RESPONSE_ERROR, "MetricManager is not available"); } } catch (final Exception e) { logger.error(e); ret.put(RESPONSE_ERROR, e.getMessage()); } } }
public class class_name { private void handleChangeManagerStatusRequest(final HttpServletRequest req, final Map<String, Object> ret, final boolean enableMetricManager) { try { logger.info("Updating metric manager status"); // depends on control dependency: [try], data = [none] if ((enableMetricManager && MetricReportManager.isInstantiated()) || MetricReportManager.isAvailable()) { final MetricReportManager metricManager = MetricReportManager.getInstance(); if (enableMetricManager) { metricManager.enableManager(); // depends on control dependency: [if], data = [none] } else { metricManager.disableManager(); // depends on control dependency: [if], data = [none] } ret.put(STATUS_PARAM, RESPONSE_SUCCESS); // depends on control dependency: [if], data = [none] } else { ret.put(RESPONSE_ERROR, "MetricManager is not available"); // depends on control dependency: [if], data = [none] } } catch (final Exception e) { logger.error(e); ret.put(RESPONSE_ERROR, e.getMessage()); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @SuppressWarnings("unchecked") private void setPropertyValue(Entity entity, Attribute attr, String propName, LdapAttribute ldapAttr) throws WIMException { String dataType = entity.getDataType(propName); boolean isMany = entity.isMultiValuedProperty(propName); String syntax = LDAP_ATTR_SYNTAX_STRING; if (ldapAttr != null) { syntax = ldapAttr.getSyntax(); if (tc.isEventEnabled()) { Tr.event(tc, "ldapAttr " + ldapAttr + " syntax is " + syntax); } } try { if (isMany) { for (NamingEnumeration<?> enu = attr.getAll(); enu.hasMoreElements();) { Object ldapValue = enu.nextElement(); if (ldapValue != null) { entity.set(propName, processPropertyValue(entity, propName, dataType, syntax, ldapValue)); } } } else { Object ldapValue = attr.get(); if (ldapValue != null) { entity.set(propName, processPropertyValue(entity, propName, dataType, syntax, ldapValue)); } } } catch (NamingException e) { if (tc.isEventEnabled()) { Tr.event(tc, "Unexpected on " + propName + " with dataType " + dataType, e); } String msg = Tr.formatMessage(tc, WIMMessageKey.NAMING_EXCEPTION, WIMMessageHelper.generateMsgParms(e.toString(true))); throw new WIMSystemException(WIMMessageKey.NAMING_EXCEPTION, msg, e); } catch (ClassCastException ce) { if (tc.isEventEnabled()) { Tr.event(tc, "Failed to cast property " + propName + " to " + dataType, ce); } if (tc.isErrorEnabled()) Tr.error(tc, WIMMessageKey.INVALID_PROPERTY_DATA_TYPE, WIMMessageHelper.generateMsgParms(propName)); } catch (ArrayStoreException ae) { if (tc.isEventEnabled()) { Tr.event(tc, "Unexpected on " + propName + " with dataType " + dataType, ae); } if (tc.isErrorEnabled()) Tr.error(tc, WIMMessageKey.INVALID_PROPERTY_DATA_TYPE, WIMMessageHelper.generateMsgParms(propName)); } } }
public class class_name { @SuppressWarnings("unchecked") private void setPropertyValue(Entity entity, Attribute attr, String propName, LdapAttribute ldapAttr) throws WIMException { String dataType = entity.getDataType(propName); boolean isMany = entity.isMultiValuedProperty(propName); String syntax = LDAP_ATTR_SYNTAX_STRING; if (ldapAttr != null) { syntax = ldapAttr.getSyntax(); if (tc.isEventEnabled()) { Tr.event(tc, "ldapAttr " + ldapAttr + " syntax is " + syntax); } } try { if (isMany) { for (NamingEnumeration<?> enu = attr.getAll(); enu.hasMoreElements();) { Object ldapValue = enu.nextElement(); if (ldapValue != null) { entity.set(propName, processPropertyValue(entity, propName, dataType, syntax, ldapValue)); // depends on control dependency: [if], data = [none] } } } else { Object ldapValue = attr.get(); if (ldapValue != null) { entity.set(propName, processPropertyValue(entity, propName, dataType, syntax, ldapValue)); // depends on control dependency: [if], data = [none] } } } catch (NamingException e) { if (tc.isEventEnabled()) { Tr.event(tc, "Unexpected on " + propName + " with dataType " + dataType, e); // depends on control dependency: [if], data = [none] } String msg = Tr.formatMessage(tc, WIMMessageKey.NAMING_EXCEPTION, WIMMessageHelper.generateMsgParms(e.toString(true))); throw new WIMSystemException(WIMMessageKey.NAMING_EXCEPTION, msg, e); } catch (ClassCastException ce) { if (tc.isEventEnabled()) { Tr.event(tc, "Failed to cast property " + propName + " to " + dataType, ce); // depends on control dependency: [if], data = [none] } if (tc.isErrorEnabled()) Tr.error(tc, WIMMessageKey.INVALID_PROPERTY_DATA_TYPE, WIMMessageHelper.generateMsgParms(propName)); } catch (ArrayStoreException ae) { if (tc.isEventEnabled()) { Tr.event(tc, "Unexpected on " + propName + " with dataType " + dataType, ae); // depends on control dependency: [if], data = [none] } if (tc.isErrorEnabled()) Tr.error(tc, WIMMessageKey.INVALID_PROPERTY_DATA_TYPE, WIMMessageHelper.generateMsgParms(propName)); } } }
public class class_name { @Override public final String getFor(final Class<?> pCls, final String pFldNm) { String cnNm; boolean isCr = this.mngSettings.getFieldsSettings() == null || this.mngSettings.getFieldsSettings().get(pCls) == null; cnNm = this.mngSettings.lazFldSts(pCls, pFldNm).get(this.settingName); if (cnNm != null && isCr && (CnvTfsEnum.class.getSimpleName().equals(cnNm) || CnvTfsObject.class.getSimpleName().equals(cnNm) || cnNm.startsWith(CnvTfsHasId.class.getSimpleName()))) { Field field = this.fieldsRapiHolder.getFor(pCls, pFldNm); Class fldCl = field.getType(); cnNm += fldCl.getSimpleName(); this.mngSettings.lazFldSts(pCls, pFldNm).put(this.settingName, cnNm); } return cnNm; } }
public class class_name { @Override public final String getFor(final Class<?> pCls, final String pFldNm) { String cnNm; boolean isCr = this.mngSettings.getFieldsSettings() == null || this.mngSettings.getFieldsSettings().get(pCls) == null; cnNm = this.mngSettings.lazFldSts(pCls, pFldNm).get(this.settingName); if (cnNm != null && isCr && (CnvTfsEnum.class.getSimpleName().equals(cnNm) || CnvTfsObject.class.getSimpleName().equals(cnNm) || cnNm.startsWith(CnvTfsHasId.class.getSimpleName()))) { Field field = this.fieldsRapiHolder.getFor(pCls, pFldNm); Class fldCl = field.getType(); cnNm += fldCl.getSimpleName(); this.mngSettings.lazFldSts(pCls, pFldNm).put(this.settingName, cnNm); // depends on control dependency: [if], data = [none] } return cnNm; } }
public class class_name { protected static long obtainLongConfigParameter(MessageStoreImpl msi, String parameterName, String defaultValue, long minValue, long maxValue) { long value = Long.parseLong(defaultValue); if (msi != null) { String strValue = msi.getProperty(parameterName, defaultValue); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { SibTr.debug(tc, parameterName + "=" + strValue); }; // end if try { value = Long.parseLong(strValue); if ((value < minValue) || (value > maxValue)) { value = Long.parseLong(defaultValue); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { SibTr.debug(tc, "OVERRIDE: " + parameterName + "=" + strValue); }; // end if }; // end if } catch (NumberFormatException nfexc) { //No FFDC Code Needed. } }; // end if return value; } }
public class class_name { protected static long obtainLongConfigParameter(MessageStoreImpl msi, String parameterName, String defaultValue, long minValue, long maxValue) { long value = Long.parseLong(defaultValue); if (msi != null) { String strValue = msi.getProperty(parameterName, defaultValue); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { SibTr.debug(tc, parameterName + "=" + strValue); // depends on control dependency: [if], data = [none] }; // end if try { value = Long.parseLong(strValue); // depends on control dependency: [try], data = [none] if ((value < minValue) || (value > maxValue)) { value = Long.parseLong(defaultValue); // depends on control dependency: [if], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { SibTr.debug(tc, "OVERRIDE: " + parameterName + "=" + strValue); // depends on control dependency: [if], data = [none] }; // end if }; // end if } catch (NumberFormatException nfexc) { //No FFDC Code Needed. } // depends on control dependency: [catch], data = [none] }; // end if return value; } }
public class class_name { public static boolean validateInputRange(final List<Long> segmentsToSeal, final List<Map.Entry<Double, Double>> newRanges, final EpochRecord currentEpoch) { boolean newRangesCheck = newRanges.stream().noneMatch(x -> x.getKey() >= x.getValue() && x.getValue() > 0); if (newRangesCheck) { List<Map.Entry<Double, Double>> oldRanges = segmentsToSeal.stream() .map(segmentId -> { StreamSegmentRecord segment = currentEpoch.getSegment(segmentId); if (segment != null) { return new AbstractMap.SimpleEntry<>(segment.getKeyStart(), segment.getKeyEnd()); } else { return null; } }).filter(Objects::nonNull) .collect(Collectors.toList()); return reduce(oldRanges).equals(reduce(newRanges)); } return false; } }
public class class_name { public static boolean validateInputRange(final List<Long> segmentsToSeal, final List<Map.Entry<Double, Double>> newRanges, final EpochRecord currentEpoch) { boolean newRangesCheck = newRanges.stream().noneMatch(x -> x.getKey() >= x.getValue() && x.getValue() > 0); if (newRangesCheck) { List<Map.Entry<Double, Double>> oldRanges = segmentsToSeal.stream() .map(segmentId -> { StreamSegmentRecord segment = currentEpoch.getSegment(segmentId); if (segment != null) { return new AbstractMap.SimpleEntry<>(segment.getKeyStart(), segment.getKeyEnd()); // depends on control dependency: [if], data = [(segment] } else { return null; // depends on control dependency: [if], data = [none] } }).filter(Objects::nonNull) .collect(Collectors.toList()); return reduce(oldRanges).equals(reduce(newRanges)); } return false; } }
public class class_name { public static PolicyLimit findPolicyLimitByUserAndCounter(EntityManager em, PrincipalUser user, PolicyCounter counter) { TypedQuery<PolicyLimit> query = em.createNamedQuery("PolicyLimit.findPolicyLimitByUserAndCounter", PolicyLimit.class); try { query.setParameter("user", user); query.setParameter("counter", counter); return query.getSingleResult(); } catch (NoResultException ex) { return null; } } }
public class class_name { public static PolicyLimit findPolicyLimitByUserAndCounter(EntityManager em, PrincipalUser user, PolicyCounter counter) { TypedQuery<PolicyLimit> query = em.createNamedQuery("PolicyLimit.findPolicyLimitByUserAndCounter", PolicyLimit.class); try { query.setParameter("user", user); // depends on control dependency: [try], data = [none] query.setParameter("counter", counter); // depends on control dependency: [try], data = [none] return query.getSingleResult(); // depends on control dependency: [try], data = [none] } catch (NoResultException ex) { return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { private License getLicense(final String licenseId) { License result = null; final Set<DbLicense> matchingLicenses = licenseMatcher.getMatchingLicenses(licenseId); if (matchingLicenses.isEmpty()) { result = DataModelFactory.createLicense("#" + licenseId + "# (to be identified)", NOT_IDENTIFIED_YET, NOT_IDENTIFIED_YET, NOT_IDENTIFIED_YET, NOT_IDENTIFIED_YET); result.setUnknown(true); } else { if (matchingLicenses.size() > 1 && LOG.isWarnEnabled()) { LOG.warn(String.format("%s matches multiple licenses %s. " + "Please run the report showing multiple matching on licenses", licenseId, matchingLicenses.toString())); } result = mapper.getLicense(matchingLicenses.iterator().next()); } return result; } }
public class class_name { private License getLicense(final String licenseId) { License result = null; final Set<DbLicense> matchingLicenses = licenseMatcher.getMatchingLicenses(licenseId); if (matchingLicenses.isEmpty()) { result = DataModelFactory.createLicense("#" + licenseId + "# (to be identified)", NOT_IDENTIFIED_YET, NOT_IDENTIFIED_YET, NOT_IDENTIFIED_YET, NOT_IDENTIFIED_YET); // depends on control dependency: [if], data = [none] result.setUnknown(true); // depends on control dependency: [if], data = [none] } else { if (matchingLicenses.size() > 1 && LOG.isWarnEnabled()) { LOG.warn(String.format("%s matches multiple licenses %s. " + "Please run the report showing multiple matching on licenses", licenseId, matchingLicenses.toString())); // depends on control dependency: [if], data = [none] } result = mapper.getLicense(matchingLicenses.iterator().next()); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { protected <T> void buildCallbackMethods(final BeanMapping<T> beanMapping, final Class<T> beanType, final CsvBean beanAnno) { // コールバック用のメソッドの取得 for(Method method : beanType.getDeclaredMethods()) { if(method.getAnnotation(CsvPreRead.class) != null) { beanMapping.addPreReadMethod(new CallbackMethod(method)); } if(method.getAnnotation(CsvPostRead.class) != null) { beanMapping.addPostReadMethod(new CallbackMethod(method)); } if(method.getAnnotation(CsvPreWrite.class) != null) { beanMapping.addPreWriteMethod(new CallbackMethod(method)); } if(method.getAnnotation(CsvPostWrite.class) != null) { beanMapping.addPostWriteMethod(new CallbackMethod(method)); } } // リスナークラスの取得 final List<Object> listeners = Arrays.stream(beanAnno.listeners()) .map(l -> configuration.getBeanFactory().create(l)) .collect(Collectors.toList()); beanMapping.addAllListeners(listeners); for(Object listener : listeners) { for(Method method : listener.getClass().getDeclaredMethods()) { if(method.getAnnotation(CsvPreRead.class) != null) { beanMapping.addPreReadMethod(new ListenerCallbackMethod(listener, method)); } if(method.getAnnotation(CsvPostRead.class) != null) { beanMapping.addPostReadMethod(new ListenerCallbackMethod(listener, method)); } if(method.getAnnotation(CsvPreWrite.class) != null) { beanMapping.addPreWriteMethod(new ListenerCallbackMethod(listener, method)); } if(method.getAnnotation(CsvPostWrite.class) != null) { beanMapping.addPostWriteMethod(new ListenerCallbackMethod(listener, method)); } } } beanMapping.getPreReadMethods().sort(null); beanMapping.getPostReadMethods().sort(null); beanMapping.getPreWriteMethods().sort(null); beanMapping.getPostWriteMethods().sort(null); } }
public class class_name { protected <T> void buildCallbackMethods(final BeanMapping<T> beanMapping, final Class<T> beanType, final CsvBean beanAnno) { // コールバック用のメソッドの取得 for(Method method : beanType.getDeclaredMethods()) { if(method.getAnnotation(CsvPreRead.class) != null) { beanMapping.addPreReadMethod(new CallbackMethod(method)); // depends on control dependency: [if], data = [none] } if(method.getAnnotation(CsvPostRead.class) != null) { beanMapping.addPostReadMethod(new CallbackMethod(method)); // depends on control dependency: [if], data = [none] } if(method.getAnnotation(CsvPreWrite.class) != null) { beanMapping.addPreWriteMethod(new CallbackMethod(method)); // depends on control dependency: [if], data = [none] } if(method.getAnnotation(CsvPostWrite.class) != null) { beanMapping.addPostWriteMethod(new CallbackMethod(method)); // depends on control dependency: [if], data = [none] } } // リスナークラスの取得 final List<Object> listeners = Arrays.stream(beanAnno.listeners()) .map(l -> configuration.getBeanFactory().create(l)) .collect(Collectors.toList()); beanMapping.addAllListeners(listeners); for(Object listener : listeners) { for(Method method : listener.getClass().getDeclaredMethods()) { if(method.getAnnotation(CsvPreRead.class) != null) { beanMapping.addPreReadMethod(new ListenerCallbackMethod(listener, method)); // depends on control dependency: [if], data = [none] } if(method.getAnnotation(CsvPostRead.class) != null) { beanMapping.addPostReadMethod(new ListenerCallbackMethod(listener, method)); // depends on control dependency: [if], data = [none] } if(method.getAnnotation(CsvPreWrite.class) != null) { beanMapping.addPreWriteMethod(new ListenerCallbackMethod(listener, method)); // depends on control dependency: [if], data = [none] } if(method.getAnnotation(CsvPostWrite.class) != null) { beanMapping.addPostWriteMethod(new ListenerCallbackMethod(listener, method)); // depends on control dependency: [if], data = [none] } } } beanMapping.getPreReadMethods().sort(null); beanMapping.getPostReadMethods().sort(null); beanMapping.getPreWriteMethods().sort(null); beanMapping.getPostWriteMethods().sort(null); } }
public class class_name { @Override void closeTxEntityManager(EntityManager em, boolean allowPooling) { if (em != null && em.isOpen()) { if (allowPooling) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "closeTxEntityManager is pooling JTA em: " + em); ivEntityManagerPool.putEntityManager(em); } else { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "closeTxEntityManager is closing JTA em: " + em); em.close(); } } } }
public class class_name { @Override void closeTxEntityManager(EntityManager em, boolean allowPooling) { if (em != null && em.isOpen()) { if (allowPooling) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "closeTxEntityManager is pooling JTA em: " + em); ivEntityManagerPool.putEntityManager(em); // depends on control dependency: [if], data = [none] } else { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "closeTxEntityManager is closing JTA em: " + em); em.close(); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public boolean isMatchedByPreviousException(AnalyzedToken token) { if (exceptionValidPrevious) { for (PatternToken testException : previousExceptionList) { if (!testException.exceptionValidNext) { if (testException.isMatched(token)) { return true; } } } } return false; } }
public class class_name { public boolean isMatchedByPreviousException(AnalyzedToken token) { if (exceptionValidPrevious) { for (PatternToken testException : previousExceptionList) { if (!testException.exceptionValidNext) { if (testException.isMatched(token)) { return true; // depends on control dependency: [if], data = [none] } } } } return false; } }
public class class_name { @Override public void onMessageReceived(RemoteMessage remoteMessage) { Map<String, String> data = remoteMessage.getData(); if (null == data) { return; } LOGGER.d("received message from: " + remoteMessage.getFrom() + ", payload: " + data.toString()); try { JSONObject jsonObject = JSON.parseObject(data.get("payload")); if (null != jsonObject) { String channel = jsonObject.getString("_channel"); String action = jsonObject.getString("action"); AndroidNotificationManager androidNotificationManager = AndroidNotificationManager.getInstance(); androidNotificationManager.processGcmMessage(channel, action, jsonObject.toJSONString()); } } catch (Exception ex) { LOGGER.e("failed to parse push data.", ex); } } }
public class class_name { @Override public void onMessageReceived(RemoteMessage remoteMessage) { Map<String, String> data = remoteMessage.getData(); if (null == data) { return; // depends on control dependency: [if], data = [none] } LOGGER.d("received message from: " + remoteMessage.getFrom() + ", payload: " + data.toString()); try { JSONObject jsonObject = JSON.parseObject(data.get("payload")); if (null != jsonObject) { String channel = jsonObject.getString("_channel"); String action = jsonObject.getString("action"); AndroidNotificationManager androidNotificationManager = AndroidNotificationManager.getInstance(); androidNotificationManager.processGcmMessage(channel, action, jsonObject.toJSONString()); } } catch (Exception ex) { LOGGER.e("failed to parse push data.", ex); } } }
public class class_name { @Override public EntityManager getEntityManager (JPAPuId puId, J2EEName j2eeName, // d510184 String refName, // d510184 boolean isExtendedContextType, boolean isUnsynchronized, Map<?, ?> properties) { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(tc, "getEntityManager : " + puId + ", " + j2eeName); EntityManager em = null; JPAPUnitInfo puInfo = findPersistenceUnitInfo(puId); if (puInfo != null) { em = isExtendedContextType ? getJPARuntime().createJPAExEntityManager(puId, puInfo, j2eeName, refName, properties, isUnsynchronized, this) : getJPARuntime().createJPATxEntityManager(puId, puInfo, j2eeName, refName, properties, isUnsynchronized, this); } if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "getEntityManager : " + em); return em; } }
public class class_name { @Override public EntityManager getEntityManager (JPAPuId puId, J2EEName j2eeName, // d510184 String refName, // d510184 boolean isExtendedContextType, boolean isUnsynchronized, Map<?, ?> properties) { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(tc, "getEntityManager : " + puId + ", " + j2eeName); EntityManager em = null; JPAPUnitInfo puInfo = findPersistenceUnitInfo(puId); if (puInfo != null) { em = isExtendedContextType ? getJPARuntime().createJPAExEntityManager(puId, puInfo, j2eeName, refName, properties, isUnsynchronized, this) : getJPARuntime().createJPATxEntityManager(puId, puInfo, j2eeName, refName, properties, isUnsynchronized, this); // depends on control dependency: [if], data = [none] } if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "getEntityManager : " + em); return em; } }
public class class_name { public static void generateSQLForInsertDynamic(final SQLiteModelMethod method, MethodSpec.Builder methodBuilder) { methodBuilder.addComment("generate SQL for insert"); JQLChecker checker = JQLChecker.getInstance(); // replace the table name, other pieces will be removed String sql = checker.replace(method, method.jql, new JQLReplacerListenerImpl(method) { @Override public String onBindParameter(String bindParameterName, boolean inStatement) { return "?"; } }); final One<Integer> counter = new One<Integer>(0); sql = checker.replaceVariableStatements(method, sql, new JQLReplaceVariableStatementListenerImpl() { @Override public String onColumnNameSet(String statement) { counter.value0++; return "%s"; } @Override public String onColumnValueSet(String statement) { counter.value0++; return "%s"; } }); if (counter.value0 == 2) { methodBuilder.addStatement("String _sql=String.format($S, _contentValues.keyList(), _contentValues.keyValueList())", sql); } else { methodBuilder.addStatement("String _sql=String.format($S, _contentValues.keyList())", sql); } } }
public class class_name { public static void generateSQLForInsertDynamic(final SQLiteModelMethod method, MethodSpec.Builder methodBuilder) { methodBuilder.addComment("generate SQL for insert"); JQLChecker checker = JQLChecker.getInstance(); // replace the table name, other pieces will be removed String sql = checker.replace(method, method.jql, new JQLReplacerListenerImpl(method) { @Override public String onBindParameter(String bindParameterName, boolean inStatement) { return "?"; } }); final One<Integer> counter = new One<Integer>(0); sql = checker.replaceVariableStatements(method, sql, new JQLReplaceVariableStatementListenerImpl() { @Override public String onColumnNameSet(String statement) { counter.value0++; return "%s"; } @Override public String onColumnValueSet(String statement) { counter.value0++; return "%s"; } }); if (counter.value0 == 2) { methodBuilder.addStatement("String _sql=String.format($S, _contentValues.keyList(), _contentValues.keyValueList())", sql); // depends on control dependency: [if], data = [none] } else { methodBuilder.addStatement("String _sql=String.format($S, _contentValues.keyList())", sql); // depends on control dependency: [if], data = [none] } } }
public class class_name { public Collection<Subscription> getSubscriptionsByMessageType(Class messageType) { Set<Subscription> subscriptions = new TreeSet<Subscription>(Subscription.SubscriptionByPriorityDesc); ReadLock readLock = readWriteLock.readLock(); try { readLock.lock(); Subscription subscription; ArrayList<Subscription> subsPerMessage = subscriptionsPerMessage.get(messageType); if (subsPerMessage != null) { subscriptions.addAll(subsPerMessage); } Class[] types = ReflectionUtils.getSuperTypes(messageType); for (int i=0, n=types.length; i<n; i++) { Class eventSuperType = types[i]; ArrayList<Subscription> subs = subscriptionsPerMessage.get(eventSuperType); if (subs != null) { for (int j = 0,m=subs.size(); j<m; j++) { subscription = subs.get(j); if (subscription.handlesMessageType(messageType)) { subscriptions.add(subscription); } } } } }finally{ readLock.unlock(); } return subscriptions; } }
public class class_name { public Collection<Subscription> getSubscriptionsByMessageType(Class messageType) { Set<Subscription> subscriptions = new TreeSet<Subscription>(Subscription.SubscriptionByPriorityDesc); ReadLock readLock = readWriteLock.readLock(); try { readLock.lock(); // depends on control dependency: [try], data = [none] Subscription subscription; ArrayList<Subscription> subsPerMessage = subscriptionsPerMessage.get(messageType); if (subsPerMessage != null) { subscriptions.addAll(subsPerMessage); // depends on control dependency: [if], data = [(subsPerMessage] } Class[] types = ReflectionUtils.getSuperTypes(messageType); for (int i=0, n=types.length; i<n; i++) { Class eventSuperType = types[i]; ArrayList<Subscription> subs = subscriptionsPerMessage.get(eventSuperType); if (subs != null) { for (int j = 0,m=subs.size(); j<m; j++) { subscription = subs.get(j); // depends on control dependency: [for], data = [j] if (subscription.handlesMessageType(messageType)) { subscriptions.add(subscription); // depends on control dependency: [if], data = [none] } } } } }finally{ readLock.unlock(); } return subscriptions; } }
public class class_name { public List<Object> toList() { List<Object> copy = new ArrayList<Object>(content.size()); for (Object o : content) { if (o instanceof JsonObject) { copy.add(((JsonObject) o).toMap()); } else if (o instanceof JsonArray) { copy.add(((JsonArray) o).toList()); } else { copy.add(o); } } return copy; } }
public class class_name { public List<Object> toList() { List<Object> copy = new ArrayList<Object>(content.size()); for (Object o : content) { if (o instanceof JsonObject) { copy.add(((JsonObject) o).toMap()); // depends on control dependency: [if], data = [none] } else if (o instanceof JsonArray) { copy.add(((JsonArray) o).toList()); // depends on control dependency: [if], data = [none] } else { copy.add(o); // depends on control dependency: [if], data = [none] } } return copy; } }
public class class_name { public static <T> T getLast(Iterable<T> iterable) { // TODO(kevinb): Support a concurrently modified collection? if (iterable instanceof List) { List<T> list = (List<T>) iterable; if (list.isEmpty()) { throw new NoSuchElementException(); } return getLastInNonemptyList(list); } return Iterators.getLast(iterable.iterator()); } }
public class class_name { public static <T> T getLast(Iterable<T> iterable) { // TODO(kevinb): Support a concurrently modified collection? if (iterable instanceof List) { List<T> list = (List<T>) iterable; if (list.isEmpty()) { throw new NoSuchElementException(); } return getLastInNonemptyList(list); // depends on control dependency: [if], data = [none] } return Iterators.getLast(iterable.iterator()); } }
public class class_name { public Config setReliableTopicConfigs(Map<String, ReliableTopicConfig> reliableTopicConfigs) { this.reliableTopicConfigs.clear(); this.reliableTopicConfigs.putAll(reliableTopicConfigs); for (Entry<String, ReliableTopicConfig> entry : reliableTopicConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); } return this; } }
public class class_name { public Config setReliableTopicConfigs(Map<String, ReliableTopicConfig> reliableTopicConfigs) { this.reliableTopicConfigs.clear(); this.reliableTopicConfigs.putAll(reliableTopicConfigs); for (Entry<String, ReliableTopicConfig> entry : reliableTopicConfigs.entrySet()) { entry.getValue().setName(entry.getKey()); // depends on control dependency: [for], data = [entry] } return this; } }
public class class_name { private static SimpleStatsProducer<MBeanStats> buildProducer(MBeanServer server, ObjectInstance mBean) { final ObjectName mBeanName = mBean.getObjectName(); final String canonicalName = mBeanName.getCanonicalName(); final String producerId = normalize(canonicalName); final String subsystem = normalize(mBeanName.getDomain()); try { MBeanStats stats = MBeanStatsFactory.createMBeanStats( server, mBeanName, conf.isUpdateAutomatically(), conf.getDelayBeforeFirstUpdate() ); if(stats != null) return new SimpleStatsProducer<>( producerId, "mbean", subsystem, Collections.singletonList(stats) ); log.info("Failed to create stats object from mbean named " + mBean + " because no one attribute can not be parsed from that been"); return null; } catch (JMException e) { log.warn("Failed to create stats object from mbean named " + mBean, e); return null; } } }
public class class_name { private static SimpleStatsProducer<MBeanStats> buildProducer(MBeanServer server, ObjectInstance mBean) { final ObjectName mBeanName = mBean.getObjectName(); final String canonicalName = mBeanName.getCanonicalName(); final String producerId = normalize(canonicalName); final String subsystem = normalize(mBeanName.getDomain()); try { MBeanStats stats = MBeanStatsFactory.createMBeanStats( server, mBeanName, conf.isUpdateAutomatically(), conf.getDelayBeforeFirstUpdate() ); if(stats != null) return new SimpleStatsProducer<>( producerId, "mbean", subsystem, Collections.singletonList(stats) ); log.info("Failed to create stats object from mbean named " + mBean + " because no one attribute can not be parsed from that been"); // depends on control dependency: [try], data = [none] return null; // depends on control dependency: [try], data = [none] } catch (JMException e) { log.warn("Failed to create stats object from mbean named " + mBean, e); return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void marshall(DescribeUsersRequest describeUsersRequest, ProtocolMarshaller protocolMarshaller) { if (describeUsersRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeUsersRequest.getAuthenticationType(), AUTHENTICATIONTYPE_BINDING); protocolMarshaller.marshall(describeUsersRequest.getMaxResults(), MAXRESULTS_BINDING); protocolMarshaller.marshall(describeUsersRequest.getNextToken(), NEXTTOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(DescribeUsersRequest describeUsersRequest, ProtocolMarshaller protocolMarshaller) { if (describeUsersRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeUsersRequest.getAuthenticationType(), AUTHENTICATIONTYPE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(describeUsersRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(describeUsersRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void setReason(String msg) { if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15 logger.logp(Level.FINE, CLASS_NAME,"setReason", " message --> " + msg,"["+this+"]"); } _response.setReason(msg); } }
public class class_name { public void setReason(String msg) { if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15 logger.logp(Level.FINE, CLASS_NAME,"setReason", " message --> " + msg,"["+this+"]"); // depends on control dependency: [if], data = [none] } _response.setReason(msg); } }
public class class_name { @Override public boolean accept( final AuditLogEntry entry ) { if ( !acceptedTypes.containsKey( entry.getGenericType() ) ) { return false; } return acceptedTypes.get( entry.getGenericType() ); } }
public class class_name { @Override public boolean accept( final AuditLogEntry entry ) { if ( !acceptedTypes.containsKey( entry.getGenericType() ) ) { return false; // depends on control dependency: [if], data = [none] } return acceptedTypes.get( entry.getGenericType() ); } }
public class class_name { public <T> Callback<T> wrapCallback(final String operationDescription, final Callback<T> callback, final Logger logger) { /* Set the callback to delegate to this callback. */ Callback<T> reactiveCallback = callbackBuilder().withCallback(new Callback<T>() { @Override public void accept(T t) { if (logger.isDebugEnabled()) { logger.debug("{} returned {}", operationDescription, t); } callback.resolve(t); } /* Provide some boiler plate error handling. */ }).withErrorHandler(error -> { logger.error(String.format("ERROR calling %s", operationDescription), error); callback.onError(error); /* Provide some boiler timeout handling. */ }).withTimeoutHandler(() -> { logger.error("TIMEOUT calling {}", operationDescription); callback.onTimeout(); }) .build(); return reactiveCallback; } }
public class class_name { public <T> Callback<T> wrapCallback(final String operationDescription, final Callback<T> callback, final Logger logger) { /* Set the callback to delegate to this callback. */ Callback<T> reactiveCallback = callbackBuilder().withCallback(new Callback<T>() { @Override public void accept(T t) { if (logger.isDebugEnabled()) { logger.debug("{} returned {}", operationDescription, t); // depends on control dependency: [if], data = [none] } callback.resolve(t); } /* Provide some boiler plate error handling. */ }).withErrorHandler(error -> { logger.error(String.format("ERROR calling %s", operationDescription), error); callback.onError(error); /* Provide some boiler timeout handling. */ }).withTimeoutHandler(() -> { logger.error("TIMEOUT calling {}", operationDescription); callback.onTimeout(); }) .build(); return reactiveCallback; } }
public class class_name { public StatusData<ApacheMetrics> parse(String status) { if (StringUtils.isEmpty(status)) throw new IllegalArgumentException("Empty status to parse!"); final StatusData<ApacheMetrics> result = new StatusData<>(); String[] lines = StringUtils.tokenize(status, '\n'); //extract hostname separately result.put(ApacheMetrics.HOSTNAME, lines[0]); lines = Arrays.copyOfRange(lines, 1, lines.length); for (String line : lines) { line = line.trim(); final int delimIndex = line.indexOf(": "); if (delimIndex <= 0) { LOGGER.warn("ApacheStatusParser: failed to parse status line: '" + line + "'."); } else { final String metric = line.substring(0, delimIndex); final String value = line.substring(2+ delimIndex); switch (metric) { case "ServerVersion": break; case "ServerMPM": break; case "Server Built": break; case "CurrentTime": break; case "RestartTime": break; case "ParentServerConfigGeneration": break; case "ParentServerMPMGeneration": break; case "ServerUptimeSeconds": result.put(ApacheMetrics.SERVER_UPTIME, value); break; case "ServerUptime": result.put(ApacheMetrics.UPTIME, value); break; case "Load1": result.put(ApacheMetrics.LOAD_1M, value); break; case "Load5": result.put(ApacheMetrics.LOAD_5M, value); break; case "Load15": result.put(ApacheMetrics.LOAD_15M, value); break; case "Total Accesses": result.put(ApacheMetrics.TOTAL_ACCESSES, value); result.put(ApacheMetrics.REQUESTS_PER_SEC, value); break; case "Total kBytes": result.put(ApacheMetrics.TOTAL_KBYTES, value); result.put(ApacheMetrics.KBYTES_PER_SEC, value); break; case "CPUUser": result.put(ApacheMetrics.CPU_USER, value); break; case "CPUSystem": result.put(ApacheMetrics.CPU_SYSTEM, value); break; case "CPUChildrenUser": result.put(ApacheMetrics.CPU_CHILDREN_USER, value); break; case "CPUChildrenSystem": result.put(ApacheMetrics.CPU_CHILDREN_SYSTEM, value); break; case "CPULoad": result.put(ApacheMetrics.CPU_LOAD, value); break; case "Uptime": break; case "ReqPerSec": break; case "BytesPerSec": break; case "BytesPerReq": break; case "BusyWorkers": result.put(ApacheMetrics.WORKERS_BUSY, value); break; case "IdleWorkers": result.put(ApacheMetrics.WORKERS_IDLE, value); break; case "ConnsTotal": result.put(ApacheMetrics.CONNECTIONS_TOTAL, value); break; case "ConnsAsyncWriting": result.put(ApacheMetrics.CONNECTIONS_ASYNC_WRITING, value); break; case "ConnsAsyncKeepAlive": result.put(ApacheMetrics.CONNECTIONS_ASYNC_KEEPALIVE, value); break; case "ConnsAsyncClosing": result.put(ApacheMetrics.CONNECTIONS_ASYNC_CLOSING, value); break; case "Scoreboard": parseScoreboard(result, value); break; default: LOGGER.warn("ApacheStatusParser: found unhandled status line: '" + line + "'."); } } } return result; } }
public class class_name { public StatusData<ApacheMetrics> parse(String status) { if (StringUtils.isEmpty(status)) throw new IllegalArgumentException("Empty status to parse!"); final StatusData<ApacheMetrics> result = new StatusData<>(); String[] lines = StringUtils.tokenize(status, '\n'); //extract hostname separately result.put(ApacheMetrics.HOSTNAME, lines[0]); lines = Arrays.copyOfRange(lines, 1, lines.length); for (String line : lines) { line = line.trim(); // depends on control dependency: [for], data = [line] final int delimIndex = line.indexOf(": "); if (delimIndex <= 0) { LOGGER.warn("ApacheStatusParser: failed to parse status line: '" + line + "'."); // depends on control dependency: [if], data = [none] } else { final String metric = line.substring(0, delimIndex); final String value = line.substring(2+ delimIndex); switch (metric) { case "ServerVersion": break; case "ServerMPM": break; case "Server Built": break; case "CurrentTime": break; case "RestartTime": break; case "ParentServerConfigGeneration": break; case "ParentServerMPMGeneration": break; case "ServerUptimeSeconds": result.put(ApacheMetrics.SERVER_UPTIME, value); break; case "ServerUptime": result.put(ApacheMetrics.UPTIME, value); break; case "Load1": result.put(ApacheMetrics.LOAD_1M, value); break; case "Load5": result.put(ApacheMetrics.LOAD_5M, value); break; case "Load15": result.put(ApacheMetrics.LOAD_15M, value); break; case "Total Accesses": result.put(ApacheMetrics.TOTAL_ACCESSES, value); result.put(ApacheMetrics.REQUESTS_PER_SEC, value); break; case "Total kBytes": result.put(ApacheMetrics.TOTAL_KBYTES, value); result.put(ApacheMetrics.KBYTES_PER_SEC, value); break; case "CPUUser": result.put(ApacheMetrics.CPU_USER, value); break; case "CPUSystem": result.put(ApacheMetrics.CPU_SYSTEM, value); break; case "CPUChildrenUser": result.put(ApacheMetrics.CPU_CHILDREN_USER, value); break; case "CPUChildrenSystem": result.put(ApacheMetrics.CPU_CHILDREN_SYSTEM, value); break; case "CPULoad": result.put(ApacheMetrics.CPU_LOAD, value); break; case "Uptime": break; case "ReqPerSec": break; case "BytesPerSec": break; case "BytesPerReq": break; case "BusyWorkers": result.put(ApacheMetrics.WORKERS_BUSY, value); break; case "IdleWorkers": result.put(ApacheMetrics.WORKERS_IDLE, value); break; case "ConnsTotal": result.put(ApacheMetrics.CONNECTIONS_TOTAL, value); break; case "ConnsAsyncWriting": result.put(ApacheMetrics.CONNECTIONS_ASYNC_WRITING, value); break; case "ConnsAsyncKeepAlive": result.put(ApacheMetrics.CONNECTIONS_ASYNC_KEEPALIVE, value); break; case "ConnsAsyncClosing": result.put(ApacheMetrics.CONNECTIONS_ASYNC_CLOSING, value); break; case "Scoreboard": parseScoreboard(result, value); break; default: LOGGER.warn("ApacheStatusParser: found unhandled status line: '" + line + "'."); } } } return result; } }
public class class_name { private static String joinWhereAndGetValue(List<Field> fields, String logicOperate, List<Object> values, Object obj) { StringBuilder sb = new StringBuilder(); int fieldSize = fields.size(); for(int i = 0; i < fieldSize; i++) { Column column = fields.get(i).getAnnotation(Column.class); sb.append(getColumnName(column)).append("=?"); if(i < fieldSize - 1) { sb.append(" ").append(logicOperate).append(" "); } Object val = DOInfoReader.getValue(fields.get(i), obj); if(val != null && column.isJSON()) { val = JSON.toJson(val); } values.add(val); } return sb.toString(); } }
public class class_name { private static String joinWhereAndGetValue(List<Field> fields, String logicOperate, List<Object> values, Object obj) { StringBuilder sb = new StringBuilder(); int fieldSize = fields.size(); for(int i = 0; i < fieldSize; i++) { Column column = fields.get(i).getAnnotation(Column.class); sb.append(getColumnName(column)).append("=?"); // depends on control dependency: [for], data = [none] if(i < fieldSize - 1) { sb.append(" ").append(logicOperate).append(" "); // depends on control dependency: [if], data = [none] } Object val = DOInfoReader.getValue(fields.get(i), obj); if(val != null && column.isJSON()) { val = JSON.toJson(val); // depends on control dependency: [if], data = [(val] } values.add(val); // depends on control dependency: [for], data = [none] } return sb.toString(); } }
public class class_name { public void runBeforeApplicationCreateBootstrap( Instrumentation instrumentation, String[] bootstrapClasses) { if (!isWithExtension) { SelendroidLogger.error("Cannot run bootstrap. Must load an extension first."); return; } for (String bootstrapClassName : bootstrapClasses) { try { SelendroidLogger.info("Running beforeApplicationCreate bootstrap: " + bootstrapClassName); loadBootstrap(bootstrapClassName).runBeforeApplicationCreate(instrumentation); SelendroidLogger.info("\"Running beforeApplicationCreate bootstrap: " + bootstrapClassName); } catch (Exception e) { throw new SelendroidException("Cannot run bootstrap " + bootstrapClassName, e); } } } }
public class class_name { public void runBeforeApplicationCreateBootstrap( Instrumentation instrumentation, String[] bootstrapClasses) { if (!isWithExtension) { SelendroidLogger.error("Cannot run bootstrap. Must load an extension first."); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } for (String bootstrapClassName : bootstrapClasses) { try { SelendroidLogger.info("Running beforeApplicationCreate bootstrap: " + bootstrapClassName); // depends on control dependency: [try], data = [none] loadBootstrap(bootstrapClassName).runBeforeApplicationCreate(instrumentation); // depends on control dependency: [try], data = [none] SelendroidLogger.info("\"Running beforeApplicationCreate bootstrap: " + bootstrapClassName); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SelendroidException("Cannot run bootstrap " + bootstrapClassName, e); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { public EEnum getGCBIMGFORMAT() { if (gcbimgformatEEnum == null) { gcbimgformatEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(136); } return gcbimgformatEEnum; } }
public class class_name { public EEnum getGCBIMGFORMAT() { if (gcbimgformatEEnum == null) { gcbimgformatEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(136); // depends on control dependency: [if], data = [none] } return gcbimgformatEEnum; } }
public class class_name { public static List<String> bitFieldNames(Message message) { int fieldCount = message.getFieldCount(); if (fieldCount == 0) { return Collections.emptyList(); } List<String> result = new ArrayList<>(); int n = (fieldCount - 1) / 32 + 1; for (int i = 0; i < n; i++) { result.add("__bitField" + i); } return result; } }
public class class_name { public static List<String> bitFieldNames(Message message) { int fieldCount = message.getFieldCount(); if (fieldCount == 0) { return Collections.emptyList(); // depends on control dependency: [if], data = [none] } List<String> result = new ArrayList<>(); int n = (fieldCount - 1) / 32 + 1; for (int i = 0; i < n; i++) { result.add("__bitField" + i); // depends on control dependency: [for], data = [i] } return result; } }
public class class_name { private static boolean detectLimitedDevice(String osName, String osArchitecture, boolean currentSystem) { String os = osName.toLowerCase(Locale.US).trim(); if (os.contains("windows ce")) { return true; } else if (os.contains("darvin")) { return true; } else if (os.contains("android")) { return true; } else if (os.contains("phone")) { return true; } else if (os.contains("firefox os")) { return true; } else if (os.contains("bada")) { return true; } else if (os.contains("sailfish")) { return true; } else if (os.contains("tvos")) { return true; } else if (os.startsWith("ios")) { return true; } else if (os.contains("iphone")) { return true; } else if (os.contains("nintendo")) { return true; } else if (os.contains("wii")) { return true; } else if (os.contains("xbox")) { return true; } else if (os.contains("playstation")) { return true; } else if (os.startsWith("rim ")) { return true; } String arch = osArchitecture.toLowerCase(Locale.US).trim(); if (arch.equals("arm")) { return true; } if (currentSystem) { String vmName = System.getProperty(SystemUtil.PROPERTY_JAVA_VM_NAME).toLowerCase(Locale.US); if (vmName.contains("dalvik")) { return true; } } return false; } }
public class class_name { private static boolean detectLimitedDevice(String osName, String osArchitecture, boolean currentSystem) { String os = osName.toLowerCase(Locale.US).trim(); if (os.contains("windows ce")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("darvin")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("android")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("phone")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("firefox os")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("bada")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("sailfish")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("tvos")) { return true; // depends on control dependency: [if], data = [none] } else if (os.startsWith("ios")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("iphone")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("nintendo")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("wii")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("xbox")) { return true; // depends on control dependency: [if], data = [none] } else if (os.contains("playstation")) { return true; // depends on control dependency: [if], data = [none] } else if (os.startsWith("rim ")) { return true; // depends on control dependency: [if], data = [none] } String arch = osArchitecture.toLowerCase(Locale.US).trim(); if (arch.equals("arm")) { return true; // depends on control dependency: [if], data = [none] } if (currentSystem) { String vmName = System.getProperty(SystemUtil.PROPERTY_JAVA_VM_NAME).toLowerCase(Locale.US); if (vmName.contains("dalvik")) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public Observable<ServiceResponse<Page<ResourceMetricDefinitionInner>>> listMultiRoleMetricDefinitionsWithServiceResponseAsync(final String resourceGroupName, final String name) { return listMultiRoleMetricDefinitionsSinglePageAsync(resourceGroupName, name) .concatMap(new Func1<ServiceResponse<Page<ResourceMetricDefinitionInner>>, Observable<ServiceResponse<Page<ResourceMetricDefinitionInner>>>>() { @Override public Observable<ServiceResponse<Page<ResourceMetricDefinitionInner>>> call(ServiceResponse<Page<ResourceMetricDefinitionInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listMultiRoleMetricDefinitionsNextWithServiceResponseAsync(nextPageLink)); } }); } }
public class class_name { public Observable<ServiceResponse<Page<ResourceMetricDefinitionInner>>> listMultiRoleMetricDefinitionsWithServiceResponseAsync(final String resourceGroupName, final String name) { return listMultiRoleMetricDefinitionsSinglePageAsync(resourceGroupName, name) .concatMap(new Func1<ServiceResponse<Page<ResourceMetricDefinitionInner>>, Observable<ServiceResponse<Page<ResourceMetricDefinitionInner>>>>() { @Override public Observable<ServiceResponse<Page<ResourceMetricDefinitionInner>>> call(ServiceResponse<Page<ResourceMetricDefinitionInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); // depends on control dependency: [if], data = [none] } return Observable.just(page).concatWith(listMultiRoleMetricDefinitionsNextWithServiceResponseAsync(nextPageLink)); } }); } }
public class class_name { public void mulRow(int i, double[] arr, int offset) { int n = cols; for (int j=0;j<n;j++) { mul(i, j, arr[j+offset]); } } }
public class class_name { public void mulRow(int i, double[] arr, int offset) { int n = cols; for (int j=0;j<n;j++) { mul(i, j, arr[j+offset]); // depends on control dependency: [for], data = [j] } } }
public class class_name { @Override public Object invoke(String methodName, Argument[] paras, String returnType) throws RemoteException { log.trace("Invoking method: " + methodName); String soapMsg = marshall(methodName, paras); InputStream is = null; try { is = post(soapMsg); log.trace("Converting xml response from server to: " + returnType); return unMarshall(returnType, is); } catch (Exception e1) { log.error("Exception caught while invoking method.", e1); throw new RemoteException("VI SDK invoke exception:" + e1, e1); } finally { if (is != null) { try { is.close(); } catch (IOException ignored) { } } } } }
public class class_name { @Override public Object invoke(String methodName, Argument[] paras, String returnType) throws RemoteException { log.trace("Invoking method: " + methodName); String soapMsg = marshall(methodName, paras); InputStream is = null; try { is = post(soapMsg); log.trace("Converting xml response from server to: " + returnType); return unMarshall(returnType, is); } catch (Exception e1) { log.error("Exception caught while invoking method.", e1); throw new RemoteException("VI SDK invoke exception:" + e1, e1); } finally { if (is != null) { try { is.close(); // depends on control dependency: [try], data = [none] } catch (IOException ignored) { } // depends on control dependency: [catch], data = [none] } } } }
public class class_name { protected final SchedulesSkill getSchedulesSkill() { if (this.skillBufferSchedules == null || this.skillBufferSchedules.get() == null) { this.skillBufferSchedules = $getSkill(Schedules.class); } return $castSkill(SchedulesSkill.class, this.skillBufferSchedules); } }
public class class_name { protected final SchedulesSkill getSchedulesSkill() { if (this.skillBufferSchedules == null || this.skillBufferSchedules.get() == null) { this.skillBufferSchedules = $getSkill(Schedules.class); // depends on control dependency: [if], data = [none] } return $castSkill(SchedulesSkill.class, this.skillBufferSchedules); } }
public class class_name { public MapComposedElement invert() { if (this.pointCoordinates == null) { throw new IndexOutOfBoundsException(); } double[] tmp = new double[this.pointCoordinates.length]; for (int i = 0; i < this.pointCoordinates.length; i += 2) { tmp[i] = this.pointCoordinates[this.pointCoordinates.length - 1 - (i + 1)]; tmp[i + 1] = this.pointCoordinates[this.pointCoordinates.length - 1 - i]; } System.arraycopy(tmp, 0, this.pointCoordinates, 0, this.pointCoordinates.length); if (this.partIndexes != null) { int[] tmpint = new int[this.partIndexes.length]; //part 0 not inside the index array for (int i = 0; i < this.partIndexes.length; ++i) { tmpint[this.partIndexes.length - 1 - i] = this.pointCoordinates.length - this.partIndexes[i]; } System.arraycopy(tmpint, 0, this.partIndexes, 0, this.partIndexes.length); tmpint = null; } tmp = null; return this; } }
public class class_name { public MapComposedElement invert() { if (this.pointCoordinates == null) { throw new IndexOutOfBoundsException(); } double[] tmp = new double[this.pointCoordinates.length]; for (int i = 0; i < this.pointCoordinates.length; i += 2) { tmp[i] = this.pointCoordinates[this.pointCoordinates.length - 1 - (i + 1)]; // depends on control dependency: [for], data = [i] tmp[i + 1] = this.pointCoordinates[this.pointCoordinates.length - 1 - i]; // depends on control dependency: [for], data = [i] } System.arraycopy(tmp, 0, this.pointCoordinates, 0, this.pointCoordinates.length); if (this.partIndexes != null) { int[] tmpint = new int[this.partIndexes.length]; //part 0 not inside the index array for (int i = 0; i < this.partIndexes.length; ++i) { tmpint[this.partIndexes.length - 1 - i] = this.pointCoordinates.length - this.partIndexes[i]; // depends on control dependency: [for], data = [i] } System.arraycopy(tmpint, 0, this.partIndexes, 0, this.partIndexes.length); // depends on control dependency: [if], data = [none] tmpint = null; // depends on control dependency: [if], data = [none] } tmp = null; return this; } }
public class class_name { private Content processParamTags(Element e, boolean isParams, List<? extends DocTree> paramTags, Map<String, String> rankMap, TagletWriter writer, Set<String> alreadyDocumented) { Messages messages = writer.configuration().getMessages(); Content result = writer.getOutputInstance(); if (!paramTags.isEmpty()) { CommentHelper ch = writer.configuration().utils.getCommentHelper(e); for (DocTree dt : paramTags) { String paramName = isParams ? ch.getParameterName(dt) : "<" + ch.getParameterName(dt) + ">"; if (!rankMap.containsKey(ch.getParameterName(dt))) { messages.warning(ch.getDocTreePath(dt), isParams ? "doclet.Parameters_warn" : "doclet.Type_Parameters_warn", paramName); } String rank = rankMap.get(ch.getParameterName(dt)); if (rank != null && alreadyDocumented.contains(rank)) { messages.warning(ch.getDocTreePath(dt), isParams ? "doclet.Parameters_dup_warn" : "doclet.Type_Parameters_dup_warn", paramName); } result.addContent(processParamTag(e, isParams, writer, dt, ch.getParameterName(dt), alreadyDocumented.isEmpty())); alreadyDocumented.add(rank); } } return result; } }
public class class_name { private Content processParamTags(Element e, boolean isParams, List<? extends DocTree> paramTags, Map<String, String> rankMap, TagletWriter writer, Set<String> alreadyDocumented) { Messages messages = writer.configuration().getMessages(); Content result = writer.getOutputInstance(); if (!paramTags.isEmpty()) { CommentHelper ch = writer.configuration().utils.getCommentHelper(e); for (DocTree dt : paramTags) { String paramName = isParams ? ch.getParameterName(dt) : "<" + ch.getParameterName(dt) + ">"; if (!rankMap.containsKey(ch.getParameterName(dt))) { messages.warning(ch.getDocTreePath(dt), isParams ? "doclet.Parameters_warn" : "doclet.Type_Parameters_warn", paramName); // depends on control dependency: [if], data = [none] } String rank = rankMap.get(ch.getParameterName(dt)); if (rank != null && alreadyDocumented.contains(rank)) { messages.warning(ch.getDocTreePath(dt), isParams ? "doclet.Parameters_dup_warn" : "doclet.Type_Parameters_dup_warn", paramName); // depends on control dependency: [if], data = [none] } result.addContent(processParamTag(e, isParams, writer, dt, ch.getParameterName(dt), alreadyDocumented.isEmpty())); // depends on control dependency: [for], data = [dt] alreadyDocumented.add(rank); // depends on control dependency: [for], data = [none] } } return result; } }
public class class_name { private ArrayList<SameLengthMotifs> SeparateMotifsByClustering(ArrayList<String[]> clusterTSIdx, SameLengthMotifs sameLenMotifs) { ArrayList<SameLengthMotifs> newResult = new ArrayList<SameLengthMotifs>(); if (clusterTSIdx.size() > 1) { ArrayList<SAXMotif> subsequences = sameLenMotifs.getSameLenMotifs(); for (String[] idxesInCluster : clusterTSIdx) { SameLengthMotifs newIthSLM = new SameLengthMotifs(); ArrayList<SAXMotif> sameLenSS = new ArrayList<SAXMotif>(); int minL = sameLenMotifs.getMinMotifLen(); int maxL = sameLenMotifs.getMaxMotifLen(); for (String i : idxesInCluster) { SAXMotif ssI = subsequences.get(Integer.parseInt(i)); int len = ssI.getPos().getEnd() - ssI.getPos().getStart(); if (len < minL) { minL = len; } else if (len > maxL) { maxL = len; } sameLenSS.add(ssI); } newIthSLM.setSameLenMotifs(sameLenSS); newIthSLM.setMaxMotifLen(maxL); newIthSLM.setMinMotifLen(minL); newResult.add(newIthSLM); } } else { newResult.add(sameLenMotifs); } return newResult; } }
public class class_name { private ArrayList<SameLengthMotifs> SeparateMotifsByClustering(ArrayList<String[]> clusterTSIdx, SameLengthMotifs sameLenMotifs) { ArrayList<SameLengthMotifs> newResult = new ArrayList<SameLengthMotifs>(); if (clusterTSIdx.size() > 1) { ArrayList<SAXMotif> subsequences = sameLenMotifs.getSameLenMotifs(); for (String[] idxesInCluster : clusterTSIdx) { SameLengthMotifs newIthSLM = new SameLengthMotifs(); ArrayList<SAXMotif> sameLenSS = new ArrayList<SAXMotif>(); int minL = sameLenMotifs.getMinMotifLen(); int maxL = sameLenMotifs.getMaxMotifLen(); for (String i : idxesInCluster) { SAXMotif ssI = subsequences.get(Integer.parseInt(i)); int len = ssI.getPos().getEnd() - ssI.getPos().getStart(); if (len < minL) { minL = len; // depends on control dependency: [if], data = [none] } else if (len > maxL) { maxL = len; // depends on control dependency: [if], data = [none] } sameLenSS.add(ssI); // depends on control dependency: [for], data = [none] } newIthSLM.setSameLenMotifs(sameLenSS); // depends on control dependency: [for], data = [none] newIthSLM.setMaxMotifLen(maxL); // depends on control dependency: [for], data = [none] newIthSLM.setMinMotifLen(minL); // depends on control dependency: [for], data = [none] newResult.add(newIthSLM); // depends on control dependency: [for], data = [none] } } else { newResult.add(sameLenMotifs); // depends on control dependency: [if], data = [none] } return newResult; } }
public class class_name { public static SimpleFeatureBuilder getLasFeatureBuilder( CoordinateReferenceSystem crs ) { if (lasSimpleFeatureBuilder == null) { SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); b.setName("lasdata"); b.setCRS(crs); b.add(THE_GEOM, Point.class); b.add(ID, Integer.class); b.add(ELEVATION, Double.class); b.add(INTENSITY, Double.class); b.add(CLASSIFICATION, Integer.class); b.add(IMPULSE, Double.class); b.add(NUM_OF_IMPULSES, Double.class); final SimpleFeatureType featureType = b.buildFeatureType(); lasSimpleFeatureBuilder = new SimpleFeatureBuilder(featureType); } return lasSimpleFeatureBuilder; } }
public class class_name { public static SimpleFeatureBuilder getLasFeatureBuilder( CoordinateReferenceSystem crs ) { if (lasSimpleFeatureBuilder == null) { SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); b.setName("lasdata"); // depends on control dependency: [if], data = [none] b.setCRS(crs); // depends on control dependency: [if], data = [none] b.add(THE_GEOM, Point.class); // depends on control dependency: [if], data = [none] b.add(ID, Integer.class); // depends on control dependency: [if], data = [none] b.add(ELEVATION, Double.class); // depends on control dependency: [if], data = [none] b.add(INTENSITY, Double.class); // depends on control dependency: [if], data = [none] b.add(CLASSIFICATION, Integer.class); // depends on control dependency: [if], data = [none] b.add(IMPULSE, Double.class); // depends on control dependency: [if], data = [none] b.add(NUM_OF_IMPULSES, Double.class); // depends on control dependency: [if], data = [none] final SimpleFeatureType featureType = b.buildFeatureType(); lasSimpleFeatureBuilder = new SimpleFeatureBuilder(featureType); // depends on control dependency: [if], data = [none] } return lasSimpleFeatureBuilder; } }
public class class_name { public TableConfig getConfig(FeatureType wantFeatureType, NetcdfDataset ds, Formatter errlog) { Dimension obsDim = ds.getUnlimitedDimension(); if (obsDim == null) { CoordinateAxis axis = CoordSysEvaluator.findCoordByType(ds, AxisType.Time); if ((axis != null) && axis.isScalar()) obsDim = axis.getDimension(0); } if (obsDim == null) { errlog.format("Must have an Observation dimension: unlimited dimension, or from Time Coordinate"); return null; } boolean hasStruct = Evaluator.hasNetcdf3RecordStructure(ds); // wants a Point if ((wantFeatureType == FeatureType.POINT)) { TableConfig nt = new TableConfig(Table.Type.Structure, hasStruct ? "record" : obsDim.getShortName() ); nt.structName = "record"; nt.structureType = hasStruct ? TableConfig.StructureType.Structure : TableConfig.StructureType.PsuedoStructure; nt.featureType = FeatureType.POINT; CoordSysEvaluator.findCoords(nt, ds, null); return nt; } // otherwise, make it a Station TableConfig nt = new TableConfig(Table.Type.Top, "station"); nt.featureType = FeatureType.STATION; nt.lat = CoordSysEvaluator.findCoordNameByType(ds, AxisType.Lat); nt.lon = CoordSysEvaluator.findCoordNameByType(ds, AxisType.Lon); nt.stnId = ds.findAttValueIgnoreCase(null, "station", null); nt.stnDesc = ds.findAttValueIgnoreCase(null, "description", null); if (nt.stnDesc == null) nt.stnDesc = ds.findAttValueIgnoreCase(null, "comment", null); TableConfig obs = new TableConfig(Table.Type.Structure, hasStruct ? "record" : obsDim.getShortName()); obs.structName = "record"; obs.structureType = hasStruct ? TableConfig.StructureType.Structure : TableConfig.StructureType.PsuedoStructure; obs.dimName = obsDim.getShortName(); obs.time = CoordSysEvaluator.findCoordNameByType(ds, AxisType.Time); nt.addChild(obs); return nt; } }
public class class_name { public TableConfig getConfig(FeatureType wantFeatureType, NetcdfDataset ds, Formatter errlog) { Dimension obsDim = ds.getUnlimitedDimension(); if (obsDim == null) { CoordinateAxis axis = CoordSysEvaluator.findCoordByType(ds, AxisType.Time); if ((axis != null) && axis.isScalar()) obsDim = axis.getDimension(0); } if (obsDim == null) { errlog.format("Must have an Observation dimension: unlimited dimension, or from Time Coordinate"); // depends on control dependency: [if], data = [none] return null; // depends on control dependency: [if], data = [none] } boolean hasStruct = Evaluator.hasNetcdf3RecordStructure(ds); // wants a Point if ((wantFeatureType == FeatureType.POINT)) { TableConfig nt = new TableConfig(Table.Type.Structure, hasStruct ? "record" : obsDim.getShortName() ); nt.structName = "record"; // depends on control dependency: [if], data = [none] nt.structureType = hasStruct ? TableConfig.StructureType.Structure : TableConfig.StructureType.PsuedoStructure; // depends on control dependency: [if], data = [none] nt.featureType = FeatureType.POINT; // depends on control dependency: [if], data = [none] CoordSysEvaluator.findCoords(nt, ds, null); // depends on control dependency: [if], data = [none] return nt; // depends on control dependency: [if], data = [none] } // otherwise, make it a Station TableConfig nt = new TableConfig(Table.Type.Top, "station"); nt.featureType = FeatureType.STATION; nt.lat = CoordSysEvaluator.findCoordNameByType(ds, AxisType.Lat); nt.lon = CoordSysEvaluator.findCoordNameByType(ds, AxisType.Lon); nt.stnId = ds.findAttValueIgnoreCase(null, "station", null); nt.stnDesc = ds.findAttValueIgnoreCase(null, "description", null); if (nt.stnDesc == null) nt.stnDesc = ds.findAttValueIgnoreCase(null, "comment", null); TableConfig obs = new TableConfig(Table.Type.Structure, hasStruct ? "record" : obsDim.getShortName()); obs.structName = "record"; obs.structureType = hasStruct ? TableConfig.StructureType.Structure : TableConfig.StructureType.PsuedoStructure; obs.dimName = obsDim.getShortName(); obs.time = CoordSysEvaluator.findCoordNameByType(ds, AxisType.Time); nt.addChild(obs); return nt; } }
public class class_name { public void setOccurrenceDaySet(java.util.Collection<Integer> occurrenceDaySet) { if (occurrenceDaySet == null) { this.occurrenceDaySet = null; return; } this.occurrenceDaySet = new com.amazonaws.internal.SdkInternalList<Integer>(occurrenceDaySet); } }
public class class_name { public void setOccurrenceDaySet(java.util.Collection<Integer> occurrenceDaySet) { if (occurrenceDaySet == null) { this.occurrenceDaySet = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.occurrenceDaySet = new com.amazonaws.internal.SdkInternalList<Integer>(occurrenceDaySet); } }
public class class_name { @Nullable private AnalyzedTokenReadings getAnalyzedTokenReadingsFor(int startPos, int endPos, List<AnalyzedTokenReadings> tokenReadings) { int pos = 0; for (AnalyzedTokenReadings tokenReading : tokenReadings) { String token = tokenReading.getToken(); if (token.trim().isEmpty()) { continue; // the OpenNLP result has no whitespace, so we need to skip it } int tokenStart = pos; int tokenEnd = pos + token.length(); if (tokenStart == startPos && tokenEnd == endPos) { //System.out.println("!!!" + startPos + " " + endPos + " " + tokenReading); return tokenReading; } pos = tokenEnd; } return null; } }
public class class_name { @Nullable private AnalyzedTokenReadings getAnalyzedTokenReadingsFor(int startPos, int endPos, List<AnalyzedTokenReadings> tokenReadings) { int pos = 0; for (AnalyzedTokenReadings tokenReading : tokenReadings) { String token = tokenReading.getToken(); if (token.trim().isEmpty()) { continue; // the OpenNLP result has no whitespace, so we need to skip it } int tokenStart = pos; int tokenEnd = pos + token.length(); if (tokenStart == startPos && tokenEnd == endPos) { //System.out.println("!!!" + startPos + " " + endPos + " " + tokenReading); return tokenReading; // depends on control dependency: [if], data = [none] } pos = tokenEnd; // depends on control dependency: [for], data = [none] } return null; } }
public class class_name { @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Observable<T> wrap(ObservableSource<T> source) { ObjectHelper.requireNonNull(source, "source is null"); if (source instanceof Observable) { return RxJavaPlugins.onAssembly((Observable<T>)source); } return RxJavaPlugins.onAssembly(new ObservableFromUnsafeSource<T>(source)); } }
public class class_name { @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Observable<T> wrap(ObservableSource<T> source) { ObjectHelper.requireNonNull(source, "source is null"); if (source instanceof Observable) { return RxJavaPlugins.onAssembly((Observable<T>)source); // depends on control dependency: [if], data = [none] } return RxJavaPlugins.onAssembly(new ObservableFromUnsafeSource<T>(source)); } }
public class class_name { public boolean checkPassword(final String iPassword, final String iHash) { if (iHash.startsWith(HASH_ALGORITHM_PREFIX)) { final String s = iHash.substring(HASH_ALGORITHM_PREFIX.length()); return createSHA256(iPassword).equals(s); } else if (iHash.startsWith(PBKDF2_ALGORITHM_PREFIX)) { final String s = iHash.substring(PBKDF2_ALGORITHM_PREFIX.length()); return checkPasswordWithSalt(iPassword, s, PBKDF2_ALGORITHM); } else if (iHash.startsWith(PBKDF2_SHA256_ALGORITHM_PREFIX)) { final String s = iHash.substring(PBKDF2_SHA256_ALGORITHM_PREFIX.length()); return checkPasswordWithSalt(iPassword, s, PBKDF2_SHA256_ALGORITHM); } // Do not compare raw strings against each other, to avoid timing attacks. // Instead, hash them both with a cryptographic hash function and // compare their hashes with a constant-time comparison method. return MessageDigest.isEqual(digestSHA256(iPassword), digestSHA256(iHash)); } }
public class class_name { public boolean checkPassword(final String iPassword, final String iHash) { if (iHash.startsWith(HASH_ALGORITHM_PREFIX)) { final String s = iHash.substring(HASH_ALGORITHM_PREFIX.length()); return createSHA256(iPassword).equals(s); // depends on control dependency: [if], data = [none] } else if (iHash.startsWith(PBKDF2_ALGORITHM_PREFIX)) { final String s = iHash.substring(PBKDF2_ALGORITHM_PREFIX.length()); return checkPasswordWithSalt(iPassword, s, PBKDF2_ALGORITHM); // depends on control dependency: [if], data = [none] } else if (iHash.startsWith(PBKDF2_SHA256_ALGORITHM_PREFIX)) { final String s = iHash.substring(PBKDF2_SHA256_ALGORITHM_PREFIX.length()); return checkPasswordWithSalt(iPassword, s, PBKDF2_SHA256_ALGORITHM); // depends on control dependency: [if], data = [none] } // Do not compare raw strings against each other, to avoid timing attacks. // Instead, hash them both with a cryptographic hash function and // compare their hashes with a constant-time comparison method. return MessageDigest.isEqual(digestSHA256(iPassword), digestSHA256(iHash)); } }
public class class_name { public void initializeAllProcessors(UimaContext aContext) { for(Priority prio : processorNames.keySet()) { for(String pn : processorNames.get(prio)) { try { Class<?> c = Class.forName(pn); GenericProcessor p = (GenericProcessor) c.newInstance(); p.initialize(aContext); processors.get(prio).add(p); } catch (Exception exception) { exception.printStackTrace(); Logger.printError(component, "Unable to initialize registered Processor " + pn + ", got: " + exception.toString()); System.exit(-1); } } } this.initialized = true; } }
public class class_name { public void initializeAllProcessors(UimaContext aContext) { for(Priority prio : processorNames.keySet()) { for(String pn : processorNames.get(prio)) { try { Class<?> c = Class.forName(pn); GenericProcessor p = (GenericProcessor) c.newInstance(); p.initialize(aContext); processors.get(prio).add(p); } catch (Exception exception) { exception.printStackTrace(); Logger.printError(component, "Unable to initialize registered Processor " + pn + ", got: " + exception.toString()); System.exit(-1); } // depends on control dependency: [catch], data = [none] } } this.initialized = true; } }
public class class_name { public _Private_IonManagedBinaryWriterBuilder withFlatImports(final SymbolTable... tables) { if (tables != null) { return withFlatImports(Arrays.asList(tables)); } return this; } }
public class class_name { public _Private_IonManagedBinaryWriterBuilder withFlatImports(final SymbolTable... tables) { if (tables != null) { return withFlatImports(Arrays.asList(tables)); // depends on control dependency: [if], data = [(tables] } return this; } }
public class class_name { protected final void cacheClear() { int i = sysTables.length; while (i-- > 0) { Table t = sysTables[i]; if (t != null) { t.clearAllData(session); } sysTableSessions[i] = null; } isDirty = false; } }
public class class_name { protected final void cacheClear() { int i = sysTables.length; while (i-- > 0) { Table t = sysTables[i]; if (t != null) { t.clearAllData(session); // depends on control dependency: [if], data = [none] } sysTableSessions[i] = null; // depends on control dependency: [while], data = [none] } isDirty = false; } }
public class class_name { @SuppressWarnings("TryFinallyCanBeTryWithResources") public static List<SasFieldInfo> getFields(String recordType, InputStream is, File dictionary) { Map<String, AtomicInteger> counters = new HashMap<>(); List<SasFieldInfo> result = readCsvDictionary(recordType, is, counters); if (dictionary != null) { InputStream dictIs = null; try { dictIs = new FileInputStream(dictionary); result.addAll(readCsvDictionary(recordType, dictIs, counters)); } catch (IOException e) { throw new RuntimeException(e); } finally { if (dictIs != null) { try { dictIs.close(); } catch (IOException e) { // ignored } } } } return result; } }
public class class_name { @SuppressWarnings("TryFinallyCanBeTryWithResources") public static List<SasFieldInfo> getFields(String recordType, InputStream is, File dictionary) { Map<String, AtomicInteger> counters = new HashMap<>(); List<SasFieldInfo> result = readCsvDictionary(recordType, is, counters); if (dictionary != null) { InputStream dictIs = null; try { dictIs = new FileInputStream(dictionary); // depends on control dependency: [try], data = [none] result.addAll(readCsvDictionary(recordType, dictIs, counters)); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] finally { if (dictIs != null) { try { dictIs.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { // ignored } // depends on control dependency: [catch], data = [none] } } } return result; } }
public class class_name { public void monitorCaches(CacheManager cacheManager) { if (cacheManager == null) { logger.warn("EhCache can't be monitored; CacheManager is null"); return; } for (String cacheName : cacheManager.getCacheNames()) { monitorCache(cacheManager.getCache(cacheName)); } } }
public class class_name { public void monitorCaches(CacheManager cacheManager) { if (cacheManager == null) { logger.warn("EhCache can't be monitored; CacheManager is null"); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } for (String cacheName : cacheManager.getCacheNames()) { monitorCache(cacheManager.getCache(cacheName)); // depends on control dependency: [for], data = [cacheName] } } }
public class class_name { public ServiceFuture<List<CloudJob>> listAsync(final JobListOptions jobListOptions, final ListOperationCallback<CloudJob> serviceCallback) { return AzureServiceFuture.fromHeaderPageResponse( listSinglePageAsync(jobListOptions), new Func1<String, Observable<ServiceResponseWithHeaders<Page<CloudJob>, JobListHeaders>>>() { @Override public Observable<ServiceResponseWithHeaders<Page<CloudJob>, JobListHeaders>> call(String nextPageLink) { JobListNextOptions jobListNextOptions = null; if (jobListOptions != null) { jobListNextOptions = new JobListNextOptions(); jobListNextOptions.withClientRequestId(jobListOptions.clientRequestId()); jobListNextOptions.withReturnClientRequestId(jobListOptions.returnClientRequestId()); jobListNextOptions.withOcpDate(jobListOptions.ocpDate()); } return listNextSinglePageAsync(nextPageLink, jobListNextOptions); } }, serviceCallback); } }
public class class_name { public ServiceFuture<List<CloudJob>> listAsync(final JobListOptions jobListOptions, final ListOperationCallback<CloudJob> serviceCallback) { return AzureServiceFuture.fromHeaderPageResponse( listSinglePageAsync(jobListOptions), new Func1<String, Observable<ServiceResponseWithHeaders<Page<CloudJob>, JobListHeaders>>>() { @Override public Observable<ServiceResponseWithHeaders<Page<CloudJob>, JobListHeaders>> call(String nextPageLink) { JobListNextOptions jobListNextOptions = null; if (jobListOptions != null) { jobListNextOptions = new JobListNextOptions(); // depends on control dependency: [if], data = [none] jobListNextOptions.withClientRequestId(jobListOptions.clientRequestId()); // depends on control dependency: [if], data = [(jobListOptions] jobListNextOptions.withReturnClientRequestId(jobListOptions.returnClientRequestId()); // depends on control dependency: [if], data = [(jobListOptions] jobListNextOptions.withOcpDate(jobListOptions.ocpDate()); // depends on control dependency: [if], data = [(jobListOptions] } return listNextSinglePageAsync(nextPageLink, jobListNextOptions); } }, serviceCallback); } }
public class class_name { @SuppressWarnings("null") public @NotNull List<Resource> getResources(@Nullable Predicate<Resource> filter, @Nullable Resource baseResource) { // resolve base path or fallback to current page's content if not specified Resource baseResourceToUse = baseResource; if (baseResourceToUse == null) { PageManager pageManager = request.getResourceResolver().adaptTo(PageManager.class); Page currentPage = pageManager.getContainingPage(request.getResource()); if (currentPage != null) { baseResourceToUse = currentPage.getContentResource(); } else { baseResourceToUse = request.getResource(); } } return getResourcesWithBaseResource(filter, baseResourceToUse); } }
public class class_name { @SuppressWarnings("null") public @NotNull List<Resource> getResources(@Nullable Predicate<Resource> filter, @Nullable Resource baseResource) { // resolve base path or fallback to current page's content if not specified Resource baseResourceToUse = baseResource; if (baseResourceToUse == null) { PageManager pageManager = request.getResourceResolver().adaptTo(PageManager.class); Page currentPage = pageManager.getContainingPage(request.getResource()); if (currentPage != null) { baseResourceToUse = currentPage.getContentResource(); // depends on control dependency: [if], data = [none] } else { baseResourceToUse = request.getResource(); // depends on control dependency: [if], data = [none] } } return getResourcesWithBaseResource(filter, baseResourceToUse); } }
public class class_name { @Override public void close() throws ParseException { if (!defined()) { super.close(); if (!tmpParserLongs.isEmpty()) { parserLongs = new MtasFunctionParserFunction[tmpParserLongs.size()]; parserLongs = tmpParserLongs.toArray(parserLongs); } if (!tmpParserDoubles.isEmpty()) { parserDoubles = new MtasFunctionParserFunction[tmpParserDoubles.size()]; parserDoubles = tmpParserDoubles.toArray(parserDoubles); } if (!tmpConstantLongs.isEmpty()) { constantLongs = new long[tmpConstantLongs.size()]; for (int i = 0; i < tmpConstantLongs.size(); i++) { constantLongs[i] = tmpConstantLongs.get(i); } } if (!tmpConstantDoubles.isEmpty()) { constantDoubles = new Double[tmpConstantDoubles.size()]; for (int i = 0; i < tmpConstantDoubles.size(); i++) { constantDoubles[i] = tmpConstantDoubles.get(i); } } if (firstType == null) { throw new ParseException("incorrect definition: no firstType"); } if (!tmpOperatorList.isEmpty()) { number = tmpOperatorList.size(); if ((tmpTypeList.size() != number) || (tmpIdList.size() != number)) { throw new ParseException("incorrect definition additional items"); } else { operatorList = new String[number]; operatorList = tmpOperatorList.toArray(operatorList); typeList = new String[number]; typeList = tmpTypeList.toArray(typeList); idList = new int[number]; for (int i = 0; i < number; i++) { idList[i] = tmpIdList.get(i).intValue(); } } } else { number = 0; operatorList = null; typeList = null; idList = null; } } } }
public class class_name { @Override public void close() throws ParseException { if (!defined()) { super.close(); if (!tmpParserLongs.isEmpty()) { parserLongs = new MtasFunctionParserFunction[tmpParserLongs.size()]; // depends on control dependency: [if], data = [none] parserLongs = tmpParserLongs.toArray(parserLongs); // depends on control dependency: [if], data = [none] } if (!tmpParserDoubles.isEmpty()) { parserDoubles = new MtasFunctionParserFunction[tmpParserDoubles.size()]; // depends on control dependency: [if], data = [none] parserDoubles = tmpParserDoubles.toArray(parserDoubles); // depends on control dependency: [if], data = [none] } if (!tmpConstantLongs.isEmpty()) { constantLongs = new long[tmpConstantLongs.size()]; // depends on control dependency: [if], data = [none] for (int i = 0; i < tmpConstantLongs.size(); i++) { constantLongs[i] = tmpConstantLongs.get(i); // depends on control dependency: [for], data = [i] } } if (!tmpConstantDoubles.isEmpty()) { constantDoubles = new Double[tmpConstantDoubles.size()]; // depends on control dependency: [if], data = [none] for (int i = 0; i < tmpConstantDoubles.size(); i++) { constantDoubles[i] = tmpConstantDoubles.get(i); // depends on control dependency: [for], data = [i] } } if (firstType == null) { throw new ParseException("incorrect definition: no firstType"); } if (!tmpOperatorList.isEmpty()) { number = tmpOperatorList.size(); // depends on control dependency: [if], data = [none] if ((tmpTypeList.size() != number) || (tmpIdList.size() != number)) { throw new ParseException("incorrect definition additional items"); } else { operatorList = new String[number]; // depends on control dependency: [if], data = [none] operatorList = tmpOperatorList.toArray(operatorList); // depends on control dependency: [if], data = [none] typeList = new String[number]; // depends on control dependency: [if], data = [none] typeList = tmpTypeList.toArray(typeList); // depends on control dependency: [if], data = [none] idList = new int[number]; // depends on control dependency: [if], data = [none] for (int i = 0; i < number; i++) { idList[i] = tmpIdList.get(i).intValue(); // depends on control dependency: [for], data = [i] } } } else { number = 0; // depends on control dependency: [if], data = [none] operatorList = null; // depends on control dependency: [if], data = [none] typeList = null; // depends on control dependency: [if], data = [none] idList = null; // depends on control dependency: [if], data = [none] } } } }
public class class_name { public String hash(String key) { if (digester != null) { byte[] bytes = digester.digest(key.getBytes()); return bytesToHexString(bytes); } else { return Integer.toHexString(key.hashCode()); } } }
public class class_name { public String hash(String key) { if (digester != null) { byte[] bytes = digester.digest(key.getBytes()); return bytesToHexString(bytes); // depends on control dependency: [if], data = [none] } else { return Integer.toHexString(key.hashCode()); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected List<R> visitChildren(ParentNode<? extends N> node) { List<R> results = new ArrayList<>(node.numChildren()); for (N child : node.getChildren()) { results.add(visit(child)); } return results; } }
public class class_name { protected List<R> visitChildren(ParentNode<? extends N> node) { List<R> results = new ArrayList<>(node.numChildren()); for (N child : node.getChildren()) { results.add(visit(child)); // depends on control dependency: [for], data = [child] } return results; } }
public class class_name { private void addNtCredentials(NtAuthInfo authInfo, Map<AuthScope, Credentials> credentialsMap) { logger.info("NT authentication for: {}", authInfo.getLoginTarget()); try { Credentials credentials = new NTCredentials(authInfo.getUsername(), authInfo.getPassword(), InetAddress.getLocalHost().getHostName(), authInfo.getDomain()); credentialsMap.put(new AuthScope(authInfo.getHost(), authInfo.getPort()), credentials); } catch (UnknownHostException e) { logger.error("Error creating NT credentials", e); } } }
public class class_name { private void addNtCredentials(NtAuthInfo authInfo, Map<AuthScope, Credentials> credentialsMap) { logger.info("NT authentication for: {}", authInfo.getLoginTarget()); try { Credentials credentials = new NTCredentials(authInfo.getUsername(), authInfo.getPassword(), InetAddress.getLocalHost().getHostName(), authInfo.getDomain()); credentialsMap.put(new AuthScope(authInfo.getHost(), authInfo.getPort()), credentials); // depends on control dependency: [try], data = [none] } catch (UnknownHostException e) { logger.error("Error creating NT credentials", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { protected void discard() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "discard"); // Discard any old cursor if(getCursor != null) { getCursor.finished(); getCursor = null; } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "discard"); } }
public class class_name { protected void discard() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "discard"); // Discard any old cursor if(getCursor != null) { getCursor.finished(); // depends on control dependency: [if], data = [none] getCursor = null; // depends on control dependency: [if], data = [none] } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "discard"); } }
public class class_name { @Nullable public List<LineString> inner() { List<List<Point>> coordinates = coordinates(); if (coordinates.size() <= 1) { return new ArrayList(0); } List<LineString> inner = new ArrayList<>(coordinates.size() - 1); for (List<Point> points : coordinates.subList(1, coordinates.size())) { inner.add(LineString.fromLngLats(points)); } return inner; } }
public class class_name { @Nullable public List<LineString> inner() { List<List<Point>> coordinates = coordinates(); if (coordinates.size() <= 1) { return new ArrayList(0); // depends on control dependency: [if], data = [none] } List<LineString> inner = new ArrayList<>(coordinates.size() - 1); for (List<Point> points : coordinates.subList(1, coordinates.size())) { inner.add(LineString.fromLngLats(points)); // depends on control dependency: [for], data = [points] } return inner; } }
public class class_name { public static void setTranslucentNavigationFlag(Activity activity, boolean on) { if (Build.VERSION.SDK_INT >= 19) { setFlag(activity, WindowManager.LayoutParams.FLAG_TRANSLUCENT_NAVIGATION, on); } } }
public class class_name { public static void setTranslucentNavigationFlag(Activity activity, boolean on) { if (Build.VERSION.SDK_INT >= 19) { setFlag(activity, WindowManager.LayoutParams.FLAG_TRANSLUCENT_NAVIGATION, on); // depends on control dependency: [if], data = [none] } } }
public class class_name { @FFDCIgnore(NullPointerException.class) private void addEventToRingBuffer(Object event) { // Check again to see if the ringBuffer is null if (ringBuffer != null) { try { ringBuffer.add(event); } catch (NullPointerException npe) { // Nothing to do! Perhaps a Trace? } } } }
public class class_name { @FFDCIgnore(NullPointerException.class) private void addEventToRingBuffer(Object event) { // Check again to see if the ringBuffer is null if (ringBuffer != null) { try { ringBuffer.add(event); // depends on control dependency: [try], data = [none] } catch (NullPointerException npe) { // Nothing to do! Perhaps a Trace? } // depends on control dependency: [catch], data = [none] } } }
public class class_name { @Override public Color[] generateColors(int numColors) { Color[] colors = new Color[numColors]; // fix the seed to always get the same colors for the same numColors parameter and ranges for hue, saturation and brightness Random rand = new Random(0); for(int i = 0; i < numColors; ++i) { float hueRatio = i/(float)numColors; float saturationRatio = rand.nextFloat(); float brightnessRatio = rand.nextFloat(); float hue = lerp(hueMin, hueMax, hueRatio); float saturation = lerp(saturationMin, saturationMax, saturationRatio); float brightness = lerp(brightnessMin, brightnessMax, brightnessRatio); colors[i] = Color.getHSBColor(hue, saturation, brightness); } return colors; } }
public class class_name { @Override public Color[] generateColors(int numColors) { Color[] colors = new Color[numColors]; // fix the seed to always get the same colors for the same numColors parameter and ranges for hue, saturation and brightness Random rand = new Random(0); for(int i = 0; i < numColors; ++i) { float hueRatio = i/(float)numColors; float saturationRatio = rand.nextFloat(); float brightnessRatio = rand.nextFloat(); float hue = lerp(hueMin, hueMax, hueRatio); float saturation = lerp(saturationMin, saturationMax, saturationRatio); float brightness = lerp(brightnessMin, brightnessMax, brightnessRatio); colors[i] = Color.getHSBColor(hue, saturation, brightness); // depends on control dependency: [for], data = [i] } return colors; } }
public class class_name { protected Action toAction(Object item) { if (item instanceof Action) { return (Action) item; } else if (item instanceof String) { final String definition = (String) item; return new DefaultAction( definition + "@" + RANDOM.nextInt(), resolveDataFunction((String) item), getMiddleware(definition) ); } else { String name; List<Middleware> middleware; try { name = (String) ClassApi .findMethod(item.getClass(), "getName") .invoke(item); middleware = getMiddleware(name); } catch (Throwable t) { name = item.getClass().getName(); middleware = Collections.emptyList(); } return new DefaultAction( name + "@" + RANDOM.nextInt(), getDataFunctionExtractor().apply(item, DEFAULT_FUNCTION), middleware ); } } }
public class class_name { protected Action toAction(Object item) { if (item instanceof Action) { return (Action) item; // depends on control dependency: [if], data = [none] } else if (item instanceof String) { final String definition = (String) item; return new DefaultAction( definition + "@" + RANDOM.nextInt(), resolveDataFunction((String) item), getMiddleware(definition) ); // depends on control dependency: [if], data = [none] } else { String name; List<Middleware> middleware; try { name = (String) ClassApi .findMethod(item.getClass(), "getName") .invoke(item); // depends on control dependency: [try], data = [none] middleware = getMiddleware(name); // depends on control dependency: [try], data = [none] } catch (Throwable t) { name = item.getClass().getName(); middleware = Collections.emptyList(); } // depends on control dependency: [catch], data = [none] return new DefaultAction( name + "@" + RANDOM.nextInt(), getDataFunctionExtractor().apply(item, DEFAULT_FUNCTION), middleware ); // depends on control dependency: [if], data = [none] } } }
public class class_name { void startScanSFeaturesAt(List seq, int pos) { sFeatures.clear(); sFeatureIdx = 0; Observation obsr = (Observation)seq.get(pos); // scan over all context predicates for (int i = 0; i < obsr.cps.length; i++) { Element elem = (Element)dict.dict.get(new Integer(obsr.cps[i])); if (elem == null) { continue; } if (!(elem.isScanned)) { // scan all labels for state feature Iterator it = elem.lbCntFidxes.keySet().iterator(); while (it.hasNext()) { Integer label = (Integer)it.next(); CountFeatureIdx cntFidx = (CountFeatureIdx)elem.lbCntFidxes.get(label); if (cntFidx.fidx >= 0) { Feature sF = new Feature(); sF.sFeature1Init(label.intValue(), obsr.cps[i]); sF.idx = cntFidx.fidx; elem.cpFeatures.add(sF); } } elem.isScanned = true; } for (int j = 0; j < elem.cpFeatures.size(); j++) { sFeatures.add(elem.cpFeatures.get(j)); } } } }
public class class_name { void startScanSFeaturesAt(List seq, int pos) { sFeatures.clear(); sFeatureIdx = 0; Observation obsr = (Observation)seq.get(pos); // scan over all context predicates for (int i = 0; i < obsr.cps.length; i++) { Element elem = (Element)dict.dict.get(new Integer(obsr.cps[i])); if (elem == null) { continue; } if (!(elem.isScanned)) { // scan all labels for state feature Iterator it = elem.lbCntFidxes.keySet().iterator(); while (it.hasNext()) { Integer label = (Integer)it.next(); CountFeatureIdx cntFidx = (CountFeatureIdx)elem.lbCntFidxes.get(label); if (cntFidx.fidx >= 0) { Feature sF = new Feature(); sF.sFeature1Init(label.intValue(), obsr.cps[i]); // depends on control dependency: [if], data = [none] sF.idx = cntFidx.fidx; // depends on control dependency: [if], data = [none] elem.cpFeatures.add(sF); // depends on control dependency: [if], data = [none] } } elem.isScanned = true; // depends on control dependency: [if], data = [none] } for (int j = 0; j < elem.cpFeatures.size(); j++) { sFeatures.add(elem.cpFeatures.get(j)); // depends on control dependency: [for], data = [j] } } } }
public class class_name { static void removeSingleConnections( List<Node> cluster ) { List<Node> open = new ArrayList<>(); List<Node> future = new ArrayList<>(); open.addAll(cluster); while( !open.isEmpty() ) { for (int i = open.size()-1; i >= 0; i--) { Node n = open.get(i); if( n.connections.size == 1 ) { // clear it's connections and remove it from the cluster int index = findNode(n.which,cluster); cluster.remove(index); // Remove the reference to this node from its one connection int parent = findNode(n.connections.get(0),cluster); n.connections.reset(); if( parent == -1 ) throw new RuntimeException("BUG!"); Node p = cluster.get(parent); int edge = p.connections.indexOf(n.which); if( edge == -1 ) throw new RuntimeException("BUG!"); p.connections.remove(edge); // if the parent now only has one connection if( p.connections.size == 1) { future.add(p); } } } open.clear(); List<Node> tmp = open; open = future; future = tmp; } } }
public class class_name { static void removeSingleConnections( List<Node> cluster ) { List<Node> open = new ArrayList<>(); List<Node> future = new ArrayList<>(); open.addAll(cluster); while( !open.isEmpty() ) { for (int i = open.size()-1; i >= 0; i--) { Node n = open.get(i); if( n.connections.size == 1 ) { // clear it's connections and remove it from the cluster int index = findNode(n.which,cluster); cluster.remove(index); // depends on control dependency: [if], data = [none] // Remove the reference to this node from its one connection int parent = findNode(n.connections.get(0),cluster); n.connections.reset(); // depends on control dependency: [if], data = [none] if( parent == -1 ) throw new RuntimeException("BUG!"); Node p = cluster.get(parent); int edge = p.connections.indexOf(n.which); if( edge == -1 ) throw new RuntimeException("BUG!"); p.connections.remove(edge); // depends on control dependency: [if], data = [none] // if the parent now only has one connection if( p.connections.size == 1) { future.add(p); // depends on control dependency: [if], data = [none] } } } open.clear(); // depends on control dependency: [while], data = [none] List<Node> tmp = open; open = future; // depends on control dependency: [while], data = [none] future = tmp; // depends on control dependency: [while], data = [none] } } }
public class class_name { private ProviderClassLoader loadClasses(ClassSource soruce) throws Exception { ProviderClassLoader loader=new ProviderClassLoader(parentClassLoader); Set<Class<?>> allClass = loader.getAllClass(); List<ClassSourceInfo> sources=soruce.getClassSources(); if(sources.isEmpty()) { return loader; } for(ClassSourceInfo cs:sources) { loader.addURL(cs.getUrl()); for(String className:cs.getClassNames()) { Class<?> clazz=loader.loadClass(className); if(clazz!=null) { allClass.add(clazz); } } } _log.debug("classloader init complete,find Class:"+allClass.size()); return loader; } }
public class class_name { private ProviderClassLoader loadClasses(ClassSource soruce) throws Exception { ProviderClassLoader loader=new ProviderClassLoader(parentClassLoader); Set<Class<?>> allClass = loader.getAllClass(); List<ClassSourceInfo> sources=soruce.getClassSources(); if(sources.isEmpty()) { return loader; } for(ClassSourceInfo cs:sources) { loader.addURL(cs.getUrl()); for(String className:cs.getClassNames()) { Class<?> clazz=loader.loadClass(className); if(clazz!=null) { allClass.add(clazz); // depends on control dependency: [if], data = [(clazz] } } } _log.debug("classloader init complete,find Class:"+allClass.size()); return loader; } }
public class class_name { @Override public boolean parseBinaryFirstLine(WsByteBuffer buff) throws MalformedMessageException { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "parseBinaryFirstLine for " + this); Tr.debug(tc, "Buffer: " + buff); } if (getBinaryParseState() == HttpInternalConstants.PARSING_BINARY_VERSION) { if (!buff.hasRemaining()) { return false; } byte version = buff.get(); if (version != HttpInternalConstants.BINARY_TRANSPORT_V1) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Unsupported binary version in message: " + version); } throw new MalformedMessageException("Invalid binary message"); } setBinaryParseState(HttpInternalConstants.PARSING_METHOD_ID_OR_LEN); resetCacheToken(4); } boolean complete = false; int value; while (!complete) { // at this point, the parsed token array is always set up, so we // can try to fill it now if (!fillCacheToken(buff)) { return false; } switch (getBinaryParseState()) { case HttpInternalConstants.PARSING_METHOD_ID_OR_LEN: value = GenericUtils.asInt(getParsedToken()); if (0 == (value & GenericConstants.KNOWN_MASK)) { setMethod(MethodValues.getByOrdinal(value)); setBinaryParseState(HttpInternalConstants.PARSING_URI_LEN); resetCacheToken(4); } else { setBinaryParseState(HttpInternalConstants.PARSING_UNKNOWN_METHOD); resetCacheToken(value & GenericConstants.UNKNOWN_MASK); } break; case HttpInternalConstants.PARSING_UNKNOWN_METHOD: setMethod(MethodValues.find(getParsedToken())); setBinaryParseState(HttpInternalConstants.PARSING_URI_LEN); createCacheToken(4); break; case HttpInternalConstants.PARSING_URI_LEN: setBinaryParseState(HttpInternalConstants.PARSING_URI); resetCacheToken(GenericUtils.asInt(getParsedToken())); break; case HttpInternalConstants.PARSING_URI: setRequestURL(getParsedToken()); setBinaryParseState(HttpInternalConstants.PARSING_VERSION_ID_OR_LEN); createCacheToken(4); break; case HttpInternalConstants.PARSING_VERSION_ID_OR_LEN: value = GenericUtils.asInt(getParsedToken()); if (0 == (value & GenericConstants.KNOWN_MASK)) { setVersion(VersionValues.getByOrdinal(value)); setBinaryParseState(GenericConstants.PARSING_HDR_FLAG); resetCacheToken(4); complete = true; } else { setBinaryParseState(HttpInternalConstants.PARSING_UNKNOWN_VERSION); resetCacheToken(value & GenericConstants.UNKNOWN_MASK); } break; case HttpInternalConstants.PARSING_UNKNOWN_VERSION: setVersion(VersionValues.find(getParsedToken())); setBinaryParseState(GenericConstants.PARSING_HDR_FLAG); createCacheToken(4); complete = true; break; default: throw new MalformedMessageException("Invalid state in first line: " + getBinaryParseState()); } // end of switch block } // end of while loop setFirstLineComplete(true); return true; } }
public class class_name { @Override public boolean parseBinaryFirstLine(WsByteBuffer buff) throws MalformedMessageException { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "parseBinaryFirstLine for " + this); Tr.debug(tc, "Buffer: " + buff); } if (getBinaryParseState() == HttpInternalConstants.PARSING_BINARY_VERSION) { if (!buff.hasRemaining()) { return false; // depends on control dependency: [if], data = [none] } byte version = buff.get(); if (version != HttpInternalConstants.BINARY_TRANSPORT_V1) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Unsupported binary version in message: " + version); // depends on control dependency: [if], data = [none] } throw new MalformedMessageException("Invalid binary message"); } setBinaryParseState(HttpInternalConstants.PARSING_METHOD_ID_OR_LEN); resetCacheToken(4); } boolean complete = false; int value; while (!complete) { // at this point, the parsed token array is always set up, so we // can try to fill it now if (!fillCacheToken(buff)) { return false; // depends on control dependency: [if], data = [none] } switch (getBinaryParseState()) { case HttpInternalConstants.PARSING_METHOD_ID_OR_LEN: value = GenericUtils.asInt(getParsedToken()); if (0 == (value & GenericConstants.KNOWN_MASK)) { setMethod(MethodValues.getByOrdinal(value)); // depends on control dependency: [if], data = [none] setBinaryParseState(HttpInternalConstants.PARSING_URI_LEN); // depends on control dependency: [if], data = [none] resetCacheToken(4); // depends on control dependency: [if], data = [none] } else { setBinaryParseState(HttpInternalConstants.PARSING_UNKNOWN_METHOD); // depends on control dependency: [if], data = [none] resetCacheToken(value & GenericConstants.UNKNOWN_MASK); // depends on control dependency: [if], data = [none] } break; case HttpInternalConstants.PARSING_UNKNOWN_METHOD: setMethod(MethodValues.find(getParsedToken())); setBinaryParseState(HttpInternalConstants.PARSING_URI_LEN); createCacheToken(4); break; case HttpInternalConstants.PARSING_URI_LEN: setBinaryParseState(HttpInternalConstants.PARSING_URI); resetCacheToken(GenericUtils.asInt(getParsedToken())); break; case HttpInternalConstants.PARSING_URI: setRequestURL(getParsedToken()); setBinaryParseState(HttpInternalConstants.PARSING_VERSION_ID_OR_LEN); createCacheToken(4); break; case HttpInternalConstants.PARSING_VERSION_ID_OR_LEN: value = GenericUtils.asInt(getParsedToken()); if (0 == (value & GenericConstants.KNOWN_MASK)) { setVersion(VersionValues.getByOrdinal(value)); // depends on control dependency: [if], data = [none] setBinaryParseState(GenericConstants.PARSING_HDR_FLAG); // depends on control dependency: [if], data = [none] resetCacheToken(4); // depends on control dependency: [if], data = [none] complete = true; // depends on control dependency: [if], data = [none] } else { setBinaryParseState(HttpInternalConstants.PARSING_UNKNOWN_VERSION); // depends on control dependency: [if], data = [none] resetCacheToken(value & GenericConstants.UNKNOWN_MASK); // depends on control dependency: [if], data = [none] } break; case HttpInternalConstants.PARSING_UNKNOWN_VERSION: setVersion(VersionValues.find(getParsedToken())); setBinaryParseState(GenericConstants.PARSING_HDR_FLAG); createCacheToken(4); complete = true; break; default: throw new MalformedMessageException("Invalid state in first line: " + getBinaryParseState()); } // end of switch block } // end of while loop setFirstLineComplete(true); return true; } }
public class class_name { @NonNull protected String getDescription() { Context context = getContext(); if (context == null) { return ""; } try { @SuppressLint("InlinedApi") ProviderInfo info = context.getPackageManager().getProviderInfo( new ComponentName(context, getClass()), PackageManager.MATCH_DISABLED_COMPONENTS); return info.descriptionRes != 0 ? context.getString(info.descriptionRes) : ""; } catch (PackageManager.NameNotFoundException e) { // Wtf? return ""; } } }
public class class_name { @NonNull protected String getDescription() { Context context = getContext(); if (context == null) { return ""; // depends on control dependency: [if], data = [none] } try { @SuppressLint("InlinedApi") ProviderInfo info = context.getPackageManager().getProviderInfo( new ComponentName(context, getClass()), PackageManager.MATCH_DISABLED_COMPONENTS); return info.descriptionRes != 0 ? context.getString(info.descriptionRes) : ""; // depends on control dependency: [try], data = [none] } catch (PackageManager.NameNotFoundException e) { // Wtf? return ""; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static List<? extends Object> getObjectList(Config config, String path) { try { return config.getAnyRefList(path); } catch (ConfigException.Missing | ConfigException.WrongType e) { if (e instanceof ConfigException.WrongType) { LOGGER.warn(e.getMessage(), e); } return null; } } }
public class class_name { public static List<? extends Object> getObjectList(Config config, String path) { try { return config.getAnyRefList(path); // depends on control dependency: [try], data = [none] } catch (ConfigException.Missing | ConfigException.WrongType e) { if (e instanceof ConfigException.WrongType) { LOGGER.warn(e.getMessage(), e); // depends on control dependency: [if], data = [none] } return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void setEventArns(java.util.Collection<String> eventArns) { if (eventArns == null) { this.eventArns = null; return; } this.eventArns = new java.util.ArrayList<String>(eventArns); } }
public class class_name { public void setEventArns(java.util.Collection<String> eventArns) { if (eventArns == null) { this.eventArns = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.eventArns = new java.util.ArrayList<String>(eventArns); } }
public class class_name { @SuppressWarnings("unchecked") public static <T> T unserialize(byte[] bytes) { ObjectInputStream ois = null; try { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); ois = new ObjectInputStream(bais); return (T) ois.readObject(); } catch (Exception e) { throw new UtilException(e); } } }
public class class_name { @SuppressWarnings("unchecked") public static <T> T unserialize(byte[] bytes) { ObjectInputStream ois = null; try { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); ois = new ObjectInputStream(bais); // depends on control dependency: [try], data = [none] return (T) ois.readObject(); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new UtilException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void setState(State state) { if (this.state != state) { this.state = state; LOGGER.debug("State changed: {}", state); changeListeners.forEach(l -> l.accept(state)); } } }
public class class_name { private void setState(State state) { if (this.state != state) { this.state = state; // depends on control dependency: [if], data = [none] LOGGER.debug("State changed: {}", state); // depends on control dependency: [if], data = [state)] changeListeners.forEach(l -> l.accept(state)); // depends on control dependency: [if], data = [state)] } } }
public class class_name { public void run() { // Note: To make sure the user gets a chance to see the // html text, we wait for a paint before returing. // Since these threads are stacked in a private thread queue, the next // thread is not executed until this one is finished. if (! EventQueue.isDispatchThread() )// && Runtime.getRuntime().availableProcessors() == 1) { if (!m_syncPage.isPaintCalled()) { // Wait for the previous call to finish synchronized (m_syncPage) { while (!m_syncPage.isPaintCalled()) { try { m_syncPage.wait(); } catch (InterruptedException e) {} } } } synchronized (m_syncPage) { m_syncPage.setPaintCalled(false); this.runPageLoader(); while (!m_syncPage.isPaintCalled()) { try { m_syncPage.wait(); } catch (InterruptedException e) {} } this.afterPageDisplay(); } } } }
public class class_name { public void run() { // Note: To make sure the user gets a chance to see the // html text, we wait for a paint before returing. // Since these threads are stacked in a private thread queue, the next // thread is not executed until this one is finished. if (! EventQueue.isDispatchThread() )// && Runtime.getRuntime().availableProcessors() == 1) { if (!m_syncPage.isPaintCalled()) { // Wait for the previous call to finish synchronized (m_syncPage) { // depends on control dependency: [if], data = [none] while (!m_syncPage.isPaintCalled()) { try { m_syncPage.wait(); } catch (InterruptedException e) {} // depends on control dependency: [try], data = [none] // depends on control dependency: [catch], data = [none] } } } synchronized (m_syncPage) { // depends on control dependency: [if], data = [] m_syncPage.setPaintCalled(false); this.runPageLoader(); while (!m_syncPage.isPaintCalled()) { try { m_syncPage.wait(); } catch (InterruptedException e) {} // depends on control dependency: [try], data = [none] // depends on control dependency: [catch], data = [none] } this.afterPageDisplay(); } } } }
public class class_name { public static String getReportsOutPath(boolean appendFileSeprator) { String userLocalPath = getUserFolderPath(true); String reportPath = userLocalPath + REPORTS_OUT_PATH; checkFolderPath(reportPath, true);// to create the folder if not exist if (appendFileSeprator) { reportPath += FILE_SEPARATOR; } return reportPath; } }
public class class_name { public static String getReportsOutPath(boolean appendFileSeprator) { String userLocalPath = getUserFolderPath(true); String reportPath = userLocalPath + REPORTS_OUT_PATH; checkFolderPath(reportPath, true);// to create the folder if not exist if (appendFileSeprator) { reportPath += FILE_SEPARATOR; // depends on control dependency: [if], data = [none] } return reportPath; } }
public class class_name { private void checkShowTitle(Editable s, boolean skipChange) { // in this we can check width if (isShowTitle() && getWidth() > 0) { boolean have = s != null && s.length() > 0; if (have != isHaveText || (have && skipChange)) { isHaveText = have; animateShowTitle(isHaveText); } } } }
public class class_name { private void checkShowTitle(Editable s, boolean skipChange) { // in this we can check width if (isShowTitle() && getWidth() > 0) { boolean have = s != null && s.length() > 0; if (have != isHaveText || (have && skipChange)) { isHaveText = have; // depends on control dependency: [if], data = [none] animateShowTitle(isHaveText); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public void addId(final URI id, final String value) { if (id != null && value != null) { final URI localId = id.normalize(); final String localValue = value.trim(); idMap.put(localId, localValue); } } }
public class class_name { public void addId(final URI id, final String value) { if (id != null && value != null) { final URI localId = id.normalize(); final String localValue = value.trim(); idMap.put(localId, localValue); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public void cleanUpNullReferences() { for (Map.Entry<K, V> entry : map.entrySet()) { Object entryVal = entry.getValue(); if (entryVal instanceof SoftReference && ((SoftReference) entryVal).get() == null) { map.remove(entry.getKey(), entryVal); } } } }
public class class_name { @Override public void cleanUpNullReferences() { for (Map.Entry<K, V> entry : map.entrySet()) { Object entryVal = entry.getValue(); if (entryVal instanceof SoftReference && ((SoftReference) entryVal).get() == null) { map.remove(entry.getKey(), entryVal); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public synchronized CircuitBreaker createCircuitBreaker(String name, CircuitBreakerConfig config) { CircuitBreaker circuitBreaker = findCircuitBreaker(name); if (circuitBreaker == null) { circuitBreaker = new CircuitBreakerBean(name); configureCircuitBreaker(name, circuitBreaker, config); if (mBeanExportOperations != null) { ObjectName objectName; try { objectName = new ObjectName("org.fishwife.jrugged.spring:type=CircuitBreakerBean," + "name=" + name); } catch (MalformedObjectNameException e) { throw new IllegalArgumentException("Invalid MBean Name " + name, e); } mBeanExportOperations.registerManagedResource(circuitBreaker, objectName); } addCircuitBreakerToMap(name, circuitBreaker); } return circuitBreaker; } }
public class class_name { public synchronized CircuitBreaker createCircuitBreaker(String name, CircuitBreakerConfig config) { CircuitBreaker circuitBreaker = findCircuitBreaker(name); if (circuitBreaker == null) { circuitBreaker = new CircuitBreakerBean(name); // depends on control dependency: [if], data = [none] configureCircuitBreaker(name, circuitBreaker, config); // depends on control dependency: [if], data = [none] if (mBeanExportOperations != null) { ObjectName objectName; try { objectName = new ObjectName("org.fishwife.jrugged.spring:type=CircuitBreakerBean," + "name=" + name); // depends on control dependency: [try], data = [none] } catch (MalformedObjectNameException e) { throw new IllegalArgumentException("Invalid MBean Name " + name, e); } // depends on control dependency: [catch], data = [none] mBeanExportOperations.registerManagedResource(circuitBreaker, objectName); // depends on control dependency: [if], data = [none] } addCircuitBreakerToMap(name, circuitBreaker); // depends on control dependency: [if], data = [none] } return circuitBreaker; } }
public class class_name { private void run(String[] args) { if (args.length != 2) { usage(); } System.out.println("Opening Doradus server: " + args[0] + ":" + args[1]); try (DoradusClient client = new DoradusClient(args[0], Integer.parseInt(args[1]))) { deleteApplication(client); createApplication(client); addData(client); queryData(client); deleteData(client); } } }
public class class_name { private void run(String[] args) { if (args.length != 2) { usage(); // depends on control dependency: [if], data = [none] } System.out.println("Opening Doradus server: " + args[0] + ":" + args[1]); try (DoradusClient client = new DoradusClient(args[0], Integer.parseInt(args[1]))) { deleteApplication(client); createApplication(client); addData(client); queryData(client); deleteData(client); } } }
public class class_name { public Class getValueClass() { if (valueClass == null) { if (valueClassName != null) { try { valueClass = Class.forName(valueClassName); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } } return valueClass; } }
public class class_name { public Class getValueClass() { if (valueClass == null) { if (valueClassName != null) { try { valueClass = Class.forName(valueClassName); // depends on control dependency: [try], data = [none] } catch (ClassNotFoundException e) { throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] } } return valueClass; } }
public class class_name { public void marshall(ListProvisioningArtifactsForServiceActionRequest listProvisioningArtifactsForServiceActionRequest, ProtocolMarshaller protocolMarshaller) { if (listProvisioningArtifactsForServiceActionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listProvisioningArtifactsForServiceActionRequest.getServiceActionId(), SERVICEACTIONID_BINDING); protocolMarshaller.marshall(listProvisioningArtifactsForServiceActionRequest.getPageSize(), PAGESIZE_BINDING); protocolMarshaller.marshall(listProvisioningArtifactsForServiceActionRequest.getPageToken(), PAGETOKEN_BINDING); protocolMarshaller.marshall(listProvisioningArtifactsForServiceActionRequest.getAcceptLanguage(), ACCEPTLANGUAGE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(ListProvisioningArtifactsForServiceActionRequest listProvisioningArtifactsForServiceActionRequest, ProtocolMarshaller protocolMarshaller) { if (listProvisioningArtifactsForServiceActionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listProvisioningArtifactsForServiceActionRequest.getServiceActionId(), SERVICEACTIONID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(listProvisioningArtifactsForServiceActionRequest.getPageSize(), PAGESIZE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(listProvisioningArtifactsForServiceActionRequest.getPageToken(), PAGETOKEN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(listProvisioningArtifactsForServiceActionRequest.getAcceptLanguage(), ACCEPTLANGUAGE_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private boolean pathologicalCase(String text) { String tag = toRegex( CollectionUtil.first( tags )); String regex = String.format( "(?is)(<\\s*(%s)\\s*.*?>)", tag ); Matcher match = compile(regex).matcher(text); if (match.find()) { regex = String.format( "(?is)(.*?)(<\\s*/\\s*(%s)\\s*.*?>)", tag ); return !compile(regex).matcher(text).find(match.end()); } return true; } }
public class class_name { private boolean pathologicalCase(String text) { String tag = toRegex( CollectionUtil.first( tags )); String regex = String.format( "(?is)(<\\s*(%s)\\s*.*?>)", tag ); Matcher match = compile(regex).matcher(text); if (match.find()) { regex = String.format( "(?is)(.*?)(<\\s*/\\s*(%s)\\s*.*?>)", tag ); return !compile(regex).matcher(text).find(match.end()); } return true; // depends on control dependency: [if], data = [none] } }
public class class_name { public void measureBefore() { if (!CTRLINST.isMonitoringEnabled()) { return; } hostname = VMNAME; sessionId = SESSIONREGISTRY.recallThreadLocalSessionId(); traceId = CFREGISTRY.recallThreadLocalTraceId(); // entry point if (traceId == -1) { entrypoint = true; traceId = CFREGISTRY.getAndStoreUniqueThreadLocalTraceId(); CFREGISTRY.storeThreadLocalEOI(0); CFREGISTRY.storeThreadLocalESS(1); // next operation is ess + 1 eoi = 0; ess = 0; } else { entrypoint = false; eoi = CFREGISTRY.incrementAndRecallThreadLocalEOI(); // ess > 1 ess = CFREGISTRY.recallAndIncrementThreadLocalESS(); // ess >= 0 if ((eoi == -1) || (ess == -1)) { LOG.error("eoi and/or ess have invalid values:" + " eoi == " + eoi + " ess == " + ess); CTRLINST.terminateMonitoring(); } } tin = TIME.getTime(); } }
public class class_name { public void measureBefore() { if (!CTRLINST.isMonitoringEnabled()) { return; // depends on control dependency: [if], data = [none] } hostname = VMNAME; sessionId = SESSIONREGISTRY.recallThreadLocalSessionId(); traceId = CFREGISTRY.recallThreadLocalTraceId(); // entry point if (traceId == -1) { entrypoint = true; // depends on control dependency: [if], data = [none] traceId = CFREGISTRY.getAndStoreUniqueThreadLocalTraceId(); // depends on control dependency: [if], data = [none] CFREGISTRY.storeThreadLocalEOI(0); // depends on control dependency: [if], data = [none] CFREGISTRY.storeThreadLocalESS(1); // next operation is ess + 1 // depends on control dependency: [if], data = [none] eoi = 0; // depends on control dependency: [if], data = [none] ess = 0; // depends on control dependency: [if], data = [none] } else { entrypoint = false; // depends on control dependency: [if], data = [none] eoi = CFREGISTRY.incrementAndRecallThreadLocalEOI(); // ess > 1 // depends on control dependency: [if], data = [none] ess = CFREGISTRY.recallAndIncrementThreadLocalESS(); // ess >= 0 // depends on control dependency: [if], data = [none] if ((eoi == -1) || (ess == -1)) { LOG.error("eoi and/or ess have invalid values:" + " eoi == " + eoi + " ess == " + ess); // depends on control dependency: [if], data = [none] CTRLINST.terminateMonitoring(); // depends on control dependency: [if], data = [none] } } tin = TIME.getTime(); } }
public class class_name { public static YamlBuilder translate(String prefix, Properties properties, Properties map) throws IOException, ReflectiveOperationException { // map translators to ruleProps Map<YamlPropertyTranslator,Map<String,Object>> translators = new LinkedHashMap<>(); YamlPropertyTranslator defaultTranslator = new DefaultYamlTranslator(); for (String name : properties.stringPropertyNames()) { if ((prefix == null || name.startsWith(prefix + ".")) || (prefix.equals("mdw") && (name.startsWith("MDWFramework") || name.startsWith("LDAP")))) { YamlPropertyTranslator translator = null; String rule = map.getProperty(name); if (rule != null && rule.isEmpty()) { // blank means remove this prop (no translator) System.out.println("Info: Obsolete property: '" + name + "' -- removed"); } else if (!"mdw".equals(prefix) && (name.startsWith("mdw") || name.startsWith("MDWFramework") || name.startsWith("LDAP"))) { String msg = "Warning: '" + name + "' belongs in mdw.yaml "; if (rule != null) msg += "(as " + rule + ") "; System.err.println(msg + "-- removed"); } else if (rule == null) { // fully structured rule = prefix == null ? name.replace('.', '/') : name.substring(prefix.length() + 1).replace('.', '/'); translator = defaultTranslator; } else if (rule.startsWith("[")) { // custom translator -- reuse existing instance if found int endBracket = rule.lastIndexOf(']'); String className = rule.substring(1, endBracket); for (YamlPropertyTranslator instance : translators.keySet()) { if (instance.getClass().getName().equals(className)) { translator = instance; break; } } if (translator == null) { translator = Class.forName(className).asSubclass(YamlPropertyTranslator.class).newInstance(); } } else { translator = defaultTranslator; } if (translator != null) { Map<String,Object> ruleProps = translators.get(translator); if (ruleProps == null) { ruleProps = new LinkedHashMap<>(); translators.put(translator, ruleProps); } Object propValue = properties.getProperty(name); try { // integers should be treated as such propValue = Integer.parseInt(propValue.toString()); } catch (NumberFormatException ex) { } ruleProps.put(rule, propValue); } } } // perform translations YamlBuilder yamlBuilder = new YamlBuilder(); for (YamlPropertyTranslator translator : translators.keySet()) { yamlBuilder.append(translator.translate(translators.get(translator))).newLine(); } return yamlBuilder; } }
public class class_name { public static YamlBuilder translate(String prefix, Properties properties, Properties map) throws IOException, ReflectiveOperationException { // map translators to ruleProps Map<YamlPropertyTranslator,Map<String,Object>> translators = new LinkedHashMap<>(); YamlPropertyTranslator defaultTranslator = new DefaultYamlTranslator(); for (String name : properties.stringPropertyNames()) { if ((prefix == null || name.startsWith(prefix + ".")) || (prefix.equals("mdw") && (name.startsWith("MDWFramework") || name.startsWith("LDAP")))) { YamlPropertyTranslator translator = null; String rule = map.getProperty(name); if (rule != null && rule.isEmpty()) { // blank means remove this prop (no translator) System.out.println("Info: Obsolete property: '" + name + "' -- removed"); // depends on control dependency: [if], data = [none] } else if (!"mdw".equals(prefix) && (name.startsWith("mdw") || name.startsWith("MDWFramework") || name.startsWith("LDAP"))) { String msg = "Warning: '" + name + "' belongs in mdw.yaml "; if (rule != null) msg += "(as " + rule + ") "; System.err.println(msg + "-- removed"); } else if (rule == null) { // fully structured rule = prefix == null ? name.replace('.', '/') : name.substring(prefix.length() + 1).replace('.', '/'); // depends on control dependency: [if], data = [none] translator = defaultTranslator; // depends on control dependency: [if], data = [none] } else if (rule.startsWith("[")) { // custom translator -- reuse existing instance if found int endBracket = rule.lastIndexOf(']'); String className = rule.substring(1, endBracket); for (YamlPropertyTranslator instance : translators.keySet()) { if (instance.getClass().getName().equals(className)) { translator = instance; // depends on control dependency: [if], data = [none] break; } } if (translator == null) { translator = Class.forName(className).asSubclass(YamlPropertyTranslator.class).newInstance(); // depends on control dependency: [if], data = [none] } } else { translator = defaultTranslator; // depends on control dependency: [if], data = [none] } if (translator != null) { Map<String,Object> ruleProps = translators.get(translator); if (ruleProps == null) { ruleProps = new LinkedHashMap<>(); // depends on control dependency: [if], data = [none] translators.put(translator, ruleProps); // depends on control dependency: [if], data = [none] } Object propValue = properties.getProperty(name); try { // integers should be treated as such propValue = Integer.parseInt(propValue.toString()); // depends on control dependency: [try], data = [none] } catch (NumberFormatException ex) { } // depends on control dependency: [catch], data = [none] ruleProps.put(rule, propValue); // depends on control dependency: [if], data = [none] } } } // perform translations YamlBuilder yamlBuilder = new YamlBuilder(); for (YamlPropertyTranslator translator : translators.keySet()) { yamlBuilder.append(translator.translate(translators.get(translator))).newLine(); } return yamlBuilder; } }
public class class_name { private boolean startsWithAFilteredPAttern(String string) { Iterator<String> iterator = filteredFrames.iterator(); while (iterator.hasNext()) { if (string.trim().startsWith(iterator.next())) { return true; } } return false; } }
public class class_name { private boolean startsWithAFilteredPAttern(String string) { Iterator<String> iterator = filteredFrames.iterator(); while (iterator.hasNext()) { if (string.trim().startsWith(iterator.next())) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public void putBytes(int pos, byte... bs) { adaptSize(pos + bs.length); ByteBuffer bb = null; int i = _bufferSize; for (byte b : bs) { if (i == _bufferSize) { bb = getBuffer(pos); i = pos % _bufferSize; } bb.put(i, b); ++i; ++pos; } } }
public class class_name { public void putBytes(int pos, byte... bs) { adaptSize(pos + bs.length); ByteBuffer bb = null; int i = _bufferSize; for (byte b : bs) { if (i == _bufferSize) { bb = getBuffer(pos); // depends on control dependency: [if], data = [none] i = pos % _bufferSize; // depends on control dependency: [if], data = [none] } bb.put(i, b); // depends on control dependency: [for], data = [b] ++i; // depends on control dependency: [for], data = [none] ++pos; // depends on control dependency: [for], data = [none] } } }
public class class_name { public Key withAttribute(String attributeName, AttributeValue value) { if (this.attributes == null) { this.attributes = new HashMap<String, AttributeValue>(); } attributes.put(attributeName, value); return this; } }
public class class_name { public Key withAttribute(String attributeName, AttributeValue value) { if (this.attributes == null) { this.attributes = new HashMap<String, AttributeValue>(); // depends on control dependency: [if], data = [none] } attributes.put(attributeName, value); return this; } }
public class class_name { private Section parseSection(boolean mediaQuery) { Section section = new Section(); parseSectionSelector(mediaQuery, section); tokenizer.consumeExpectedSymbol("{"); while (tokenizer.more()) { if (tokenizer.current().isSymbol("}")) { tokenizer.consumeExpectedSymbol("}"); return section; } // Parse "normal" attributes like "font-weight: bold;" if (isAtAttribute()) { Attribute attr = parseAttribute(); section.addAttribute(attr); } else if (tokenizer.current().isKeyword(KEYWORD_MEDIA)) { // Take care of @media sub sections section.addSubSection(parseSection(true)); } else if (tokenizer.current().isKeyword(KEYWORD_INCLUDE)) { parseInclude(section); } else if (tokenizer.current().isKeyword(KEYWORD_EXTEND)) { parseExtend(section); } else { // If it is neither an attribute, nor a media query or instruction - it is probably a sub section... section.addSubSection(parseSection(false)); } } tokenizer.consumeExpectedSymbol("}"); return section; } }
public class class_name { private Section parseSection(boolean mediaQuery) { Section section = new Section(); parseSectionSelector(mediaQuery, section); tokenizer.consumeExpectedSymbol("{"); while (tokenizer.more()) { if (tokenizer.current().isSymbol("}")) { tokenizer.consumeExpectedSymbol("}"); // depends on control dependency: [if], data = [none] return section; // depends on control dependency: [if], data = [none] } // Parse "normal" attributes like "font-weight: bold;" if (isAtAttribute()) { Attribute attr = parseAttribute(); section.addAttribute(attr); // depends on control dependency: [if], data = [none] } else if (tokenizer.current().isKeyword(KEYWORD_MEDIA)) { // Take care of @media sub sections section.addSubSection(parseSection(true)); // depends on control dependency: [if], data = [none] } else if (tokenizer.current().isKeyword(KEYWORD_INCLUDE)) { parseInclude(section); // depends on control dependency: [if], data = [none] } else if (tokenizer.current().isKeyword(KEYWORD_EXTEND)) { parseExtend(section); // depends on control dependency: [if], data = [none] } else { // If it is neither an attribute, nor a media query or instruction - it is probably a sub section... section.addSubSection(parseSection(false)); // depends on control dependency: [if], data = [none] } } tokenizer.consumeExpectedSymbol("}"); return section; } }
public class class_name { public void setBoardPivotX(float x) { if (mListeners != null) { for (StateListener l : mListeners) { l.onBoardPivotX(this, x); } } mContentView.setPivotX(x); } }
public class class_name { public void setBoardPivotX(float x) { if (mListeners != null) { for (StateListener l : mListeners) { l.onBoardPivotX(this, x); // depends on control dependency: [for], data = [l] } } mContentView.setPivotX(x); } }
public class class_name { public boolean editMap(ConfigurationHandler configurationHandler) { String method = configurationHandler.getMethod().toUpperCase(); if (configurationHandlerMap.containsKey(Method.valueOf(method))) { configurationHandlerMap.put(Method.valueOf(method), configurationHandler); return true; } return false; } }
public class class_name { public boolean editMap(ConfigurationHandler configurationHandler) { String method = configurationHandler.getMethod().toUpperCase(); if (configurationHandlerMap.containsKey(Method.valueOf(method))) { configurationHandlerMap.put(Method.valueOf(method), configurationHandler); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public static void changeUser(IUser user) { try { getUserContext().requestContextChange(user); } catch (Exception e) { log.error("Error during user context change.", e); } } }
public class class_name { public static void changeUser(IUser user) { try { getUserContext().requestContextChange(user); // depends on control dependency: [try], data = [none] } catch (Exception e) { log.error("Error during user context change.", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public EventCategoriesMap withEvents(EventInfoMap... events) { if (this.events == null) { setEvents(new com.amazonaws.internal.SdkInternalList<EventInfoMap>(events.length)); } for (EventInfoMap ele : events) { this.events.add(ele); } return this; } }
public class class_name { public EventCategoriesMap withEvents(EventInfoMap... events) { if (this.events == null) { setEvents(new com.amazonaws.internal.SdkInternalList<EventInfoMap>(events.length)); // depends on control dependency: [if], data = [none] } for (EventInfoMap ele : events) { this.events.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { void splitEdge_(int edge1, int edge2, int intersectionCluster, SegmentIntersector intersector) { disconnectEdge_(edge1);// disconnects the edge from the clusters. The // edge still remembers the clusters. if (edge2 != -1) disconnectEdge_(edge2);// disconnects the edge from the clusters. // The edge still remembers the clusters. // Collect all edges that are affected when the clusters change position // due to snapping // The edges are collected in m_edges_to_insert_in_sweep_structure. // Collect the modified clusters in m_modified_clusters. processSplitHelper1_(0, edge1, intersector); if (edge2 != -1) processSplitHelper1_(1, edge2, intersector); if (intersectionCluster != -1) { intersector.getResultPoint().getXY(pt_1); getClusterXY(intersectionCluster, pt_2); if (!pt_2.isEqual(pt_1)) m_modified_clusters.add(intersectionCluster); } // remove modified clusters from the event queue. We'll reincert them // later for (int i = 0, n = m_modified_clusters.size(); i < n; i++) { int cluster = m_modified_clusters.get(i); int eventQnode = getClusterEventQNode(cluster); if (eventQnode != -1) { m_event_q.deleteNode(eventQnode, -1); setClusterEventQNode_(cluster, -1); } } int edgeOrigins1 = getEdgeOriginVertices(edge1); int edgeOrigins2 = (edge2 != -1) ? getEdgeOriginVertices(edge2) : -1; // Adjust the vertex coordinates and split the segments in the the edit // shape. applyIntersectorToEditShape_(edgeOrigins1, intersector, 0); if (edge2 != -1) applyIntersectorToEditShape_(edgeOrigins2, intersector, 1); // Produce clusters, and new edges. The new edges are added to // m_edges_to_insert_in_sweep_structure. createEdgesAndClustersFromSplitEdge_(edge1, intersector, 0); if (edge2 != -1) createEdgesAndClustersFromSplitEdge_(edge2, intersector, 1); m_edge_vertices.deleteList(edgeOrigins1); deleteEdge_(edge1); if (edge2 != -1) { m_edge_vertices.deleteList(edgeOrigins2); deleteEdge_(edge2); } // insert clusters into the event queue and the edges into the sweep // structure. for (int i = 0, n = m_modified_clusters.size(); i < n; i++) { int cluster = m_modified_clusters.get(i); if (cluster == m_sweep_point_cluster) m_b_sweep_point_cluster_was_modified = true; int eventQnode = getClusterEventQNode(cluster); if (eventQnode == -1) { int vertex = getClusterFirstVertex(cluster); assert (getClusterFromVertex(vertex) == cluster); eventQnode = m_event_q.addUniqueElement(vertex, -1);// O(logN) // operation if (eventQnode == -1) {// the cluster is coinciding with another // one. merge. int existingNode = m_event_q.getDuplicateElement(-1); int v = m_event_q.getElement(existingNode); assert (m_shape.isEqualXY(vertex, v)); int existingCluster = getClusterFromVertex(v); mergeClusters_(existingCluster, cluster); } else { setClusterEventQNode_(cluster, eventQnode); } } else { // if already inserted (probably impossible) case } } m_modified_clusters.clear(false); } }
public class class_name { void splitEdge_(int edge1, int edge2, int intersectionCluster, SegmentIntersector intersector) { disconnectEdge_(edge1);// disconnects the edge from the clusters. The // edge still remembers the clusters. if (edge2 != -1) disconnectEdge_(edge2);// disconnects the edge from the clusters. // The edge still remembers the clusters. // Collect all edges that are affected when the clusters change position // due to snapping // The edges are collected in m_edges_to_insert_in_sweep_structure. // Collect the modified clusters in m_modified_clusters. processSplitHelper1_(0, edge1, intersector); if (edge2 != -1) processSplitHelper1_(1, edge2, intersector); if (intersectionCluster != -1) { intersector.getResultPoint().getXY(pt_1); // depends on control dependency: [if], data = [none] getClusterXY(intersectionCluster, pt_2); // depends on control dependency: [if], data = [(intersectionCluster] if (!pt_2.isEqual(pt_1)) m_modified_clusters.add(intersectionCluster); } // remove modified clusters from the event queue. We'll reincert them // later for (int i = 0, n = m_modified_clusters.size(); i < n; i++) { int cluster = m_modified_clusters.get(i); int eventQnode = getClusterEventQNode(cluster); if (eventQnode != -1) { m_event_q.deleteNode(eventQnode, -1); // depends on control dependency: [if], data = [(eventQnode] setClusterEventQNode_(cluster, -1); // depends on control dependency: [if], data = [-1)] } } int edgeOrigins1 = getEdgeOriginVertices(edge1); int edgeOrigins2 = (edge2 != -1) ? getEdgeOriginVertices(edge2) : -1; // Adjust the vertex coordinates and split the segments in the the edit // shape. applyIntersectorToEditShape_(edgeOrigins1, intersector, 0); if (edge2 != -1) applyIntersectorToEditShape_(edgeOrigins2, intersector, 1); // Produce clusters, and new edges. The new edges are added to // m_edges_to_insert_in_sweep_structure. createEdgesAndClustersFromSplitEdge_(edge1, intersector, 0); if (edge2 != -1) createEdgesAndClustersFromSplitEdge_(edge2, intersector, 1); m_edge_vertices.deleteList(edgeOrigins1); deleteEdge_(edge1); if (edge2 != -1) { m_edge_vertices.deleteList(edgeOrigins2); // depends on control dependency: [if], data = [none] deleteEdge_(edge2); // depends on control dependency: [if], data = [(edge2] } // insert clusters into the event queue and the edges into the sweep // structure. for (int i = 0, n = m_modified_clusters.size(); i < n; i++) { int cluster = m_modified_clusters.get(i); if (cluster == m_sweep_point_cluster) m_b_sweep_point_cluster_was_modified = true; int eventQnode = getClusterEventQNode(cluster); if (eventQnode == -1) { int vertex = getClusterFirstVertex(cluster); assert (getClusterFromVertex(vertex) == cluster); // depends on control dependency: [if], data = [none] eventQnode = m_event_q.addUniqueElement(vertex, -1);// O(logN) // depends on control dependency: [if], data = [-1)] // operation if (eventQnode == -1) {// the cluster is coinciding with another // one. merge. int existingNode = m_event_q.getDuplicateElement(-1); int v = m_event_q.getElement(existingNode); assert (m_shape.isEqualXY(vertex, v)); // depends on control dependency: [if], data = [none] int existingCluster = getClusterFromVertex(v); mergeClusters_(existingCluster, cluster); // depends on control dependency: [if], data = [none] } else { setClusterEventQNode_(cluster, eventQnode); // depends on control dependency: [if], data = [none] } } else { // if already inserted (probably impossible) case } } m_modified_clusters.clear(false); } }
public class class_name { public Alphabet getAlphabetForChar(final char theChar) { Alphabet result = null; for (final Alphabet alphabet : this.alphabets) { if (alphabet.isContain(theChar)) { result = alphabet; break; } } return result; } }
public class class_name { public Alphabet getAlphabetForChar(final char theChar) { Alphabet result = null; for (final Alphabet alphabet : this.alphabets) { if (alphabet.isContain(theChar)) { result = alphabet; // depends on control dependency: [if], data = [none] break; } } return result; } }
public class class_name { public Rate getRate(int field) throws MPXJException { Rate result; if ((field < m_fields.length) && (m_fields[field].length() != 0)) { try { String rate = m_fields[field]; int index = rate.indexOf('/'); double amount; TimeUnit units; if (index == -1) { amount = m_formats.getCurrencyFormat().parse(rate).doubleValue(); units = TimeUnit.HOURS; } else { amount = m_formats.getCurrencyFormat().parse(rate.substring(0, index)).doubleValue(); units = TimeUnitUtility.getInstance(rate.substring(index + 1), m_locale); } result = new Rate(amount, units); } catch (ParseException ex) { throw new MPXJException("Failed to parse rate", ex); } } else { result = null; } return (result); } }
public class class_name { public Rate getRate(int field) throws MPXJException { Rate result; if ((field < m_fields.length) && (m_fields[field].length() != 0)) { try { String rate = m_fields[field]; int index = rate.indexOf('/'); double amount; TimeUnit units; if (index == -1) { amount = m_formats.getCurrencyFormat().parse(rate).doubleValue(); // depends on control dependency: [if], data = [none] units = TimeUnit.HOURS; // depends on control dependency: [if], data = [none] } else { amount = m_formats.getCurrencyFormat().parse(rate.substring(0, index)).doubleValue(); // depends on control dependency: [if], data = [none] units = TimeUnitUtility.getInstance(rate.substring(index + 1), m_locale); // depends on control dependency: [if], data = [(index] } result = new Rate(amount, units); // depends on control dependency: [try], data = [none] } catch (ParseException ex) { throw new MPXJException("Failed to parse rate", ex); } // depends on control dependency: [catch], data = [none] } else { result = null; } return (result); } }
public class class_name { @Override protected final ParseWriter parseChunk(int chunkId, ParseReader din, ParseWriter dout) { _cidx = chunkId; // only do something if within file size and the orc file is not empty List<StripeInformation> stripesInfo = ((OrcParseSetup) this._setup).getStripes(); if(stripesInfo.size() == 0) { dout.addError(new ParseWriter.ParseErr("Orc Parser: Empty file.", chunkId, 0L, -2L)); return dout; // empty file } OrcParseSetup setup = (OrcParseSetup) this._setup; StripeInformation thisStripe = stripesInfo.get(chunkId); // get one stripe // write one stripe of data to H2O frame String [] orcTypes = setup.getColumnTypesString(); boolean[] toInclude = setup.getToInclude(); try { RecordReader perStripe = orcFileReader.rows(thisStripe.getOffset(), thisStripe.getDataLength(), setup.getToInclude(), null, setup.getColumnNames()); VectorizedRowBatch batch = null; long rows = 0; long rowCount = thisStripe.getNumberOfRows(); while (rows != rowCount) { batch = perStripe.nextBatch(batch); // read orc file stripes in vectorizedRowBatch long currentBatchRow = batch.count(); int nrows = (int)currentBatchRow; if(currentBatchRow != nrows) throw new IllegalArgumentException("got batch with too many records, does not fit in int"); ColumnVector[] dataVectors = batch.cols; int colIndex = 0; for (int col = 0; col < batch.numCols; ++col) { // read one column at a time; if (toInclude[col + 1]) { // only write a column if we actually want it if(_setup.getColumnTypes()[colIndex] != Vec.T_BAD) write1column(dataVectors[col], orcTypes[colIndex], colIndex, nrows, dout); else dout.addNAs(col,nrows); colIndex++; } } rows += currentBatchRow; // record number of rows of data actually read } byte [] col_types = _setup.getColumnTypes(); for(int i = 0; i < col_types.length; ++i){ if(col_types[i] == Vec.T_BAD) dout.addNAs(i,(int)rowCount); } perStripe.close(); } catch(IOException ioe) { throw new RuntimeException(ioe); } return dout; } }
public class class_name { @Override protected final ParseWriter parseChunk(int chunkId, ParseReader din, ParseWriter dout) { _cidx = chunkId; // only do something if within file size and the orc file is not empty List<StripeInformation> stripesInfo = ((OrcParseSetup) this._setup).getStripes(); if(stripesInfo.size() == 0) { dout.addError(new ParseWriter.ParseErr("Orc Parser: Empty file.", chunkId, 0L, -2L)); // depends on control dependency: [if], data = [none] return dout; // empty file // depends on control dependency: [if], data = [none] } OrcParseSetup setup = (OrcParseSetup) this._setup; StripeInformation thisStripe = stripesInfo.get(chunkId); // get one stripe // write one stripe of data to H2O frame String [] orcTypes = setup.getColumnTypesString(); boolean[] toInclude = setup.getToInclude(); try { RecordReader perStripe = orcFileReader.rows(thisStripe.getOffset(), thisStripe.getDataLength(), setup.getToInclude(), null, setup.getColumnNames()); VectorizedRowBatch batch = null; long rows = 0; long rowCount = thisStripe.getNumberOfRows(); while (rows != rowCount) { batch = perStripe.nextBatch(batch); // read orc file stripes in vectorizedRowBatch // depends on control dependency: [while], data = [none] long currentBatchRow = batch.count(); int nrows = (int)currentBatchRow; if(currentBatchRow != nrows) throw new IllegalArgumentException("got batch with too many records, does not fit in int"); ColumnVector[] dataVectors = batch.cols; int colIndex = 0; for (int col = 0; col < batch.numCols; ++col) { // read one column at a time; if (toInclude[col + 1]) { // only write a column if we actually want it if(_setup.getColumnTypes()[colIndex] != Vec.T_BAD) write1column(dataVectors[col], orcTypes[colIndex], colIndex, nrows, dout); else dout.addNAs(col,nrows); colIndex++; // depends on control dependency: [if], data = [none] } } rows += currentBatchRow; // record number of rows of data actually read // depends on control dependency: [while], data = [none] } byte [] col_types = _setup.getColumnTypes(); for(int i = 0; i < col_types.length; ++i){ if(col_types[i] == Vec.T_BAD) dout.addNAs(i,(int)rowCount); } perStripe.close(); // depends on control dependency: [try], data = [none] } catch(IOException ioe) { throw new RuntimeException(ioe); } // depends on control dependency: [catch], data = [none] return dout; } }
public class class_name { public static String getQualifiedName(HasMetadata entity) { if (entity != null) { return "" + getNamespace(entity) + "/" + getName(entity); } else { return null; } } }
public class class_name { public static String getQualifiedName(HasMetadata entity) { if (entity != null) { return "" + getNamespace(entity) + "/" + getName(entity); // depends on control dependency: [if], data = [(entity] } else { return null; // depends on control dependency: [if], data = [none] } } }