code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public void setText(final String text, final Serializable... args) { WComponent body = getBody(); if (body instanceof WText) { ((WText) body).setText(text, args); } else if (body instanceof WLabel) { ((WLabel) body).setText(text, args); } else if (body instanceof WButton) { ((WButton) body).setText(text, args); } else if (body instanceof WLink) { ((WLink) body).setText(text, args); } else if (body == null) { setBody(new WText(text, args)); } } }
public class class_name { public void setText(final String text, final Serializable... args) { WComponent body = getBody(); if (body instanceof WText) { ((WText) body).setText(text, args); // depends on control dependency: [if], data = [none] } else if (body instanceof WLabel) { ((WLabel) body).setText(text, args); // depends on control dependency: [if], data = [none] } else if (body instanceof WButton) { ((WButton) body).setText(text, args); // depends on control dependency: [if], data = [none] } else if (body instanceof WLink) { ((WLink) body).setText(text, args); // depends on control dependency: [if], data = [none] } else if (body == null) { setBody(new WText(text, args)); // depends on control dependency: [if], data = [none] } } }
public class class_name { public Map<String, Serializable> getPostFields() throws HelloSignException { Map<String, Serializable> fields = super.getPostFields(); try { if (hasTitle()) { fields.put(REQUEST_TITLE, getTitle()); } if (hasSubject()) { fields.put(REQUEST_SUBJECT, getSubject()); } if (hasMessage()) { fields.put(REQUEST_MESSAGE, getMessage()); } List<String> signerRoles = getSignerRoles(); for (int i = 0; i < signerRoles.size(); i++) { String s = signerRoles.get(i); fields.put("signer_roles[" + i + "][name]", s); if (getOrderMatters()) { fields.put("signer_roles[" + i + "][order]", i); } } List<String> ccRoles = getCCRoles(); for (int i = 0; i < ccRoles.size(); i++) { String cc = ccRoles.get(i); fields.put("cc_roles[" + i + "]", cc); } List<Document> docs = getDocuments(); for (int i = 0; i < docs.size(); i++) { Document d = docs.get(i); fields.put("file[" + i + "]", d.getFile()); } List<String> fileUrls = getFileUrls(); for (int i = 0; i < fileUrls.size(); i++) { fields.put("file_url[" + i + "]", fileUrls.get(i)); } String mergeFieldStr = TemplateDraft.serializeMergeFields(getMergeFields()); if (mergeFieldStr != null) { fields.put("merge_fields", mergeFieldStr); } if (hasUsePreexistingFields()) { fields.put(REQUEST_USE_PREEXISTING_FIELDS, true); } if (isTestMode()) { fields.put(REQUEST_TEST_MODE, true); } } catch (Exception ex) { throw new HelloSignException("Could not extract form fields from TemplateDraft.", ex); } return fields; } }
public class class_name { public Map<String, Serializable> getPostFields() throws HelloSignException { Map<String, Serializable> fields = super.getPostFields(); try { if (hasTitle()) { fields.put(REQUEST_TITLE, getTitle()); // depends on control dependency: [if], data = [none] } if (hasSubject()) { fields.put(REQUEST_SUBJECT, getSubject()); // depends on control dependency: [if], data = [none] } if (hasMessage()) { fields.put(REQUEST_MESSAGE, getMessage()); // depends on control dependency: [if], data = [none] } List<String> signerRoles = getSignerRoles(); for (int i = 0; i < signerRoles.size(); i++) { String s = signerRoles.get(i); fields.put("signer_roles[" + i + "][name]", s); // depends on control dependency: [for], data = [i] if (getOrderMatters()) { fields.put("signer_roles[" + i + "][order]", i); // depends on control dependency: [if], data = [none] } } List<String> ccRoles = getCCRoles(); for (int i = 0; i < ccRoles.size(); i++) { String cc = ccRoles.get(i); fields.put("cc_roles[" + i + "]", cc); // depends on control dependency: [for], data = [i] } List<Document> docs = getDocuments(); for (int i = 0; i < docs.size(); i++) { Document d = docs.get(i); fields.put("file[" + i + "]", d.getFile()); // depends on control dependency: [for], data = [i] } List<String> fileUrls = getFileUrls(); for (int i = 0; i < fileUrls.size(); i++) { fields.put("file_url[" + i + "]", fileUrls.get(i)); // depends on control dependency: [for], data = [i] } String mergeFieldStr = TemplateDraft.serializeMergeFields(getMergeFields()); if (mergeFieldStr != null) { fields.put("merge_fields", mergeFieldStr); // depends on control dependency: [if], data = [none] } if (hasUsePreexistingFields()) { fields.put(REQUEST_USE_PREEXISTING_FIELDS, true); // depends on control dependency: [if], data = [none] } if (isTestMode()) { fields.put(REQUEST_TEST_MODE, true); // depends on control dependency: [if], data = [none] } } catch (Exception ex) { throw new HelloSignException("Could not extract form fields from TemplateDraft.", ex); } return fields; } }
public class class_name { protected List<String> getResourceNamesFromParam(Map<String, String> params) { String resourcesParam = "/"; if (params.containsKey(I_CmsListResourceCollector.PARAM_RESOURCES)) { resourcesParam = params.get(I_CmsListResourceCollector.PARAM_RESOURCES); } if (resourcesParam.length() == 0) { return Collections.emptyList(); } return CmsStringUtil.splitAsList(resourcesParam, "#"); } }
public class class_name { protected List<String> getResourceNamesFromParam(Map<String, String> params) { String resourcesParam = "/"; if (params.containsKey(I_CmsListResourceCollector.PARAM_RESOURCES)) { resourcesParam = params.get(I_CmsListResourceCollector.PARAM_RESOURCES); // depends on control dependency: [if], data = [none] } if (resourcesParam.length() == 0) { return Collections.emptyList(); // depends on control dependency: [if], data = [none] } return CmsStringUtil.splitAsList(resourcesParam, "#"); } }
public class class_name { public Range<C> intersection(Range<C> connectedRange) { int lowerCmp = lowerBound.compareTo(connectedRange.lowerBound); int upperCmp = upperBound.compareTo(connectedRange.upperBound); if (lowerCmp >= 0 && upperCmp <= 0) { return this; } else if (lowerCmp <= 0 && upperCmp >= 0) { return connectedRange; } else { Cut<C> newLower = (lowerCmp >= 0) ? lowerBound : connectedRange.lowerBound; Cut<C> newUpper = (upperCmp <= 0) ? upperBound : connectedRange.upperBound; return create(newLower, newUpper); } } }
public class class_name { public Range<C> intersection(Range<C> connectedRange) { int lowerCmp = lowerBound.compareTo(connectedRange.lowerBound); int upperCmp = upperBound.compareTo(connectedRange.upperBound); if (lowerCmp >= 0 && upperCmp <= 0) { return this; // depends on control dependency: [if], data = [none] } else if (lowerCmp <= 0 && upperCmp >= 0) { return connectedRange; // depends on control dependency: [if], data = [none] } else { Cut<C> newLower = (lowerCmp >= 0) ? lowerBound : connectedRange.lowerBound; Cut<C> newUpper = (upperCmp <= 0) ? upperBound : connectedRange.upperBound; return create(newLower, newUpper); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected Edge buildConcrete11Edge(Vertex vertex1, Vertex vertex2, boolean fkToRef) { ModificationState state1 = vertex1.getEnvelope().getModificationState(); ModificationState state2 = vertex2.getEnvelope().getModificationState(); if (state1.needsUpdate() || state1.needsInsert()) { if (state2.needsInsert()) { // (2) must be inserted before (1) can point to it return new Edge(vertex2, vertex1, fkToRef ? CONCRETE_EDGE_WEIGHT_WITH_FK : CONCRETE_EDGE_WEIGHT); } } else if (state1.needsDelete()) { if (state2.needsDelete()) { // (1) points to (2) and must be deleted first return new Edge(vertex1, vertex2, fkToRef ? CONCRETE_EDGE_WEIGHT_WITH_FK : CONCRETE_EDGE_WEIGHT); } } return null; } }
public class class_name { protected Edge buildConcrete11Edge(Vertex vertex1, Vertex vertex2, boolean fkToRef) { ModificationState state1 = vertex1.getEnvelope().getModificationState(); ModificationState state2 = vertex2.getEnvelope().getModificationState(); if (state1.needsUpdate() || state1.needsInsert()) { if (state2.needsInsert()) { // (2) must be inserted before (1) can point to it return new Edge(vertex2, vertex1, fkToRef ? CONCRETE_EDGE_WEIGHT_WITH_FK : CONCRETE_EDGE_WEIGHT); // depends on control dependency: [if], data = [none] } } else if (state1.needsDelete()) { if (state2.needsDelete()) { // (1) points to (2) and must be deleted first return new Edge(vertex1, vertex2, fkToRef ? CONCRETE_EDGE_WEIGHT_WITH_FK : CONCRETE_EDGE_WEIGHT); // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { public boolean contains(Friend friend) { for (final Friend f : getFriends()) { if (StringUtils.parseName(f.getUserId()).equals( StringUtils.parseName(friend.getUserId()))) { return true; } } return false; } }
public class class_name { public boolean contains(Friend friend) { for (final Friend f : getFriends()) { if (StringUtils.parseName(f.getUserId()).equals( StringUtils.parseName(friend.getUserId()))) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public void setUnprocessedTraceSegments(java.util.Collection<UnprocessedTraceSegment> unprocessedTraceSegments) { if (unprocessedTraceSegments == null) { this.unprocessedTraceSegments = null; return; } this.unprocessedTraceSegments = new java.util.ArrayList<UnprocessedTraceSegment>(unprocessedTraceSegments); } }
public class class_name { public void setUnprocessedTraceSegments(java.util.Collection<UnprocessedTraceSegment> unprocessedTraceSegments) { if (unprocessedTraceSegments == null) { this.unprocessedTraceSegments = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.unprocessedTraceSegments = new java.util.ArrayList<UnprocessedTraceSegment>(unprocessedTraceSegments); } }
public class class_name { private void readAttributesFromDBaseFile(E created_element) throws IOException { // Read the DBF entry if (this.dbfReader != null) { final List<DBaseFileField> dbfColumns = this.dbfReader.getDBFFields(); // Read the record even if the shape element was not inserted into // the database. It is necessary to not have inconsistancy between // the shape entries and the dbase entries. final DBaseFileRecord record = this.dbfReader.readNextDBFRecord(); if (record != null) { // Add the dBase values for (final DBaseFileField dbfColumn : dbfColumns) { // Test if the column was marked as selected. // A column was selected if the user want to import the column // values into the database. if (this.dbfReader.isColumnSelectable(dbfColumn)) { final Object fieldValue = record.getFieldValue(dbfColumn.getColumnIndex()); final AttributeValueImpl attr = new AttributeValueImpl(); attr.castAndSet(dbfColumn.getAttributeType(), fieldValue); putAttributeIn(created_element, dbfColumn.getName(), attr); } } } } } }
public class class_name { private void readAttributesFromDBaseFile(E created_element) throws IOException { // Read the DBF entry if (this.dbfReader != null) { final List<DBaseFileField> dbfColumns = this.dbfReader.getDBFFields(); // Read the record even if the shape element was not inserted into // the database. It is necessary to not have inconsistancy between // the shape entries and the dbase entries. final DBaseFileRecord record = this.dbfReader.readNextDBFRecord(); if (record != null) { // Add the dBase values for (final DBaseFileField dbfColumn : dbfColumns) { // Test if the column was marked as selected. // A column was selected if the user want to import the column // values into the database. if (this.dbfReader.isColumnSelectable(dbfColumn)) { final Object fieldValue = record.getFieldValue(dbfColumn.getColumnIndex()); final AttributeValueImpl attr = new AttributeValueImpl(); attr.castAndSet(dbfColumn.getAttributeType(), fieldValue); // depends on control dependency: [if], data = [none] putAttributeIn(created_element, dbfColumn.getName(), attr); // depends on control dependency: [if], data = [none] } } } } } }
public class class_name { public static BitSet toBitSet(int value) { if (value > Byte.MAX_VALUE || value < Byte.MIN_VALUE) { throw new IllegalArgumentException( "The value " + value + " is out of byte range, should be limited between [" + Byte.MIN_VALUE + "] to [" + Byte.MAX_VALUE + "]"); } BitSet bs = new BitSet(); int index = 0; while (value != 0) { if (value % 2 != 0) { bs.set(index); } ++index; value = (byte) (value >> 1); } return bs; } }
public class class_name { public static BitSet toBitSet(int value) { if (value > Byte.MAX_VALUE || value < Byte.MIN_VALUE) { throw new IllegalArgumentException( "The value " + value + " is out of byte range, should be limited between [" + Byte.MIN_VALUE + "] to [" + Byte.MAX_VALUE + "]"); } BitSet bs = new BitSet(); int index = 0; while (value != 0) { if (value % 2 != 0) { bs.set(index); // depends on control dependency: [if], data = [none] } ++index; // depends on control dependency: [while], data = [none] value = (byte) (value >> 1); // depends on control dependency: [while], data = [(value] } return bs; } }
public class class_name { private void calculate(List<String> paths, IAtomContainer mol) { AllPairsShortestPaths apsp = new AllPairsShortestPaths(mol); int numAtoms = mol.getAtomCount(); for (int i = 0; i < numAtoms; i++) { if (!include(mol.getAtom(i))) continue; for (int j = i + 1; j < numAtoms; j++) { if (!include(mol.getAtom(j))) continue; final int dist = apsp.from(i).distanceTo(j); if (dist > MAX_DISTANCE) continue; final IAtom beg = mol.getAtom(i); final IAtom end = mol.getAtom(j); paths.add(encodePath(dist, beg, end)); paths.add(encodePath(dist, end, beg)); if (isHalogen(mol.getAtom(i)) || isHalogen(mol.getAtom(j))) { paths.add(encodeHalPath(dist, beg, end)); paths.add(encodeHalPath(dist, end, beg)); } } } } }
public class class_name { private void calculate(List<String> paths, IAtomContainer mol) { AllPairsShortestPaths apsp = new AllPairsShortestPaths(mol); int numAtoms = mol.getAtomCount(); for (int i = 0; i < numAtoms; i++) { if (!include(mol.getAtom(i))) continue; for (int j = i + 1; j < numAtoms; j++) { if (!include(mol.getAtom(j))) continue; final int dist = apsp.from(i).distanceTo(j); if (dist > MAX_DISTANCE) continue; final IAtom beg = mol.getAtom(i); final IAtom end = mol.getAtom(j); paths.add(encodePath(dist, beg, end)); // depends on control dependency: [for], data = [none] paths.add(encodePath(dist, end, beg)); // depends on control dependency: [for], data = [none] if (isHalogen(mol.getAtom(i)) || isHalogen(mol.getAtom(j))) { paths.add(encodeHalPath(dist, beg, end)); // depends on control dependency: [if], data = [none] paths.add(encodeHalPath(dist, end, beg)); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public ManagedConnection createManagedConnection(Subject subject, ConnectionRequestInfo info) { Util.log("In OTMJCAManagedConnectionFactory.createManagedConnection"); try { Kit kit = getKit(); PBKey key = ((OTMConnectionRequestInfo) info).getPbKey(); OTMConnection connection = kit.acquireConnection(key); return new OTMJCAManagedConnection(this, connection, key); } catch (ResourceException e) { throw new OTMConnectionRuntimeException(e.getMessage()); } } }
public class class_name { public ManagedConnection createManagedConnection(Subject subject, ConnectionRequestInfo info) { Util.log("In OTMJCAManagedConnectionFactory.createManagedConnection"); try { Kit kit = getKit(); PBKey key = ((OTMConnectionRequestInfo) info).getPbKey(); OTMConnection connection = kit.acquireConnection(key); return new OTMJCAManagedConnection(this, connection, key); // depends on control dependency: [try], data = [none] } catch (ResourceException e) { throw new OTMConnectionRuntimeException(e.getMessage()); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private static Cache<String, UserSession> getCache() { // before the first use the Entity used for indexing must be added if (!((EmbeddedCacheManager) InfinispanCache.get().getContainer()).cacheExists(RegistryManager.SESSIONCACHE)) { final Configuration config = ((EmbeddedCacheManager) InfinispanCache.get().getContainer()) .getCacheConfiguration(RegistryManager.SESSIONCACHE); final ConfigurationBuilder bldr = new ConfigurationBuilder().read(config); bldr.indexing().addIndexedEntity(UserSession.class); ((EmbeddedCacheManager) InfinispanCache.get().getContainer()).undefineConfiguration( RegistryManager.SESSIONCACHE); ((EmbeddedCacheManager) InfinispanCache.get().getContainer()).defineConfiguration( RegistryManager.SESSIONCACHE, bldr.build()); } return InfinispanCache.get().getIgnReCache(RegistryManager.SESSIONCACHE); } }
public class class_name { private static Cache<String, UserSession> getCache() { // before the first use the Entity used for indexing must be added if (!((EmbeddedCacheManager) InfinispanCache.get().getContainer()).cacheExists(RegistryManager.SESSIONCACHE)) { final Configuration config = ((EmbeddedCacheManager) InfinispanCache.get().getContainer()) .getCacheConfiguration(RegistryManager.SESSIONCACHE); final ConfigurationBuilder bldr = new ConfigurationBuilder().read(config); bldr.indexing().addIndexedEntity(UserSession.class); // depends on control dependency: [if], data = [none] ((EmbeddedCacheManager) InfinispanCache.get().getContainer()).undefineConfiguration( RegistryManager.SESSIONCACHE); // depends on control dependency: [if], data = [none] ((EmbeddedCacheManager) InfinispanCache.get().getContainer()).defineConfiguration( RegistryManager.SESSIONCACHE, bldr.build()); // depends on control dependency: [if], data = [none] } return InfinispanCache.get().getIgnReCache(RegistryManager.SESSIONCACHE); } }
public class class_name { public void setRepositoryNames(java.util.Collection<String> repositoryNames) { if (repositoryNames == null) { this.repositoryNames = null; return; } this.repositoryNames = new java.util.ArrayList<String>(repositoryNames); } }
public class class_name { public void setRepositoryNames(java.util.Collection<String> repositoryNames) { if (repositoryNames == null) { this.repositoryNames = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.repositoryNames = new java.util.ArrayList<String>(repositoryNames); } }
public class class_name { private JSONArray readOptionalArray(JSONObject json, String key) { try { return json.getJSONArray(key); } catch (JSONException e) { LOG.debug("Reading optional JSON array failed. Default to provided default value.", e); } return null; } }
public class class_name { private JSONArray readOptionalArray(JSONObject json, String key) { try { return json.getJSONArray(key); // depends on control dependency: [try], data = [none] } catch (JSONException e) { LOG.debug("Reading optional JSON array failed. Default to provided default value.", e); } // depends on control dependency: [catch], data = [none] return null; } }
public class class_name { public String[] makeDomain(Map<IcedLong, IcedLong> unsortedMap, String[] dA, String[] dB) { String[] _domain; // Log.info("Collected hash table"); // Log.info(java.util.Arrays.deepToString(unsortedMap.entrySet().toArray())); // Log.info("Interaction between " + dA.length + " and " + dB.length + " factor levels => " + // ((long)dA.length * dB.length) + " possible factors."); _sortedMap = mySort(unsortedMap); // create domain of the most frequent unique factors long factorCount = 0; // Log.info("Found " + _sortedMap.size() + " unique interaction factors (out of " + ((long)dA.length * (long)dB.length) + ")."); _domain = new String[_sortedMap.size()]; //TODO: use ArrayList here, then convert to array Iterator it2 = _sortedMap.entrySet().iterator(); int d = 0; while (it2.hasNext()) { Map.Entry kv = (Map.Entry)it2.next(); final long ab = (Long)kv.getKey(); final long count = (Long)kv.getValue(); if (factorCount < _ci._max_factors && count >= _ci._min_occurrence) { factorCount++; // extract the two original factor categoricals String feature = ""; if (dA != dB) { int a = (int)(ab >> 32); final String fA = a != _missing ? dA[a] : "NA"; feature = fA + "_"; } int b = (int) ab; String fB = b != _missing ? dB[b] : "NA"; feature += fB; // Log.info("Adding interaction feature " + feature + ", occurrence count: " + count); // Log.info("Total number of interaction factors so far: " + factorCount); _domain[d++] = feature; } else break; } if (d < _sortedMap.size()) { // Log.info("Truncated map to " + _sortedMap.size() + " elements."); String[] copy = new String[d+1]; System.arraycopy(_domain, 0, copy, 0, d); copy[d] = _other; _domain = copy; Map tm = new LinkedHashMap<>(); it2 = _sortedMap.entrySet().iterator(); while (--d >= 0) { Map.Entry kv = (Map.Entry) it2.next(); tm.put(kv.getKey(), kv.getValue()); } _sortedMap = tm; } // Log.info("Created domain: " + Arrays.deepToString(_domain)); return _domain; } }
public class class_name { public String[] makeDomain(Map<IcedLong, IcedLong> unsortedMap, String[] dA, String[] dB) { String[] _domain; // Log.info("Collected hash table"); // Log.info(java.util.Arrays.deepToString(unsortedMap.entrySet().toArray())); // Log.info("Interaction between " + dA.length + " and " + dB.length + " factor levels => " + // ((long)dA.length * dB.length) + " possible factors."); _sortedMap = mySort(unsortedMap); // create domain of the most frequent unique factors long factorCount = 0; // Log.info("Found " + _sortedMap.size() + " unique interaction factors (out of " + ((long)dA.length * (long)dB.length) + ")."); _domain = new String[_sortedMap.size()]; //TODO: use ArrayList here, then convert to array Iterator it2 = _sortedMap.entrySet().iterator(); int d = 0; while (it2.hasNext()) { Map.Entry kv = (Map.Entry)it2.next(); final long ab = (Long)kv.getKey(); final long count = (Long)kv.getValue(); if (factorCount < _ci._max_factors && count >= _ci._min_occurrence) { factorCount++; // depends on control dependency: [if], data = [none] // extract the two original factor categoricals String feature = ""; if (dA != dB) { int a = (int)(ab >> 32); final String fA = a != _missing ? dA[a] : "NA"; feature = fA + "_"; // depends on control dependency: [if], data = [none] } int b = (int) ab; String fB = b != _missing ? dB[b] : "NA"; feature += fB; // depends on control dependency: [if], data = [none] // Log.info("Adding interaction feature " + feature + ", occurrence count: " + count); // Log.info("Total number of interaction factors so far: " + factorCount); _domain[d++] = feature; // depends on control dependency: [if], data = [none] } else break; } if (d < _sortedMap.size()) { // Log.info("Truncated map to " + _sortedMap.size() + " elements."); String[] copy = new String[d+1]; System.arraycopy(_domain, 0, copy, 0, d); // depends on control dependency: [if], data = [none] copy[d] = _other; // depends on control dependency: [if], data = [none] _domain = copy; // depends on control dependency: [if], data = [none] Map tm = new LinkedHashMap<>(); it2 = _sortedMap.entrySet().iterator(); // depends on control dependency: [if], data = [none] while (--d >= 0) { Map.Entry kv = (Map.Entry) it2.next(); tm.put(kv.getKey(), kv.getValue()); // depends on control dependency: [while], data = [none] } _sortedMap = tm; // depends on control dependency: [if], data = [none] } // Log.info("Created domain: " + Arrays.deepToString(_domain)); return _domain; } }
public class class_name { public void setInvalidValues(final Set<String> invalidValues, final boolean isCaseSensitive, final String invalidValueErrorMessage) { if (isCaseSensitive) { this.invalidValues = invalidValues; } else { this.invalidValues = new HashSet<String>(); for (String value : invalidValues) { this.invalidValues.add(value.toLowerCase()); } } this.isCaseSensitive = isCaseSensitive; this.invalidValueErrorMessage = invalidValueErrorMessage; } }
public class class_name { public void setInvalidValues(final Set<String> invalidValues, final boolean isCaseSensitive, final String invalidValueErrorMessage) { if (isCaseSensitive) { this.invalidValues = invalidValues; // depends on control dependency: [if], data = [none] } else { this.invalidValues = new HashSet<String>(); // depends on control dependency: [if], data = [none] for (String value : invalidValues) { this.invalidValues.add(value.toLowerCase()); // depends on control dependency: [for], data = [value] } } this.isCaseSensitive = isCaseSensitive; this.invalidValueErrorMessage = invalidValueErrorMessage; } }
public class class_name { @Override public RestRepositories getRepositories(Request request) { RestRepositories repositories = new RestRepositories(); for (String repositoryName : getRepositoryManager().getJcrRepositoryNames()) { addRepository(request, repositories, repositoryName); } return repositories; } }
public class class_name { @Override public RestRepositories getRepositories(Request request) { RestRepositories repositories = new RestRepositories(); for (String repositoryName : getRepositoryManager().getJcrRepositoryNames()) { addRepository(request, repositories, repositoryName); // depends on control dependency: [for], data = [repositoryName] } return repositories; } }
public class class_name { public void onApplicationEvent(ContextRefreshedEvent event) { if (event.getApplicationContext().equals(context) && targetSource != null) { targetSource.freeze(); } } }
public class class_name { public void onApplicationEvent(ContextRefreshedEvent event) { if (event.getApplicationContext().equals(context) && targetSource != null) { targetSource.freeze(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void setUseServerSubject(Boolean useServerSubject) { if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) { SibTr.debug(this, TRACE, "setUseServerSubject", useServerSubject); } _useServerSubject = useServerSubject; } }
public class class_name { public void setUseServerSubject(Boolean useServerSubject) { if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) { SibTr.debug(this, TRACE, "setUseServerSubject", useServerSubject); // depends on control dependency: [if], data = [none] } _useServerSubject = useServerSubject; } }
public class class_name { @Override public Object[] toArray() { Object[] array = new Object[size()]; int i = 0; for(Record r : values()) { array[i++] = r; } return array; } }
public class class_name { @Override public Object[] toArray() { Object[] array = new Object[size()]; int i = 0; for(Record r : values()) { array[i++] = r; // depends on control dependency: [for], data = [r] } return array; } }
public class class_name { private Method[] getMethodsToProxyRecursive() { Set<MethodSetEntry> methodsToProxy = new HashSet<>(); Set<MethodSetEntry> seenFinalMethods = new HashSet<>(); // Traverse the class hierarchy to ensure that all concrete methods (which could be marked // as final) are visited before any abstract methods from interfaces. for (Class<?> c = baseClass; c != null; c = c.getSuperclass()) { getMethodsToProxy(methodsToProxy, seenFinalMethods, c); } // Now traverse the interface hierarchy, starting with the ones implemented by the class, // followed by any extra interfaces. for (Class<?> c = baseClass; c != null; c = c.getSuperclass()) { for (Class<?> i : c.getInterfaces()) { getMethodsToProxy(methodsToProxy, seenFinalMethods, i); } } for (Class<?> c : interfaces) { getMethodsToProxy(methodsToProxy, seenFinalMethods, c); } Method[] results = new Method[methodsToProxy.size()]; int i = 0; for (MethodSetEntry entry : methodsToProxy) { results[i++] = entry.originalMethod; } return results; } }
public class class_name { private Method[] getMethodsToProxyRecursive() { Set<MethodSetEntry> methodsToProxy = new HashSet<>(); Set<MethodSetEntry> seenFinalMethods = new HashSet<>(); // Traverse the class hierarchy to ensure that all concrete methods (which could be marked // as final) are visited before any abstract methods from interfaces. for (Class<?> c = baseClass; c != null; c = c.getSuperclass()) { getMethodsToProxy(methodsToProxy, seenFinalMethods, c); // depends on control dependency: [for], data = [c] } // Now traverse the interface hierarchy, starting with the ones implemented by the class, // followed by any extra interfaces. for (Class<?> c = baseClass; c != null; c = c.getSuperclass()) { for (Class<?> i : c.getInterfaces()) { getMethodsToProxy(methodsToProxy, seenFinalMethods, i); // depends on control dependency: [for], data = [i] } } for (Class<?> c : interfaces) { getMethodsToProxy(methodsToProxy, seenFinalMethods, c); // depends on control dependency: [for], data = [c] } Method[] results = new Method[methodsToProxy.size()]; int i = 0; for (MethodSetEntry entry : methodsToProxy) { results[i++] = entry.originalMethod; // depends on control dependency: [for], data = [entry] } return results; } }
public class class_name { public static AIService getService(final Context context, final AIConfiguration config) { if (config.getRecognitionEngine() == AIConfiguration.RecognitionEngine.Google) { return new GoogleRecognitionServiceImpl(context, config); } if (config.getRecognitionEngine() == AIConfiguration.RecognitionEngine.System) { return new GoogleRecognitionServiceImpl(context, config); } else if (config.getRecognitionEngine() == AIConfiguration.RecognitionEngine.Speaktoit) { return new SpeaktoitRecognitionServiceImpl(context, config); } else { throw new UnsupportedOperationException("This engine still not supported"); } } }
public class class_name { public static AIService getService(final Context context, final AIConfiguration config) { if (config.getRecognitionEngine() == AIConfiguration.RecognitionEngine.Google) { return new GoogleRecognitionServiceImpl(context, config); // depends on control dependency: [if], data = [none] } if (config.getRecognitionEngine() == AIConfiguration.RecognitionEngine.System) { return new GoogleRecognitionServiceImpl(context, config); // depends on control dependency: [if], data = [none] } else if (config.getRecognitionEngine() == AIConfiguration.RecognitionEngine.Speaktoit) { return new SpeaktoitRecognitionServiceImpl(context, config); // depends on control dependency: [if], data = [none] } else { throw new UnsupportedOperationException("This engine still not supported"); } } }
public class class_name { private static boolean isSharedFlyweight(Object obj) { if (obj == null) { return true; } if (obj == Boolean.TRUE || obj == Boolean.FALSE) { return true; } if (/* obj == Locale.ROOT || *//* Java 6 */ obj == Locale.ENGLISH || obj == Locale.FRENCH || obj == Locale.GERMAN || obj == Locale.ITALIAN || obj == Locale.JAPANESE || obj == Locale.KOREAN || obj == Locale.CHINESE || obj == Locale.SIMPLIFIED_CHINESE || obj == Locale.TRADITIONAL_CHINESE || obj == Locale.FRANCE || obj == Locale.GERMANY || obj == Locale.ITALY || obj == Locale.JAPAN || obj == Locale.KOREA || obj == Locale.CHINA || obj == Locale.PRC || obj == Locale.TAIWAN || obj == Locale.UK || obj == Locale.US || obj == Locale.CANADA || obj == Locale.CANADA_FRENCH) { return true; } if (obj == Collections.EMPTY_SET || obj == Collections.EMPTY_LIST || obj == Collections.EMPTY_MAP) { return true; } if (obj == BigInteger.ZERO || obj == BigInteger.ONE) { return true; } if (obj == System.in || obj == System.out || obj == System.err) { return true; } if (obj == String.CASE_INSENSITIVE_ORDER) { return true; } if (obj == JarFile.MANIFEST_NAME) { return true; } return false; } }
public class class_name { private static boolean isSharedFlyweight(Object obj) { if (obj == null) { return true; // depends on control dependency: [if], data = [none] } if (obj == Boolean.TRUE || obj == Boolean.FALSE) { return true; // depends on control dependency: [if], data = [none] } if (/* obj == Locale.ROOT || *//* Java 6 */ obj == Locale.ENGLISH || obj == Locale.FRENCH || obj == Locale.GERMAN || obj == Locale.ITALIAN || obj == Locale.JAPANESE || obj == Locale.KOREAN || obj == Locale.CHINESE || obj == Locale.SIMPLIFIED_CHINESE || obj == Locale.TRADITIONAL_CHINESE || obj == Locale.FRANCE || obj == Locale.GERMANY || obj == Locale.ITALY || obj == Locale.JAPAN || obj == Locale.KOREA || obj == Locale.CHINA || obj == Locale.PRC || obj == Locale.TAIWAN || obj == Locale.UK || obj == Locale.US || obj == Locale.CANADA || obj == Locale.CANADA_FRENCH) { return true; // depends on control dependency: [if], data = [none] } if (obj == Collections.EMPTY_SET || obj == Collections.EMPTY_LIST || obj == Collections.EMPTY_MAP) { return true; // depends on control dependency: [if], data = [none] } if (obj == BigInteger.ZERO || obj == BigInteger.ONE) { return true; // depends on control dependency: [if], data = [none] } if (obj == System.in || obj == System.out || obj == System.err) { return true; // depends on control dependency: [if], data = [none] } if (obj == String.CASE_INSENSITIVE_ORDER) { return true; // depends on control dependency: [if], data = [none] } if (obj == JarFile.MANIFEST_NAME) { return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public static ExecutionPath fromList(List<?> objects) { assertNotNull(objects); ExecutionPath path = ExecutionPath.rootPath(); for (Object object : objects) { if (object instanceof Number) { path = path.segment(((Number) object).intValue()); } else { path = path.segment(String.valueOf(object)); } } return path; } }
public class class_name { public static ExecutionPath fromList(List<?> objects) { assertNotNull(objects); ExecutionPath path = ExecutionPath.rootPath(); for (Object object : objects) { if (object instanceof Number) { path = path.segment(((Number) object).intValue()); // depends on control dependency: [if], data = [none] } else { path = path.segment(String.valueOf(object)); // depends on control dependency: [if], data = [none] } } return path; } }
public class class_name { protected void updateLocaleNodeSorting() { // check if the locale nodes require sorting List<Locale> locales = new ArrayList<Locale>(m_locales); Collections.sort(locales, new Comparator<Locale>() { public int compare(Locale o1, Locale o2) { return o1.toString().compareTo(o2.toString()); } }); List<Element> localeNodes = new ArrayList<Element>(m_document.getRootElement().elements()); boolean sortRequired = false; if (localeNodes.size() != locales.size()) { sortRequired = true; } else { int i = 0; for (Element el : localeNodes) { if (!locales.get(i).toString().equals(el.attributeValue("language"))) { sortRequired = true; break; } i++; } } if (sortRequired) { // do the actual node sorting, by removing the nodes first for (Element el : localeNodes) { m_document.getRootElement().remove(el); } Collections.sort(localeNodes, new Comparator<Object>() { public int compare(Object o1, Object o2) { String locale1 = ((Element)o1).attributeValue("language"); String locale2 = ((Element)o2).attributeValue("language"); return locale1.compareTo(locale2); } }); // re-adding the nodes in alphabetical order for (Element el : localeNodes) { m_document.getRootElement().add(el); } } } }
public class class_name { protected void updateLocaleNodeSorting() { // check if the locale nodes require sorting List<Locale> locales = new ArrayList<Locale>(m_locales); Collections.sort(locales, new Comparator<Locale>() { public int compare(Locale o1, Locale o2) { return o1.toString().compareTo(o2.toString()); } }); List<Element> localeNodes = new ArrayList<Element>(m_document.getRootElement().elements()); boolean sortRequired = false; if (localeNodes.size() != locales.size()) { sortRequired = true; // depends on control dependency: [if], data = [none] } else { int i = 0; for (Element el : localeNodes) { if (!locales.get(i).toString().equals(el.attributeValue("language"))) { sortRequired = true; // depends on control dependency: [if], data = [none] break; } i++; // depends on control dependency: [for], data = [none] } } if (sortRequired) { // do the actual node sorting, by removing the nodes first for (Element el : localeNodes) { m_document.getRootElement().remove(el); // depends on control dependency: [for], data = [el] } Collections.sort(localeNodes, new Comparator<Object>() { public int compare(Object o1, Object o2) { String locale1 = ((Element)o1).attributeValue("language"); String locale2 = ((Element)o2).attributeValue("language"); return locale1.compareTo(locale2); } }); // depends on control dependency: [if], data = [none] // re-adding the nodes in alphabetical order for (Element el : localeNodes) { m_document.getRootElement().add(el); // depends on control dependency: [for], data = [el] } } } }
public class class_name { public ParseResult parseSources(String sources[]) throws IOException { final CourierParseResult result = new CourierParseResult(); try { for (String source : sources) { final File sourceFile = new File(source); if (sourceFile.exists()) { if (sourceFile.isDirectory()) { final List<File> sourceFilesInDirectory = FileUtil.listFiles(sourceFile, _fileExtensionFilter); for (File f : sourceFilesInDirectory) { parseFile(f, result); } } else { parseFile(sourceFile, result); } } else { final StringBuilder errorMessage = new StringBuilder(); final DataSchema schema = _schemaResolver.findDataSchema(source, errorMessage); if (schema == null) { result.addMessage("File cannot be opened or schema name cannot be resolved: ").addMessage(source).addMessage("\n"); } if (errorMessage.length() > 0) { result.addMessage(errorMessage.toString()); } } } for (Map.Entry<String, DataSchemaLocation> entry : _schemaResolver.nameToDataSchemaLocations().entrySet()) { final DataSchema schema = _schemaResolver.bindings().get(entry.getKey()); result.getSchemaAndLocations().put(schema, entry.getValue()); } if (result.getMessage().length() > 0) { throw new IOException(result.getMessage()); } appendSourceFilesFromSchemaResolver(result); return result; } catch (RuntimeException e) { if (result.getMessage().length() > 0) { e = new RuntimeException("Unexpected " + e.getClass().getSimpleName() + " encountered.\n" + "This may be caused by the following parsing or processing errors:\n" + result.getMessage(), e); } throw e; } } }
public class class_name { public ParseResult parseSources(String sources[]) throws IOException { final CourierParseResult result = new CourierParseResult(); try { for (String source : sources) { final File sourceFile = new File(source); if (sourceFile.exists()) { if (sourceFile.isDirectory()) { final List<File> sourceFilesInDirectory = FileUtil.listFiles(sourceFile, _fileExtensionFilter); for (File f : sourceFilesInDirectory) { parseFile(f, result); // depends on control dependency: [for], data = [f] } } else { parseFile(sourceFile, result); // depends on control dependency: [if], data = [none] } } else { final StringBuilder errorMessage = new StringBuilder(); final DataSchema schema = _schemaResolver.findDataSchema(source, errorMessage); if (schema == null) { result.addMessage("File cannot be opened or schema name cannot be resolved: ").addMessage(source).addMessage("\n"); // depends on control dependency: [if], data = [none] } if (errorMessage.length() > 0) { result.addMessage(errorMessage.toString()); // depends on control dependency: [if], data = [none] } } } for (Map.Entry<String, DataSchemaLocation> entry : _schemaResolver.nameToDataSchemaLocations().entrySet()) { final DataSchema schema = _schemaResolver.bindings().get(entry.getKey()); result.getSchemaAndLocations().put(schema, entry.getValue()); // depends on control dependency: [for], data = [entry] } if (result.getMessage().length() > 0) { throw new IOException(result.getMessage()); } appendSourceFilesFromSchemaResolver(result); return result; } catch (RuntimeException e) { if (result.getMessage().length() > 0) { e = new RuntimeException("Unexpected " + e.getClass().getSimpleName() + " encountered.\n" + "This may be caused by the following parsing or processing errors:\n" + result.getMessage(), e); // depends on control dependency: [if], data = [none] } throw e; } } }
public class class_name { public void updateRepeatNumber(int overrideId, int pathId, Integer ordinal, Integer repeatNumber, String clientUUID) { if (ordinal == null) { ordinal = 1; } try { // get ID of the ordinal int enabledId = getEnabledEndpoint(pathId, overrideId, ordinal, clientUUID).getId(); updateRepeatNumber(enabledId, repeatNumber); } catch (Exception e) { e.printStackTrace(); } } }
public class class_name { public void updateRepeatNumber(int overrideId, int pathId, Integer ordinal, Integer repeatNumber, String clientUUID) { if (ordinal == null) { ordinal = 1; // depends on control dependency: [if], data = [none] } try { // get ID of the ordinal int enabledId = getEnabledEndpoint(pathId, overrideId, ordinal, clientUUID).getId(); updateRepeatNumber(enabledId, repeatNumber); // depends on control dependency: [try], data = [none] } catch (Exception e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public <D> D oneToManyWithoutControl(Class<D> destinationClass, final T source) { try{ return this.<D,T>getJMapper(relationalOneToManyMapper,destinationClass).getDestinationWithoutControl(source); } catch (Exception e) { return (D) this.destinationClassControl(e,destinationClass); } } }
public class class_name { public <D> D oneToManyWithoutControl(Class<D> destinationClass, final T source) { try{ return this.<D,T>getJMapper(relationalOneToManyMapper,destinationClass).getDestinationWithoutControl(source); } // depends on control dependency: [try], data = [none] catch (Exception e) { return (D) this.destinationClassControl(e,destinationClass); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void main(String[] args) { try { Main main = new Main(); main.start(); Runtime.getRuntime().addShutdownHook(main.getShutdownHook()); main.awaitTermination(1, TimeUnit.DAYS); } catch (InterruptedException e) { e = null; Thread.currentThread().interrupt(); } } }
public class class_name { public static void main(String[] args) { try { Main main = new Main(); main.start(); // depends on control dependency: [try], data = [none] Runtime.getRuntime().addShutdownHook(main.getShutdownHook()); // depends on control dependency: [try], data = [none] main.awaitTermination(1, TimeUnit.DAYS); // depends on control dependency: [try], data = [none] } catch (InterruptedException e) { e = null; Thread.currentThread().interrupt(); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static String getLocalFileName(String recordName){ if ( protectedIDs.contains(recordName)){ recordName = "_" + recordName; } File f = new File(getPath(), CHEM_COMP_CACHE_DIRECTORY); if (! f.exists()){ logger.info("Creating directory " + f); boolean success = f.mkdir(); // we've checked in initPath that path is writable, so there's no need to check if it succeeds // in the unlikely case that in the meantime it isn't writable at least we log an error if (!success) logger.error("Directory {} could not be created",f); } File theFile = new File(f,recordName + ".cif.gz"); return theFile.toString(); } }
public class class_name { public static String getLocalFileName(String recordName){ if ( protectedIDs.contains(recordName)){ recordName = "_" + recordName; // depends on control dependency: [if], data = [none] } File f = new File(getPath(), CHEM_COMP_CACHE_DIRECTORY); if (! f.exists()){ logger.info("Creating directory " + f); // depends on control dependency: [if], data = [none] boolean success = f.mkdir(); // we've checked in initPath that path is writable, so there's no need to check if it succeeds // in the unlikely case that in the meantime it isn't writable at least we log an error if (!success) logger.error("Directory {} could not be created",f); } File theFile = new File(f,recordName + ".cif.gz"); return theFile.toString(); } }
public class class_name { public static Map<String, String> toMap(List<CmsProperty> list) { if ((list == null) || (list.size() == 0)) { return Collections.emptyMap(); } String name = null; String value = null; CmsProperty property = null; Map<String, String> result = new HashMap<String, String>(list.size()); // choose the fastest method to traverse the list if (list instanceof RandomAccess) { for (int i = 0, n = list.size(); i < n; i++) { property = list.get(i); name = property.m_name; value = property.getValue(); result.put(name, value); } } else { Iterator<CmsProperty> i = list.iterator(); while (i.hasNext()) { property = i.next(); name = property.m_name; value = property.getValue(); result.put(name, value); } } return result; } }
public class class_name { public static Map<String, String> toMap(List<CmsProperty> list) { if ((list == null) || (list.size() == 0)) { return Collections.emptyMap(); // depends on control dependency: [if], data = [none] } String name = null; String value = null; CmsProperty property = null; Map<String, String> result = new HashMap<String, String>(list.size()); // choose the fastest method to traverse the list if (list instanceof RandomAccess) { for (int i = 0, n = list.size(); i < n; i++) { property = list.get(i); // depends on control dependency: [for], data = [i] name = property.m_name; // depends on control dependency: [for], data = [none] value = property.getValue(); // depends on control dependency: [for], data = [none] result.put(name, value); // depends on control dependency: [for], data = [none] } } else { Iterator<CmsProperty> i = list.iterator(); while (i.hasNext()) { property = i.next(); // depends on control dependency: [while], data = [none] name = property.m_name; // depends on control dependency: [while], data = [none] value = property.getValue(); // depends on control dependency: [while], data = [none] result.put(name, value); // depends on control dependency: [while], data = [none] } } return result; } }
public class class_name { Node createThisAliasReferenceForFunction(String aliasName, Node functionNode) { final Node result = IR.name(aliasName); if (isAddingTypes()) { result.setJSType(getTypeOfThisForFunctionNode(functionNode)); } return result; } }
public class class_name { Node createThisAliasReferenceForFunction(String aliasName, Node functionNode) { final Node result = IR.name(aliasName); if (isAddingTypes()) { result.setJSType(getTypeOfThisForFunctionNode(functionNode)); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public static void commitTransaction() { Connection connection = tl_conn.get(); if (connection == null) { throw new RuntimeException("You do not start a Transaction so you can not commit a transaction!"); } try { connection.commit(); connection.close(); tl_conn.remove(); tl_sp.remove(); } catch (SQLException e) { JDBCUtils.rollbackTransaction(); throw new RuntimeException(e); } } }
public class class_name { public static void commitTransaction() { Connection connection = tl_conn.get(); if (connection == null) { throw new RuntimeException("You do not start a Transaction so you can not commit a transaction!"); } try { connection.commit(); // depends on control dependency: [try], data = [none] connection.close(); // depends on control dependency: [try], data = [none] tl_conn.remove(); // depends on control dependency: [try], data = [none] tl_sp.remove(); // depends on control dependency: [try], data = [none] } catch (SQLException e) { JDBCUtils.rollbackTransaction(); throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void marshall(VpcConfig vpcConfig, ProtocolMarshaller protocolMarshaller) { if (vpcConfig == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(vpcConfig.getSubnetIds(), SUBNETIDS_BINDING); protocolMarshaller.marshall(vpcConfig.getSecurityGroupIds(), SECURITYGROUPIDS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(VpcConfig vpcConfig, ProtocolMarshaller protocolMarshaller) { if (vpcConfig == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(vpcConfig.getSubnetIds(), SUBNETIDS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(vpcConfig.getSecurityGroupIds(), SECURITYGROUPIDS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void write(Collection<?> objects, boolean allOrNothing, OutputStream out) { try { JsonGenerator jg = objectMapper.getFactory().createGenerator(out, JsonEncoding.UTF8); jg.writeStartObject(); if (allOrNothing) { jg.writeBooleanField("all_or_nothing", true); } jg.writeArrayFieldStart("docs"); for (Object o : objects) { jg.writeObject(o); } jg.writeEndArray(); jg.writeEndObject(); jg.flush(); jg.close(); } catch (Exception e) { throw Exceptions.propagate(e); } finally { IOUtils.closeQuietly(out); } } }
public class class_name { public void write(Collection<?> objects, boolean allOrNothing, OutputStream out) { try { JsonGenerator jg = objectMapper.getFactory().createGenerator(out, JsonEncoding.UTF8); jg.writeStartObject(); // depends on control dependency: [try], data = [none] if (allOrNothing) { jg.writeBooleanField("all_or_nothing", true); // depends on control dependency: [if], data = [none] } jg.writeArrayFieldStart("docs"); // depends on control dependency: [try], data = [none] for (Object o : objects) { jg.writeObject(o); // depends on control dependency: [for], data = [o] } jg.writeEndArray(); // depends on control dependency: [try], data = [none] jg.writeEndObject(); // depends on control dependency: [try], data = [none] jg.flush(); // depends on control dependency: [try], data = [none] jg.close(); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw Exceptions.propagate(e); } finally { // depends on control dependency: [catch], data = [none] IOUtils.closeQuietly(out); } } }
public class class_name { @Nonnull private DAType findFixedResolution(Element element) { Name qualifiedName = element.accept(QualifiedNameExtractor.QUALIFIED_NAME_EXTRACTOR, null); // qualified reference to Type in code if (qualifiedName != null && !qualifiedName.contentEquals(element.getSimpleName())) { return ensureNonnull(scanResult.findFixedByQualifiedName(qualifiedName.toString()), element); } Optional<String> importQualifiedName = scanResult.getImports().findBySimpleName(element.getSimpleName()); if (!importQualifiedName.isPresent()) { throw new IllegalStateException("Type for Element " + element + " is neither imported nor explicitly qualified"); } return ensureNonnull(scanResult.findFixedByQualifiedName(importQualifiedName.get()), element); } }
public class class_name { @Nonnull private DAType findFixedResolution(Element element) { Name qualifiedName = element.accept(QualifiedNameExtractor.QUALIFIED_NAME_EXTRACTOR, null); // qualified reference to Type in code if (qualifiedName != null && !qualifiedName.contentEquals(element.getSimpleName())) { return ensureNonnull(scanResult.findFixedByQualifiedName(qualifiedName.toString()), element); // depends on control dependency: [if], data = [(qualifiedName] } Optional<String> importQualifiedName = scanResult.getImports().findBySimpleName(element.getSimpleName()); if (!importQualifiedName.isPresent()) { throw new IllegalStateException("Type for Element " + element + " is neither imported nor explicitly qualified"); } return ensureNonnull(scanResult.findFixedByQualifiedName(importQualifiedName.get()), element); } }
public class class_name { @Override public ISource locate(String path) { // need some path to work with if(path == null || path.isEmpty()) { return new UnfoundSource(path); } // get a local version of the path for comparing String localPath = path.toLowerCase().trim(); // has resource marker if(localPath.startsWith("resource:")) { path = StringUtils.removeStart(path, MultiLocator.RESOURCE); return this.rLocator.locate(path); } // otherwise use file system lookup return this.fLocator.locate(path); } }
public class class_name { @Override public ISource locate(String path) { // need some path to work with if(path == null || path.isEmpty()) { return new UnfoundSource(path); // depends on control dependency: [if], data = [(path] } // get a local version of the path for comparing String localPath = path.toLowerCase().trim(); // has resource marker if(localPath.startsWith("resource:")) { path = StringUtils.removeStart(path, MultiLocator.RESOURCE); // depends on control dependency: [if], data = [none] return this.rLocator.locate(path); // depends on control dependency: [if], data = [none] } // otherwise use file system lookup return this.fLocator.locate(path); } }
public class class_name { public java.util.List<DBClusterSnapshotAttribute> getDBClusterSnapshotAttributes() { if (dBClusterSnapshotAttributes == null) { dBClusterSnapshotAttributes = new com.amazonaws.internal.SdkInternalList<DBClusterSnapshotAttribute>(); } return dBClusterSnapshotAttributes; } }
public class class_name { public java.util.List<DBClusterSnapshotAttribute> getDBClusterSnapshotAttributes() { if (dBClusterSnapshotAttributes == null) { dBClusterSnapshotAttributes = new com.amazonaws.internal.SdkInternalList<DBClusterSnapshotAttribute>(); // depends on control dependency: [if], data = [none] } return dBClusterSnapshotAttributes; } }
public class class_name { @Override public void endVisit(ParenthesizedExpression node) { if (getReplaceableValue(node.getExpression()) != null) { node.replaceWith(node.getExpression().copy()); } } }
public class class_name { @Override public void endVisit(ParenthesizedExpression node) { if (getReplaceableValue(node.getExpression()) != null) { node.replaceWith(node.getExpression().copy()); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String fromClassPath() { Set<String> versions = new HashSet<>(); try { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); Enumeration<URL> manifests = classLoader.getResources("META-INF/MANIFEST.MF"); while (manifests.hasMoreElements()) { URL manifestURL = manifests.nextElement(); try (InputStream is = manifestURL.openStream()) { Manifest manifest = new Manifest(); manifest.read(is); Attributes buildInfo = manifest.getAttributes("Build-Info"); if (buildInfo != null) { if (buildInfo.getValue("Selenium-Version") != null) { versions.add(buildInfo.getValue("Selenium-Version")); } else { // might be in build-info part if (manifest.getEntries() != null) { if (manifest.getEntries().containsKey("Build-Info")) { final Attributes attributes = manifest.getEntries().get("Build-Info"); if (attributes.getValue("Selenium-Version") != null) { versions.add(attributes.getValue("Selenium-Version")); } } } } } } } } catch (Exception e) { logger.log(Level.WARNING, "Exception {0} occurred while resolving selenium version and latest image is going to be used.", e.getMessage()); return SELENIUM_VERSION; } if (versions.isEmpty()) { logger.log(Level.INFO, "No version of Selenium found in classpath. Using latest image."); return SELENIUM_VERSION; } String foundVersion = versions.iterator().next(); if (versions.size() > 1) { logger.log(Level.WARNING, "Multiple versions of Selenium found in classpath. Using the first one found {0}.", foundVersion); } return foundVersion; } }
public class class_name { public static String fromClassPath() { Set<String> versions = new HashSet<>(); try { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); Enumeration<URL> manifests = classLoader.getResources("META-INF/MANIFEST.MF"); while (manifests.hasMoreElements()) { URL manifestURL = manifests.nextElement(); try (InputStream is = manifestURL.openStream()) { Manifest manifest = new Manifest(); manifest.read(is); // depends on control dependency: [try], data = [none] Attributes buildInfo = manifest.getAttributes("Build-Info"); if (buildInfo != null) { if (buildInfo.getValue("Selenium-Version") != null) { versions.add(buildInfo.getValue("Selenium-Version")); // depends on control dependency: [if], data = [(buildInfo.getValue("Selenium-Version")] } else { // might be in build-info part if (manifest.getEntries() != null) { if (manifest.getEntries().containsKey("Build-Info")) { final Attributes attributes = manifest.getEntries().get("Build-Info"); if (attributes.getValue("Selenium-Version") != null) { versions.add(attributes.getValue("Selenium-Version")); // depends on control dependency: [if], data = [(attributes.getValue("Selenium-Version")] } } } } } } } } catch (Exception e) { logger.log(Level.WARNING, "Exception {0} occurred while resolving selenium version and latest image is going to be used.", e.getMessage()); return SELENIUM_VERSION; } // depends on control dependency: [catch], data = [none] if (versions.isEmpty()) { logger.log(Level.INFO, "No version of Selenium found in classpath. Using latest image."); // depends on control dependency: [if], data = [none] return SELENIUM_VERSION; // depends on control dependency: [if], data = [none] } String foundVersion = versions.iterator().next(); if (versions.size() > 1) { logger.log(Level.WARNING, "Multiple versions of Selenium found in classpath. Using the first one found {0}.", foundVersion); // depends on control dependency: [if], data = [none] } return foundVersion; } }
public class class_name { private HeaderField remove() { if (size == 0) { return null; } HeaderEntry eldest = head.after; int h = eldest.hash; int i = index(h); HeaderEntry prev = headerFields[i]; HeaderEntry e = prev; while (e != null) { HeaderEntry next = e.next; if (e == eldest) { if (prev == eldest) { headerFields[i] = next; } else { prev.next = next; } eldest.remove(); size -= eldest.size(); return eldest; } prev = e; e = next; } return null; } }
public class class_name { private HeaderField remove() { if (size == 0) { return null; // depends on control dependency: [if], data = [none] } HeaderEntry eldest = head.after; int h = eldest.hash; int i = index(h); HeaderEntry prev = headerFields[i]; HeaderEntry e = prev; while (e != null) { HeaderEntry next = e.next; if (e == eldest) { if (prev == eldest) { headerFields[i] = next; // depends on control dependency: [if], data = [none] } else { prev.next = next; // depends on control dependency: [if], data = [none] } eldest.remove(); // depends on control dependency: [if], data = [none] size -= eldest.size(); // depends on control dependency: [if], data = [none] return eldest; // depends on control dependency: [if], data = [none] } prev = e; // depends on control dependency: [while], data = [none] e = next; // depends on control dependency: [while], data = [none] } return null; } }
public class class_name { @Override public void copyFrom(Frame<IsNullValue> other) { super.copyFrom(other); decision = ((IsNullValueFrame) other).decision; if (trackValueNumbers) { knownValueMap = Util.makeSmallHashMap(((IsNullValueFrame) other).knownValueMap); } } }
public class class_name { @Override public void copyFrom(Frame<IsNullValue> other) { super.copyFrom(other); decision = ((IsNullValueFrame) other).decision; if (trackValueNumbers) { knownValueMap = Util.makeSmallHashMap(((IsNullValueFrame) other).knownValueMap); // depends on control dependency: [if], data = [none] } } }
public class class_name { public SendTemplatedEmailRequest withTags(MessageTag... tags) { if (this.tags == null) { setTags(new com.amazonaws.internal.SdkInternalList<MessageTag>(tags.length)); } for (MessageTag ele : tags) { this.tags.add(ele); } return this; } }
public class class_name { public SendTemplatedEmailRequest withTags(MessageTag... tags) { if (this.tags == null) { setTags(new com.amazonaws.internal.SdkInternalList<MessageTag>(tags.length)); // depends on control dependency: [if], data = [none] } for (MessageTag ele : tags) { this.tags.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { private static BigDecimal multiplyAndRound(long x, long y, int scale, MathContext mc) { long product = multiply(x, y); if(product!=INFLATED) { return doRound(product, scale, mc); } // attempt to do it in 128 bits int rsign = 1; if(x < 0) { x = -x; rsign = -1; } if(y < 0) { y = -y; rsign *= -1; } // multiply dividend0 * dividend1 long m0_hi = x >>> 32; long m0_lo = x & LONG_MASK; long m1_hi = y >>> 32; long m1_lo = y & LONG_MASK; product = m0_lo * m1_lo; long m0 = product & LONG_MASK; long m1 = product >>> 32; product = m0_hi * m1_lo + m1; m1 = product & LONG_MASK; long m2 = product >>> 32; product = m0_lo * m1_hi + m1; m1 = product & LONG_MASK; m2 += product >>> 32; long m3 = m2>>>32; m2 &= LONG_MASK; product = m0_hi*m1_hi + m2; m2 = product & LONG_MASK; m3 = ((product>>>32) + m3) & LONG_MASK; final long mHi = make64(m3,m2); final long mLo = make64(m1,m0); BigDecimal res = doRound128(mHi, mLo, rsign, scale, mc); if(res!=null) { return res; } res = new BigDecimal(BigInteger.valueOf(x).multiply(y*rsign), INFLATED, scale, 0); return doRound(res,mc); } }
public class class_name { private static BigDecimal multiplyAndRound(long x, long y, int scale, MathContext mc) { long product = multiply(x, y); if(product!=INFLATED) { return doRound(product, scale, mc); // depends on control dependency: [if], data = [(product] } // attempt to do it in 128 bits int rsign = 1; if(x < 0) { x = -x; // depends on control dependency: [if], data = [none] rsign = -1; // depends on control dependency: [if], data = [none] } if(y < 0) { y = -y; // depends on control dependency: [if], data = [none] rsign *= -1; // depends on control dependency: [if], data = [none] } // multiply dividend0 * dividend1 long m0_hi = x >>> 32; long m0_lo = x & LONG_MASK; long m1_hi = y >>> 32; long m1_lo = y & LONG_MASK; product = m0_lo * m1_lo; long m0 = product & LONG_MASK; long m1 = product >>> 32; product = m0_hi * m1_lo + m1; m1 = product & LONG_MASK; long m2 = product >>> 32; product = m0_lo * m1_hi + m1; m1 = product & LONG_MASK; m2 += product >>> 32; long m3 = m2>>>32; m2 &= LONG_MASK; product = m0_hi*m1_hi + m2; m2 = product & LONG_MASK; m3 = ((product>>>32) + m3) & LONG_MASK; final long mHi = make64(m3,m2); final long mLo = make64(m1,m0); BigDecimal res = doRound128(mHi, mLo, rsign, scale, mc); if(res!=null) { return res; // depends on control dependency: [if], data = [none] } res = new BigDecimal(BigInteger.valueOf(x).multiply(y*rsign), INFLATED, scale, 0); return doRound(res,mc); } }
public class class_name { public static SparseDoubleVector multiplyUnmodified(SparseDoubleVector a, SparseDoubleVector b) { SparseDoubleVector result = new CompactSparseVector(); int[] nonZerosA = a.getNonZeroIndices(); int[] nonZerosB = b.getNonZeroIndices(); if (nonZerosA.length == 0 || nonZerosB.length == 0) return result; if (nonZerosA[nonZerosA.length-1] > nonZerosB[nonZerosB.length-1]) { SparseDoubleVector t = b; b = a; a = t; } nonZerosA = a.getNonZeroIndices(); for (int index : nonZerosA) { double v = a.get(index); double w = b.get(index); if (w != 0d) result.set(index, v*w); } return result; } }
public class class_name { public static SparseDoubleVector multiplyUnmodified(SparseDoubleVector a, SparseDoubleVector b) { SparseDoubleVector result = new CompactSparseVector(); int[] nonZerosA = a.getNonZeroIndices(); int[] nonZerosB = b.getNonZeroIndices(); if (nonZerosA.length == 0 || nonZerosB.length == 0) return result; if (nonZerosA[nonZerosA.length-1] > nonZerosB[nonZerosB.length-1]) { SparseDoubleVector t = b; b = a; // depends on control dependency: [if], data = [none] a = t; // depends on control dependency: [if], data = [none] } nonZerosA = a.getNonZeroIndices(); for (int index : nonZerosA) { double v = a.get(index); double w = b.get(index); if (w != 0d) result.set(index, v*w); } return result; } }
public class class_name { private Map<String, String> maybeReplaceFormatterInSettings(Map<String, String> individualSettings) { if (individualSettings == null) { return null; } else if (m_customReplacements == null) { return individualSettings; } else { LinkedHashMap<String, String> result = new LinkedHashMap<String, String>(); for (Map.Entry<String, String> entry : individualSettings.entrySet()) { String value = entry.getValue(); if (CmsUUID.isValidUUID(value)) { CmsUUID valueId = new CmsUUID(value); if (m_customReplacements.containsKey(valueId)) { value = "" + m_customReplacements.get(valueId); } } result.put(entry.getKey(), value); } return result; } } }
public class class_name { private Map<String, String> maybeReplaceFormatterInSettings(Map<String, String> individualSettings) { if (individualSettings == null) { return null; // depends on control dependency: [if], data = [none] } else if (m_customReplacements == null) { return individualSettings; // depends on control dependency: [if], data = [none] } else { LinkedHashMap<String, String> result = new LinkedHashMap<String, String>(); for (Map.Entry<String, String> entry : individualSettings.entrySet()) { String value = entry.getValue(); if (CmsUUID.isValidUUID(value)) { CmsUUID valueId = new CmsUUID(value); if (m_customReplacements.containsKey(valueId)) { value = "" + m_customReplacements.get(valueId); // depends on control dependency: [if], data = [none] } } result.put(entry.getKey(), value); // depends on control dependency: [for], data = [entry] } return result; // depends on control dependency: [if], data = [none] } } }
public class class_name { protected void allocateBuffers(int arraySize) { assert Integer.bitCount(arraySize) == 1; // Compute new hash mixer candidate before expanding. final int newKeyMixer = this.orderMixer.newKeyMixer(arraySize); // Ensure no change is done if we hit an OOM. KType[] prevKeys = Intrinsics.<KType[]> cast(this.keys); try { int emptyElementSlot = 1; this.keys = Intrinsics.<KType> newArray(arraySize + emptyElementSlot); } catch (OutOfMemoryError e) { this.keys = prevKeys; throw new BufferAllocationException( "Not enough memory to allocate buffers for rehashing: %,d -> %,d", e, this.keys == null ? 0 : size(), arraySize); } this.resizeAt = expandAtCount(arraySize, loadFactor); this.keyMixer = newKeyMixer; this.mask = arraySize - 1; } }
public class class_name { protected void allocateBuffers(int arraySize) { assert Integer.bitCount(arraySize) == 1; // Compute new hash mixer candidate before expanding. final int newKeyMixer = this.orderMixer.newKeyMixer(arraySize); // Ensure no change is done if we hit an OOM. KType[] prevKeys = Intrinsics.<KType[]> cast(this.keys); try { int emptyElementSlot = 1; this.keys = Intrinsics.<KType> newArray(arraySize + emptyElementSlot); // depends on control dependency: [try], data = [none] } catch (OutOfMemoryError e) { this.keys = prevKeys; throw new BufferAllocationException( "Not enough memory to allocate buffers for rehashing: %,d -> %,d", e, this.keys == null ? 0 : size(), arraySize); } // depends on control dependency: [catch], data = [none] this.resizeAt = expandAtCount(arraySize, loadFactor); this.keyMixer = newKeyMixer; this.mask = arraySize - 1; } }
public class class_name { public void warn( Marker marker, String msg ) { if( m_delegate.isWarnEnabled() ) { setMDCMarker( marker ); m_delegate.warn( msg, null ); resetMDCMarker(); } } }
public class class_name { public void warn( Marker marker, String msg ) { if( m_delegate.isWarnEnabled() ) { setMDCMarker( marker ); // depends on control dependency: [if], data = [none] m_delegate.warn( msg, null ); // depends on control dependency: [if], data = [none] resetMDCMarker(); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public void run() { try { byte[] bytes = new byte[8192]; int pos = 0; // Read the first part of the header while (true) { int read = input.read(); if (read == -1) { break; } bytes[pos] = (byte) read; if (pos >= 4) { // Find \r\n\r\n if (bytes[pos - 3] == '\r' && bytes[pos - 2] == '\n' && bytes[pos - 1] == '\r' && bytes[pos] == '\n') { break; } } pos++; } // Read from the header data BufferedReader reader = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(bytes, 0, pos))); // Read the first line, defined as the status line String l = reader.readLine(); // Sanity check, after not returning data the client MIGHT attempt // to send something back and it will end up being something we // cannot read. if (l == null) { sendError(HttpStatus.BAD_REQUEST, HttpStatus.BAD_REQUEST.toString()); return; } // Otherwise continue on int idx = l.indexOf(' '); // Split out the method and path String methodString = l.substring(0, idx); HttpMethod method = null; for (HttpMethod m : HttpMethod.values()) { if (m.name().equals(methodString)) { method = m; } } // If it's an unknown method it won't be defined in the enum if (method == null) { sendError(HttpStatus.METHOD_NOT_ALLOWED, "This server currently does not support this method."); return; } // The URI String path = l.substring(idx + 1, l.lastIndexOf(' ')); // Parse the headers Map<String, String> headers = new HashMap<String, String>(); while ((l = reader.readLine()) != null) { // End header. if (l.equals("")) break; // Headers are usually Key: Value String key = l.substring(0, l.indexOf(':')); String value = l.substring(l.indexOf(':') + 1).trim(); // Put the header in the map, correcting the header key if // needed. headers.put(HttpUtil.capitalizeHeader(key), value); } // Close the reader used for the header reader.close(); HttpRequest request = new HttpRequest(this, method, path, headers); int questionIdx = path.indexOf('?'); if (questionIdx != -1) { String queryString = path.substring(questionIdx + 1); request.setQueryString(queryString); request.setGetData(HttpUtil.parseData(queryString)); path = path.substring(0, questionIdx); request.setUri(path); } // Parse cookies, only if the server has the capability enabled (to save time, processing power, and memory if it isn't used) if (headers.containsKey(HttpHeader.COOKIE) && server.hasCapability(HttpCapability.COOKIES)) { List<HttpCookie> cookies = new LinkedList<HttpCookie>(); StringTokenizer tok = new StringTokenizer(headers.get(HttpHeader.COOKIE), ";"); while (tok.hasMoreTokens()) { String token = tok.nextToken(); int eqIdx = token.indexOf('='); if (eqIdx == -1) { // Invalid cookie continue; } String key = token.substring(0, eqIdx); String value = token.substring(eqIdx + 1); cookies.add(new HttpCookie(key, value)); } request.setCookies(cookies); } // Read the request data if (method == HttpMethod.POST) { boolean acceptsStandard = server.hasCapability(HttpCapability.STANDARD_POST), acceptsMultipart = server.hasCapability(HttpCapability.MULTIPART_POST); // Make sure the server will accept POST or Multipart POST // before we start checking the content if (acceptsStandard || acceptsMultipart) { // Validate that there's a length header if (!headers.containsKey(HttpHeader.CONTENT_LENGTH)) { // If there isn't, send the correct response sendError(HttpStatus.LENGTH_REQUIRED, HttpStatus.LENGTH_REQUIRED.toString()); } else { // Otherwise, continue on int contentLength = Integer.parseInt(headers.get(HttpHeader.CONTENT_LENGTH)); String contentTypeHeader = headers.get(HttpHeader.CONTENT_TYPE); // Copy it to trim to what we need, keeping the original // to parse the boundary String contentType = contentTypeHeader; if (contentTypeHeader.indexOf(';') != -1) { contentType = contentTypeHeader.substring(0, contentTypeHeader.indexOf(';')); } // Check the content type if (contentType.equalsIgnoreCase("multipart/form-data")) { if (acceptsMultipart) { // The server will accept post requests with // multipart data String boundary = contentTypeHeader.substring(contentTypeHeader.indexOf(';')).trim(); boundary = boundary.substring(boundary.indexOf('=') + 1); // Parse file uploads etc. request.setPostData(readMultipartData(boundary)); } else { // The server has the multipart post // capabilities disabled sendError(HttpStatus.BAD_REQUEST, "This server does not support multipart/form-data requests."); } } else { if (acceptsStandard) { // Read the reported content length, TODO some // kind of check/timeout to make sure it won't // hang the thread? byte[] b = new byte[contentLength]; int read, totalRead = 0; while (contentLength - totalRead > 0 && (read = input.read(b, totalRead, contentLength - totalRead)) > -1) { totalRead += read; } // We either read all of the data, or the // connection closed. if (totalRead < contentLength) { sendError(HttpStatus.BAD_REQUEST, "Unable to read correct amount of data!"); } else { String data = new String(b); if (contentType.equalsIgnoreCase("application/x-www-form-urlencoded")) { // It is FOR SURE regular data. request.setPostData(HttpUtil.parseData(data)); } else { // Could be JSON or XML etc request.setData(data); } } } else { // The server has the Standard post capabilities // disabled sendError(HttpStatus.BAD_REQUEST, "This server does not support POST requests."); } } } } else { // The server has the Standard and Multipart capabilities // disabled sendError(HttpStatus.METHOD_NOT_ALLOWED, "This server does not support POST requests."); } } this.request = request; server.dispatchRequest(request); } catch (SocketException e) { //Socket was closed probably } catch (IOException e) { e.printStackTrace(); } } }
public class class_name { @Override public void run() { try { byte[] bytes = new byte[8192]; int pos = 0; // Read the first part of the header while (true) { int read = input.read(); if (read == -1) { break; } bytes[pos] = (byte) read; // depends on control dependency: [while], data = [none] if (pos >= 4) { // Find \r\n\r\n if (bytes[pos - 3] == '\r' && bytes[pos - 2] == '\n' && bytes[pos - 1] == '\r' && bytes[pos] == '\n') { break; } } pos++; // depends on control dependency: [while], data = [none] } // Read from the header data BufferedReader reader = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(bytes, 0, pos))); // Read the first line, defined as the status line String l = reader.readLine(); // Sanity check, after not returning data the client MIGHT attempt // to send something back and it will end up being something we // cannot read. if (l == null) { sendError(HttpStatus.BAD_REQUEST, HttpStatus.BAD_REQUEST.toString()); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } // Otherwise continue on int idx = l.indexOf(' '); // Split out the method and path String methodString = l.substring(0, idx); HttpMethod method = null; for (HttpMethod m : HttpMethod.values()) { if (m.name().equals(methodString)) { method = m; // depends on control dependency: [if], data = [none] } } // If it's an unknown method it won't be defined in the enum if (method == null) { sendError(HttpStatus.METHOD_NOT_ALLOWED, "This server currently does not support this method."); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } // The URI String path = l.substring(idx + 1, l.lastIndexOf(' ')); // Parse the headers Map<String, String> headers = new HashMap<String, String>(); while ((l = reader.readLine()) != null) { // End header. if (l.equals("")) break; // Headers are usually Key: Value String key = l.substring(0, l.indexOf(':')); String value = l.substring(l.indexOf(':') + 1).trim(); // Put the header in the map, correcting the header key if // needed. headers.put(HttpUtil.capitalizeHeader(key), value); // depends on control dependency: [while], data = [none] } // Close the reader used for the header reader.close(); // depends on control dependency: [try], data = [none] HttpRequest request = new HttpRequest(this, method, path, headers); int questionIdx = path.indexOf('?'); if (questionIdx != -1) { String queryString = path.substring(questionIdx + 1); request.setQueryString(queryString); // depends on control dependency: [if], data = [none] request.setGetData(HttpUtil.parseData(queryString)); // depends on control dependency: [if], data = [none] path = path.substring(0, questionIdx); // depends on control dependency: [if], data = [none] request.setUri(path); // depends on control dependency: [if], data = [none] } // Parse cookies, only if the server has the capability enabled (to save time, processing power, and memory if it isn't used) if (headers.containsKey(HttpHeader.COOKIE) && server.hasCapability(HttpCapability.COOKIES)) { List<HttpCookie> cookies = new LinkedList<HttpCookie>(); StringTokenizer tok = new StringTokenizer(headers.get(HttpHeader.COOKIE), ";"); while (tok.hasMoreTokens()) { String token = tok.nextToken(); int eqIdx = token.indexOf('='); if (eqIdx == -1) { // Invalid cookie continue; } String key = token.substring(0, eqIdx); String value = token.substring(eqIdx + 1); cookies.add(new HttpCookie(key, value)); // depends on control dependency: [while], data = [none] } request.setCookies(cookies); // depends on control dependency: [if], data = [none] } // Read the request data if (method == HttpMethod.POST) { boolean acceptsStandard = server.hasCapability(HttpCapability.STANDARD_POST), acceptsMultipart = server.hasCapability(HttpCapability.MULTIPART_POST); // Make sure the server will accept POST or Multipart POST // before we start checking the content if (acceptsStandard || acceptsMultipart) { // Validate that there's a length header if (!headers.containsKey(HttpHeader.CONTENT_LENGTH)) { // If there isn't, send the correct response sendError(HttpStatus.LENGTH_REQUIRED, HttpStatus.LENGTH_REQUIRED.toString()); // depends on control dependency: [if], data = [none] } else { // Otherwise, continue on int contentLength = Integer.parseInt(headers.get(HttpHeader.CONTENT_LENGTH)); String contentTypeHeader = headers.get(HttpHeader.CONTENT_TYPE); // Copy it to trim to what we need, keeping the original // to parse the boundary String contentType = contentTypeHeader; if (contentTypeHeader.indexOf(';') != -1) { contentType = contentTypeHeader.substring(0, contentTypeHeader.indexOf(';')); // depends on control dependency: [if], data = [none] } // Check the content type if (contentType.equalsIgnoreCase("multipart/form-data")) { if (acceptsMultipart) { // The server will accept post requests with // multipart data String boundary = contentTypeHeader.substring(contentTypeHeader.indexOf(';')).trim(); boundary = boundary.substring(boundary.indexOf('=') + 1); // depends on control dependency: [if], data = [none] // Parse file uploads etc. request.setPostData(readMultipartData(boundary)); // depends on control dependency: [if], data = [none] } else { // The server has the multipart post // capabilities disabled sendError(HttpStatus.BAD_REQUEST, "This server does not support multipart/form-data requests."); // depends on control dependency: [if], data = [none] } } else { if (acceptsStandard) { // Read the reported content length, TODO some // kind of check/timeout to make sure it won't // hang the thread? byte[] b = new byte[contentLength]; int read, totalRead = 0; while (contentLength - totalRead > 0 && (read = input.read(b, totalRead, contentLength - totalRead)) > -1) { totalRead += read; // depends on control dependency: [while], data = [none] } // We either read all of the data, or the // connection closed. if (totalRead < contentLength) { sendError(HttpStatus.BAD_REQUEST, "Unable to read correct amount of data!"); // depends on control dependency: [if], data = [none] } else { String data = new String(b); if (contentType.equalsIgnoreCase("application/x-www-form-urlencoded")) { // It is FOR SURE regular data. request.setPostData(HttpUtil.parseData(data)); // depends on control dependency: [if], data = [none] } else { // Could be JSON or XML etc request.setData(data); // depends on control dependency: [if], data = [none] } } } else { // The server has the Standard post capabilities // disabled sendError(HttpStatus.BAD_REQUEST, "This server does not support POST requests."); // depends on control dependency: [if], data = [none] } } } } else { // The server has the Standard and Multipart capabilities // disabled sendError(HttpStatus.METHOD_NOT_ALLOWED, "This server does not support POST requests."); // depends on control dependency: [if], data = [none] } } this.request = request; // depends on control dependency: [try], data = [none] server.dispatchRequest(request); // depends on control dependency: [try], data = [none] } catch (SocketException e) { //Socket was closed probably } catch (IOException e) { // depends on control dependency: [catch], data = [none] e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public ValidateFieldResult<String> validateAndConvert (String string) { ValidateFieldResult<String> result = cleanString(string); try { new URL(result.clean); // TODO call this to validate, but we can't default to zero return result; } catch (Exception ex) { result.errors.add(NewGTFSError.forFeed(NewGTFSErrorType.URL_FORMAT, string)); return result; } } }
public class class_name { public ValidateFieldResult<String> validateAndConvert (String string) { ValidateFieldResult<String> result = cleanString(string); try { new URL(result.clean); // TODO call this to validate, but we can't default to zero // depends on control dependency: [try], data = [none] return result; // depends on control dependency: [try], data = [none] } catch (Exception ex) { result.errors.add(NewGTFSError.forFeed(NewGTFSErrorType.URL_FORMAT, string)); return result; } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void runWithPermit(final Task<?> task, final String planClass) { ArgumentUtil.requireNotNull(task, "task"); ArgumentUtil.requireNotNull(planClass, "planClass"); State currState, newState; do { currState = _stateRef.get(); if (currState._stateName != StateName.RUN) { task.cancel(new EngineShutdownException("Task submitted after engine shutdown")); return; } newState = new State(StateName.RUN, currState._pendingCount + 1); } while (!_stateRef.compareAndSet(currState, newState)); PlanContext planContext = new PlanContext(this, _taskExecutor, _timerExecutor, _loggerFactory, _allLogger, _rootLogger, planClass, task, _maxRelationshipsPerTrace, _planDeactivationListener, _planCompletionListener, _taskQueueFactory.newTaskQueue(), _drainSerialExecutorQueue, _executionMonitor); new ContextImpl(planContext, task).runTask(); } }
public class class_name { private void runWithPermit(final Task<?> task, final String planClass) { ArgumentUtil.requireNotNull(task, "task"); ArgumentUtil.requireNotNull(planClass, "planClass"); State currState, newState; do { currState = _stateRef.get(); if (currState._stateName != StateName.RUN) { task.cancel(new EngineShutdownException("Task submitted after engine shutdown")); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } newState = new State(StateName.RUN, currState._pendingCount + 1); } while (!_stateRef.compareAndSet(currState, newState)); PlanContext planContext = new PlanContext(this, _taskExecutor, _timerExecutor, _loggerFactory, _allLogger, _rootLogger, planClass, task, _maxRelationshipsPerTrace, _planDeactivationListener, _planCompletionListener, _taskQueueFactory.newTaskQueue(), _drainSerialExecutorQueue, _executionMonitor); new ContextImpl(planContext, task).runTask(); } }
public class class_name { public void writeLanguage(String appid, String langCode, Map<String, String> lang, boolean writeToDatabase) { if (lang == null || lang.isEmpty() || StringUtils.isBlank(langCode) || !ALL_LOCALES.containsKey(langCode)) { return; } writeLanguageToFile(appid, langCode, lang); if (writeToDatabase) { // this will overwrite a saved language map! Sysprop s = new Sysprop(keyPrefix.concat(langCode)); Map<String, String> dlang = getDefaultLanguage(appid); for (Map.Entry<String, String> entry : dlang.entrySet()) { String key = entry.getKey(); if (lang.containsKey(key)) { s.addProperty(key, lang.get(key)); } else { s.addProperty(key, entry.getValue()); } } dao.create(appid, s); } } }
public class class_name { public void writeLanguage(String appid, String langCode, Map<String, String> lang, boolean writeToDatabase) { if (lang == null || lang.isEmpty() || StringUtils.isBlank(langCode) || !ALL_LOCALES.containsKey(langCode)) { return; // depends on control dependency: [if], data = [none] } writeLanguageToFile(appid, langCode, lang); if (writeToDatabase) { // this will overwrite a saved language map! Sysprop s = new Sysprop(keyPrefix.concat(langCode)); Map<String, String> dlang = getDefaultLanguage(appid); for (Map.Entry<String, String> entry : dlang.entrySet()) { String key = entry.getKey(); if (lang.containsKey(key)) { s.addProperty(key, lang.get(key)); // depends on control dependency: [if], data = [none] } else { s.addProperty(key, entry.getValue()); // depends on control dependency: [if], data = [none] } } dao.create(appid, s); // depends on control dependency: [if], data = [none] } } }
public class class_name { HpackHeaderField getHeaderField(int index) { HeaderEntry entry = head; while (index-- >= 0) { entry = entry.before; } return entry; } }
public class class_name { HpackHeaderField getHeaderField(int index) { HeaderEntry entry = head; while (index-- >= 0) { entry = entry.before; // depends on control dependency: [while], data = [none] } return entry; } }
public class class_name { private static char[] correctPassword(char[] password) { if (password != null && new String(password).indexOf('%') == -1) { String escaped = escapeSpecialAsciiAndNonAscii(new String(password)); StringBuilder totalEscaped = new StringBuilder(); for (int i = 0; i < escaped.length(); i++) { char ch = escaped.charAt(i); if (ch == '@' || ch == '/' || ch == ':') { totalEscaped.append('%') .append(Integer.toHexString(ch).toUpperCase()); } else { totalEscaped.append(ch); } } password = totalEscaped.toString().toCharArray(); } return password; } }
public class class_name { private static char[] correctPassword(char[] password) { if (password != null && new String(password).indexOf('%') == -1) { String escaped = escapeSpecialAsciiAndNonAscii(new String(password)); StringBuilder totalEscaped = new StringBuilder(); for (int i = 0; i < escaped.length(); i++) { char ch = escaped.charAt(i); if (ch == '@' || ch == '/' || ch == ':') { totalEscaped.append('%') .append(Integer.toHexString(ch).toUpperCase()); // depends on control dependency: [if], data = [none] } else { totalEscaped.append(ch); // depends on control dependency: [if], data = [(ch] } } password = totalEscaped.toString().toCharArray(); // depends on control dependency: [if], data = [none] } return password; } }
public class class_name { @Override public List<AnnotationType> getAnnotationTypes(KamInfo ki) { if (ki == null) throw new InvalidArgument(DEFAULT_MSG); if (!exists(ki)) return null; try { return kamStoreDao(ki).getAnnotationTypes(); } catch (SQLException e) { final String fmt = "error getting annotation types for %s"; final String msg = format(fmt, ki.getName()); throw new KAMStoreException(msg, e); } } }
public class class_name { @Override public List<AnnotationType> getAnnotationTypes(KamInfo ki) { if (ki == null) throw new InvalidArgument(DEFAULT_MSG); if (!exists(ki)) return null; try { return kamStoreDao(ki).getAnnotationTypes(); // depends on control dependency: [try], data = [none] } catch (SQLException e) { final String fmt = "error getting annotation types for %s"; final String msg = format(fmt, ki.getName()); throw new KAMStoreException(msg, e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private MessageType getMessageType(Message response) { String messageTypeString = (String) response.getHeader(MessageHeaders.MESSAGE_TYPE); if (MessageType.knows(messageTypeString)){ return MessageType.valueOf(messageTypeString.toUpperCase()); } return null; } }
public class class_name { private MessageType getMessageType(Message response) { String messageTypeString = (String) response.getHeader(MessageHeaders.MESSAGE_TYPE); if (MessageType.knows(messageTypeString)){ return MessageType.valueOf(messageTypeString.toUpperCase()); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { private static String multipleExtractor(String formula) { String recentCompoundCount = "0"; String recentCompound = ""; boolean found = false; for (int f = 0; f < formula.length(); f++) { char thisChar = formula.charAt(f); if (thisChar >= '0' && thisChar <= '9') { if (!found) recentCompoundCount += thisChar; else recentCompound += thisChar; } else { found = true; recentCompound += thisChar; } } return muliplier(recentCompound, Integer.valueOf(recentCompoundCount)); } }
public class class_name { private static String multipleExtractor(String formula) { String recentCompoundCount = "0"; String recentCompound = ""; boolean found = false; for (int f = 0; f < formula.length(); f++) { char thisChar = formula.charAt(f); if (thisChar >= '0' && thisChar <= '9') { if (!found) recentCompoundCount += thisChar; else recentCompound += thisChar; } else { found = true; // depends on control dependency: [if], data = [none] recentCompound += thisChar; // depends on control dependency: [if], data = [none] } } return muliplier(recentCompound, Integer.valueOf(recentCompoundCount)); } }
public class class_name { public void marshall(GetRandomPasswordRequest getRandomPasswordRequest, ProtocolMarshaller protocolMarshaller) { if (getRandomPasswordRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getRandomPasswordRequest.getPasswordLength(), PASSWORDLENGTH_BINDING); protocolMarshaller.marshall(getRandomPasswordRequest.getExcludeCharacters(), EXCLUDECHARACTERS_BINDING); protocolMarshaller.marshall(getRandomPasswordRequest.getExcludeNumbers(), EXCLUDENUMBERS_BINDING); protocolMarshaller.marshall(getRandomPasswordRequest.getExcludePunctuation(), EXCLUDEPUNCTUATION_BINDING); protocolMarshaller.marshall(getRandomPasswordRequest.getExcludeUppercase(), EXCLUDEUPPERCASE_BINDING); protocolMarshaller.marshall(getRandomPasswordRequest.getExcludeLowercase(), EXCLUDELOWERCASE_BINDING); protocolMarshaller.marshall(getRandomPasswordRequest.getIncludeSpace(), INCLUDESPACE_BINDING); protocolMarshaller.marshall(getRandomPasswordRequest.getRequireEachIncludedType(), REQUIREEACHINCLUDEDTYPE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetRandomPasswordRequest getRandomPasswordRequest, ProtocolMarshaller protocolMarshaller) { if (getRandomPasswordRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getRandomPasswordRequest.getPasswordLength(), PASSWORDLENGTH_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getRandomPasswordRequest.getExcludeCharacters(), EXCLUDECHARACTERS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getRandomPasswordRequest.getExcludeNumbers(), EXCLUDENUMBERS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getRandomPasswordRequest.getExcludePunctuation(), EXCLUDEPUNCTUATION_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getRandomPasswordRequest.getExcludeUppercase(), EXCLUDEUPPERCASE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getRandomPasswordRequest.getExcludeLowercase(), EXCLUDELOWERCASE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getRandomPasswordRequest.getIncludeSpace(), INCLUDESPACE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getRandomPasswordRequest.getRequireEachIncludedType(), REQUIREEACHINCLUDEDTYPE_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void changeMonitor(ComponentMonitor monitor) { // will also change monitor in lifecycleStrategyForInstanceRegistrations if (componentAdapterFactory instanceof ComponentMonitorStrategy) { ((ComponentMonitorStrategy) componentAdapterFactory).changeMonitor(monitor); } for (Iterator i = compAdapters.iterator(); i.hasNext();) { Object adapter = i.next(); if (adapter instanceof ComponentMonitorStrategy) { ((ComponentMonitorStrategy) adapter).changeMonitor(monitor); } } for (Iterator i = children.iterator(); i.hasNext();) { Object child = i.next(); if (child instanceof ComponentMonitorStrategy) { ((ComponentMonitorStrategy) child).changeMonitor(monitor); } } } }
public class class_name { public void changeMonitor(ComponentMonitor monitor) { // will also change monitor in lifecycleStrategyForInstanceRegistrations if (componentAdapterFactory instanceof ComponentMonitorStrategy) { ((ComponentMonitorStrategy) componentAdapterFactory).changeMonitor(monitor); // depends on control dependency: [if], data = [none] } for (Iterator i = compAdapters.iterator(); i.hasNext();) { Object adapter = i.next(); if (adapter instanceof ComponentMonitorStrategy) { ((ComponentMonitorStrategy) adapter).changeMonitor(monitor); // depends on control dependency: [if], data = [none] } } for (Iterator i = children.iterator(); i.hasNext();) { Object child = i.next(); if (child instanceof ComponentMonitorStrategy) { ((ComponentMonitorStrategy) child).changeMonitor(monitor); // depends on control dependency: [if], data = [none] } } } }
public class class_name { protected static IPackageFragmentRoot[] findRootsForClasspath(IPath entry, IJavaProject[] searchOrder) { for (int i= 0; i < searchOrder.length; i++) { IPackageFragmentRoot[] elements= findRootsInProject(entry, searchOrder[i]); if (elements.length != 0) { return elements; } } return null; } }
public class class_name { protected static IPackageFragmentRoot[] findRootsForClasspath(IPath entry, IJavaProject[] searchOrder) { for (int i= 0; i < searchOrder.length; i++) { IPackageFragmentRoot[] elements= findRootsInProject(entry, searchOrder[i]); if (elements.length != 0) { return elements; // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { public void addNestedObject(ParaObject obj) { if (obj != null) { getNstd().add(ParaObjectUtils.getAnnotatedFields(obj, false)); } } }
public class class_name { public void addNestedObject(ParaObject obj) { if (obj != null) { getNstd().add(ParaObjectUtils.getAnnotatedFields(obj, false)); // depends on control dependency: [if], data = [(obj] } } }
public class class_name { private static boolean hasConflictingClassName(DescriptorProto messageDesc, String name) { if (name.equals(messageDesc.getName())) { return true; } for (EnumDescriptorProto enumDesc : messageDesc.getEnumTypeList()) { if (name.equals(enumDesc.getName())) { return true; } } for (DescriptorProto nestedMessageDesc : messageDesc.getNestedTypeList()) { if (hasConflictingClassName(nestedMessageDesc, name)) { return true; } } return false; } }
public class class_name { private static boolean hasConflictingClassName(DescriptorProto messageDesc, String name) { if (name.equals(messageDesc.getName())) { return true; // depends on control dependency: [if], data = [none] } for (EnumDescriptorProto enumDesc : messageDesc.getEnumTypeList()) { if (name.equals(enumDesc.getName())) { return true; // depends on control dependency: [if], data = [none] } } for (DescriptorProto nestedMessageDesc : messageDesc.getNestedTypeList()) { if (hasConflictingClassName(nestedMessageDesc, name)) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { protected void addModulesList(Collection<ModuleElement> modules, String text, String tableSummary, Content body) { Content heading = HtmlTree.HEADING(HtmlConstants.MODULE_HEADING, true, contents.modulesLabel); HtmlTree htmlTree = (configuration.allowTag(HtmlTag.MAIN)) ? HtmlTree.MAIN(HtmlStyle.indexContainer, heading) : HtmlTree.DIV(HtmlStyle.indexContainer, heading); HtmlTree ul = new HtmlTree(HtmlTag.UL); ul.setTitle(contents.modulesLabel); for (ModuleElement mdle: modules) { ul.addContent(getModuleLink(mdle)); } htmlTree.addContent(ul); body.addContent(htmlTree); } }
public class class_name { protected void addModulesList(Collection<ModuleElement> modules, String text, String tableSummary, Content body) { Content heading = HtmlTree.HEADING(HtmlConstants.MODULE_HEADING, true, contents.modulesLabel); HtmlTree htmlTree = (configuration.allowTag(HtmlTag.MAIN)) ? HtmlTree.MAIN(HtmlStyle.indexContainer, heading) : HtmlTree.DIV(HtmlStyle.indexContainer, heading); HtmlTree ul = new HtmlTree(HtmlTag.UL); ul.setTitle(contents.modulesLabel); for (ModuleElement mdle: modules) { ul.addContent(getModuleLink(mdle)); // depends on control dependency: [for], data = [mdle] } htmlTree.addContent(ul); body.addContent(htmlTree); } }
public class class_name { public boolean encodeTmpParityFile(Configuration jobConf, StripeReader sReader, FileSystem parityFs, Path partialTmpParity, Path parityFile, short tmpRepl, long blockSize, long expectedPartialParityBlocks, long expectedPartialParityFileSize, Progressable reporter) throws IOException, InterruptedException { // Create a tmp file to which we will write first. String jobID = RaidNode.getJobID(jobConf); Path tmpDir = new Path(codec.tmpParityDirectory, jobID); if (!parityFs.mkdirs(tmpDir)) { throw new IOException("Could not create tmp dir " + tmpDir); } Path parityTmp = new Path(tmpDir, parityFile.getName() + rand.nextLong()); FSDataOutputStream out = parityFs.create( parityTmp, true, conf.getInt("io.file.buffer.size", 64 * 1024), tmpRepl, blockSize); try { CRC32[] crcOuts = null; if (checksumStore != null) { crcOuts = new CRC32[(int)expectedPartialParityBlocks]; } encodeFileToStream(sReader, blockSize, out, crcOuts, reporter); out.close(); out = null; LOG.info("Wrote temp parity file " + parityTmp); FileStatus tmpStat = parityFs.getFileStatus(parityTmp); if (tmpStat.getLen() != expectedPartialParityFileSize) { InjectionHandler.processEventIO( InjectionEvent.RAID_ENCODING_FAILURE_PARTIAL_PARITY_SIZE_MISMATCH); throw new IOException("Expected partial parity size " + expectedPartialParityFileSize + " does not match actual " + tmpStat.getLen() + " in path " + tmpStat.getPath()); } InjectionHandler.processEventIO( InjectionEvent.RAID_ENCODING_FAILURE_PUT_CHECKSUM); if (checksumStore != null) { this.writeToChecksumStore((DistributedFileSystem)parityFs, crcOuts, parityTmp, expectedPartialParityFileSize, reporter); } if (!parityFs.rename(parityTmp, partialTmpParity)) { LOG.warn("Fail to rename file " + parityTmp + " to " + partialTmpParity); return false; } LOG.info("renamed " + parityTmp + " to " + partialTmpParity); return true; } finally { try { if (out != null) { out.close(); } } finally { parityFs.delete(parityTmp, false); } } } }
public class class_name { public boolean encodeTmpParityFile(Configuration jobConf, StripeReader sReader, FileSystem parityFs, Path partialTmpParity, Path parityFile, short tmpRepl, long blockSize, long expectedPartialParityBlocks, long expectedPartialParityFileSize, Progressable reporter) throws IOException, InterruptedException { // Create a tmp file to which we will write first. String jobID = RaidNode.getJobID(jobConf); Path tmpDir = new Path(codec.tmpParityDirectory, jobID); if (!parityFs.mkdirs(tmpDir)) { throw new IOException("Could not create tmp dir " + tmpDir); } Path parityTmp = new Path(tmpDir, parityFile.getName() + rand.nextLong()); FSDataOutputStream out = parityFs.create( parityTmp, true, conf.getInt("io.file.buffer.size", 64 * 1024), tmpRepl, blockSize); try { CRC32[] crcOuts = null; if (checksumStore != null) { crcOuts = new CRC32[(int)expectedPartialParityBlocks]; // depends on control dependency: [if], data = [none] } encodeFileToStream(sReader, blockSize, out, crcOuts, reporter); out.close(); out = null; LOG.info("Wrote temp parity file " + parityTmp); FileStatus tmpStat = parityFs.getFileStatus(parityTmp); if (tmpStat.getLen() != expectedPartialParityFileSize) { InjectionHandler.processEventIO( InjectionEvent.RAID_ENCODING_FAILURE_PARTIAL_PARITY_SIZE_MISMATCH); // depends on control dependency: [if], data = [none] throw new IOException("Expected partial parity size " + expectedPartialParityFileSize + " does not match actual " + tmpStat.getLen() + " in path " + tmpStat.getPath()); } InjectionHandler.processEventIO( InjectionEvent.RAID_ENCODING_FAILURE_PUT_CHECKSUM); if (checksumStore != null) { this.writeToChecksumStore((DistributedFileSystem)parityFs, crcOuts, parityTmp, expectedPartialParityFileSize, reporter); // depends on control dependency: [if], data = [none] } if (!parityFs.rename(parityTmp, partialTmpParity)) { LOG.warn("Fail to rename file " + parityTmp + " to " + partialTmpParity); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } LOG.info("renamed " + parityTmp + " to " + partialTmpParity); return true; } finally { try { if (out != null) { out.close(); // depends on control dependency: [if], data = [none] } } finally { parityFs.delete(parityTmp, false); } } } }
public class class_name { private JvmTypeReference getSuperTypeByName(String typeName, boolean interfaceType, JvmType thisType, RecursionGuard<JvmType> guard) { EClass thisTypeEClass = thisType.eClass(); if (!interfaceType && thisTypeEClass == TypesPackage.Literals.JVM_GENERIC_TYPE && ((JvmGenericType) thisType).isInterface()) { return null; } if (thisTypeEClass == TypesPackage.Literals.JVM_VOID || thisTypeEClass == TypesPackage.Literals.JVM_PRIMITIVE_TYPE) { return null; } if (typeName.equals(thisType.getIdentifier()) || !guard.tryNext(thisType)) { return null; } if (thisTypeEClass != TypesPackage.Literals.JVM_TYPE_PARAMETER) { if (thisTypeEClass != TypesPackage.Literals.JVM_ARRAY_TYPE) { List<JvmTypeReference> superTypes = ((JvmDeclaredType) thisType).getSuperTypes(); for(int i = 0, size = superTypes.size(); i < size; i++) { JvmTypeReference superType = superTypes.get(i); JvmType rawSuperType = superType.getType(); if (rawSuperType != null) { if (typeName.equals(rawSuperType.getIdentifier())) return superType; JvmTypeReference result = getSuperTypeByName(typeName, interfaceType, rawSuperType, guard); if (result != null) return result; } } } } else { List<JvmTypeConstraint> constraints = ((JvmTypeParameter) thisType).getConstraints(); for(int i = 0, size = constraints.size(); i < size; i++) { JvmTypeConstraint constraint = constraints.get(i); if (constraint.eClass() == TypesPackage.Literals.JVM_UPPER_BOUND && constraint.getTypeReference() != null) { JvmTypeReference superType = constraint.getTypeReference(); JvmType rawSuperType = superType.getType(); if (rawSuperType != null) { if (rawSuperType.eClass() != TypesPackage.Literals.JVM_TYPE_PARAMETER && typeName.equals(rawSuperType.getIdentifier())) { return superType; } JvmTypeReference result = getSuperTypeByName(typeName, interfaceType, rawSuperType, guard); if (result != null) return result; } } } } return null; } }
public class class_name { private JvmTypeReference getSuperTypeByName(String typeName, boolean interfaceType, JvmType thisType, RecursionGuard<JvmType> guard) { EClass thisTypeEClass = thisType.eClass(); if (!interfaceType && thisTypeEClass == TypesPackage.Literals.JVM_GENERIC_TYPE && ((JvmGenericType) thisType).isInterface()) { return null; // depends on control dependency: [if], data = [none] } if (thisTypeEClass == TypesPackage.Literals.JVM_VOID || thisTypeEClass == TypesPackage.Literals.JVM_PRIMITIVE_TYPE) { return null; // depends on control dependency: [if], data = [none] } if (typeName.equals(thisType.getIdentifier()) || !guard.tryNext(thisType)) { return null; // depends on control dependency: [if], data = [none] } if (thisTypeEClass != TypesPackage.Literals.JVM_TYPE_PARAMETER) { if (thisTypeEClass != TypesPackage.Literals.JVM_ARRAY_TYPE) { List<JvmTypeReference> superTypes = ((JvmDeclaredType) thisType).getSuperTypes(); for(int i = 0, size = superTypes.size(); i < size; i++) { JvmTypeReference superType = superTypes.get(i); JvmType rawSuperType = superType.getType(); if (rawSuperType != null) { if (typeName.equals(rawSuperType.getIdentifier())) return superType; JvmTypeReference result = getSuperTypeByName(typeName, interfaceType, rawSuperType, guard); if (result != null) return result; } } } } else { List<JvmTypeConstraint> constraints = ((JvmTypeParameter) thisType).getConstraints(); for(int i = 0, size = constraints.size(); i < size; i++) { JvmTypeConstraint constraint = constraints.get(i); if (constraint.eClass() == TypesPackage.Literals.JVM_UPPER_BOUND && constraint.getTypeReference() != null) { JvmTypeReference superType = constraint.getTypeReference(); JvmType rawSuperType = superType.getType(); if (rawSuperType != null) { if (rawSuperType.eClass() != TypesPackage.Literals.JVM_TYPE_PARAMETER && typeName.equals(rawSuperType.getIdentifier())) { return superType; // depends on control dependency: [if], data = [none] } JvmTypeReference result = getSuperTypeByName(typeName, interfaceType, rawSuperType, guard); if (result != null) return result; } } } } return null; } }
public class class_name { public static Integer guessBasePortFromOffsetPort(final int port) { if ((port >= 3600) && (port <= 3699)) { final int suffix = port % 100; int image = suffix / 10 * 10; int offset = suffix % 10; if (suffix / 10 % 2 == 1) { image = image - 10; offset = offset + 10; } if ((offset == 0) || isAPortOffset(offset)) { return 3600 + image; } } return null; } }
public class class_name { public static Integer guessBasePortFromOffsetPort(final int port) { if ((port >= 3600) && (port <= 3699)) { final int suffix = port % 100; int image = suffix / 10 * 10; int offset = suffix % 10; if (suffix / 10 % 2 == 1) { image = image - 10; // depends on control dependency: [if], data = [none] offset = offset + 10; // depends on control dependency: [if], data = [none] } if ((offset == 0) || isAPortOffset(offset)) { return 3600 + image; // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { private int isExpected( long found ) { int bestHamming = 2; int bestNumber = -1; for (int i = 0; i < numbers.length; i++) { int hamming = DescriptorDistance.hamming((int)found^(int)numbers[i]); if( hamming < bestHamming ) { bestHamming = hamming; bestNumber = i; } } return bestNumber; } }
public class class_name { private int isExpected( long found ) { int bestHamming = 2; int bestNumber = -1; for (int i = 0; i < numbers.length; i++) { int hamming = DescriptorDistance.hamming((int)found^(int)numbers[i]); if( hamming < bestHamming ) { bestHamming = hamming; // depends on control dependency: [if], data = [none] bestNumber = i; // depends on control dependency: [if], data = [none] } } return bestNumber; } }
public class class_name { public static ClassLoader getSystemToolClassLoader() { try { Class<? extends JavaCompiler> c = instance().getSystemToolClass(JavaCompiler.class, defaultJavaCompilerName); return c.getClassLoader(); } catch (Throwable e) { return trace(WARNING, e); } } }
public class class_name { public static ClassLoader getSystemToolClassLoader() { try { Class<? extends JavaCompiler> c = instance().getSystemToolClass(JavaCompiler.class, defaultJavaCompilerName); return c.getClassLoader(); } catch (Throwable e) { return trace(WARNING, e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static List<MemoryMonitor> getMemoryStats(){ ArrayList<MemoryMonitor> memoryPoolInformation = new ArrayList<>(); MemoryUsage heapMem = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage(); memoryPoolInformation.add(new MemoryMonitor(HEAP_MEMORY,heapMem)); MemoryUsage nonHeapMen = ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage(); memoryPoolInformation.add(new MemoryMonitor(NON_HEAP_MEMORY,nonHeapMen)); for(MemoryPoolMXBean memMXBean :ManagementFactory.getMemoryPoolMXBeans()){ memoryPoolInformation.add(new MemoryMonitor(memMXBean.getName(), memMXBean.getUsage())); } return Collections.unmodifiableList(memoryPoolInformation); } }
public class class_name { public static List<MemoryMonitor> getMemoryStats(){ ArrayList<MemoryMonitor> memoryPoolInformation = new ArrayList<>(); MemoryUsage heapMem = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage(); memoryPoolInformation.add(new MemoryMonitor(HEAP_MEMORY,heapMem)); MemoryUsage nonHeapMen = ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage(); memoryPoolInformation.add(new MemoryMonitor(NON_HEAP_MEMORY,nonHeapMen)); for(MemoryPoolMXBean memMXBean :ManagementFactory.getMemoryPoolMXBeans()){ memoryPoolInformation.add(new MemoryMonitor(memMXBean.getName(), memMXBean.getUsage())); // depends on control dependency: [for], data = [memMXBean] } return Collections.unmodifiableList(memoryPoolInformation); } }
public class class_name { public static PerfidixMethodCheckException checkMethod(final Object obj, final Class<? extends Annotation> anno, final Method... meths) { for (Method meth : meths) { // check if the class of the object to be executed has the given // method boolean classMethodCorr = false; for (final Method methodOfClass : obj.getClass().getDeclaredMethods()) { if (methodOfClass.equals(meth)) { classMethodCorr = true; } } if (!classMethodCorr) { return new PerfidixMethodCheckException(new IllegalStateException("Object to execute " + obj + " is not having a Method named " + meth + "."), meth, anno); } // check if the method is reflected executable if (!BenchmarkMethod.isReflectedExecutable(meth, anno)) { return new PerfidixMethodCheckException(new IllegalAccessException("Method to execute " + meth + " is not reflected executable."), meth, anno); } } return null; } }
public class class_name { public static PerfidixMethodCheckException checkMethod(final Object obj, final Class<? extends Annotation> anno, final Method... meths) { for (Method meth : meths) { // check if the class of the object to be executed has the given // method boolean classMethodCorr = false; for (final Method methodOfClass : obj.getClass().getDeclaredMethods()) { if (methodOfClass.equals(meth)) { classMethodCorr = true; // depends on control dependency: [if], data = [none] } } if (!classMethodCorr) { return new PerfidixMethodCheckException(new IllegalStateException("Object to execute " + obj + " is not having a Method named " + meth + "."), meth, anno); // depends on control dependency: [if], data = [none] } // check if the method is reflected executable if (!BenchmarkMethod.isReflectedExecutable(meth, anno)) { return new PerfidixMethodCheckException(new IllegalAccessException("Method to execute " + meth + " is not reflected executable."), meth, anno); // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { IWorkbenchPart getActivePart() { final IWorkbenchWindow activeWindow = getWorkbench().getActiveWorkbenchWindow(); if (activeWindow != null) { final IWorkbenchPage activePage = activeWindow.getActivePage(); if (activePage != null) { return activePage.getActivePart(); } } return null; } }
public class class_name { IWorkbenchPart getActivePart() { final IWorkbenchWindow activeWindow = getWorkbench().getActiveWorkbenchWindow(); if (activeWindow != null) { final IWorkbenchPage activePage = activeWindow.getActivePage(); if (activePage != null) { return activePage.getActivePart(); // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { @Override public TimestampedDatasetVersion getDatasetVersion(Path pathRelativeToDatasetRoot, FileStatus versionFileStatus) { String dateTimeString = null; try { // pathRelativeToDatasetRoot can be daily/2016/03/02 or 2016/03/02. In either case we need to pick 2016/03/02 as version dateTimeString = StringUtils.substring(pathRelativeToDatasetRoot.toString(), pathRelativeToDatasetRoot.toString().length() - this.datePartitionPattern.length()); return new FileStatusTimestampedDatasetVersion(this.formatter.parseDateTime(dateTimeString), versionFileStatus); } catch (IllegalArgumentException exception) { LOGGER.warn(String.format( "Candidate dataset version with pathRelativeToDatasetRoot: %s has inferred dataTimeString:%s. " + "It does not match expected datetime pattern %s. Ignoring.", pathRelativeToDatasetRoot, dateTimeString, this.datePartitionPattern)); return null; } } }
public class class_name { @Override public TimestampedDatasetVersion getDatasetVersion(Path pathRelativeToDatasetRoot, FileStatus versionFileStatus) { String dateTimeString = null; try { // pathRelativeToDatasetRoot can be daily/2016/03/02 or 2016/03/02. In either case we need to pick 2016/03/02 as version dateTimeString = StringUtils.substring(pathRelativeToDatasetRoot.toString(), pathRelativeToDatasetRoot.toString().length() - this.datePartitionPattern.length()); // depends on control dependency: [try], data = [none] return new FileStatusTimestampedDatasetVersion(this.formatter.parseDateTime(dateTimeString), versionFileStatus); // depends on control dependency: [try], data = [none] } catch (IllegalArgumentException exception) { LOGGER.warn(String.format( "Candidate dataset version with pathRelativeToDatasetRoot: %s has inferred dataTimeString:%s. " + "It does not match expected datetime pattern %s. Ignoring.", pathRelativeToDatasetRoot, dateTimeString, this.datePartitionPattern)); return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public boolean start(final FileDownloadListener listener, final boolean isSerial) { if (listener == null) { FileDownloadLog.w(this, "Tasks with the listener can't start, because the listener " + "provided is null: [null, %B]", isSerial); return false; } return isSerial ? getQueuesHandler().startQueueSerial(listener) : getQueuesHandler().startQueueParallel(listener); } }
public class class_name { public boolean start(final FileDownloadListener listener, final boolean isSerial) { if (listener == null) { FileDownloadLog.w(this, "Tasks with the listener can't start, because the listener " + "provided is null: [null, %B]", isSerial); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } return isSerial ? getQueuesHandler().startQueueSerial(listener) : getQueuesHandler().startQueueParallel(listener); } }
public class class_name { public StorageProvider create(StorageProviderCredentials credentials) { StorageProviderType storageProviderType = credentials.getProviderType(); if (storageProviderType.equals(StorageProviderType.AMAZON_S3)) { return new S3StorageProvider(credentials.getAccessKey(), credentials.getSecretKey(), credentials.getOptions()); } else if (storageProviderType.equals(StorageProviderType.AMAZON_GLACIER)) { return new GlacierStorageProvider(credentials.getAccessKey(), credentials.getSecretKey(), credentials.getOptions()); } else if (storageProviderType.equals(StorageProviderType.CHRONOPOLIS)) { return new ChronopolisStorageProvider(credentials.getAccessKey(), credentials.getSecretKey()); } throw new RuntimeException(storageProviderType + " is not a supported storage provider type"); } }
public class class_name { public StorageProvider create(StorageProviderCredentials credentials) { StorageProviderType storageProviderType = credentials.getProviderType(); if (storageProviderType.equals(StorageProviderType.AMAZON_S3)) { return new S3StorageProvider(credentials.getAccessKey(), credentials.getSecretKey(), credentials.getOptions()); // depends on control dependency: [if], data = [none] } else if (storageProviderType.equals(StorageProviderType.AMAZON_GLACIER)) { return new GlacierStorageProvider(credentials.getAccessKey(), credentials.getSecretKey(), credentials.getOptions()); // depends on control dependency: [if], data = [none] } else if (storageProviderType.equals(StorageProviderType.CHRONOPOLIS)) { return new ChronopolisStorageProvider(credentials.getAccessKey(), credentials.getSecretKey()); // depends on control dependency: [if], data = [none] } throw new RuntimeException(storageProviderType + " is not a supported storage provider type"); } }
public class class_name { protected void copyFlashScope(FlashScope source, FlashScope destination) { for (Map.Entry<String,Object> entry: source.entrySet()) { destination.put(entry.getKey(), entry.getValue()); } } }
public class class_name { protected void copyFlashScope(FlashScope source, FlashScope destination) { for (Map.Entry<String,Object> entry: source.entrySet()) { destination.put(entry.getKey(), entry.getValue()); // depends on control dependency: [for], data = [entry] } } }
public class class_name { public DefaultCompositeOption add(final Option... _options) { if (_options != null) { this.options.addAll(Arrays.asList(_options)); } return this; } }
public class class_name { public DefaultCompositeOption add(final Option... _options) { if (_options != null) { this.options.addAll(Arrays.asList(_options)); // depends on control dependency: [if], data = [(_options] } return this; } }
public class class_name { protected void scanMethods(ClassFile cf) { List<ClassFile> methods = cf.getMethods(); if (methods == null) return; for (Object obj : methods) { MethodInfo method = (MethodInfo) obj; if (scanMethodAnnotations) { AnnotationsAttribute visible = (AnnotationsAttribute) method .getAttribute(AnnotationsAttribute.visibleTag); AnnotationsAttribute invisible = (AnnotationsAttribute) method .getAttribute(AnnotationsAttribute.invisibleTag); if (visible != null) populate(visible.getAnnotations(), cf.getName()); if (invisible != null) populate(invisible.getAnnotations(), cf.getName()); } if (scanParameterAnnotations) { ParameterAnnotationsAttribute paramsVisible = (ParameterAnnotationsAttribute) method .getAttribute(ParameterAnnotationsAttribute.visibleTag); ParameterAnnotationsAttribute paramsInvisible = (ParameterAnnotationsAttribute) method .getAttribute(ParameterAnnotationsAttribute.invisibleTag); if (paramsVisible != null && paramsVisible.getAnnotations() != null) { for (Annotation[] anns : paramsVisible.getAnnotations()) { populate(anns, cf.getName()); } } if (paramsInvisible != null && paramsInvisible.getAnnotations() != null) { for (Annotation[] anns : paramsInvisible.getAnnotations()) { populate(anns, cf.getName()); } } } } } }
public class class_name { protected void scanMethods(ClassFile cf) { List<ClassFile> methods = cf.getMethods(); if (methods == null) return; for (Object obj : methods) { MethodInfo method = (MethodInfo) obj; if (scanMethodAnnotations) { AnnotationsAttribute visible = (AnnotationsAttribute) method .getAttribute(AnnotationsAttribute.visibleTag); AnnotationsAttribute invisible = (AnnotationsAttribute) method .getAttribute(AnnotationsAttribute.invisibleTag); if (visible != null) populate(visible.getAnnotations(), cf.getName()); if (invisible != null) populate(invisible.getAnnotations(), cf.getName()); } if (scanParameterAnnotations) { ParameterAnnotationsAttribute paramsVisible = (ParameterAnnotationsAttribute) method .getAttribute(ParameterAnnotationsAttribute.visibleTag); ParameterAnnotationsAttribute paramsInvisible = (ParameterAnnotationsAttribute) method .getAttribute(ParameterAnnotationsAttribute.invisibleTag); if (paramsVisible != null && paramsVisible.getAnnotations() != null) { for (Annotation[] anns : paramsVisible.getAnnotations()) { populate(anns, cf.getName()); // depends on control dependency: [for], data = [anns] } } if (paramsInvisible != null && paramsInvisible.getAnnotations() != null) { for (Annotation[] anns : paramsInvisible.getAnnotations()) { populate(anns, cf.getName()); // depends on control dependency: [for], data = [anns] } } } } } }
public class class_name { private static byte[] generateMultipartBoundary() { final Random rand = new Random(); final int c11 = 11, c30 = 30; final byte[] bytes = new byte[rand.nextInt(c11) + c30]; // a random size from 30 to 40 final byte[] chars = getMultipartChars(); for (int i = 0; i < bytes.length; i++) { bytes[i] = chars[rand.nextInt(chars.length)]; } return bytes; } }
public class class_name { private static byte[] generateMultipartBoundary() { final Random rand = new Random(); final int c11 = 11, c30 = 30; final byte[] bytes = new byte[rand.nextInt(c11) + c30]; // a random size from 30 to 40 final byte[] chars = getMultipartChars(); for (int i = 0; i < bytes.length; i++) { bytes[i] = chars[rand.nextInt(chars.length)]; // depends on control dependency: [for], data = [i] } return bytes; } }
public class class_name { public void startMonitor() { logger.info("Starting Directory Update Monitor"); try { monitor.start(); } catch (IllegalStateException e) { logger.info("File alteration monitor is already started: " + e.getMessage()); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } } }
public class class_name { public void startMonitor() { logger.info("Starting Directory Update Monitor"); try { monitor.start(); // depends on control dependency: [try], data = [none] } catch (IllegalStateException e) { logger.info("File alteration monitor is already started: " + e.getMessage()); } catch (Exception e) { // depends on control dependency: [catch], data = [none] throw new RuntimeException(e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public IScope getFeatureScope(/* @Nullable */ XAbstractFeatureCall currentFeatureCall, Anchor anchor) { if (anchor == Anchor.RECEIVER) { if (currentFeatureCall == requestedFeatureCall && cachedReceiverFeatureScope != null) { return cachedReceiverFeatureScope; } IScope result = createFeatureCallScopeForReceiver(currentFeatureCall); this.requestedFeatureCall = currentFeatureCall; return cachedReceiverFeatureScope = result; } return getFeatureScope(anchor); } }
public class class_name { public IScope getFeatureScope(/* @Nullable */ XAbstractFeatureCall currentFeatureCall, Anchor anchor) { if (anchor == Anchor.RECEIVER) { if (currentFeatureCall == requestedFeatureCall && cachedReceiverFeatureScope != null) { return cachedReceiverFeatureScope; // depends on control dependency: [if], data = [none] } IScope result = createFeatureCallScopeForReceiver(currentFeatureCall); this.requestedFeatureCall = currentFeatureCall; // depends on control dependency: [if], data = [none] return cachedReceiverFeatureScope = result; // depends on control dependency: [if], data = [none] } return getFeatureScope(anchor); } }
public class class_name { private static String filePathToKey(String path) { try { return FileUtil.getDefaultInstance().canonicalPath(path); } catch (Exception e) { return path; } } }
public class class_name { private static String filePathToKey(String path) { try { return FileUtil.getDefaultInstance().canonicalPath(path); // depends on control dependency: [try], data = [none] } catch (Exception e) { return path; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static synchronized void enable() { if (null != thread) { thread = new Thread(HighResolutionTimer::run); thread.setDaemon(true); thread.setName("high-resolution-timer-hack"); thread.start(); } } }
public class class_name { public static synchronized void enable() { if (null != thread) { thread = new Thread(HighResolutionTimer::run); // depends on control dependency: [if], data = [none] thread.setDaemon(true); // depends on control dependency: [if], data = [none] thread.setName("high-resolution-timer-hack"); // depends on control dependency: [if], data = [none] thread.start(); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public boolean isUserInRole(String role) { if (userRoles == null) { return false; } return userRoles.contains(role); } }
public class class_name { @Override public boolean isUserInRole(String role) { if (userRoles == null) { return false; // depends on control dependency: [if], data = [none] } return userRoles.contains(role); } }
public class class_name { @SuppressWarnings("unchecked") public ScoredValue<V> mapScore(Function<? super Number, ? extends Number> mapper) { LettuceAssert.notNull(mapper, "Mapper function must not be null"); if (hasValue()) { return new ScoredValue<V>(mapper.apply(score).doubleValue(), getValue()); } return this; } }
public class class_name { @SuppressWarnings("unchecked") public ScoredValue<V> mapScore(Function<? super Number, ? extends Number> mapper) { LettuceAssert.notNull(mapper, "Mapper function must not be null"); if (hasValue()) { return new ScoredValue<V>(mapper.apply(score).doubleValue(), getValue()); // depends on control dependency: [if], data = [none] } return this; } }
public class class_name { private static void constructEmitFromClause(LinkedList<SchemaTableTree> distinctQueryStack, ColumnList cols) { int count = 1; for (SchemaTableTree schemaTableTree : distinctQueryStack) { if (count > 1) { if (!schemaTableTree.getSchemaTable().isEdgeTable() && schemaTableTree.isEmit()) { //if the VertexStep is for an edge table there is no need to print edge ids as its already printed. printEdgeId(schemaTableTree.parent, cols); } } count++; } } }
public class class_name { private static void constructEmitFromClause(LinkedList<SchemaTableTree> distinctQueryStack, ColumnList cols) { int count = 1; for (SchemaTableTree schemaTableTree : distinctQueryStack) { if (count > 1) { if (!schemaTableTree.getSchemaTable().isEdgeTable() && schemaTableTree.isEmit()) { //if the VertexStep is for an edge table there is no need to print edge ids as its already printed. printEdgeId(schemaTableTree.parent, cols); // depends on control dependency: [if], data = [none] } } count++; // depends on control dependency: [for], data = [none] } } }
public class class_name { public ZooKeeperServer getZooKeeperServer() { if (zooKeeperServer == null) { String dataDirectory = System.getProperty("java.io.tmpdir"); File dir = new File(dataDirectory, "zookeeper").getAbsoluteFile(); try { zooKeeperServer = new ZooKeeperServer(dir, dir, 2000); } catch (IOException e) { throw new CitrusRuntimeException("Failed to create default zookeeper server", e); } } return zooKeeperServer; } }
public class class_name { public ZooKeeperServer getZooKeeperServer() { if (zooKeeperServer == null) { String dataDirectory = System.getProperty("java.io.tmpdir"); File dir = new File(dataDirectory, "zookeeper").getAbsoluteFile(); try { zooKeeperServer = new ZooKeeperServer(dir, dir, 2000); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new CitrusRuntimeException("Failed to create default zookeeper server", e); } // depends on control dependency: [catch], data = [none] } return zooKeeperServer; } }
public class class_name { private static long startOfYear(int year) { long day = cache.get(year); if (day == CalendarCache.EMPTY) { int months = (235 * year - 234) / 19; // # of months before year long frac = months * MONTH_FRACT + BAHARAD; // Fractional part of day # day = months * 29 + (frac / DAY_PARTS); // Whole # part of calculation frac = frac % DAY_PARTS; // Time of day int wd = (int)(day % 7); // Day of week (0 == Monday) if (wd == 2 || wd == 4 || wd == 6) { // If the 1st is on Sun, Wed, or Fri, postpone to the next day day += 1; wd = (int)(day % 7); } if (wd == 1 && frac > 15*HOUR_PARTS+204 && !isLeapYear(year) ) { // If the new moon falls after 3:11:20am (15h204p from the previous noon) // on a Tuesday and it is not a leap year, postpone by 2 days. // This prevents 356-day years. day += 2; } else if (wd == 0 && frac > 21*HOUR_PARTS+589 && isLeapYear(year-1) ) { // If the new moon falls after 9:32:43 1/3am (21h589p from yesterday noon) // on a Monday and *last* year was a leap year, postpone by 1 day. // Prevents 382-day years. day += 1; } cache.put(year, day); } return day; } }
public class class_name { private static long startOfYear(int year) { long day = cache.get(year); if (day == CalendarCache.EMPTY) { int months = (235 * year - 234) / 19; // # of months before year long frac = months * MONTH_FRACT + BAHARAD; // Fractional part of day # day = months * 29 + (frac / DAY_PARTS); // Whole # part of calculation // depends on control dependency: [if], data = [none] frac = frac % DAY_PARTS; // Time of day // depends on control dependency: [if], data = [none] int wd = (int)(day % 7); // Day of week (0 == Monday) if (wd == 2 || wd == 4 || wd == 6) { // If the 1st is on Sun, Wed, or Fri, postpone to the next day day += 1; // depends on control dependency: [if], data = [none] wd = (int)(day % 7); // depends on control dependency: [if], data = [none] } if (wd == 1 && frac > 15*HOUR_PARTS+204 && !isLeapYear(year) ) { // If the new moon falls after 3:11:20am (15h204p from the previous noon) // on a Tuesday and it is not a leap year, postpone by 2 days. // This prevents 356-day years. day += 2; // depends on control dependency: [if], data = [none] } else if (wd == 0 && frac > 21*HOUR_PARTS+589 && isLeapYear(year-1) ) { // If the new moon falls after 9:32:43 1/3am (21h589p from yesterday noon) // on a Monday and *last* year was a leap year, postpone by 1 day. // Prevents 382-day years. day += 1; // depends on control dependency: [if], data = [none] } cache.put(year, day); // depends on control dependency: [if], data = [none] } return day; } }
public class class_name { protected void buildSuperJoinTree(TableAlias left, ClassDescriptor cld, String name, boolean useOuterJoin) { ClassDescriptor superCld = cld.getSuperClassDescriptor(); if (superCld != null) { SuperReferenceDescriptor superRef = cld.getSuperReference(); FieldDescriptor[] leftFields = superRef.getForeignKeyFieldDescriptors(cld); TableAlias base_alias = getTableAliasForPath(name, null, null); String aliasName = String.valueOf(getAliasChar()) + m_aliasCount++; TableAlias right = new TableAlias(superCld, aliasName, useOuterJoin, null); Join join1to1 = new Join(left, leftFields, right, superCld.getPkFields(), useOuterJoin, "superClass"); base_alias.addJoin(join1to1); buildSuperJoinTree(right, superCld, name, useOuterJoin); } } }
public class class_name { protected void buildSuperJoinTree(TableAlias left, ClassDescriptor cld, String name, boolean useOuterJoin) { ClassDescriptor superCld = cld.getSuperClassDescriptor(); if (superCld != null) { SuperReferenceDescriptor superRef = cld.getSuperReference(); FieldDescriptor[] leftFields = superRef.getForeignKeyFieldDescriptors(cld); TableAlias base_alias = getTableAliasForPath(name, null, null); String aliasName = String.valueOf(getAliasChar()) + m_aliasCount++; TableAlias right = new TableAlias(superCld, aliasName, useOuterJoin, null); Join join1to1 = new Join(left, leftFields, right, superCld.getPkFields(), useOuterJoin, "superClass"); base_alias.addJoin(join1to1); // depends on control dependency: [if], data = [none] buildSuperJoinTree(right, superCld, name, useOuterJoin); // depends on control dependency: [if], data = [none] } } }
public class class_name { public int putRawFieldData(Convert field) { String strKey = this.getFullKey(field.getFieldName()); Class<?> classData = String.class; if (field.getField() != null) classData = this.getMessage().getNativeClassType(field.getField().getDataClass()); Object objValue = field.getData(); try { objValue = DataConverters.convertObjectToDatatype(objValue, classData, null); // I do this just to be careful. } catch (Exception ex) { objValue = null; } this.getMessage().putNative(strKey, objValue); return Constant.NORMAL_RETURN; } }
public class class_name { public int putRawFieldData(Convert field) { String strKey = this.getFullKey(field.getFieldName()); Class<?> classData = String.class; if (field.getField() != null) classData = this.getMessage().getNativeClassType(field.getField().getDataClass()); Object objValue = field.getData(); try { objValue = DataConverters.convertObjectToDatatype(objValue, classData, null); // I do this just to be careful. // depends on control dependency: [try], data = [none] } catch (Exception ex) { objValue = null; } // depends on control dependency: [catch], data = [none] this.getMessage().putNative(strKey, objValue); return Constant.NORMAL_RETURN; } }
public class class_name { protected RequestEntity createBinaryRequestContent(HTTPRequest httpRequest) { RequestEntity requestEntity=null; byte[] contentBinary=httpRequest.getContentAsBinary(); if(contentBinary!=null) { requestEntity=new ByteArrayRequestEntity(contentBinary,"binary/octet-stream"); } return requestEntity; } }
public class class_name { protected RequestEntity createBinaryRequestContent(HTTPRequest httpRequest) { RequestEntity requestEntity=null; byte[] contentBinary=httpRequest.getContentAsBinary(); if(contentBinary!=null) { requestEntity=new ByteArrayRequestEntity(contentBinary,"binary/octet-stream"); // depends on control dependency: [if], data = [(contentBinary] } return requestEntity; } }
public class class_name { @SuppressWarnings("unchecked") @Override public List getPermissions() { // TODO Auto-generated method stub if (this.permissions != null) { return this.permissions.getList(); } else return Collections.emptyList(); } }
public class class_name { @SuppressWarnings("unchecked") @Override public List getPermissions() { // TODO Auto-generated method stub if (this.permissions != null) { return this.permissions.getList(); // depends on control dependency: [if], data = [none] } else return Collections.emptyList(); } }
public class class_name { protected void renderLayer (Graphics2D g, Rectangle bounds, JLayeredPane pane, boolean[] clipped, Integer layer) { // stop now if there are no components in that layer int ccount = pane.getComponentCountInLayer(layer.intValue()); if (ccount == 0) { return; } // render them up Component[] comps = pane.getComponentsInLayer(layer.intValue()); for (int ii = 0; ii < ccount; ii++) { Component comp = comps[ii]; if (!comp.isVisible() || comp instanceof SafeLayerComponent) { continue; } // if this overlay does not intersect the component we just rendered, we don't need to // repaint it Rectangle compBounds = new Rectangle(0, 0, comp.getWidth(), comp.getHeight()); getRoot(comp, compBounds); if (!compBounds.intersects(bounds)) { continue; } // if the clipping region has not yet been set during this render pass, the time has // come to do so if (!clipped[0]) { g.setClip(bounds); clipped[0] = true; } // translate into the components coordinate system and render g.translate(compBounds.x, compBounds.y); try { comp.paint(g); } catch (Exception e) { log.warning("Component choked while rendering.", e); } g.translate(-compBounds.x, -compBounds.y); } } }
public class class_name { protected void renderLayer (Graphics2D g, Rectangle bounds, JLayeredPane pane, boolean[] clipped, Integer layer) { // stop now if there are no components in that layer int ccount = pane.getComponentCountInLayer(layer.intValue()); if (ccount == 0) { return; // depends on control dependency: [if], data = [none] } // render them up Component[] comps = pane.getComponentsInLayer(layer.intValue()); for (int ii = 0; ii < ccount; ii++) { Component comp = comps[ii]; if (!comp.isVisible() || comp instanceof SafeLayerComponent) { continue; } // if this overlay does not intersect the component we just rendered, we don't need to // repaint it Rectangle compBounds = new Rectangle(0, 0, comp.getWidth(), comp.getHeight()); getRoot(comp, compBounds); // depends on control dependency: [for], data = [none] if (!compBounds.intersects(bounds)) { continue; } // if the clipping region has not yet been set during this render pass, the time has // come to do so if (!clipped[0]) { g.setClip(bounds); // depends on control dependency: [if], data = [none] clipped[0] = true; // depends on control dependency: [if], data = [none] } // translate into the components coordinate system and render g.translate(compBounds.x, compBounds.y); // depends on control dependency: [for], data = [none] try { comp.paint(g); // depends on control dependency: [try], data = [none] } catch (Exception e) { log.warning("Component choked while rendering.", e); } // depends on control dependency: [catch], data = [none] g.translate(-compBounds.x, -compBounds.y); // depends on control dependency: [for], data = [none] } } }
public class class_name { public GlobalClusterMember withReaders(String... readers) { if (this.readers == null) { setReaders(new com.amazonaws.internal.SdkInternalList<String>(readers.length)); } for (String ele : readers) { this.readers.add(ele); } return this; } }
public class class_name { public GlobalClusterMember withReaders(String... readers) { if (this.readers == null) { setReaders(new com.amazonaws.internal.SdkInternalList<String>(readers.length)); // depends on control dependency: [if], data = [none] } for (String ele : readers) { this.readers.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { private Node parseBasicTypeExpression(JsDocToken token) { if (token == JsDocToken.STAR) { return newNode(Token.STAR); } else if (token == JsDocToken.LEFT_CURLY) { skipEOLs(); return parseRecordType(next()); } else if (token == JsDocToken.LEFT_PAREN) { skipEOLs(); return parseUnionType(next()); } else if (token == JsDocToken.STRING) { String string = stream.getString(); switch (string) { case "function": skipEOLs(); return parseFunctionType(next()); case "null": case "undefined": return newStringNode(string); case "typeof": skipEOLs(); return parseTypeofType(next()); default: return parseTypeName(token); } } restoreLookAhead(token); return reportGenericTypeSyntaxWarning(); } }
public class class_name { private Node parseBasicTypeExpression(JsDocToken token) { if (token == JsDocToken.STAR) { return newNode(Token.STAR); // depends on control dependency: [if], data = [none] } else if (token == JsDocToken.LEFT_CURLY) { skipEOLs(); // depends on control dependency: [if], data = [none] return parseRecordType(next()); // depends on control dependency: [if], data = [none] } else if (token == JsDocToken.LEFT_PAREN) { skipEOLs(); // depends on control dependency: [if], data = [none] return parseUnionType(next()); // depends on control dependency: [if], data = [none] } else if (token == JsDocToken.STRING) { String string = stream.getString(); switch (string) { case "function": skipEOLs(); return parseFunctionType(next()); case "null": case "undefined": return newStringNode(string); case "typeof": skipEOLs(); return parseTypeofType(next()); default: return parseTypeName(token); } } restoreLookAhead(token); return reportGenericTypeSyntaxWarning(); } }
public class class_name { public static String[] getNames(JSONObject jo) { int length = jo.length(); if (length == 0) { return null; } Iterator<String> i = jo.keys(); String[] names = new String[length]; int j = 0; while (i.hasNext()) { names[j] = i.next(); j += 1; } return names; } }
public class class_name { public static String[] getNames(JSONObject jo) { int length = jo.length(); if (length == 0) { return null; // depends on control dependency: [if], data = [none] } Iterator<String> i = jo.keys(); String[] names = new String[length]; int j = 0; while (i.hasNext()) { names[j] = i.next(); // depends on control dependency: [while], data = [none] j += 1; // depends on control dependency: [while], data = [none] } return names; } }
public class class_name { public static void updateSymmetryTransformation(SymmetryAxes axes, MultipleAlignment msa) throws StructureException { List<List<Integer>> block = msa.getBlocks().get(0).getAlignRes(); int length = block.get(0).size(); if (axes != null) { for (int level = 0; level < axes.getNumLevels(); level++) { // Calculate the aligned atom arrays to superimpose List<Atom> list1 = new ArrayList<Atom>(); List<Atom> list2 = new ArrayList<Atom>(); for (int firstRepeat : axes.getFirstRepeats(level)) { Matrix4d transform = axes.getRepeatTransform(firstRepeat); List<List<Integer>> relation = axes.getRepeatRelation( level, firstRepeat); for (int index = 0; index < relation.get(0).size(); index++) { int p1 = relation.get(0).get(index); int p2 = relation.get(1).get(index); for (int k = 0; k < length; k++) { Integer pos1 = block.get(p1).get(k); Integer pos2 = block.get(p2).get(k); if (pos1 != null && pos2 != null) { Atom a = (Atom) msa.getAtomArrays().get(p1)[pos1] .clone(); Atom b = (Atom) msa.getAtomArrays().get(p2)[pos2] .clone(); Calc.transform(a, transform); Calc.transform(b, transform); list1.add(a); list2.add(b); } } } } Atom[] arr1 = list1.toArray(new Atom[list1.size()]); Atom[] arr2 = list2.toArray(new Atom[list2.size()]); // Calculate the new transformation information if (arr1.length > 0 && arr2.length > 0) { Matrix4d axis = SuperPositions.superpose( Calc.atomsToPoints(arr1), Calc.atomsToPoints(arr2)); axes.updateAxis(level, axis); } // Get the transformations from the SymmetryAxes List<Matrix4d> transformations = new ArrayList<Matrix4d>(); for (int su = 0; su < msa.size(); su++) { transformations.add(axes.getRepeatTransform(su)); } msa.getBlockSet(0).setTransformations(transformations); } } else { MultipleSuperimposer imposer = new CoreSuperimposer(); imposer.superimpose(msa); } updateSymmetryScores(msa); } }
public class class_name { public static void updateSymmetryTransformation(SymmetryAxes axes, MultipleAlignment msa) throws StructureException { List<List<Integer>> block = msa.getBlocks().get(0).getAlignRes(); int length = block.get(0).size(); if (axes != null) { for (int level = 0; level < axes.getNumLevels(); level++) { // Calculate the aligned atom arrays to superimpose List<Atom> list1 = new ArrayList<Atom>(); List<Atom> list2 = new ArrayList<Atom>(); for (int firstRepeat : axes.getFirstRepeats(level)) { Matrix4d transform = axes.getRepeatTransform(firstRepeat); List<List<Integer>> relation = axes.getRepeatRelation( level, firstRepeat); for (int index = 0; index < relation.get(0).size(); index++) { int p1 = relation.get(0).get(index); int p2 = relation.get(1).get(index); for (int k = 0; k < length; k++) { Integer pos1 = block.get(p1).get(k); Integer pos2 = block.get(p2).get(k); if (pos1 != null && pos2 != null) { Atom a = (Atom) msa.getAtomArrays().get(p1)[pos1] .clone(); Atom b = (Atom) msa.getAtomArrays().get(p2)[pos2] .clone(); Calc.transform(a, transform); // depends on control dependency: [if], data = [none] Calc.transform(b, transform); // depends on control dependency: [if], data = [none] list1.add(a); // depends on control dependency: [if], data = [none] list2.add(b); // depends on control dependency: [if], data = [none] } } } } Atom[] arr1 = list1.toArray(new Atom[list1.size()]); Atom[] arr2 = list2.toArray(new Atom[list2.size()]); // Calculate the new transformation information if (arr1.length > 0 && arr2.length > 0) { Matrix4d axis = SuperPositions.superpose( Calc.atomsToPoints(arr1), Calc.atomsToPoints(arr2)); axes.updateAxis(level, axis); // depends on control dependency: [if], data = [none] } // Get the transformations from the SymmetryAxes List<Matrix4d> transformations = new ArrayList<Matrix4d>(); for (int su = 0; su < msa.size(); su++) { transformations.add(axes.getRepeatTransform(su)); // depends on control dependency: [for], data = [su] } msa.getBlockSet(0).setTransformations(transformations); } } else { MultipleSuperimposer imposer = new CoreSuperimposer(); imposer.superimpose(msa); } updateSymmetryScores(msa); } }
public class class_name { private List<IGeoPoint> loadPoints(BoundingBox view) { List<IGeoPoint> pts = new ArrayList<IGeoPoint>(); for (int i = 0; i < 10000; i++) { pts.add(new GeoPoint((Math.random() * view.getLatitudeSpan()) + view.getLatSouth(), (Math.random() * view.getLongitudeSpan()) + view.getLonWest())); } pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); return pts; } }
public class class_name { private List<IGeoPoint> loadPoints(BoundingBox view) { List<IGeoPoint> pts = new ArrayList<IGeoPoint>(); for (int i = 0; i < 10000; i++) { pts.add(new GeoPoint((Math.random() * view.getLatitudeSpan()) + view.getLatSouth(), (Math.random() * view.getLongitudeSpan()) + view.getLonWest())); // depends on control dependency: [for], data = [none] } pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(0d, 0d)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(-1.1d * cellSizeInDp, 1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); pts.add(new GeoPoint(1.1d * cellSizeInDp, -1.1d * cellSizeInDp)); return pts; } }
public class class_name { private ScanStaticModel scanBasicInfo(List<String> packNameList) { ScanStaticModel scanModel = new ScanStaticModel(); // // 扫描对象 // Reflections reflections = getReflection(packNameList); scanModel.setReflections(reflections); // // 获取DisconfFile class // Set<Class<?>> classdata = reflections.getTypesAnnotatedWith(DisconfFile.class); scanModel.setDisconfFileClassSet(classdata); // // 获取DisconfFileItem method // Set<Method> af1 = reflections.getMethodsAnnotatedWith(DisconfFileItem.class); scanModel.setDisconfFileItemMethodSet(af1); // // 获取DisconfItem method // af1 = reflections.getMethodsAnnotatedWith(DisconfItem.class); scanModel.setDisconfItemMethodSet(af1); // // 获取DisconfActiveBackupService // classdata = reflections.getTypesAnnotatedWith(DisconfActiveBackupService.class); scanModel.setDisconfActiveBackupServiceClassSet(classdata); // // 获取DisconfUpdateService // classdata = reflections.getTypesAnnotatedWith(DisconfUpdateService.class); scanModel.setDisconfUpdateService(classdata); // update pipeline Set<Class<? extends IDisconfUpdatePipeline>> iDisconfUpdatePipeline = reflections.getSubTypesOf (IDisconfUpdatePipeline .class); if (iDisconfUpdatePipeline != null && iDisconfUpdatePipeline.size() != 0) { scanModel.setiDisconfUpdatePipeline((Class<IDisconfUpdatePipeline>) iDisconfUpdatePipeline .toArray()[0]); } return scanModel; } }
public class class_name { private ScanStaticModel scanBasicInfo(List<String> packNameList) { ScanStaticModel scanModel = new ScanStaticModel(); // // 扫描对象 // Reflections reflections = getReflection(packNameList); scanModel.setReflections(reflections); // // 获取DisconfFile class // Set<Class<?>> classdata = reflections.getTypesAnnotatedWith(DisconfFile.class); scanModel.setDisconfFileClassSet(classdata); // // 获取DisconfFileItem method // Set<Method> af1 = reflections.getMethodsAnnotatedWith(DisconfFileItem.class); scanModel.setDisconfFileItemMethodSet(af1); // // 获取DisconfItem method // af1 = reflections.getMethodsAnnotatedWith(DisconfItem.class); scanModel.setDisconfItemMethodSet(af1); // // 获取DisconfActiveBackupService // classdata = reflections.getTypesAnnotatedWith(DisconfActiveBackupService.class); scanModel.setDisconfActiveBackupServiceClassSet(classdata); // // 获取DisconfUpdateService // classdata = reflections.getTypesAnnotatedWith(DisconfUpdateService.class); scanModel.setDisconfUpdateService(classdata); // update pipeline Set<Class<? extends IDisconfUpdatePipeline>> iDisconfUpdatePipeline = reflections.getSubTypesOf (IDisconfUpdatePipeline .class); if (iDisconfUpdatePipeline != null && iDisconfUpdatePipeline.size() != 0) { scanModel.setiDisconfUpdatePipeline((Class<IDisconfUpdatePipeline>) iDisconfUpdatePipeline .toArray()[0]); // depends on control dependency: [if], data = [none] } return scanModel; } }
public class class_name { private static Map<Method, EventType> findMethods(Class<?> type) { Map<Method, EventType> events = new HashMap<>(); for (Method method : type.getDeclaredMethods()) { Event event = method.getAnnotation(Event.class); if (event != null) { String name = event.value().equals("") ? method.getName() : event.value(); events.put(method, EventType.from(name)); } } for (Class<?> iface : type.getInterfaces()) { events.putAll(findMethods(iface)); } return events; } }
public class class_name { private static Map<Method, EventType> findMethods(Class<?> type) { Map<Method, EventType> events = new HashMap<>(); for (Method method : type.getDeclaredMethods()) { Event event = method.getAnnotation(Event.class); if (event != null) { String name = event.value().equals("") ? method.getName() : event.value(); events.put(method, EventType.from(name)); // depends on control dependency: [if], data = [none] } } for (Class<?> iface : type.getInterfaces()) { events.putAll(findMethods(iface)); // depends on control dependency: [for], data = [iface] } return events; } }
public class class_name { public String get(String group, String key) { readLock.lock(); try { LinkedHashMap<String, String> map = this.get(StrUtil.nullToEmpty(group)); if (MapUtil.isNotEmpty(map)) { return map.get(key); } } finally { readLock.unlock(); } return null; } }
public class class_name { public String get(String group, String key) { readLock.lock(); try { LinkedHashMap<String, String> map = this.get(StrUtil.nullToEmpty(group)); if (MapUtil.isNotEmpty(map)) { return map.get(key); // depends on control dependency: [if], data = [none] } } finally { readLock.unlock(); } return null; } }