code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public void marshall(UpdateResourceDefinitionRequest updateResourceDefinitionRequest, ProtocolMarshaller protocolMarshaller) { if (updateResourceDefinitionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(updateResourceDefinitionRequest.getName(), NAME_BINDING); protocolMarshaller.marshall(updateResourceDefinitionRequest.getResourceDefinitionId(), RESOURCEDEFINITIONID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(UpdateResourceDefinitionRequest updateResourceDefinitionRequest, ProtocolMarshaller protocolMarshaller) { if (updateResourceDefinitionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(updateResourceDefinitionRequest.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(updateResourceDefinitionRequest.getResourceDefinitionId(), RESOURCEDEFINITIONID_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public boolean loadNewVersion(String versionsuffix, byte[] newbytedata) { javaMethodCache = null; if (GlobalConfiguration.verboseMode && log.isLoggable(Level.INFO)) { log.info("Loading new version of " + slashedtypename + ", identifying suffix " + versionsuffix + ", new data length is " + newbytedata.length + "bytes"); } // If we find our parent classloader has a weavingTransformer newbytedata = retransform(newbytedata); // TODO how slow is this? something to worry about? make it conditional on a setting? boolean reload = true; TypeDelta td = null; if (GlobalConfiguration.verifyReloads) { td = TypeDiffComputer.computeDifferences(bytesInitial, newbytedata); if (td.hasAnythingChanged()) { // need to check it isn't anything we do not yet support boolean cantReload = false; StringBuilder s = null; if (td.hasTypeDeclarationChanged()) { // Not allowed to change the type reload = false; s = new StringBuilder("Spring Loaded: Cannot reload new version of ").append( this.dottedtypename).append( "\n"); if (td.hasTypeAccessChanged()) { s.append(" Reason: Type modifiers changed from=0x" + Integer.toHexString(td.oAccess) + " to=0x" + Integer.toHexString(td.nAccess) + "\n"); cantReload = true; } if (td.hasTypeSupertypeChanged()) { s.append(" Reason: Supertype changed from ").append(td.oSuperName).append(" to ").append( td.nSuperName).append("\n"); cantReload = true; } if (td.hasTypeInterfacesChanged()) { // This next bit of code is to deal with the situation // Peter saw where on a full build some type implements // GroovyObject // but on an incremental build of just that one file, it // no longer implements it (presumably - and we could go // checking // for this - a supertype already implements the // interface but the full build wasn't smart enough to // work that out) boolean justGroovyObjectMoved = false; if (!cantReload && getTypeDescriptor().isGroovyType()) { // Is it just GroovyObject that has been lost? List<String> interfaceChanges = new ArrayList<String>(); interfaceChanges.addAll(td.oInterfaces); interfaceChanges.removeAll(td.nInterfaces); // If ifaces is just GroovyObject now then that // means it has been removed from the interface list // - which can unfortunately happen on an // incremental compile if (this.getTypeDescriptor().isGroovyType() && interfaceChanges.size() == 1 && interfaceChanges.get(0).equals("groovy/lang/GroovyObject")) { // just let it go... needs fixing in Groovy // really justGroovyObjectMoved = true; s = null; reload = true; } } if (!justGroovyObjectMoved) { s.append(" Reason: Interfaces changed from ").append(td.oInterfaces).append(" to ").append( td.nInterfaces).append("\n"); cantReload = true; } } } // if (td.haveFieldsChangedOrBeenAddedOrRemoved()) { // reload = false; // if (s == null) { // s = new StringBuilder("Spring-Loaded: Cannot reload new version of ").append(this.dottedtypename).append( // "\n"); // } // if (td.hasNewFields()) { // s.append(" Reason: New version has new fields:\n" + Utils.fieldNodeFormat(td.getNewFields().values())); // } // if (td.hasLostFields()) { // s.append(" Reason: New version has removed some fields: \n" // + Utils.fieldNodeFormat(td.getLostFields().values())); // } // } boolean somethingCalled = false; if (cantReload && td.hasAnythingChanged()) { somethingCalled = typeRegistry.fireUnableToReloadEvent(this, td, versionsuffix); } if (cantReload && s == null && td.hasAnythingChanged()) { if (!somethingCalled) { System.out.println("Something has changed preventing reload"); } } if (!somethingCalled && s != null) { System.out.println(s); } } } if (reload) { TypeRegistry.nothingReloaded = false; invokersCache_getDeclaredMethods = null; // will no longer use this cache if (GlobalConfiguration.reloadMessages) { // Only put out the message when running in limit mode (under tc Server) System.out.println("Reloading: Loading new version of " + this.dottedtypename + " [" + versionsuffix + "]"); } if (GlobalConfiguration.isRuntimeLogging && log.isLoggable(Level.INFO)) { log.info("Reloading: Loading new version of " + this.dottedtypename + " [" + versionsuffix + "]"); } liveVersion = new CurrentLiveVersion(this, versionsuffix, newbytedata); liveVersion.setTypeDelta(td); typeRegistry.reloadableTypeDescriptorCache.put(this.slashedtypename, liveVersion.typeDescriptor); if (typedescriptor.isGroovyType()) { fixupGroovyType(); } if (typedescriptor.isEnum()) { resetEnumRelatedState(); } if (typeRegistry.shouldRerunStaticInitializer(this, versionsuffix) || typedescriptor.isEnum()) { liveVersion.staticInitializedNeedsRerunningOnDefine = true; liveVersion.runStaticInitializer(); } else { liveVersion.staticInitializedNeedsRerunningOnDefine = false; } // For performance: // - tag the relevant types that may have been affected by this being reloaded, i.e. this type and any reloadable types in the same hierachy tagAsAffectedByReload(); tagSupertypesAsAffectedByReload(); tagSubtypesAsAffectedByReload(); typeRegistry.fireReloadEvent(this, versionsuffix); reloadProxiesIfNecessary(versionsuffix); } // dump(newbytedata); return reload; } }
public class class_name { public boolean loadNewVersion(String versionsuffix, byte[] newbytedata) { javaMethodCache = null; if (GlobalConfiguration.verboseMode && log.isLoggable(Level.INFO)) { log.info("Loading new version of " + slashedtypename + ", identifying suffix " + versionsuffix + ", new data length is " + newbytedata.length + "bytes"); // depends on control dependency: [if], data = [none] } // If we find our parent classloader has a weavingTransformer newbytedata = retransform(newbytedata); // TODO how slow is this? something to worry about? make it conditional on a setting? boolean reload = true; TypeDelta td = null; if (GlobalConfiguration.verifyReloads) { td = TypeDiffComputer.computeDifferences(bytesInitial, newbytedata); // depends on control dependency: [if], data = [none] if (td.hasAnythingChanged()) { // need to check it isn't anything we do not yet support boolean cantReload = false; StringBuilder s = null; if (td.hasTypeDeclarationChanged()) { // Not allowed to change the type reload = false; // depends on control dependency: [if], data = [none] s = new StringBuilder("Spring Loaded: Cannot reload new version of ").append( this.dottedtypename).append( "\n"); // depends on control dependency: [if], data = [none] if (td.hasTypeAccessChanged()) { s.append(" Reason: Type modifiers changed from=0x" + Integer.toHexString(td.oAccess) + " to=0x" + Integer.toHexString(td.nAccess) + "\n"); // depends on control dependency: [if], data = [none] cantReload = true; // depends on control dependency: [if], data = [none] } if (td.hasTypeSupertypeChanged()) { s.append(" Reason: Supertype changed from ").append(td.oSuperName).append(" to ").append( td.nSuperName).append("\n"); // depends on control dependency: [if], data = [none] cantReload = true; // depends on control dependency: [if], data = [none] } if (td.hasTypeInterfacesChanged()) { // This next bit of code is to deal with the situation // Peter saw where on a full build some type implements // GroovyObject // but on an incremental build of just that one file, it // no longer implements it (presumably - and we could go // checking // for this - a supertype already implements the // interface but the full build wasn't smart enough to // work that out) boolean justGroovyObjectMoved = false; if (!cantReload && getTypeDescriptor().isGroovyType()) { // Is it just GroovyObject that has been lost? List<String> interfaceChanges = new ArrayList<String>(); interfaceChanges.addAll(td.oInterfaces); // depends on control dependency: [if], data = [none] interfaceChanges.removeAll(td.nInterfaces); // depends on control dependency: [if], data = [none] // If ifaces is just GroovyObject now then that // means it has been removed from the interface list // - which can unfortunately happen on an // incremental compile if (this.getTypeDescriptor().isGroovyType() && interfaceChanges.size() == 1 && interfaceChanges.get(0).equals("groovy/lang/GroovyObject")) { // just let it go... needs fixing in Groovy // really justGroovyObjectMoved = true; // depends on control dependency: [if], data = [none] s = null; // depends on control dependency: [if], data = [none] reload = true; // depends on control dependency: [if], data = [none] } } if (!justGroovyObjectMoved) { s.append(" Reason: Interfaces changed from ").append(td.oInterfaces).append(" to ").append( td.nInterfaces).append("\n"); // depends on control dependency: [if], data = [none] cantReload = true; // depends on control dependency: [if], data = [none] } } } // if (td.haveFieldsChangedOrBeenAddedOrRemoved()) { // reload = false; // if (s == null) { // s = new StringBuilder("Spring-Loaded: Cannot reload new version of ").append(this.dottedtypename).append( // "\n"); // } // if (td.hasNewFields()) { // s.append(" Reason: New version has new fields:\n" + Utils.fieldNodeFormat(td.getNewFields().values())); // } // if (td.hasLostFields()) { // s.append(" Reason: New version has removed some fields: \n" // + Utils.fieldNodeFormat(td.getLostFields().values())); // } // } boolean somethingCalled = false; if (cantReload && td.hasAnythingChanged()) { somethingCalled = typeRegistry.fireUnableToReloadEvent(this, td, versionsuffix); // depends on control dependency: [if], data = [none] } if (cantReload && s == null && td.hasAnythingChanged()) { if (!somethingCalled) { System.out.println("Something has changed preventing reload"); // depends on control dependency: [if], data = [none] } } if (!somethingCalled && s != null) { System.out.println(s); // depends on control dependency: [if], data = [none] } } } if (reload) { TypeRegistry.nothingReloaded = false; // depends on control dependency: [if], data = [none] invokersCache_getDeclaredMethods = null; // will no longer use this cache // depends on control dependency: [if], data = [none] if (GlobalConfiguration.reloadMessages) { // Only put out the message when running in limit mode (under tc Server) System.out.println("Reloading: Loading new version of " + this.dottedtypename + " [" + versionsuffix + "]"); // depends on control dependency: [if], data = [none] } if (GlobalConfiguration.isRuntimeLogging && log.isLoggable(Level.INFO)) { log.info("Reloading: Loading new version of " + this.dottedtypename + " [" + versionsuffix + "]"); // depends on control dependency: [if], data = [none] } liveVersion = new CurrentLiveVersion(this, versionsuffix, newbytedata); // depends on control dependency: [if], data = [none] liveVersion.setTypeDelta(td); // depends on control dependency: [if], data = [none] typeRegistry.reloadableTypeDescriptorCache.put(this.slashedtypename, liveVersion.typeDescriptor); // depends on control dependency: [if], data = [none] if (typedescriptor.isGroovyType()) { fixupGroovyType(); // depends on control dependency: [if], data = [none] } if (typedescriptor.isEnum()) { resetEnumRelatedState(); // depends on control dependency: [if], data = [none] } if (typeRegistry.shouldRerunStaticInitializer(this, versionsuffix) || typedescriptor.isEnum()) { liveVersion.staticInitializedNeedsRerunningOnDefine = true; // depends on control dependency: [if], data = [none] liveVersion.runStaticInitializer(); // depends on control dependency: [if], data = [none] } else { liveVersion.staticInitializedNeedsRerunningOnDefine = false; // depends on control dependency: [if], data = [none] } // For performance: // - tag the relevant types that may have been affected by this being reloaded, i.e. this type and any reloadable types in the same hierachy tagAsAffectedByReload(); // depends on control dependency: [if], data = [none] tagSupertypesAsAffectedByReload(); // depends on control dependency: [if], data = [none] tagSubtypesAsAffectedByReload(); // depends on control dependency: [if], data = [none] typeRegistry.fireReloadEvent(this, versionsuffix); // depends on control dependency: [if], data = [none] reloadProxiesIfNecessary(versionsuffix); // depends on control dependency: [if], data = [none] } // dump(newbytedata); return reload; } }
public class class_name { public UserPoolClientType withWriteAttributes(String... writeAttributes) { if (this.writeAttributes == null) { setWriteAttributes(new java.util.ArrayList<String>(writeAttributes.length)); } for (String ele : writeAttributes) { this.writeAttributes.add(ele); } return this; } }
public class class_name { public UserPoolClientType withWriteAttributes(String... writeAttributes) { if (this.writeAttributes == null) { setWriteAttributes(new java.util.ArrayList<String>(writeAttributes.length)); // depends on control dependency: [if], data = [none] } for (String ele : writeAttributes) { this.writeAttributes.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public synchronized void onRemovedFromList(ObservableList list, Object value) { if (!configuration.isUseGc()) { return; } removeReferenceAndCheckForGC(list, value); } }
public class class_name { public synchronized void onRemovedFromList(ObservableList list, Object value) { if (!configuration.isUseGc()) { return; // depends on control dependency: [if], data = [none] } removeReferenceAndCheckForGC(list, value); } }
public class class_name { @Deprecated protected Currency getEffectiveCurrency() { Currency c = getCurrency(); if (c == null) { ULocale uloc = getLocale(ULocale.VALID_LOCALE); if (uloc == null) { uloc = ULocale.getDefault(Category.FORMAT); } c = Currency.getInstance(uloc); } return c; } }
public class class_name { @Deprecated protected Currency getEffectiveCurrency() { Currency c = getCurrency(); if (c == null) { ULocale uloc = getLocale(ULocale.VALID_LOCALE); if (uloc == null) { uloc = ULocale.getDefault(Category.FORMAT); // depends on control dependency: [if], data = [none] } c = Currency.getInstance(uloc); // depends on control dependency: [if], data = [none] } return c; } }
public class class_name { @Override public void onDeleteJob(URI deletedJobURI, String deletedJobVersion) { JobSpec fakeJobSpec = JobSpec.builder(deletedJobURI).withVersion(deletedJobVersion).build(); if (this.filter.apply(fakeJobSpec)) { this.delegate.onDeleteJob(deletedJobURI, deletedJobVersion); } } }
public class class_name { @Override public void onDeleteJob(URI deletedJobURI, String deletedJobVersion) { JobSpec fakeJobSpec = JobSpec.builder(deletedJobURI).withVersion(deletedJobVersion).build(); if (this.filter.apply(fakeJobSpec)) { this.delegate.onDeleteJob(deletedJobURI, deletedJobVersion); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String subst(final StringBuffer buff, final String from, final String to, final String string) { int begin = 0, end = 0; while ((end = string.indexOf(from, end)) != -1) { // append the first part of the string buff.append(string.substring(begin, end)); // append the replaced string buff.append(to); // update positions begin = end + from.length(); end = begin; } // append the rest of the string buff.append(string.substring(begin, string.length())); return buff.toString(); } }
public class class_name { public static String subst(final StringBuffer buff, final String from, final String to, final String string) { int begin = 0, end = 0; while ((end = string.indexOf(from, end)) != -1) { // append the first part of the string buff.append(string.substring(begin, end)); // depends on control dependency: [while], data = [none] // append the replaced string buff.append(to); // depends on control dependency: [while], data = [none] // update positions begin = end + from.length(); // depends on control dependency: [while], data = [none] end = begin; // depends on control dependency: [while], data = [none] } // append the rest of the string buff.append(string.substring(begin, string.length())); return buff.toString(); } }
public class class_name { public long ticketKeyRenew() { Lock readerLock = context.ctxLock.readLock(); readerLock.lock(); try { return SSLContext.sessionTicketKeyRenew(context.ctx); } finally { readerLock.unlock(); } } }
public class class_name { public long ticketKeyRenew() { Lock readerLock = context.ctxLock.readLock(); readerLock.lock(); try { return SSLContext.sessionTicketKeyRenew(context.ctx); // depends on control dependency: [try], data = [none] } finally { readerLock.unlock(); } } }
public class class_name { private Document mergeAdditionalTranslatedXML(BuildData buildData, final Document topicDoc, final TranslatedTopicWrapper translatedTopic, final TopicType topicType) throws BuildProcessingException { Document retValue = topicDoc; if (!isNullOrEmpty(translatedTopic.getTranslatedAdditionalXML())) { Document additionalXMLDoc = null; try { additionalXMLDoc = XMLUtilities.convertStringToDocument(translatedTopic.getTranslatedAdditionalXML()); } catch (Exception ex) { buildData.getErrorDatabase().addError(translatedTopic, ErrorType.INVALID_CONTENT, BuilderConstants.ERROR_INVALID_TOPIC_XML + " " + StringUtilities.escapeForXML(ex.getMessage())); retValue = DocBookBuildUtilities.setTopicXMLForError(buildData, translatedTopic, getErrorInvalidValidationTopicTemplate().getValue()); } if (additionalXMLDoc != null) { // Merge the two together try { if (TopicType.AUTHOR_GROUP.equals(topicType)) { DocBookBuildUtilities.mergeAuthorGroups(topicDoc, additionalXMLDoc); } else if (TopicType.REVISION_HISTORY.equals(topicType)) { DocBookBuildUtilities.mergeRevisionHistories(topicDoc, additionalXMLDoc); } } catch (BuildProcessingException ex) { final String xmlStringInCDATA = XMLUtilities.wrapStringInCDATA(translatedTopic.getTranslatedAdditionalXML()); buildData.getErrorDatabase().addError(translatedTopic, ErrorType.INVALID_CONTENT, BuilderConstants.ERROR_BAD_XML_STRUCTURE + " " + StringUtilities.escapeForXML( ex.getMessage()) + " The processed XML is <programlisting>" + xmlStringInCDATA + "</programlisting>"); retValue = DocBookBuildUtilities.setTopicXMLForError(buildData, translatedTopic, getErrorInvalidValidationTopicTemplate().getValue()); } } else { final String xmlStringInCDATA = XMLUtilities.wrapStringInCDATA(translatedTopic.getTranslatedAdditionalXML()); buildData.getErrorDatabase().addError(translatedTopic, ErrorType.INVALID_CONTENT, BuilderConstants.ERROR_INVALID_XML_CONTENT + " The processed XML is <programlisting>" + xmlStringInCDATA + "</programlisting>"); retValue = DocBookBuildUtilities.setTopicXMLForError(buildData, translatedTopic, getErrorInvalidValidationTopicTemplate().getValue()); } } return retValue; } }
public class class_name { private Document mergeAdditionalTranslatedXML(BuildData buildData, final Document topicDoc, final TranslatedTopicWrapper translatedTopic, final TopicType topicType) throws BuildProcessingException { Document retValue = topicDoc; if (!isNullOrEmpty(translatedTopic.getTranslatedAdditionalXML())) { Document additionalXMLDoc = null; try { additionalXMLDoc = XMLUtilities.convertStringToDocument(translatedTopic.getTranslatedAdditionalXML()); // depends on control dependency: [try], data = [none] } catch (Exception ex) { buildData.getErrorDatabase().addError(translatedTopic, ErrorType.INVALID_CONTENT, BuilderConstants.ERROR_INVALID_TOPIC_XML + " " + StringUtilities.escapeForXML(ex.getMessage())); retValue = DocBookBuildUtilities.setTopicXMLForError(buildData, translatedTopic, getErrorInvalidValidationTopicTemplate().getValue()); } // depends on control dependency: [catch], data = [none] if (additionalXMLDoc != null) { // Merge the two together try { if (TopicType.AUTHOR_GROUP.equals(topicType)) { DocBookBuildUtilities.mergeAuthorGroups(topicDoc, additionalXMLDoc); // depends on control dependency: [if], data = [none] } else if (TopicType.REVISION_HISTORY.equals(topicType)) { DocBookBuildUtilities.mergeRevisionHistories(topicDoc, additionalXMLDoc); // depends on control dependency: [if], data = [none] } } catch (BuildProcessingException ex) { final String xmlStringInCDATA = XMLUtilities.wrapStringInCDATA(translatedTopic.getTranslatedAdditionalXML()); buildData.getErrorDatabase().addError(translatedTopic, ErrorType.INVALID_CONTENT, BuilderConstants.ERROR_BAD_XML_STRUCTURE + " " + StringUtilities.escapeForXML( ex.getMessage()) + " The processed XML is <programlisting>" + xmlStringInCDATA + "</programlisting>"); retValue = DocBookBuildUtilities.setTopicXMLForError(buildData, translatedTopic, getErrorInvalidValidationTopicTemplate().getValue()); } // depends on control dependency: [catch], data = [none] } else { final String xmlStringInCDATA = XMLUtilities.wrapStringInCDATA(translatedTopic.getTranslatedAdditionalXML()); buildData.getErrorDatabase().addError(translatedTopic, ErrorType.INVALID_CONTENT, BuilderConstants.ERROR_INVALID_XML_CONTENT + " The processed XML is <programlisting>" + xmlStringInCDATA + "</programlisting>"); // depends on control dependency: [if], data = [none] retValue = DocBookBuildUtilities.setTopicXMLForError(buildData, translatedTopic, getErrorInvalidValidationTopicTemplate().getValue()); // depends on control dependency: [if], data = [none] } } return retValue; } }
public class class_name { public void setLoadBalancerTargetGroups(java.util.Collection<LoadBalancerTargetGroupState> loadBalancerTargetGroups) { if (loadBalancerTargetGroups == null) { this.loadBalancerTargetGroups = null; return; } this.loadBalancerTargetGroups = new com.amazonaws.internal.SdkInternalList<LoadBalancerTargetGroupState>(loadBalancerTargetGroups); } }
public class class_name { public void setLoadBalancerTargetGroups(java.util.Collection<LoadBalancerTargetGroupState> loadBalancerTargetGroups) { if (loadBalancerTargetGroups == null) { this.loadBalancerTargetGroups = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.loadBalancerTargetGroups = new com.amazonaws.internal.SdkInternalList<LoadBalancerTargetGroupState>(loadBalancerTargetGroups); } }
public class class_name { private void initCapitalizationContextInfo(ULocale theLocale) { ICUResourceBundle rb = (ICUResourceBundle) UResourceBundle.getBundleInstance(ICUData.ICU_BASE_NAME, theLocale); try { ICUResourceBundle rdb = rb.getWithFallback("contextTransforms/number-spellout"); int[] intVector = rdb.getIntVector(); if (intVector.length >= 2) { capitalizationForListOrMenu = (intVector[0] != 0); capitalizationForStandAlone = (intVector[1] != 0); } } catch (MissingResourceException e) { // use default } } }
public class class_name { private void initCapitalizationContextInfo(ULocale theLocale) { ICUResourceBundle rb = (ICUResourceBundle) UResourceBundle.getBundleInstance(ICUData.ICU_BASE_NAME, theLocale); try { ICUResourceBundle rdb = rb.getWithFallback("contextTransforms/number-spellout"); int[] intVector = rdb.getIntVector(); if (intVector.length >= 2) { capitalizationForListOrMenu = (intVector[0] != 0); // depends on control dependency: [if], data = [none] capitalizationForStandAlone = (intVector[1] != 0); // depends on control dependency: [if], data = [none] } } catch (MissingResourceException e) { // use default } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static DataMedia<? extends DataMediaSource> findSourceDataMedia(Pipeline pipeline, String namespace, String name, boolean notExistReturnNull) { for (DataMediaPair pair : pipeline.getPairs()) { if (isMatch(pair.getSource(), namespace, name)) { return pair.getSource(); } } if (notExistReturnNull) { return null; } else { throw new ConfigException("no such DataMedia , the namespace = " + namespace + " name = " + name); } } }
public class class_name { public static DataMedia<? extends DataMediaSource> findSourceDataMedia(Pipeline pipeline, String namespace, String name, boolean notExistReturnNull) { for (DataMediaPair pair : pipeline.getPairs()) { if (isMatch(pair.getSource(), namespace, name)) { return pair.getSource(); // depends on control dependency: [if], data = [none] } } if (notExistReturnNull) { return null; // depends on control dependency: [if], data = [none] } else { throw new ConfigException("no such DataMedia , the namespace = " + namespace + " name = " + name); } } }
public class class_name { public void setResourcePreProcessors(final Collection<ResourcePreProcessor> processors) { preProcessors.clear(); if (processors != null) { preProcessors.addAll(processors); } } }
public class class_name { public void setResourcePreProcessors(final Collection<ResourcePreProcessor> processors) { preProcessors.clear(); if (processors != null) { preProcessors.addAll(processors); // depends on control dependency: [if], data = [(processors] } } }
public class class_name { public static String getConnectHost(ServiceType service, AlluxioConfiguration conf) { if (conf.isSet(service.mHostNameKey)) { String connectHost = conf.get(service.mHostNameKey); if (!connectHost.isEmpty() && !connectHost.equals(WILDCARD_ADDRESS)) { return connectHost; } } if (conf.isSet(service.mBindHostKey)) { String bindHost = conf.get(service.mBindHostKey); if (!bindHost.isEmpty() && !bindHost.equals(WILDCARD_ADDRESS)) { return bindHost; } } return getLocalHostName((int) conf.getMs(PropertyKey.NETWORK_HOST_RESOLUTION_TIMEOUT_MS)); } }
public class class_name { public static String getConnectHost(ServiceType service, AlluxioConfiguration conf) { if (conf.isSet(service.mHostNameKey)) { String connectHost = conf.get(service.mHostNameKey); if (!connectHost.isEmpty() && !connectHost.equals(WILDCARD_ADDRESS)) { return connectHost; // depends on control dependency: [if], data = [none] } } if (conf.isSet(service.mBindHostKey)) { String bindHost = conf.get(service.mBindHostKey); if (!bindHost.isEmpty() && !bindHost.equals(WILDCARD_ADDRESS)) { return bindHost; // depends on control dependency: [if], data = [none] } } return getLocalHostName((int) conf.getMs(PropertyKey.NETWORK_HOST_RESOLUTION_TIMEOUT_MS)); } }
public class class_name { private boolean isMathExpression(String line) { String cleanLine = cleanLine(line).trim(); boolean mathExpression = false; if (cleanLine.startsWith(DEFINE)) { return true; } else if (cleanLine.matches(CODE_LINE_SUFFIX)) { return true; } // go forward int index = cleanLine.indexOf(COPYRIGHT_SYMBOL); for (int i = index + 1; i < cleanLine.length(); i++) { char c = cleanLine.charAt(i); if (c == Constants.OPEN_BRACKET || c == Constants.CLOSE_BRACKET || c == Constants.WHITESPACE_CHAR) { continue; } else if (MATH_SYMBOLS.contains(c)) { mathExpression = true; break; } else { break; } } // go backwards if (mathExpression) { for (int i = index - 1; i >= 0; i--) { char c = cleanLine.charAt(i); if (c == Constants.OPEN_BRACKET || c == Constants.CLOSE_BRACKET || c == Constants.WHITESPACE_CHAR) { continue; } else if (MATH_SYMBOLS.contains(c)) { mathExpression = true; break; } else { break; } } } return mathExpression; } }
public class class_name { private boolean isMathExpression(String line) { String cleanLine = cleanLine(line).trim(); boolean mathExpression = false; if (cleanLine.startsWith(DEFINE)) { return true; // depends on control dependency: [if], data = [none] } else if (cleanLine.matches(CODE_LINE_SUFFIX)) { return true; // depends on control dependency: [if], data = [none] } // go forward int index = cleanLine.indexOf(COPYRIGHT_SYMBOL); for (int i = index + 1; i < cleanLine.length(); i++) { char c = cleanLine.charAt(i); if (c == Constants.OPEN_BRACKET || c == Constants.CLOSE_BRACKET || c == Constants.WHITESPACE_CHAR) { continue; } else if (MATH_SYMBOLS.contains(c)) { mathExpression = true; // depends on control dependency: [if], data = [none] break; } else { break; } } // go backwards if (mathExpression) { for (int i = index - 1; i >= 0; i--) { char c = cleanLine.charAt(i); if (c == Constants.OPEN_BRACKET || c == Constants.CLOSE_BRACKET || c == Constants.WHITESPACE_CHAR) { continue; } else if (MATH_SYMBOLS.contains(c)) { mathExpression = true; // depends on control dependency: [if], data = [none] break; } else { break; } } } return mathExpression; } }
public class class_name { private QueryResultImpl combineResults(Queue<QueryResultImpl> results, boolean distinct) { while (results.size() > 1) { QueryResultImpl a = results.remove(); QueryResultImpl b = results.remove(); results.add(combineResults(a, b, distinct)); } return results.remove(); } }
public class class_name { private QueryResultImpl combineResults(Queue<QueryResultImpl> results, boolean distinct) { while (results.size() > 1) { QueryResultImpl a = results.remove(); QueryResultImpl b = results.remove(); results.add(combineResults(a, b, distinct)); // depends on control dependency: [while], data = [none] } return results.remove(); } }
public class class_name { private static void checkBeanFactory(long waitTime, String bean) { if(beanFactory != null) { return; } // Jeśli czas oczekiwania jest określony, to wstrzymujemy bieżący wątek // do momentu ustawienia fabryki bean'ów. Dla ujemnej wartości czasu // oczekiwania - do skutku. Jeśli wartość jest dodatnia - aż upłynie // zdana ilość sekund: if(waitTime != 0) { long endTime = currentTimeMillis() + (waitTime * 1000); while(beanFactory == null && (waitTime < 0 || currentTimeMillis() < endTime)) { try { Thread.sleep(1000); } catch(InterruptedException e) { } } } if(beanFactory == null) { throw new BeanRetrievalException(bean, "Bean utils not initialized (bean factory not set)"); } } }
public class class_name { private static void checkBeanFactory(long waitTime, String bean) { if(beanFactory != null) { return; // depends on control dependency: [if], data = [none] } // Jeśli czas oczekiwania jest określony, to wstrzymujemy bieżący wątek // do momentu ustawienia fabryki bean'ów. Dla ujemnej wartości czasu // oczekiwania - do skutku. Jeśli wartość jest dodatnia - aż upłynie // zdana ilość sekund: if(waitTime != 0) { long endTime = currentTimeMillis() + (waitTime * 1000); while(beanFactory == null && (waitTime < 0 || currentTimeMillis() < endTime)) { try { Thread.sleep(1000); // depends on control dependency: [try], data = [none] } catch(InterruptedException e) { } // depends on control dependency: [catch], data = [none] } } if(beanFactory == null) { throw new BeanRetrievalException(bean, "Bean utils not initialized (bean factory not set)"); } } }
public class class_name { public void remove(final String[] pl, final boolean fwd) { synchronized (this) { for (final String element : pl) { remove_i(element, fwd); } } } }
public class class_name { public void remove(final String[] pl, final boolean fwd) { synchronized (this) { for (final String element : pl) { remove_i(element, fwd); // depends on control dependency: [for], data = [element] } } } }
public class class_name { public CmsPropertyDefinition createPropertyDefinition(CmsDbContext dbc, String name) throws CmsException { CmsPropertyDefinition propertyDefinition = null; name = name.trim(); // validate the property name CmsPropertyDefinition.checkPropertyName(name); // TODO: make the type a parameter try { try { propertyDefinition = getVfsDriver(dbc).readPropertyDefinition( dbc, name, dbc.currentProject().getUuid()); } catch (CmsException e) { propertyDefinition = getVfsDriver(dbc).createPropertyDefinition( dbc, dbc.currentProject().getUuid(), name, CmsPropertyDefinition.TYPE_NORMAL); } try { getVfsDriver(dbc).readPropertyDefinition(dbc, name, CmsProject.ONLINE_PROJECT_ID); } catch (CmsException e) { getVfsDriver(dbc).createPropertyDefinition( dbc, CmsProject.ONLINE_PROJECT_ID, name, CmsPropertyDefinition.TYPE_NORMAL); } try { getHistoryDriver(dbc).readPropertyDefinition(dbc, name); } catch (CmsException e) { getHistoryDriver(dbc).createPropertyDefinition(dbc, name, CmsPropertyDefinition.TYPE_NORMAL); } } finally { // fire an event that a property of a resource has been deleted OpenCms.fireCmsEvent( new CmsEvent( I_CmsEventListener.EVENT_PROPERTY_DEFINITION_CREATED, Collections.<String, Object> singletonMap("propertyDefinition", propertyDefinition))); } return propertyDefinition; } }
public class class_name { public CmsPropertyDefinition createPropertyDefinition(CmsDbContext dbc, String name) throws CmsException { CmsPropertyDefinition propertyDefinition = null; name = name.trim(); // validate the property name CmsPropertyDefinition.checkPropertyName(name); // TODO: make the type a parameter try { try { propertyDefinition = getVfsDriver(dbc).readPropertyDefinition( dbc, name, dbc.currentProject().getUuid()); // depends on control dependency: [try], data = [none] } catch (CmsException e) { propertyDefinition = getVfsDriver(dbc).createPropertyDefinition( dbc, dbc.currentProject().getUuid(), name, CmsPropertyDefinition.TYPE_NORMAL); } // depends on control dependency: [catch], data = [none] try { getVfsDriver(dbc).readPropertyDefinition(dbc, name, CmsProject.ONLINE_PROJECT_ID); // depends on control dependency: [try], data = [none] } catch (CmsException e) { getVfsDriver(dbc).createPropertyDefinition( dbc, CmsProject.ONLINE_PROJECT_ID, name, CmsPropertyDefinition.TYPE_NORMAL); } // depends on control dependency: [catch], data = [none] try { getHistoryDriver(dbc).readPropertyDefinition(dbc, name); // depends on control dependency: [try], data = [none] } catch (CmsException e) { getHistoryDriver(dbc).createPropertyDefinition(dbc, name, CmsPropertyDefinition.TYPE_NORMAL); } // depends on control dependency: [catch], data = [none] } finally { // fire an event that a property of a resource has been deleted OpenCms.fireCmsEvent( new CmsEvent( I_CmsEventListener.EVENT_PROPERTY_DEFINITION_CREATED, Collections.<String, Object> singletonMap("propertyDefinition", propertyDefinition))); } return propertyDefinition; } }
public class class_name { public AnalysisResultFuture runJob(AnalysisJob job, String slaveJobId, AnalysisListener... analysisListeners) { final AnalysisRunner runner = new SlaveAnalysisRunner(_configuration, analysisListeners); final AnalysisResultFuture resultFuture = runner.run(job); if (slaveJobId != null) { _runningJobs.put(slaveJobId, resultFuture); } return resultFuture; } }
public class class_name { public AnalysisResultFuture runJob(AnalysisJob job, String slaveJobId, AnalysisListener... analysisListeners) { final AnalysisRunner runner = new SlaveAnalysisRunner(_configuration, analysisListeners); final AnalysisResultFuture resultFuture = runner.run(job); if (slaveJobId != null) { _runningJobs.put(slaveJobId, resultFuture); // depends on control dependency: [if], data = [(slaveJobId] } return resultFuture; } }
public class class_name { public static synchronized Stack getStack(String className) throws InternalException { println(new StringBuilder().append("StackManager.getStack(\"").append(className).append("\")").toString()); if (!initialized) { initialize(); } // Gets the classloader of the code that called this method, may be null. ClassLoader callerCL = ClassLoader.getSystemClassLoader(); // Walk through the loaded stacks attempting to locate someone who understands the given URL. for (StackInfo di : stacks) { // If the caller does not have permission to load the stack then skip it. if (getCallerClass(callerCL, di.stackClassName) != di.stackClass) { println(new StringBuilder().append(" skipping: ").append(di).toString()); continue; } println(new StringBuilder().append(" trying ").append(di).toString()); if (di.stackClassName.equals(className)) { // Success! println("geStack returning " + di); return (di.stack); } } println("getStack: no suitable stack"); throw new InternalException("No suitable stack"); } }
public class class_name { public static synchronized Stack getStack(String className) throws InternalException { println(new StringBuilder().append("StackManager.getStack(\"").append(className).append("\")").toString()); if (!initialized) { initialize(); } // Gets the classloader of the code that called this method, may be null. ClassLoader callerCL = ClassLoader.getSystemClassLoader(); // Walk through the loaded stacks attempting to locate someone who understands the given URL. for (StackInfo di : stacks) { // If the caller does not have permission to load the stack then skip it. if (getCallerClass(callerCL, di.stackClassName) != di.stackClass) { println(new StringBuilder().append(" skipping: ").append(di).toString()); // depends on control dependency: [if], data = [none] continue; } println(new StringBuilder().append(" trying ").append(di).toString()); if (di.stackClassName.equals(className)) { // Success! println("geStack returning " + di); // depends on control dependency: [if], data = [none] return (di.stack); // depends on control dependency: [if], data = [none] } } println("getStack: no suitable stack"); throw new InternalException("No suitable stack"); } }
public class class_name { public static boolean isSystemKey(String key) { for (String prefix : Config13Constants.SYSTEM_PREFIXES) { if (key.startsWith(prefix)) { return true; } } return false; } }
public class class_name { public static boolean isSystemKey(String key) { for (String prefix : Config13Constants.SYSTEM_PREFIXES) { if (key.startsWith(prefix)) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public String getTypeDescriptor() { StringBuilder b = new StringBuilder(); int count = getPropertyCount(); for (int i=0; i<count; i++) { StorableProperty property = getProperty(i); if (property.isNullable()) { b.append('N'); } b.append(TypeDesc.forClass(property.getType()).getDescriptor()); } return b.toString(); } }
public class class_name { public String getTypeDescriptor() { StringBuilder b = new StringBuilder(); int count = getPropertyCount(); for (int i=0; i<count; i++) { StorableProperty property = getProperty(i); if (property.isNullable()) { b.append('N'); // depends on control dependency: [if], data = [none] } b.append(TypeDesc.forClass(property.getType()).getDescriptor()); // depends on control dependency: [for], data = [none] } return b.toString(); } }
public class class_name { @Override public boolean onShutdown(long timeout, TimeUnit unit) { if (nodeEngine.getLocalMember().isLiteMember()) { return true; } long timeoutNanos = unit.toNanos(timeout); for (CRDTReplicationAwareService service : getReplicationServices()) { service.prepareToSafeShutdown(); final CRDTReplicationContainer replicationOperation = service.prepareReplicationOperation( replicationVectorClocks.getLatestReplicatedVectorClock(service.getName()), 0); if (replicationOperation == null) { logger.fine("Skipping replication since all CRDTs are replicated"); continue; } long start = System.nanoTime(); if (!tryProcessOnOtherMembers(replicationOperation.getOperation(), service.getName(), timeoutNanos)) { logger.warning("Failed replication of CRDTs for " + service.getName() + ". CRDT state may be lost."); } timeoutNanos -= (System.nanoTime() - start); if (timeoutNanos < 0) { return false; } } return true; } }
public class class_name { @Override public boolean onShutdown(long timeout, TimeUnit unit) { if (nodeEngine.getLocalMember().isLiteMember()) { return true; // depends on control dependency: [if], data = [none] } long timeoutNanos = unit.toNanos(timeout); for (CRDTReplicationAwareService service : getReplicationServices()) { service.prepareToSafeShutdown(); // depends on control dependency: [for], data = [service] final CRDTReplicationContainer replicationOperation = service.prepareReplicationOperation( replicationVectorClocks.getLatestReplicatedVectorClock(service.getName()), 0); if (replicationOperation == null) { logger.fine("Skipping replication since all CRDTs are replicated"); // depends on control dependency: [if], data = [none] continue; } long start = System.nanoTime(); if (!tryProcessOnOtherMembers(replicationOperation.getOperation(), service.getName(), timeoutNanos)) { logger.warning("Failed replication of CRDTs for " + service.getName() + ". CRDT state may be lost."); // depends on control dependency: [if], data = [none] } timeoutNanos -= (System.nanoTime() - start); // depends on control dependency: [for], data = [none] if (timeoutNanos < 0) { return false; // depends on control dependency: [if], data = [none] } } return true; } }
public class class_name { public static StringIdentifierGenerator getInstance(Collection generators) { if (generators == null) { throw new IllegalArgumentException( "Generator collection must not be null"); } if (generators.size() == 0) { throw new IllegalArgumentException( "Generator collection must not be empty"); } StringIdentifierGenerator[] generatorsCopy = new StringIdentifierGenerator[generators.size()]; int i = 0; Iterator it = generators.iterator(); while (it.hasNext()) { generatorsCopy[i] = (StringIdentifierGenerator) it.next(); if (generatorsCopy[i] == null) { throw new IllegalArgumentException( "Generators must not be null"); } i++; } return new CompositeIdentifierGenerator(generatorsCopy); } }
public class class_name { public static StringIdentifierGenerator getInstance(Collection generators) { if (generators == null) { throw new IllegalArgumentException( "Generator collection must not be null"); } if (generators.size() == 0) { throw new IllegalArgumentException( "Generator collection must not be empty"); } StringIdentifierGenerator[] generatorsCopy = new StringIdentifierGenerator[generators.size()]; int i = 0; Iterator it = generators.iterator(); while (it.hasNext()) { generatorsCopy[i] = (StringIdentifierGenerator) it.next(); // depends on control dependency: [while], data = [none] if (generatorsCopy[i] == null) { throw new IllegalArgumentException( "Generators must not be null"); } i++; // depends on control dependency: [while], data = [none] } return new CompositeIdentifierGenerator(generatorsCopy); } }
public class class_name { @Override public final CTNumDataSource getCTNumDataSourceFromCTSer( final Object ctObjSer) { if (ctObjSer instanceof CTAreaSer) { return ((CTAreaSer) ctObjSer).getVal(); } return null; } }
public class class_name { @Override public final CTNumDataSource getCTNumDataSourceFromCTSer( final Object ctObjSer) { if (ctObjSer instanceof CTAreaSer) { return ((CTAreaSer) ctObjSer).getVal(); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public static User userInfo(String access_token,String openid,int emoji){ HttpUriRequest httpUriRequest = RequestBuilder.get() .setUri(BASE_URI+"/cgi-bin/user/info") .addParameter(PARAM_ACCESS_TOKEN, API.accessToken(access_token)) .addParameter("openid",openid) .addParameter("lang","zh_CN") .build(); User user = LocalHttpClient.executeJsonResult(httpUriRequest,User.class); if(emoji != 0 && user != null && user.getNickname() != null){ user.setNickname_emoji(EmojiUtil.parse(user.getNickname(), emoji)); } return user; } }
public class class_name { public static User userInfo(String access_token,String openid,int emoji){ HttpUriRequest httpUriRequest = RequestBuilder.get() .setUri(BASE_URI+"/cgi-bin/user/info") .addParameter(PARAM_ACCESS_TOKEN, API.accessToken(access_token)) .addParameter("openid",openid) .addParameter("lang","zh_CN") .build(); User user = LocalHttpClient.executeJsonResult(httpUriRequest,User.class); if(emoji != 0 && user != null && user.getNickname() != null){ user.setNickname_emoji(EmojiUtil.parse(user.getNickname(), emoji)); // depends on control dependency: [if], data = [none] } return user; } }
public class class_name { public static WeldContainer current() { List<String> ids = WeldContainer.getRunningContainerIds(); if (ids.size() == 1) { return WeldContainer.instance(ids.get(0)); } else { // if there is either no container or multiple containers we want to throw exception // in this case Weld cannot determine which container is "current" throw WeldSELogger.LOG.zeroOrMoreThanOneContainerRunning(); } } }
public class class_name { public static WeldContainer current() { List<String> ids = WeldContainer.getRunningContainerIds(); if (ids.size() == 1) { return WeldContainer.instance(ids.get(0)); // depends on control dependency: [if], data = [none] } else { // if there is either no container or multiple containers we want to throw exception // in this case Weld cannot determine which container is "current" throw WeldSELogger.LOG.zeroOrMoreThanOneContainerRunning(); } } }
public class class_name { public static void cutSuffix(String suffix, StringBuilder stringBuilder) { if (stringBuilder.substring(stringBuilder.length() - suffix.length()).equals(suffix)) { stringBuilder.delete(stringBuilder.length() - suffix.length(), stringBuilder.length()); } } }
public class class_name { public static void cutSuffix(String suffix, StringBuilder stringBuilder) { if (stringBuilder.substring(stringBuilder.length() - suffix.length()).equals(suffix)) { stringBuilder.delete(stringBuilder.length() - suffix.length(), stringBuilder.length()); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected void start() { if (mNumInstances == 0) { if (mListeners != null && mListeners.size() > 0) { ArrayList<TransitionListener> tmpListeners = (ArrayList<TransitionListener>) mListeners.clone(); int numListeners = tmpListeners.size(); for (int i = 0; i < numListeners; ++i) { tmpListeners.get(i).onTransitionStart(this); } } mEnded = false; } mNumInstances++; } }
public class class_name { protected void start() { if (mNumInstances == 0) { if (mListeners != null && mListeners.size() > 0) { ArrayList<TransitionListener> tmpListeners = (ArrayList<TransitionListener>) mListeners.clone(); int numListeners = tmpListeners.size(); for (int i = 0; i < numListeners; ++i) { tmpListeners.get(i).onTransitionStart(this); // depends on control dependency: [for], data = [i] } } mEnded = false; // depends on control dependency: [if], data = [none] } mNumInstances++; } }
public class class_name { public EEnum getGSMPPREC() { if (gsmpprecEEnum == null) { gsmpprecEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(146); } return gsmpprecEEnum; } }
public class class_name { public EEnum getGSMPPREC() { if (gsmpprecEEnum == null) { gsmpprecEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(146); // depends on control dependency: [if], data = [none] } return gsmpprecEEnum; } }
public class class_name { void put(String uuid, CachingIndexReader reader, int n) { LRUMap cacheSegment = docNumbers[getSegmentIndex(uuid.charAt(0))]; //UUID key = UUID.fromString(uuid); String key = uuid; synchronized (cacheSegment) { Entry e = (Entry)cacheSegment.get(key); if (e != null) { // existing entry // ignore if reader is older than the one in entry if (reader.getCreationTick() <= e.creationTick) { if (log.isDebugEnabled()) { log.debug("Ignoring put(). New entry is not from a newer reader. " + "existing: " + e.creationTick + ", new: " + reader.getCreationTick()); } e = null; } } else { // entry did not exist e = new Entry(reader.getCreationTick(), n); } if (e != null) { cacheSegment.put(key, e); } } } }
public class class_name { void put(String uuid, CachingIndexReader reader, int n) { LRUMap cacheSegment = docNumbers[getSegmentIndex(uuid.charAt(0))]; //UUID key = UUID.fromString(uuid); String key = uuid; synchronized (cacheSegment) { Entry e = (Entry)cacheSegment.get(key); if (e != null) { // existing entry // ignore if reader is older than the one in entry if (reader.getCreationTick() <= e.creationTick) { if (log.isDebugEnabled()) { log.debug("Ignoring put(). New entry is not from a newer reader. " + "existing: " + e.creationTick + ", new: " + reader.getCreationTick()); // depends on control dependency: [if], data = [none] } e = null; // depends on control dependency: [if], data = [none] } } else { // entry did not exist e = new Entry(reader.getCreationTick(), n); // depends on control dependency: [if], data = [none] } if (e != null) { cacheSegment.put(key, e); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public final void ruleXClosure() throws RecognitionException { int stackSize = keepStackSize(); try { // InternalXbase.g:796:2: ( ( ( rule__XClosure__Group__0 ) ) ) // InternalXbase.g:797:2: ( ( rule__XClosure__Group__0 ) ) { // InternalXbase.g:797:2: ( ( rule__XClosure__Group__0 ) ) // InternalXbase.g:798:3: ( rule__XClosure__Group__0 ) { if ( state.backtracking==0 ) { before(grammarAccess.getXClosureAccess().getGroup()); } // InternalXbase.g:799:3: ( rule__XClosure__Group__0 ) // InternalXbase.g:799:4: rule__XClosure__Group__0 { pushFollow(FOLLOW_2); rule__XClosure__Group__0(); state._fsp--; if (state.failed) return ; } if ( state.backtracking==0 ) { after(grammarAccess.getXClosureAccess().getGroup()); } } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } }
public class class_name { public final void ruleXClosure() throws RecognitionException { int stackSize = keepStackSize(); try { // InternalXbase.g:796:2: ( ( ( rule__XClosure__Group__0 ) ) ) // InternalXbase.g:797:2: ( ( rule__XClosure__Group__0 ) ) { // InternalXbase.g:797:2: ( ( rule__XClosure__Group__0 ) ) // InternalXbase.g:798:3: ( rule__XClosure__Group__0 ) { if ( state.backtracking==0 ) { before(grammarAccess.getXClosureAccess().getGroup()); // depends on control dependency: [if], data = [none] } // InternalXbase.g:799:3: ( rule__XClosure__Group__0 ) // InternalXbase.g:799:4: rule__XClosure__Group__0 { pushFollow(FOLLOW_2); rule__XClosure__Group__0(); state._fsp--; if (state.failed) return ; } if ( state.backtracking==0 ) { after(grammarAccess.getXClosureAccess().getGroup()); // depends on control dependency: [if], data = [none] } } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } }
public class class_name { public synchronized void rollback() throws HeuristicCommitException, HeuristicMixedException, HeuristicHazardException, SystemException { if (tc.isEntryEnabled()) Tr.entry(tc, "rollback", this); // Ensure timeout cannot rollback the underlying transaction ((DistributableTransaction) _transaction).addAssociation(); boolean sysException = false; // Cancel any outstanding (in-doubt or transaction) timer EmbeddableTimeoutManager.setTimeout(_transaction, EmbeddableTimeoutManager.CANCEL_TIMEOUT, 0); final int state = _transaction.getTransactionState().getState(); switch (state) { case TransactionState.STATE_ACTIVE: case TransactionState.STATE_PREPARED: try { _transaction.getTransactionState().setState(TransactionState.STATE_ROLLING_BACK); } catch (javax.transaction.SystemException se) { FFDCFilter.processException(se, "com.ibm.tx.remote.TransactionWrapper.rollback", "586", this); if (tc.isDebugEnabled()) Tr.debug(tc, "Exception caught setting state to ROLLING_BACK!", se); sysException = true; } try { // Resume the transaction created from the incoming // request so that it is installed on the thread. ((EmbeddableTranManagerSet) TransactionManagerFactory.getTransactionManager()).resume(_transaction); _transaction.internalRollback(); _transaction.notifyCompletion(); } catch (HeuristicMixedException hme) { // No FFDC code needed. _heuristic = StatefulResource.HEURISTIC_MIXED; } catch (HeuristicHazardException hhe) { // No FFDC code needed. _heuristic = StatefulResource.HEURISTIC_HAZARD; } catch (HeuristicCommitException hce) { // No FFDC code needed. _heuristic = StatefulResource.HEURISTIC_COMMIT; } catch (Throwable exc) // javax.transaction.SystemException { FFDCFilter.processException(exc, "com.ibm.tx.remote.TransactionWrapper.rollback", "610", this); Tr.error(tc, "WTRN0071_ROLLBACK_FAILED", exc); _transaction.notifyCompletion(); sysException = true; } break; // If the transaction that this object represents has already been completed, // raise a heuristic exception if necessary. This object must wait for a // forget before destroying itself if it returns a heuristic exception. case TransactionState.STATE_HEURISTIC_ON_ROLLBACK: case TransactionState.STATE_ROLLED_BACK: // Return last heuristic value and allow for recovery _heuristic = _transaction.getResources().getHeuristicOutcome(); break; case TransactionState.STATE_ROLLING_BACK: // We should only get in this state if we are in recovery and this // inbound rollback arrives. In other cases, if we are rolling back // we will hold out using the association counts and if we are // locally retrying we will be in a heuristic state as we returned // heuristic hazard to the superior. ((DistributableTransaction) _transaction).removeAssociation(); final TRANSIENT tre = new TRANSIENT(); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", tre); throw tre; case TransactionState.STATE_COMMITTING: case TransactionState.STATE_HEURISTIC_ON_COMMIT: case TransactionState.STATE_COMMITTED: // Admin heuristic commit ... // again retry ... respond with heurcom _heuristic = StatefulResource.HEURISTIC_COMMIT; break; case TransactionState.STATE_NONE: // Transaction has completed and is now finished // Normally the remoteable object would be disconnected from the orb, // but ... timing may mean get got here while it was happenning // We could just return ok, but it is more true to return exception ((DistributableTransaction) _transaction).removeAssociation(); final OBJECT_NOT_EXIST one = new OBJECT_NOT_EXIST(); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", one); throw one; default: Tr.error(tc, "WTRN0072_ROLLBACK_BAD_STATE", TransactionState.stateToString(state)); sysException = true; _transaction.notifyCompletion(); break; } // end switch ((DistributableTransaction) _transaction).removeAssociation(); switch (_heuristic) { case StatefulResource.NONE: break; case StatefulResource.HEURISTIC_HAZARD: // _transaction.addHeuristic(); final HeuristicHazardException hh = new HeuristicHazardException(); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", hh); throw hh; case StatefulResource.HEURISTIC_COMMIT: // _transaction.addHeuristic(); final HeuristicCommitException hc = new HeuristicCommitException(); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", hc); throw hc; default: // _transaction.addHeuristic(); final HeuristicMixedException hm = new HeuristicMixedException(); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", hm); throw hm; } if (sysException) { // destroy(); final INTERNAL ie = new INTERNAL(MinorCode.LOGIC_ERROR, null); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", ie); throw ie; } if (tc.isEntryEnabled()) Tr.exit(tc, "rollback"); } }
public class class_name { public synchronized void rollback() throws HeuristicCommitException, HeuristicMixedException, HeuristicHazardException, SystemException { if (tc.isEntryEnabled()) Tr.entry(tc, "rollback", this); // Ensure timeout cannot rollback the underlying transaction ((DistributableTransaction) _transaction).addAssociation(); boolean sysException = false; // Cancel any outstanding (in-doubt or transaction) timer EmbeddableTimeoutManager.setTimeout(_transaction, EmbeddableTimeoutManager.CANCEL_TIMEOUT, 0); final int state = _transaction.getTransactionState().getState(); switch (state) { case TransactionState.STATE_ACTIVE: case TransactionState.STATE_PREPARED: try { _transaction.getTransactionState().setState(TransactionState.STATE_ROLLING_BACK); // depends on control dependency: [try], data = [none] } catch (javax.transaction.SystemException se) { FFDCFilter.processException(se, "com.ibm.tx.remote.TransactionWrapper.rollback", "586", this); if (tc.isDebugEnabled()) Tr.debug(tc, "Exception caught setting state to ROLLING_BACK!", se); sysException = true; } // depends on control dependency: [catch], data = [none] try { // Resume the transaction created from the incoming // request so that it is installed on the thread. ((EmbeddableTranManagerSet) TransactionManagerFactory.getTransactionManager()).resume(_transaction); // depends on control dependency: [try], data = [none] _transaction.internalRollback(); // depends on control dependency: [try], data = [none] _transaction.notifyCompletion(); // depends on control dependency: [try], data = [none] } catch (HeuristicMixedException hme) { // No FFDC code needed. _heuristic = StatefulResource.HEURISTIC_MIXED; } catch (HeuristicHazardException hhe) // depends on control dependency: [catch], data = [none] { // No FFDC code needed. _heuristic = StatefulResource.HEURISTIC_HAZARD; } catch (HeuristicCommitException hce) // depends on control dependency: [catch], data = [none] { // No FFDC code needed. _heuristic = StatefulResource.HEURISTIC_COMMIT; } catch (Throwable exc) // javax.transaction.SystemException // depends on control dependency: [catch], data = [none] { FFDCFilter.processException(exc, "com.ibm.tx.remote.TransactionWrapper.rollback", "610", this); Tr.error(tc, "WTRN0071_ROLLBACK_FAILED", exc); _transaction.notifyCompletion(); sysException = true; } // depends on control dependency: [catch], data = [none] break; // If the transaction that this object represents has already been completed, // raise a heuristic exception if necessary. This object must wait for a // forget before destroying itself if it returns a heuristic exception. case TransactionState.STATE_HEURISTIC_ON_ROLLBACK: case TransactionState.STATE_ROLLED_BACK: // Return last heuristic value and allow for recovery _heuristic = _transaction.getResources().getHeuristicOutcome(); break; case TransactionState.STATE_ROLLING_BACK: // We should only get in this state if we are in recovery and this // inbound rollback arrives. In other cases, if we are rolling back // we will hold out using the association counts and if we are // locally retrying we will be in a heuristic state as we returned // heuristic hazard to the superior. ((DistributableTransaction) _transaction).removeAssociation(); final TRANSIENT tre = new TRANSIENT(); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", tre); throw tre; case TransactionState.STATE_COMMITTING: case TransactionState.STATE_HEURISTIC_ON_COMMIT: case TransactionState.STATE_COMMITTED: // Admin heuristic commit ... // again retry ... respond with heurcom _heuristic = StatefulResource.HEURISTIC_COMMIT; break; case TransactionState.STATE_NONE: // Transaction has completed and is now finished // Normally the remoteable object would be disconnected from the orb, // but ... timing may mean get got here while it was happenning // We could just return ok, but it is more true to return exception ((DistributableTransaction) _transaction).removeAssociation(); final OBJECT_NOT_EXIST one = new OBJECT_NOT_EXIST(); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", one); throw one; default: Tr.error(tc, "WTRN0072_ROLLBACK_BAD_STATE", TransactionState.stateToString(state)); sysException = true; _transaction.notifyCompletion(); break; } // end switch ((DistributableTransaction) _transaction).removeAssociation(); switch (_heuristic) { case StatefulResource.NONE: break; case StatefulResource.HEURISTIC_HAZARD: // _transaction.addHeuristic(); final HeuristicHazardException hh = new HeuristicHazardException(); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", hh); throw hh; case StatefulResource.HEURISTIC_COMMIT: // _transaction.addHeuristic(); final HeuristicCommitException hc = new HeuristicCommitException(); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", hc); throw hc; default: // _transaction.addHeuristic(); final HeuristicMixedException hm = new HeuristicMixedException(); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", hm); throw hm; } if (sysException) { // destroy(); final INTERNAL ie = new INTERNAL(MinorCode.LOGIC_ERROR, null); if (tc.isEntryEnabled()) Tr.exit(tc, "rollback", ie); throw ie; } if (tc.isEntryEnabled()) Tr.exit(tc, "rollback"); } }
public class class_name { public static AzimuthZenithAngle calculateSolarPosition(final GregorianCalendar date, final double latitude, final double longitude) { final Calendar utcTime = new GregorianCalendar(TimeZone.getTimeZone("GMT")); utcTime.setTimeInMillis(date.getTimeInMillis()); // Main variables double dElapsedJulianDays; double dDecimalHours; double dEclipticLongitude; double dEclipticObliquity; double dRightAscension; double dDeclination; // Auxiliary variables double dY; double dX; // Calculate difference in days between the current Julian Day // and JD 2451545.0, which is noon 1 January 2000 Universal Time { long liAux1; long liAux2; double dJulianDate; // Calculate time of the day in UT decimal hours dDecimalHours = utcTime.get(Calendar.HOUR_OF_DAY) + (utcTime.get(Calendar.MINUTE) + utcTime.get(Calendar.SECOND) / 60.0) / 60.0; // Calculate current Julian Day liAux1 = (utcTime.get(Calendar.MONTH) + 1 - 14) / 12; liAux2 = (1461 * (utcTime.get(Calendar.YEAR) + 4800 + liAux1)) / 4 + (367 * (utcTime.get(Calendar.MONTH) + 1 - 2 - 12 * liAux1)) / 12 - (3 * ((utcTime.get(Calendar.YEAR) + 4900 + liAux1) / 100)) / 4 + utcTime.get(Calendar.DAY_OF_MONTH) - 32075; dJulianDate = (liAux2) - 0.5 + dDecimalHours / 24.0; // Calculate difference between current Julian Day and JD 2451545.0 dElapsedJulianDays = dJulianDate - 2451545.0; } // Calculate ecliptic coordinates (ecliptic longitude and obliquity of the // ecliptic in radians but without limiting the angle to be less than 2*Pi // (i.e., the result may be greater than 2*Pi) { double dMeanLongitude; double dMeanAnomaly; double dOmega; dOmega = 2.1429 - 0.0010394594 * dElapsedJulianDays; dMeanLongitude = 4.8950630 + 0.017202791698 * dElapsedJulianDays; // Radians dMeanAnomaly = 6.2400600 + 0.0172019699 * dElapsedJulianDays; dEclipticLongitude = dMeanLongitude + 0.03341607 * Math.sin(dMeanAnomaly) + 0.00034894 * Math.sin(2 * dMeanAnomaly) - 0.0001134 - 0.0000203 * Math.sin(dOmega); dEclipticObliquity = 0.4090928 - 6.2140e-9 * dElapsedJulianDays + 0.0000396 * Math.cos(dOmega); } // Calculate celestial coordinates ( right ascension and declination ) in radians // but without limiting the angle to be less than 2*Pi (i.e., the result // may be greater than 2*Pi) { double dSinEclipticLongitude; dSinEclipticLongitude = Math.sin(dEclipticLongitude); dY = Math.cos(dEclipticObliquity) * dSinEclipticLongitude; dX = Math.cos(dEclipticLongitude); dRightAscension = Math.atan2(dY, dX); if (dRightAscension < 0.0) { dRightAscension = dRightAscension + 2 * Math.PI; } dDeclination = Math.asin(Math.sin(dEclipticObliquity) * dSinEclipticLongitude); } // Calculate local coordinates ( azimuth and zenith angle ) in degrees { double dGreenwichMeanSiderealTime; double dLocalMeanSiderealTime; double dLatitudeInRadians; double dHourAngle; double dCosLatitude; double dSinLatitude; double dCosHourAngle; double dParallax; dGreenwichMeanSiderealTime = 6.6974243242 + 0.0657098283 * dElapsedJulianDays + dDecimalHours; dLocalMeanSiderealTime = (dGreenwichMeanSiderealTime * 15 + longitude) * RAD; dHourAngle = dLocalMeanSiderealTime - dRightAscension; dLatitudeInRadians = latitude * RAD; dCosLatitude = Math.cos(dLatitudeInRadians); dSinLatitude = Math.sin(dLatitudeInRadians); dCosHourAngle = Math.cos(dHourAngle); double zenithAngle = (Math.acos(dCosLatitude * dCosHourAngle * Math.cos(dDeclination) + Math.sin(dDeclination) * dSinLatitude)); dY = -Math.sin(dHourAngle); dX = Math.tan(dDeclination) * dCosLatitude - dSinLatitude * dCosHourAngle; double azimuth = Math.atan2(dY, dX); if (azimuth < 0.0) { azimuth = azimuth + TWOPI; } azimuth = azimuth / RAD; // Parallax Correction dParallax = (D_EARTH_MEAN_RADIUS / D_ASTRONOMICAL_UNIT) * Math.sin(zenithAngle); zenithAngle = (zenithAngle + dParallax) / RAD; return new AzimuthZenithAngle(azimuth, zenithAngle); } } }
public class class_name { public static AzimuthZenithAngle calculateSolarPosition(final GregorianCalendar date, final double latitude, final double longitude) { final Calendar utcTime = new GregorianCalendar(TimeZone.getTimeZone("GMT")); utcTime.setTimeInMillis(date.getTimeInMillis()); // Main variables double dElapsedJulianDays; double dDecimalHours; double dEclipticLongitude; double dEclipticObliquity; double dRightAscension; double dDeclination; // Auxiliary variables double dY; double dX; // Calculate difference in days between the current Julian Day // and JD 2451545.0, which is noon 1 January 2000 Universal Time { long liAux1; long liAux2; double dJulianDate; // Calculate time of the day in UT decimal hours dDecimalHours = utcTime.get(Calendar.HOUR_OF_DAY) + (utcTime.get(Calendar.MINUTE) + utcTime.get(Calendar.SECOND) / 60.0) / 60.0; // Calculate current Julian Day liAux1 = (utcTime.get(Calendar.MONTH) + 1 - 14) / 12; liAux2 = (1461 * (utcTime.get(Calendar.YEAR) + 4800 + liAux1)) / 4 + (367 * (utcTime.get(Calendar.MONTH) + 1 - 2 - 12 * liAux1)) / 12 - (3 * ((utcTime.get(Calendar.YEAR) + 4900 + liAux1) / 100)) / 4 + utcTime.get(Calendar.DAY_OF_MONTH) - 32075; dJulianDate = (liAux2) - 0.5 + dDecimalHours / 24.0; // Calculate difference between current Julian Day and JD 2451545.0 dElapsedJulianDays = dJulianDate - 2451545.0; } // Calculate ecliptic coordinates (ecliptic longitude and obliquity of the // ecliptic in radians but without limiting the angle to be less than 2*Pi // (i.e., the result may be greater than 2*Pi) { double dMeanLongitude; double dMeanAnomaly; double dOmega; dOmega = 2.1429 - 0.0010394594 * dElapsedJulianDays; dMeanLongitude = 4.8950630 + 0.017202791698 * dElapsedJulianDays; // Radians dMeanAnomaly = 6.2400600 + 0.0172019699 * dElapsedJulianDays; dEclipticLongitude = dMeanLongitude + 0.03341607 * Math.sin(dMeanAnomaly) + 0.00034894 * Math.sin(2 * dMeanAnomaly) - 0.0001134 - 0.0000203 * Math.sin(dOmega); dEclipticObliquity = 0.4090928 - 6.2140e-9 * dElapsedJulianDays + 0.0000396 * Math.cos(dOmega); } // Calculate celestial coordinates ( right ascension and declination ) in radians // but without limiting the angle to be less than 2*Pi (i.e., the result // may be greater than 2*Pi) { double dSinEclipticLongitude; dSinEclipticLongitude = Math.sin(dEclipticLongitude); dY = Math.cos(dEclipticObliquity) * dSinEclipticLongitude; dX = Math.cos(dEclipticLongitude); dRightAscension = Math.atan2(dY, dX); if (dRightAscension < 0.0) { dRightAscension = dRightAscension + 2 * Math.PI; // depends on control dependency: [if], data = [none] } dDeclination = Math.asin(Math.sin(dEclipticObliquity) * dSinEclipticLongitude); } // Calculate local coordinates ( azimuth and zenith angle ) in degrees { double dGreenwichMeanSiderealTime; double dLocalMeanSiderealTime; double dLatitudeInRadians; double dHourAngle; double dCosLatitude; double dSinLatitude; double dCosHourAngle; double dParallax; dGreenwichMeanSiderealTime = 6.6974243242 + 0.0657098283 * dElapsedJulianDays + dDecimalHours; dLocalMeanSiderealTime = (dGreenwichMeanSiderealTime * 15 + longitude) * RAD; dHourAngle = dLocalMeanSiderealTime - dRightAscension; dLatitudeInRadians = latitude * RAD; dCosLatitude = Math.cos(dLatitudeInRadians); dSinLatitude = Math.sin(dLatitudeInRadians); dCosHourAngle = Math.cos(dHourAngle); double zenithAngle = (Math.acos(dCosLatitude * dCosHourAngle * Math.cos(dDeclination) + Math.sin(dDeclination) * dSinLatitude)); dY = -Math.sin(dHourAngle); dX = Math.tan(dDeclination) * dCosLatitude - dSinLatitude * dCosHourAngle; double azimuth = Math.atan2(dY, dX); if (azimuth < 0.0) { azimuth = azimuth + TWOPI; // depends on control dependency: [if], data = [none] } azimuth = azimuth / RAD; // Parallax Correction dParallax = (D_EARTH_MEAN_RADIUS / D_ASTRONOMICAL_UNIT) * Math.sin(zenithAngle); zenithAngle = (zenithAngle + dParallax) / RAD; return new AzimuthZenithAngle(azimuth, zenithAngle); } } }
public class class_name { public static String getTokenText(INode node) { if (node instanceof ILeafNode) return ((ILeafNode) node).getText(); else { StringBuilder builder = new StringBuilder(Math.max(node.getTotalLength(), 1)); boolean hiddenSeen = false; for (ILeafNode leaf : node.getLeafNodes()) { if (!leaf.isHidden()) { if (hiddenSeen && builder.length() > 0) builder.append(' '); builder.append(leaf.getText()); hiddenSeen = false; } else { hiddenSeen = true; } } return builder.toString(); } } }
public class class_name { public static String getTokenText(INode node) { if (node instanceof ILeafNode) return ((ILeafNode) node).getText(); else { StringBuilder builder = new StringBuilder(Math.max(node.getTotalLength(), 1)); boolean hiddenSeen = false; for (ILeafNode leaf : node.getLeafNodes()) { if (!leaf.isHidden()) { if (hiddenSeen && builder.length() > 0) builder.append(' '); builder.append(leaf.getText()); // depends on control dependency: [if], data = [none] hiddenSeen = false; // depends on control dependency: [if], data = [none] } else { hiddenSeen = true; // depends on control dependency: [if], data = [none] } } return builder.toString(); // depends on control dependency: [if], data = [none] } } }
public class class_name { private int getByteLength(final Object val) { if (val == null) { return 4; } String str = val.toString(); try { int len = str.getBytes(ENCODING_SHIFT_JIS).length; return len <= 200 ? len : 200; } catch (UnsupportedEncodingException ex) { return 1; } } }
public class class_name { private int getByteLength(final Object val) { if (val == null) { return 4; // depends on control dependency: [if], data = [none] } String str = val.toString(); try { int len = str.getBytes(ENCODING_SHIFT_JIS).length; return len <= 200 ? len : 200; // depends on control dependency: [try], data = [none] } catch (UnsupportedEncodingException ex) { return 1; } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void scanHexExponentAndSuffix(int pos) { if (reader.ch == 'p' || reader.ch == 'P') { reader.putChar(true); skipIllegalUnderscores(); if (reader.ch == '+' || reader.ch == '-') { reader.putChar(true); } skipIllegalUnderscores(); if ('0' <= reader.ch && reader.ch <= '9') { scanDigits(pos, 10); if (!allowHexFloats) { lexError(pos, "unsupported.fp.lit", source.name); allowHexFloats = true; } else if (!hexFloatsWork) lexError(pos, "unsupported.cross.fp.lit"); } else lexError(pos, "malformed.fp.lit"); } else { lexError(pos, "malformed.fp.lit"); } if (reader.ch == 'f' || reader.ch == 'F') { reader.putChar(true); tk = TokenKind.FLOATLITERAL; radix = 16; } else { if (reader.ch == 'd' || reader.ch == 'D') { reader.putChar(true); } tk = TokenKind.DOUBLELITERAL; radix = 16; } } }
public class class_name { private void scanHexExponentAndSuffix(int pos) { if (reader.ch == 'p' || reader.ch == 'P') { reader.putChar(true); // depends on control dependency: [if], data = [none] skipIllegalUnderscores(); // depends on control dependency: [if], data = [none] if (reader.ch == '+' || reader.ch == '-') { reader.putChar(true); // depends on control dependency: [if], data = [none] } skipIllegalUnderscores(); // depends on control dependency: [if], data = [none] if ('0' <= reader.ch && reader.ch <= '9') { scanDigits(pos, 10); // depends on control dependency: [if], data = [none] if (!allowHexFloats) { lexError(pos, "unsupported.fp.lit", source.name); // depends on control dependency: [if], data = [none] allowHexFloats = true; // depends on control dependency: [if], data = [none] } else if (!hexFloatsWork) lexError(pos, "unsupported.cross.fp.lit"); } else lexError(pos, "malformed.fp.lit"); } else { lexError(pos, "malformed.fp.lit"); // depends on control dependency: [if], data = [none] } if (reader.ch == 'f' || reader.ch == 'F') { reader.putChar(true); // depends on control dependency: [if], data = [none] tk = TokenKind.FLOATLITERAL; // depends on control dependency: [if], data = [none] radix = 16; // depends on control dependency: [if], data = [none] } else { if (reader.ch == 'd' || reader.ch == 'D') { reader.putChar(true); // depends on control dependency: [if], data = [none] } tk = TokenKind.DOUBLELITERAL; // depends on control dependency: [if], data = [none] radix = 16; // depends on control dependency: [if], data = [none] } } }
public class class_name { public Interactions getInteractions() { try { if (!isNull(Interactions.KEY_NAME)) { Object obj = get(Interactions.KEY_NAME); if (obj instanceof JSONArray) { Interactions interactions = new Interactions(); JSONArray interactionsJSONArray = (JSONArray) obj; for (int i = 0; i < interactionsJSONArray.length(); i++) { Interaction interaction = Interaction.Factory.parseInteraction(interactionsJSONArray.getString(i)); if (interaction != null) { interactions.put(interaction.getId(), interaction); } else { // This is an unknown Interaction type. Probably for a future SDK version. } } return interactions; } } } catch (JSONException e) { ApptentiveLog.w(INTERACTIONS, e, "Unable to load Interactions from InteractionManifest."); logException(e); } return null; } }
public class class_name { public Interactions getInteractions() { try { if (!isNull(Interactions.KEY_NAME)) { Object obj = get(Interactions.KEY_NAME); if (obj instanceof JSONArray) { Interactions interactions = new Interactions(); JSONArray interactionsJSONArray = (JSONArray) obj; for (int i = 0; i < interactionsJSONArray.length(); i++) { Interaction interaction = Interaction.Factory.parseInteraction(interactionsJSONArray.getString(i)); if (interaction != null) { interactions.put(interaction.getId(), interaction); // depends on control dependency: [if], data = [(interaction] } else { // This is an unknown Interaction type. Probably for a future SDK version. } } return interactions; // depends on control dependency: [if], data = [none] } } } catch (JSONException e) { ApptentiveLog.w(INTERACTIONS, e, "Unable to load Interactions from InteractionManifest."); logException(e); } // depends on control dependency: [catch], data = [none] return null; } }
public class class_name { public boolean isAccessDenied(final String viewName) { final List<DashboardMenuItem> accessibleViews = getAccessibleViews(); boolean accessDeined = Boolean.TRUE.booleanValue(); for (final DashboardMenuItem dashboardViewType : accessibleViews) { if (dashboardViewType.getViewName().equals(viewName)) { accessDeined = Boolean.FALSE.booleanValue(); } } return accessDeined; } }
public class class_name { public boolean isAccessDenied(final String viewName) { final List<DashboardMenuItem> accessibleViews = getAccessibleViews(); boolean accessDeined = Boolean.TRUE.booleanValue(); for (final DashboardMenuItem dashboardViewType : accessibleViews) { if (dashboardViewType.getViewName().equals(viewName)) { accessDeined = Boolean.FALSE.booleanValue(); // depends on control dependency: [if], data = [none] } } return accessDeined; } }
public class class_name { public static JsonValue readFrom( String text ) { try { return new JsonParser( text ).parse(); } catch( IOException exception ) { // JsonParser does not throw IOException for String throw new RuntimeException( exception ); } } }
public class class_name { public static JsonValue readFrom( String text ) { try { return new JsonParser( text ).parse(); // depends on control dependency: [try], data = [none] } catch( IOException exception ) { // JsonParser does not throw IOException for String throw new RuntimeException( exception ); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static double orientationAngle(Point3d[] fixed, Point3d[] moved, boolean centered) { if (!centered) { fixed = CalcPoint.clonePoint3dArray(fixed); moved = CalcPoint.clonePoint3dArray(moved); CalcPoint.center(fixed); CalcPoint.center(moved); } return orientationAngle(fixed, moved); } }
public class class_name { public static double orientationAngle(Point3d[] fixed, Point3d[] moved, boolean centered) { if (!centered) { fixed = CalcPoint.clonePoint3dArray(fixed); // depends on control dependency: [if], data = [none] moved = CalcPoint.clonePoint3dArray(moved); // depends on control dependency: [if], data = [none] CalcPoint.center(fixed); // depends on control dependency: [if], data = [none] CalcPoint.center(moved); // depends on control dependency: [if], data = [none] } return orientationAngle(fixed, moved); } }
public class class_name { public void marshall(BasePathMapping basePathMapping, ProtocolMarshaller protocolMarshaller) { if (basePathMapping == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(basePathMapping.getBasePath(), BASEPATH_BINDING); protocolMarshaller.marshall(basePathMapping.getRestApiId(), RESTAPIID_BINDING); protocolMarshaller.marshall(basePathMapping.getStage(), STAGE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(BasePathMapping basePathMapping, ProtocolMarshaller protocolMarshaller) { if (basePathMapping == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(basePathMapping.getBasePath(), BASEPATH_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(basePathMapping.getRestApiId(), RESTAPIID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(basePathMapping.getStage(), STAGE_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public Map<String, DataSource> getDataSources() { HashMap<String, DataSource> map = new HashMap<>(); for (Map.Entry<String, HikariDataSource> entry : sources.entrySet()) { if (entry.getValue() != null) { map.put(entry.getKey(), entry.getValue()); } } return map; } }
public class class_name { @Override public Map<String, DataSource> getDataSources() { HashMap<String, DataSource> map = new HashMap<>(); for (Map.Entry<String, HikariDataSource> entry : sources.entrySet()) { if (entry.getValue() != null) { map.put(entry.getKey(), entry.getValue()); // depends on control dependency: [if], data = [none] } } return map; } }
public class class_name { public static String encodePath(Object source) { Assert.notNull(source, "Path value must not be null!"); try { return UriUtils.encodePath(source.toString(), ENCODING); } catch (Throwable e) { throw new IllegalStateException(e); } } }
public class class_name { public static String encodePath(Object source) { Assert.notNull(source, "Path value must not be null!"); try { return UriUtils.encodePath(source.toString(), ENCODING); // depends on control dependency: [try], data = [none] } catch (Throwable e) { throw new IllegalStateException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void setDataTableScanFilter( final Scanner scanner, final List<byte[]> group_bys, final ByteMap<byte[][]> row_key_literals, final boolean explicit_tags, final boolean enable_fuzzy_filter, final int end_time) { // no-op if ((group_bys == null || group_bys.isEmpty()) && (row_key_literals == null || row_key_literals.isEmpty())) { return; } final int prefix_width = Const.SALT_WIDTH() + TSDB.metrics_width() + Const.TIMESTAMP_BYTES; final short name_width = TSDB.tagk_width(); final short value_width = TSDB.tagv_width(); final byte[] fuzzy_key; final byte[] fuzzy_mask; if (explicit_tags && enable_fuzzy_filter) { fuzzy_key = new byte[prefix_width + (row_key_literals.size() * (name_width + value_width))]; fuzzy_mask = new byte[prefix_width + (row_key_literals.size() * (name_width + value_width))]; System.arraycopy(scanner.getCurrentKey(), 0, fuzzy_key, 0, scanner.getCurrentKey().length); } else { fuzzy_key = fuzzy_mask = null; } final String regex = getRowKeyUIDRegex(group_bys, row_key_literals, explicit_tags, fuzzy_key, fuzzy_mask); final KeyRegexpFilter regex_filter = new KeyRegexpFilter( regex.toString(), Const.ASCII_CHARSET); if (LOG.isDebugEnabled()) { LOG.debug("Regex for scanner: " + scanner + ": " + byteRegexToString(regex)); } if (!(explicit_tags && enable_fuzzy_filter)) { scanner.setFilter(regex_filter); return; } scanner.setStartKey(fuzzy_key); final byte[] stop_key = Arrays.copyOf(fuzzy_key, fuzzy_key.length); Internal.setBaseTime(stop_key, end_time); int idx = Const.SALT_WIDTH() + TSDB.metrics_width() + Const.TIMESTAMP_BYTES + TSDB.tagk_width(); // max out the tag values while (idx < stop_key.length) { for (int i = 0; i < TSDB.tagv_width(); i++) { stop_key[idx++] = (byte) 0xFF; } idx += TSDB.tagk_width(); } scanner.setStopKey(stop_key); final List<ScanFilter> filters = new ArrayList<ScanFilter>(2); filters.add( new FuzzyRowFilter( new FuzzyRowFilter.FuzzyFilterPair(fuzzy_key, fuzzy_mask))); filters.add(regex_filter); scanner.setFilter(new FilterList(filters)); } }
public class class_name { public static void setDataTableScanFilter( final Scanner scanner, final List<byte[]> group_bys, final ByteMap<byte[][]> row_key_literals, final boolean explicit_tags, final boolean enable_fuzzy_filter, final int end_time) { // no-op if ((group_bys == null || group_bys.isEmpty()) && (row_key_literals == null || row_key_literals.isEmpty())) { return; // depends on control dependency: [if], data = [none] } final int prefix_width = Const.SALT_WIDTH() + TSDB.metrics_width() + Const.TIMESTAMP_BYTES; final short name_width = TSDB.tagk_width(); final short value_width = TSDB.tagv_width(); final byte[] fuzzy_key; final byte[] fuzzy_mask; if (explicit_tags && enable_fuzzy_filter) { fuzzy_key = new byte[prefix_width + (row_key_literals.size() * (name_width + value_width))]; // depends on control dependency: [if], data = [none] fuzzy_mask = new byte[prefix_width + (row_key_literals.size() * (name_width + value_width))]; // depends on control dependency: [if], data = [none] System.arraycopy(scanner.getCurrentKey(), 0, fuzzy_key, 0, scanner.getCurrentKey().length); // depends on control dependency: [if], data = [none] } else { fuzzy_key = fuzzy_mask = null; // depends on control dependency: [if], data = [none] } final String regex = getRowKeyUIDRegex(group_bys, row_key_literals, explicit_tags, fuzzy_key, fuzzy_mask); final KeyRegexpFilter regex_filter = new KeyRegexpFilter( regex.toString(), Const.ASCII_CHARSET); if (LOG.isDebugEnabled()) { LOG.debug("Regex for scanner: " + scanner + ": " + byteRegexToString(regex)); // depends on control dependency: [if], data = [none] } if (!(explicit_tags && enable_fuzzy_filter)) { scanner.setFilter(regex_filter); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } scanner.setStartKey(fuzzy_key); final byte[] stop_key = Arrays.copyOf(fuzzy_key, fuzzy_key.length); Internal.setBaseTime(stop_key, end_time); int idx = Const.SALT_WIDTH() + TSDB.metrics_width() + Const.TIMESTAMP_BYTES + TSDB.tagk_width(); // max out the tag values while (idx < stop_key.length) { for (int i = 0; i < TSDB.tagv_width(); i++) { stop_key[idx++] = (byte) 0xFF; // depends on control dependency: [for], data = [none] } idx += TSDB.tagk_width(); // depends on control dependency: [while], data = [none] } scanner.setStopKey(stop_key); final List<ScanFilter> filters = new ArrayList<ScanFilter>(2); filters.add( new FuzzyRowFilter( new FuzzyRowFilter.FuzzyFilterPair(fuzzy_key, fuzzy_mask))); filters.add(regex_filter); scanner.setFilter(new FilterList(filters)); } }
public class class_name { public void marshall(AuditNotificationTarget auditNotificationTarget, ProtocolMarshaller protocolMarshaller) { if (auditNotificationTarget == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(auditNotificationTarget.getTargetArn(), TARGETARN_BINDING); protocolMarshaller.marshall(auditNotificationTarget.getRoleArn(), ROLEARN_BINDING); protocolMarshaller.marshall(auditNotificationTarget.getEnabled(), ENABLED_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(AuditNotificationTarget auditNotificationTarget, ProtocolMarshaller protocolMarshaller) { if (auditNotificationTarget == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(auditNotificationTarget.getTargetArn(), TARGETARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(auditNotificationTarget.getRoleArn(), ROLEARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(auditNotificationTarget.getEnabled(), ENABLED_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void populateImage(final View view, final String url) { final Object tag = view.getTag(); ImageConsumer consumer = null; if (tag == null) { consumer = createImageConsumer(view); view.setTag(consumer); } else { if (!(tag instanceof ImageConsumer)) { throw new IllegalStateException("View already has a tag " + tag + ". Cannot store consumer"); } consumer = (ImageConsumer)tag; } populateImage(consumer, url); } }
public class class_name { public void populateImage(final View view, final String url) { final Object tag = view.getTag(); ImageConsumer consumer = null; if (tag == null) { consumer = createImageConsumer(view); // depends on control dependency: [if], data = [none] view.setTag(consumer); // depends on control dependency: [if], data = [none] } else { if (!(tag instanceof ImageConsumer)) { throw new IllegalStateException("View already has a tag " + tag + ". Cannot store consumer"); } consumer = (ImageConsumer)tag; // depends on control dependency: [if], data = [none] } populateImage(consumer, url); } }
public class class_name { @Override public void run() { boolean lastAcquireFailed = false; while (!halted.get()) { try { // check if we're supposed to pause... synchronized (sigLock) { while (paused && !halted.get()) { try { // wait until togglePause(false) is called... sigLock.wait(1000L); } catch (InterruptedException ignore) { } } if (halted.get()) { break; } } int availThreadCount = quartzSchedulerResources.getThreadPool().blockForAvailableThreads(); if (availThreadCount > 0) { // will always be true, due to semantics of blockForAvailableThreads... List<OperableTrigger> triggers = null; long now = System.currentTimeMillis(); clearSignaledSchedulingChange(); try { triggers = quartzSchedulerResources .getJobStore() .acquireNextTriggers( now + idleWaitTime, Math.min(availThreadCount, quartzSchedulerResources.getMaxBatchSize()), quartzSchedulerResources.getBatchTimeWindow()); lastAcquireFailed = false; logger.debug( "batch acquisition of " + (triggers == null ? 0 : triggers.size()) + " triggers"); } catch (JobPersistenceException jpe) { lastAcquireFailed = true; } catch (RuntimeException e) { if (!lastAcquireFailed) { logger.error("quartzSchedulerThreadLoop: RuntimeException " + e.getMessage(), e); } lastAcquireFailed = true; } if (triggers != null && !triggers.isEmpty()) { now = System.currentTimeMillis(); long triggerTime = triggers.get(0).getNextFireTime().getTime(); long timeUntilTrigger = triggerTime - now; while (timeUntilTrigger > 2) { synchronized (sigLock) { if (halted.get()) { break; } if (!isCandidateNewTimeEarlierWithinReason(triggerTime, false)) { try { // we could have blocked a long while // on 'synchronize', so we must recompute now = System.currentTimeMillis(); timeUntilTrigger = triggerTime - now; if (timeUntilTrigger >= 1) { sigLock.wait(timeUntilTrigger); } } catch (InterruptedException ignore) { } } } if (releaseIfScheduleChangedSignificantly(triggers, triggerTime)) { break; } now = System.currentTimeMillis(); timeUntilTrigger = triggerTime - now; } // this happens if releaseIfScheduleChangedSignificantly decided to release triggers if (triggers.isEmpty()) { continue; } // set triggers to 'executing' List<TriggerFiredResult> bndles = new ArrayList<TriggerFiredResult>(); boolean goAhead = true; synchronized (sigLock) { goAhead = !halted.get(); } if (goAhead) { try { List<TriggerFiredResult> res = quartzSchedulerResources.getJobStore().triggersFired(triggers); if (res != null) { bndles = res; } } catch (SchedulerException se) { quartzScheduler.notifySchedulerListenersError( "An error occurred while firing triggers '" + triggers + "'", se); } } for (int i = 0; i < bndles.size(); i++) { TriggerFiredResult result = bndles.get(i); TriggerFiredBundle bndle = result.getTriggerFiredBundle(); Exception exception = result.getException(); if (exception instanceof RuntimeException) { logger.error("RuntimeException while firing trigger " + triggers.get(i), exception); continue; } // it's possible to get 'null' if the triggers was paused, // blocked, or other similar occurrences that prevent it being // fired at this time... or if the scheduler was shutdown (halted) if (bndle == null) { try { quartzSchedulerResources.getJobStore().releaseAcquiredTrigger(triggers.get(i)); } catch (SchedulerException se) { quartzScheduler.notifySchedulerListenersError( "An error occurred while releasing triggers '" + triggers.get(i).getName() + "'", se); } continue; } // TODO: improvements: // // 2- make sure we can get a job runshell before firing triggers, or // don't let that throw an exception (right now it never does, // but the signature says it can). // 3- acquire more triggers at a time (based on num threads available?) JobRunShell shell = null; try { shell = quartzSchedulerResources.getJobRunShellFactory().createJobRunShell(bndle); shell.initialize(quartzScheduler); } catch (SchedulerException se) { try { quartzSchedulerResources .getJobStore() .triggeredJobComplete( triggers.get(i), bndle.getJobDetail(), CompletedExecutionInstruction.SET_ALL_JOB_TRIGGERS_ERROR); } catch (SchedulerException se2) { quartzScheduler.notifySchedulerListenersError( "An error occurred while placing job's triggers in error state '" + triggers.get(i).getName() + "'", se2); } continue; } if (quartzSchedulerResources.getThreadPool().runInThread(shell) == false) { try { // this case should never happen, as it is indicative of the // scheduler being shutdown or a bug in the thread pool or // a thread pool being used concurrently - which the docs // say not to do... logger.error("ThreadPool.runInThread() return false!"); quartzSchedulerResources .getJobStore() .triggeredJobComplete( triggers.get(i), bndle.getJobDetail(), CompletedExecutionInstruction.SET_ALL_JOB_TRIGGERS_ERROR); } catch (SchedulerException se2) { quartzScheduler.notifySchedulerListenersError( "An error occurred while placing job's triggers in error state '" + triggers.get(i).getName() + "'", se2); } } } continue; // while (!halted) } } else { // if(availThreadCount > 0) // should never happen, if threadPool.blockForAvailableThreads() follows contract continue; // while (!halted) } long now = System.currentTimeMillis(); long waitTime = now + getRandomizedIdleWaitTime(); long timeUntilContinue = waitTime - now; synchronized (sigLock) { try { sigLock.wait(timeUntilContinue); } catch (InterruptedException ignore) { } } } catch (RuntimeException re) { logger.error("Runtime error occurred in main trigger firing loop.", re); } } // while (!halted) // drop references to scheduler stuff to aid garbage collection... quartzScheduler = null; quartzSchedulerResources = null; } }
public class class_name { @Override public void run() { boolean lastAcquireFailed = false; while (!halted.get()) { try { // check if we're supposed to pause... synchronized (sigLock) { // depends on control dependency: [try], data = [none] while (paused && !halted.get()) { try { // wait until togglePause(false) is called... sigLock.wait(1000L); // depends on control dependency: [try], data = [none] } catch (InterruptedException ignore) { } // depends on control dependency: [catch], data = [none] } if (halted.get()) { break; } } int availThreadCount = quartzSchedulerResources.getThreadPool().blockForAvailableThreads(); if (availThreadCount > 0) { // will always be true, due to semantics of blockForAvailableThreads... List<OperableTrigger> triggers = null; long now = System.currentTimeMillis(); clearSignaledSchedulingChange(); // depends on control dependency: [if], data = [none] try { triggers = quartzSchedulerResources .getJobStore() .acquireNextTriggers( now + idleWaitTime, Math.min(availThreadCount, quartzSchedulerResources.getMaxBatchSize()), quartzSchedulerResources.getBatchTimeWindow()); // depends on control dependency: [try], data = [none] lastAcquireFailed = false; // depends on control dependency: [try], data = [none] logger.debug( "batch acquisition of " + (triggers == null ? 0 : triggers.size()) + " triggers"); // depends on control dependency: [try], data = [none] } catch (JobPersistenceException jpe) { lastAcquireFailed = true; } catch (RuntimeException e) { // depends on control dependency: [catch], data = [none] if (!lastAcquireFailed) { logger.error("quartzSchedulerThreadLoop: RuntimeException " + e.getMessage(), e); // depends on control dependency: [if], data = [none] } lastAcquireFailed = true; } // depends on control dependency: [catch], data = [none] if (triggers != null && !triggers.isEmpty()) { now = System.currentTimeMillis(); // depends on control dependency: [if], data = [none] long triggerTime = triggers.get(0).getNextFireTime().getTime(); long timeUntilTrigger = triggerTime - now; while (timeUntilTrigger > 2) { synchronized (sigLock) { // depends on control dependency: [while], data = [none] if (halted.get()) { break; } if (!isCandidateNewTimeEarlierWithinReason(triggerTime, false)) { try { // we could have blocked a long while // on 'synchronize', so we must recompute now = System.currentTimeMillis(); // depends on control dependency: [try], data = [none] timeUntilTrigger = triggerTime - now; // depends on control dependency: [try], data = [none] if (timeUntilTrigger >= 1) { sigLock.wait(timeUntilTrigger); // depends on control dependency: [if], data = [(timeUntilTrigger] } } catch (InterruptedException ignore) { } // depends on control dependency: [catch], data = [none] } } if (releaseIfScheduleChangedSignificantly(triggers, triggerTime)) { break; } now = System.currentTimeMillis(); // depends on control dependency: [while], data = [none] timeUntilTrigger = triggerTime - now; // depends on control dependency: [while], data = [none] } // this happens if releaseIfScheduleChangedSignificantly decided to release triggers if (triggers.isEmpty()) { continue; } // set triggers to 'executing' List<TriggerFiredResult> bndles = new ArrayList<TriggerFiredResult>(); boolean goAhead = true; synchronized (sigLock) { // depends on control dependency: [if], data = [none] goAhead = !halted.get(); } if (goAhead) { try { List<TriggerFiredResult> res = quartzSchedulerResources.getJobStore().triggersFired(triggers); if (res != null) { bndles = res; // depends on control dependency: [if], data = [none] } } catch (SchedulerException se) { quartzScheduler.notifySchedulerListenersError( "An error occurred while firing triggers '" + triggers + "'", se); } // depends on control dependency: [catch], data = [none] } for (int i = 0; i < bndles.size(); i++) { TriggerFiredResult result = bndles.get(i); TriggerFiredBundle bndle = result.getTriggerFiredBundle(); Exception exception = result.getException(); if (exception instanceof RuntimeException) { logger.error("RuntimeException while firing trigger " + triggers.get(i), exception); // depends on control dependency: [if], data = [none] continue; } // it's possible to get 'null' if the triggers was paused, // blocked, or other similar occurrences that prevent it being // fired at this time... or if the scheduler was shutdown (halted) if (bndle == null) { try { quartzSchedulerResources.getJobStore().releaseAcquiredTrigger(triggers.get(i)); // depends on control dependency: [try], data = [none] } catch (SchedulerException se) { quartzScheduler.notifySchedulerListenersError( "An error occurred while releasing triggers '" + triggers.get(i).getName() + "'", se); } // depends on control dependency: [catch], data = [none] continue; } // TODO: improvements: // // 2- make sure we can get a job runshell before firing triggers, or // don't let that throw an exception (right now it never does, // but the signature says it can). // 3- acquire more triggers at a time (based on num threads available?) JobRunShell shell = null; try { shell = quartzSchedulerResources.getJobRunShellFactory().createJobRunShell(bndle); // depends on control dependency: [try], data = [none] shell.initialize(quartzScheduler); // depends on control dependency: [try], data = [none] } catch (SchedulerException se) { try { quartzSchedulerResources .getJobStore() .triggeredJobComplete( triggers.get(i), bndle.getJobDetail(), CompletedExecutionInstruction.SET_ALL_JOB_TRIGGERS_ERROR); // depends on control dependency: [try], data = [none] } catch (SchedulerException se2) { quartzScheduler.notifySchedulerListenersError( "An error occurred while placing job's triggers in error state '" + triggers.get(i).getName() + "'", se2); } // depends on control dependency: [catch], data = [none] continue; } // depends on control dependency: [catch], data = [none] if (quartzSchedulerResources.getThreadPool().runInThread(shell) == false) { try { // this case should never happen, as it is indicative of the // scheduler being shutdown or a bug in the thread pool or // a thread pool being used concurrently - which the docs // say not to do... logger.error("ThreadPool.runInThread() return false!"); // depends on control dependency: [try], data = [none] quartzSchedulerResources .getJobStore() .triggeredJobComplete( triggers.get(i), bndle.getJobDetail(), CompletedExecutionInstruction.SET_ALL_JOB_TRIGGERS_ERROR); // depends on control dependency: [try], data = [none] } catch (SchedulerException se2) { quartzScheduler.notifySchedulerListenersError( "An error occurred while placing job's triggers in error state '" + triggers.get(i).getName() + "'", se2); } // depends on control dependency: [catch], data = [none] } } continue; // while (!halted) } } else { // if(availThreadCount > 0) // should never happen, if threadPool.blockForAvailableThreads() follows contract continue; // while (!halted) } long now = System.currentTimeMillis(); long waitTime = now + getRandomizedIdleWaitTime(); long timeUntilContinue = waitTime - now; synchronized (sigLock) { // depends on control dependency: [try], data = [none] try { sigLock.wait(timeUntilContinue); // depends on control dependency: [try], data = [none] } catch (InterruptedException ignore) { } // depends on control dependency: [catch], data = [none] } } catch (RuntimeException re) { logger.error("Runtime error occurred in main trigger firing loop.", re); } // depends on control dependency: [catch], data = [none] } // while (!halted) // drop references to scheduler stuff to aid garbage collection... quartzScheduler = null; quartzSchedulerResources = null; } }
public class class_name { private void visitReturn(NodeTraversal t, Node n) { Node enclosingFunction = t.getEnclosingFunction(); if (enclosingFunction.isGeneratorFunction() && !n.hasChildren()) { // Allow "return;" in a generator function, even if it's not the declared return type. // e.g. Don't warn for a generator function with JSDoc "@return {!Generator<number>}" and // a "return;" in the fn body, even though "undefined" does not match "number". return; } JSType jsType = getJSType(enclosingFunction); if (jsType.isFunctionType()) { FunctionType functionType = jsType.toMaybeFunctionType(); JSType returnType = functionType.getReturnType(); // if no return type is specified, undefined must be returned // (it's a void function) if (returnType == null) { returnType = getNativeType(VOID_TYPE); } else if (enclosingFunction.isGeneratorFunction()) { // Unwrap the template variable from a generator function's declared return type. // e.g. if returnType is "Generator<string>", make it just "string". returnType = JsIterables.getElementType(returnType, typeRegistry); if (enclosingFunction.isAsyncGeneratorFunction()) { // Can return x|IThenable<x> in an AsyncGenerator<x>, no await needed. Note that we must // first wrap the type in IThenable as createAsyncReturnableType will map a non-IThenable // to `?`. returnType = Promises.createAsyncReturnableType( typeRegistry, Promises.wrapInIThenable(typeRegistry, returnType)); } } else if (enclosingFunction.isAsyncFunction()) { // e.g. `!Promise<string>` => `string|!IThenable<string>` // We transform the expected return type rather than the actual return type so that the // extual return type is always reported to the user. This was felt to be clearer. returnType = Promises.createAsyncReturnableType(typeRegistry, returnType); } else if (returnType.isVoidType() && functionType.isConstructor()) { // Allow constructors to use empty returns for flow control. if (!n.hasChildren()) { return; } // Allow constructors to return its own instance type returnType = functionType.getInstanceType(); } // fetching the returned value's type Node valueNode = n.getFirstChild(); JSType actualReturnType; if (valueNode == null) { actualReturnType = getNativeType(VOID_TYPE); valueNode = n; } else { actualReturnType = getJSType(valueNode); } // verifying validator.expectCanAssignTo( valueNode, actualReturnType, returnType, "inconsistent return type"); } } }
public class class_name { private void visitReturn(NodeTraversal t, Node n) { Node enclosingFunction = t.getEnclosingFunction(); if (enclosingFunction.isGeneratorFunction() && !n.hasChildren()) { // Allow "return;" in a generator function, even if it's not the declared return type. // e.g. Don't warn for a generator function with JSDoc "@return {!Generator<number>}" and // a "return;" in the fn body, even though "undefined" does not match "number". return; // depends on control dependency: [if], data = [none] } JSType jsType = getJSType(enclosingFunction); if (jsType.isFunctionType()) { FunctionType functionType = jsType.toMaybeFunctionType(); JSType returnType = functionType.getReturnType(); // if no return type is specified, undefined must be returned // (it's a void function) if (returnType == null) { returnType = getNativeType(VOID_TYPE); // depends on control dependency: [if], data = [none] } else if (enclosingFunction.isGeneratorFunction()) { // Unwrap the template variable from a generator function's declared return type. // e.g. if returnType is "Generator<string>", make it just "string". returnType = JsIterables.getElementType(returnType, typeRegistry); // depends on control dependency: [if], data = [none] if (enclosingFunction.isAsyncGeneratorFunction()) { // Can return x|IThenable<x> in an AsyncGenerator<x>, no await needed. Note that we must // first wrap the type in IThenable as createAsyncReturnableType will map a non-IThenable // to `?`. returnType = Promises.createAsyncReturnableType( typeRegistry, Promises.wrapInIThenable(typeRegistry, returnType)); // depends on control dependency: [if], data = [none] } } else if (enclosingFunction.isAsyncFunction()) { // e.g. `!Promise<string>` => `string|!IThenable<string>` // We transform the expected return type rather than the actual return type so that the // extual return type is always reported to the user. This was felt to be clearer. returnType = Promises.createAsyncReturnableType(typeRegistry, returnType); // depends on control dependency: [if], data = [none] } else if (returnType.isVoidType() && functionType.isConstructor()) { // Allow constructors to use empty returns for flow control. if (!n.hasChildren()) { return; // depends on control dependency: [if], data = [none] } // Allow constructors to return its own instance type returnType = functionType.getInstanceType(); // depends on control dependency: [if], data = [none] } // fetching the returned value's type Node valueNode = n.getFirstChild(); JSType actualReturnType; if (valueNode == null) { actualReturnType = getNativeType(VOID_TYPE); // depends on control dependency: [if], data = [none] valueNode = n; // depends on control dependency: [if], data = [none] } else { actualReturnType = getJSType(valueNode); // depends on control dependency: [if], data = [(valueNode] } // verifying validator.expectCanAssignTo( valueNode, actualReturnType, returnType, "inconsistent return type"); // depends on control dependency: [if], data = [none] } } }
public class class_name { public RegisteredServiceDocument get(final long id) { try { return this.get(String.valueOf(id)); } catch (final DocumentNotFoundException e) { LOGGER.debug("Service [{}] not found. [{}]", id, e.getMessage()); return null; } } }
public class class_name { public RegisteredServiceDocument get(final long id) { try { return this.get(String.valueOf(id)); // depends on control dependency: [try], data = [none] } catch (final DocumentNotFoundException e) { LOGGER.debug("Service [{}] not found. [{}]", id, e.getMessage()); return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void setPortMappings(java.util.Collection<PortMapping> portMappings) { if (portMappings == null) { this.portMappings = null; return; } this.portMappings = new com.amazonaws.internal.SdkInternalList<PortMapping>(portMappings); } }
public class class_name { public void setPortMappings(java.util.Collection<PortMapping> portMappings) { if (portMappings == null) { this.portMappings = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.portMappings = new com.amazonaws.internal.SdkInternalList<PortMapping>(portMappings); } }
public class class_name { @Override public void abort(final String msg, Throwable t) { if (t != null) { LOG.fatal(msg, t); } else { LOG.fatal(msg); } this.aborted = true; try { close(); } catch(IOException e) { throw new RuntimeException("Could not close the connection", e); } } }
public class class_name { @Override public void abort(final String msg, Throwable t) { if (t != null) { LOG.fatal(msg, t); // depends on control dependency: [if], data = [none] } else { LOG.fatal(msg); // depends on control dependency: [if], data = [none] } this.aborted = true; try { close(); // depends on control dependency: [try], data = [none] } catch(IOException e) { throw new RuntimeException("Could not close the connection", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void addForeignKey(String schema, String table, String column, int stepDepth, String alias, String[] foreignKeyParts) { Column c = add(schema, table, column, stepDepth, alias); c.isForeignKey = true; if (foreignKeyParts.length == 3) { Map<String, PropertyType> properties = this.filteredAllTables.get(foreignKeyParts[0] + "." + Topology.VERTEX_PREFIX + foreignKeyParts[1]); if (foreignKeyParts[2].endsWith(Topology.IN_VERTEX_COLUMN_END)) { c.propertyType = properties.get(foreignKeyParts[2].substring(0, foreignKeyParts[2].length() - Topology.IN_VERTEX_COLUMN_END.length())); c.foreignKeyDirection = Direction.IN; c.foreignSchemaTable = SchemaTable.of(foreignKeyParts[0], foreignKeyParts[1]); c.foreignKeyProperty = foreignKeyParts[2]; } else { c.propertyType = properties.get(foreignKeyParts[2].substring(0, foreignKeyParts[2].length() - Topology.OUT_VERTEX_COLUMN_END.length())); c.foreignKeyDirection = Direction.OUT; c.foreignSchemaTable = SchemaTable.of(foreignKeyParts[0], foreignKeyParts[1]); c.foreignKeyProperty = foreignKeyParts[2]; } } else { c.propertyType = PropertyType.LONG; c.foreignKeyDirection = (column.endsWith(Topology.IN_VERTEX_COLUMN_END) ? Direction.IN : Direction.OUT); c.foreignSchemaTable = SchemaTable.of(foreignKeyParts[0], foreignKeyParts[1].substring(0, foreignKeyParts[1].length() - Topology.IN_VERTEX_COLUMN_END.length())); c.foreignKeyProperty = null; } } }
public class class_name { private void addForeignKey(String schema, String table, String column, int stepDepth, String alias, String[] foreignKeyParts) { Column c = add(schema, table, column, stepDepth, alias); c.isForeignKey = true; if (foreignKeyParts.length == 3) { Map<String, PropertyType> properties = this.filteredAllTables.get(foreignKeyParts[0] + "." + Topology.VERTEX_PREFIX + foreignKeyParts[1]); if (foreignKeyParts[2].endsWith(Topology.IN_VERTEX_COLUMN_END)) { c.propertyType = properties.get(foreignKeyParts[2].substring(0, foreignKeyParts[2].length() - Topology.IN_VERTEX_COLUMN_END.length())); // depends on control dependency: [if], data = [none] c.foreignKeyDirection = Direction.IN; // depends on control dependency: [if], data = [none] c.foreignSchemaTable = SchemaTable.of(foreignKeyParts[0], foreignKeyParts[1]); // depends on control dependency: [if], data = [none] c.foreignKeyProperty = foreignKeyParts[2]; // depends on control dependency: [if], data = [none] } else { c.propertyType = properties.get(foreignKeyParts[2].substring(0, foreignKeyParts[2].length() - Topology.OUT_VERTEX_COLUMN_END.length())); // depends on control dependency: [if], data = [none] c.foreignKeyDirection = Direction.OUT; // depends on control dependency: [if], data = [none] c.foreignSchemaTable = SchemaTable.of(foreignKeyParts[0], foreignKeyParts[1]); // depends on control dependency: [if], data = [none] c.foreignKeyProperty = foreignKeyParts[2]; // depends on control dependency: [if], data = [none] } } else { c.propertyType = PropertyType.LONG; // depends on control dependency: [if], data = [none] c.foreignKeyDirection = (column.endsWith(Topology.IN_VERTEX_COLUMN_END) ? Direction.IN : Direction.OUT); // depends on control dependency: [if], data = [none] c.foreignSchemaTable = SchemaTable.of(foreignKeyParts[0], foreignKeyParts[1].substring(0, foreignKeyParts[1].length() - Topology.IN_VERTEX_COLUMN_END.length())); // depends on control dependency: [if], data = [none] c.foreignKeyProperty = null; // depends on control dependency: [if], data = [none] } } }
public class class_name { protected void populate() { clearAuthorizationTable(); Map<String, Set<String>> userToRoleName = new HashMap<String, Set<String>>(); Map<String, Set<String>> groupToRoleName = new HashMap<String, Set<String>>(); Map<String, Set<String>> specialSubjectToRoleName = new HashMap<String, Set<String>>(); Iterator<SecurityRole> itr = getRoles(); while (itr.hasNext()) { SecurityRole role = itr.next(); String roleName = role.getRoleName(); for (String user : role.getUsers()) { Set<String> assignedRoles = userToRoleName.get(user); if (assignedRoles == null) { assignedRoles = new HashSet<String>(); userToRoleName.put(user, assignedRoles); } assignedRoles.add(roleName); } for (String group : role.getGroups()) { Set<String> assignedRoles = groupToRoleName.get(group); if (assignedRoles == null) { assignedRoles = new HashSet<String>(); groupToRoleName.put(group, assignedRoles); } assignedRoles.add(roleName); } for (String specialSubject : role.getSpecialSubjects()) { Set<String> assignedRoles = specialSubjectToRoleName.get(specialSubject); if (assignedRoles == null) { assignedRoles = new HashSet<String>(); specialSubjectToRoleName.put(specialSubject, assignedRoles); } assignedRoles.add(roleName); } for (String accessId : role.getAccessIds()) { Set<String> assignedRoles = getRoles(explicitAccessIdToRoles, accessId); if (assignedRoles == null) { assignedRoles = new HashSet<String>(); explicitAccessIdToRoles.put(accessId, assignedRoles); } assignedRoles.add(roleName); } } for (Map.Entry<String, Set<String>> entry : userToRoleName.entrySet()) { userToRoles.put(entry.getKey(), new RoleSet(entry.getValue())); } for (Map.Entry<String, Set<String>> entry : groupToRoleName.entrySet()) { groupToRoles.put(entry.getKey(), new RoleSet(entry.getValue())); } for (Map.Entry<String, Set<String>> entry : specialSubjectToRoleName.entrySet()) { specialSubjectToRoles.put(entry.getKey(), new RoleSet(entry.getValue())); } populated = true; } }
public class class_name { protected void populate() { clearAuthorizationTable(); Map<String, Set<String>> userToRoleName = new HashMap<String, Set<String>>(); Map<String, Set<String>> groupToRoleName = new HashMap<String, Set<String>>(); Map<String, Set<String>> specialSubjectToRoleName = new HashMap<String, Set<String>>(); Iterator<SecurityRole> itr = getRoles(); while (itr.hasNext()) { SecurityRole role = itr.next(); String roleName = role.getRoleName(); for (String user : role.getUsers()) { Set<String> assignedRoles = userToRoleName.get(user); if (assignedRoles == null) { assignedRoles = new HashSet<String>(); // depends on control dependency: [if], data = [none] userToRoleName.put(user, assignedRoles); // depends on control dependency: [if], data = [none] } assignedRoles.add(roleName); // depends on control dependency: [for], data = [none] } for (String group : role.getGroups()) { Set<String> assignedRoles = groupToRoleName.get(group); if (assignedRoles == null) { assignedRoles = new HashSet<String>(); // depends on control dependency: [if], data = [none] groupToRoleName.put(group, assignedRoles); // depends on control dependency: [if], data = [none] } assignedRoles.add(roleName); // depends on control dependency: [for], data = [none] } for (String specialSubject : role.getSpecialSubjects()) { Set<String> assignedRoles = specialSubjectToRoleName.get(specialSubject); if (assignedRoles == null) { assignedRoles = new HashSet<String>(); // depends on control dependency: [if], data = [none] specialSubjectToRoleName.put(specialSubject, assignedRoles); // depends on control dependency: [if], data = [none] } assignedRoles.add(roleName); // depends on control dependency: [for], data = [none] } for (String accessId : role.getAccessIds()) { Set<String> assignedRoles = getRoles(explicitAccessIdToRoles, accessId); if (assignedRoles == null) { assignedRoles = new HashSet<String>(); // depends on control dependency: [if], data = [none] explicitAccessIdToRoles.put(accessId, assignedRoles); // depends on control dependency: [if], data = [none] } assignedRoles.add(roleName); // depends on control dependency: [for], data = [none] } } for (Map.Entry<String, Set<String>> entry : userToRoleName.entrySet()) { userToRoles.put(entry.getKey(), new RoleSet(entry.getValue())); // depends on control dependency: [for], data = [entry] } for (Map.Entry<String, Set<String>> entry : groupToRoleName.entrySet()) { groupToRoles.put(entry.getKey(), new RoleSet(entry.getValue())); // depends on control dependency: [for], data = [entry] } for (Map.Entry<String, Set<String>> entry : specialSubjectToRoleName.entrySet()) { specialSubjectToRoles.put(entry.getKey(), new RoleSet(entry.getValue())); // depends on control dependency: [for], data = [entry] } populated = true; } }
public class class_name { public void setValueVisible(final boolean VISIBLE) { if (null == valueVisible) { _valueVisible = VISIBLE; fireUpdateEvent(VISIBILITY_EVENT); } else { valueVisible.set(VISIBLE); } } }
public class class_name { public void setValueVisible(final boolean VISIBLE) { if (null == valueVisible) { _valueVisible = VISIBLE; // depends on control dependency: [if], data = [none] fireUpdateEvent(VISIBILITY_EVENT); // depends on control dependency: [if], data = [none] } else { valueVisible.set(VISIBLE); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public Filter[] filterChainToArray() { int length = chainLength(); Filter[] array = new Filter[length]; Filter thisFilter = this; for (int i = 0; i < length; i++) { array[i] = thisFilter; thisFilter = thisFilter.getAnd(); } return array; } }
public class class_name { @Override public Filter[] filterChainToArray() { int length = chainLength(); Filter[] array = new Filter[length]; Filter thisFilter = this; for (int i = 0; i < length; i++) { array[i] = thisFilter; // depends on control dependency: [for], data = [i] thisFilter = thisFilter.getAnd(); // depends on control dependency: [for], data = [none] } return array; } }
public class class_name { protected String getValue(String arg) { String[] split = arg.split("="); if (split.length == 1) { return null; } else if (split.length == 2) { return split[1]; } else { // Handle DN case with multiple =s StringBuffer value = new StringBuffer(); for (int i = 1; i < split.length; i++) { value.append(split[i]); if (i < (split.length - 1)) { value.append("="); } } return value.toString(); } } }
public class class_name { protected String getValue(String arg) { String[] split = arg.split("="); if (split.length == 1) { return null; // depends on control dependency: [if], data = [none] } else if (split.length == 2) { return split[1]; // depends on control dependency: [if], data = [none] } else { // Handle DN case with multiple =s StringBuffer value = new StringBuffer(); for (int i = 1; i < split.length; i++) { value.append(split[i]); // depends on control dependency: [for], data = [i] if (i < (split.length - 1)) { value.append("="); // depends on control dependency: [if], data = [none] } } return value.toString(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public java.util.List<String> getImportTablesNotStarted() { if (importTablesNotStarted == null) { importTablesNotStarted = new com.amazonaws.internal.SdkInternalList<String>(); } return importTablesNotStarted; } }
public class class_name { public java.util.List<String> getImportTablesNotStarted() { if (importTablesNotStarted == null) { importTablesNotStarted = new com.amazonaws.internal.SdkInternalList<String>(); // depends on control dependency: [if], data = [none] } return importTablesNotStarted; } }
public class class_name { private void toggle(Coordinate coordinate, final boolean clearSelection, final boolean singleSelection) { if (null == coordinate) { return; } // we can clear here (but remember the selected feature for the special case of single selection) ! final String singleSelectionId = mapWidget.getMapModel().getSelectedFeature(); if (clearSelection) { mapWidget.getMapModel().clearSelectedFeatures(); } MapModel mapModel = mapWidget.getMapModel(); Coordinate worldPosition = mapModel.getMapView().getWorldViewTransformer().viewToWorld(coordinate); GwtCommand commandRequest = new GwtCommand(SearchByLocationRequest.COMMAND); SearchByLocationRequest request = new SearchByLocationRequest(); Layer<?> layer = mapModel.getSelectedLayer(); if (priorityToSelectedLayer && layer != null && layer instanceof VectorLayer) { if (!layer.isShowing()) { return; } request.addLayerWithFilter(layer.getId(), layer.getServerLayerId(), ((VectorLayer) layer).getFilter()); } else { addVisibleLayers(request, mapModel); } Point point = mapModel.getGeometryFactory().createPoint(worldPosition); request.setLocation(GeometryConverter.toDto(point)); request.setCrs(mapWidget.getMapModel().getCrs()); request.setQueryType(SearchByLocationRequest.QUERY_INTERSECTS); request.setSearchType(SearchByLocationRequest.SEARCH_ALL_LAYERS); request.setBuffer(calculateBufferFromPixelTolerance()); request.setFeatureIncludes(GwtCommandDispatcher.getInstance().getLazyFeatureIncludesSelect()); commandRequest.setCommandRequest(request); GwtCommandDispatcher.getInstance().execute(commandRequest, new AbstractCommandCallback<SearchByLocationResponse>() { public void execute(SearchByLocationResponse response) { Map<String, List<Feature>> featureMap = response.getFeatureMap(); for (String layerId : featureMap.keySet()) { selectFeatures(layerId, featureMap.get(layerId), singleSelectionId, singleSelection); if (singleSelection) { break; } } } }); } }
public class class_name { private void toggle(Coordinate coordinate, final boolean clearSelection, final boolean singleSelection) { if (null == coordinate) { return; // depends on control dependency: [if], data = [none] } // we can clear here (but remember the selected feature for the special case of single selection) ! final String singleSelectionId = mapWidget.getMapModel().getSelectedFeature(); if (clearSelection) { mapWidget.getMapModel().clearSelectedFeatures(); // depends on control dependency: [if], data = [none] } MapModel mapModel = mapWidget.getMapModel(); Coordinate worldPosition = mapModel.getMapView().getWorldViewTransformer().viewToWorld(coordinate); GwtCommand commandRequest = new GwtCommand(SearchByLocationRequest.COMMAND); SearchByLocationRequest request = new SearchByLocationRequest(); Layer<?> layer = mapModel.getSelectedLayer(); if (priorityToSelectedLayer && layer != null && layer instanceof VectorLayer) { if (!layer.isShowing()) { return; // depends on control dependency: [if], data = [none] } request.addLayerWithFilter(layer.getId(), layer.getServerLayerId(), ((VectorLayer) layer).getFilter()); // depends on control dependency: [if], data = [none] } else { addVisibleLayers(request, mapModel); // depends on control dependency: [if], data = [none] } Point point = mapModel.getGeometryFactory().createPoint(worldPosition); request.setLocation(GeometryConverter.toDto(point)); request.setCrs(mapWidget.getMapModel().getCrs()); request.setQueryType(SearchByLocationRequest.QUERY_INTERSECTS); request.setSearchType(SearchByLocationRequest.SEARCH_ALL_LAYERS); request.setBuffer(calculateBufferFromPixelTolerance()); request.setFeatureIncludes(GwtCommandDispatcher.getInstance().getLazyFeatureIncludesSelect()); commandRequest.setCommandRequest(request); GwtCommandDispatcher.getInstance().execute(commandRequest, new AbstractCommandCallback<SearchByLocationResponse>() { public void execute(SearchByLocationResponse response) { Map<String, List<Feature>> featureMap = response.getFeatureMap(); for (String layerId : featureMap.keySet()) { selectFeatures(layerId, featureMap.get(layerId), singleSelectionId, singleSelection); // depends on control dependency: [for], data = [layerId] if (singleSelection) { break; } } } }); } }
public class class_name { public void broadcastEmit(T record) throws IOException, InterruptedException { checkErroneous(); serializer.serializeRecord(record); boolean pruneAfterCopying = false; for (int channel : broadcastChannels) { if (copyFromSerializerToTargetChannel(channel)) { pruneAfterCopying = true; } } // Make sure we don't hold onto the large intermediate serialization buffer for too long if (pruneAfterCopying) { serializer.prune(); } } }
public class class_name { public void broadcastEmit(T record) throws IOException, InterruptedException { checkErroneous(); serializer.serializeRecord(record); boolean pruneAfterCopying = false; for (int channel : broadcastChannels) { if (copyFromSerializerToTargetChannel(channel)) { pruneAfterCopying = true; // depends on control dependency: [if], data = [none] } } // Make sure we don't hold onto the large intermediate serialization buffer for too long if (pruneAfterCopying) { serializer.prune(); } } }
public class class_name { private void overrideUseArtifactoryGradlePlugin(T overrider, Class overriderClass) { if (overriderClass.getSimpleName().equals(ArtifactoryGradleConfigurator.class.getSimpleName())) { try { Field useArtifactoryGradlePluginField = overriderClass.getDeclaredField("useArtifactoryGradlePlugin"); useArtifactoryGradlePluginField.setAccessible(true); Object useArtifactoryGradlePlugin = useArtifactoryGradlePluginField.get(overrider); if (useArtifactoryGradlePlugin == null) { Field skipInjectInitScriptField = overriderClass.getDeclaredField("skipInjectInitScript"); skipInjectInitScriptField.setAccessible(true); Object skipInjectInitScript = skipInjectInitScriptField.get(overrider); if (skipInjectInitScript instanceof Boolean && skipInjectInitScript != null) { useArtifactoryGradlePluginField.set(overrider, skipInjectInitScript); } } } catch (NoSuchFieldException | IllegalAccessException e) { converterErrors.add(getConversionErrorMessage(overrider, e)); } } } }
public class class_name { private void overrideUseArtifactoryGradlePlugin(T overrider, Class overriderClass) { if (overriderClass.getSimpleName().equals(ArtifactoryGradleConfigurator.class.getSimpleName())) { try { Field useArtifactoryGradlePluginField = overriderClass.getDeclaredField("useArtifactoryGradlePlugin"); useArtifactoryGradlePluginField.setAccessible(true); // depends on control dependency: [try], data = [none] Object useArtifactoryGradlePlugin = useArtifactoryGradlePluginField.get(overrider); if (useArtifactoryGradlePlugin == null) { Field skipInjectInitScriptField = overriderClass.getDeclaredField("skipInjectInitScript"); skipInjectInitScriptField.setAccessible(true); // depends on control dependency: [if], data = [none] Object skipInjectInitScript = skipInjectInitScriptField.get(overrider); if (skipInjectInitScript instanceof Boolean && skipInjectInitScript != null) { useArtifactoryGradlePluginField.set(overrider, skipInjectInitScript); // depends on control dependency: [if], data = [none] } } } catch (NoSuchFieldException | IllegalAccessException e) { converterErrors.add(getConversionErrorMessage(overrider, e)); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { private void doQueueIsEmpty(final Message<JsonObject> message) { final String name = message.body().getString("name"); if (name == null) { message.reply(new JsonObject().putString("status", "error").putString("message", "No name specified.")); return; } context.execute(new Action<Boolean>() { @Override public Boolean perform() { return data.getQueue(formatKey(name)).isEmpty(); } }, new Handler<AsyncResult<Boolean>>() { @Override public void handle(AsyncResult<Boolean> result) { if (result.failed()) { message.reply(new JsonObject().putString("status", "error").putString("message", result.cause().getMessage())); } else { message.reply(new JsonObject().putString("status", "ok").putBoolean("result", result.result())); } } }); } }
public class class_name { private void doQueueIsEmpty(final Message<JsonObject> message) { final String name = message.body().getString("name"); if (name == null) { message.reply(new JsonObject().putString("status", "error").putString("message", "No name specified.")); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } context.execute(new Action<Boolean>() { @Override public Boolean perform() { return data.getQueue(formatKey(name)).isEmpty(); } }, new Handler<AsyncResult<Boolean>>() { @Override public void handle(AsyncResult<Boolean> result) { if (result.failed()) { message.reply(new JsonObject().putString("status", "error").putString("message", result.cause().getMessage())); // depends on control dependency: [if], data = [none] } else { message.reply(new JsonObject().putString("status", "ok").putBoolean("result", result.result())); // depends on control dependency: [if], data = [none] } } }); } }
public class class_name { public Observable<ServiceResponse<Page<BackupEngineBaseResourceInner>>> getSinglePageAsync(final String vaultName, final String resourceGroupName, final String filter, final String skipToken) { if (vaultName == null) { throw new IllegalArgumentException("Parameter vaultName is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.get(vaultName, resourceGroupName, this.client.subscriptionId(), this.client.apiVersion(), filter, skipToken, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<BackupEngineBaseResourceInner>>>>() { @Override public Observable<ServiceResponse<Page<BackupEngineBaseResourceInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl1<BackupEngineBaseResourceInner>> result = getDelegate(response); return Observable.just(new ServiceResponse<Page<BackupEngineBaseResourceInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } }
public class class_name { public Observable<ServiceResponse<Page<BackupEngineBaseResourceInner>>> getSinglePageAsync(final String vaultName, final String resourceGroupName, final String filter, final String skipToken) { if (vaultName == null) { throw new IllegalArgumentException("Parameter vaultName is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.get(vaultName, resourceGroupName, this.client.subscriptionId(), this.client.apiVersion(), filter, skipToken, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<BackupEngineBaseResourceInner>>>>() { @Override public Observable<ServiceResponse<Page<BackupEngineBaseResourceInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl1<BackupEngineBaseResourceInner>> result = getDelegate(response); return Observable.just(new ServiceResponse<Page<BackupEngineBaseResourceInner>>(result.body(), result.response())); // depends on control dependency: [try], data = [none] } catch (Throwable t) { return Observable.error(t); } // depends on control dependency: [catch], data = [none] } }); } }
public class class_name { public static void doSetIgnoreCaseRecursively(ComparatorItem comparatorItem, boolean ignoreCase) { ComparatorItem tmp = comparatorItem; while (tmp != null) { tmp.setIgnoreCase(ignoreCase); tmp = tmp.getNextComparatorItem(); } } }
public class class_name { public static void doSetIgnoreCaseRecursively(ComparatorItem comparatorItem, boolean ignoreCase) { ComparatorItem tmp = comparatorItem; while (tmp != null) { tmp.setIgnoreCase(ignoreCase); // depends on control dependency: [while], data = [none] tmp = tmp.getNextComparatorItem(); // depends on control dependency: [while], data = [none] } } }
public class class_name { @Override public Resource getPhyscalFile() { if (physcalSource == null) { if (!mapping.hasPhysical()) { return null; } Resource tmp = mapping.getPhysical().getRealResource(relPath); physcalSource = ResourceUtil.toExactResource(tmp); // fix if the case not match if (!tmp.getAbsolutePath().equals(physcalSource.getAbsolutePath())) { String relpath = extractRealpath(relPath, physcalSource.getAbsolutePath()); // just a security! if (relPath.equalsIgnoreCase(relpath)) { this.relPath = relpath; createClassAndPackage(); } } } return physcalSource; } }
public class class_name { @Override public Resource getPhyscalFile() { if (physcalSource == null) { if (!mapping.hasPhysical()) { return null; // depends on control dependency: [if], data = [none] } Resource tmp = mapping.getPhysical().getRealResource(relPath); physcalSource = ResourceUtil.toExactResource(tmp); // depends on control dependency: [if], data = [none] // fix if the case not match if (!tmp.getAbsolutePath().equals(physcalSource.getAbsolutePath())) { String relpath = extractRealpath(relPath, physcalSource.getAbsolutePath()); // just a security! if (relPath.equalsIgnoreCase(relpath)) { this.relPath = relpath; // depends on control dependency: [if], data = [none] createClassAndPackage(); // depends on control dependency: [if], data = [none] } } } return physcalSource; } }
public class class_name { @Override public Object render(Map<String, Object> context, LNode... nodes) { // ^(CASE condition var // ^(WHEN term+ block) 1,2,3 b1 // ^(ELSE block?)) b2 Object condition = nodes[0].render(context); for (int i = 1; i < nodes.length; i++) { LNode node = nodes[i]; if(i == nodes.length - 1 && node instanceof BlockNode) { // this must be the trailing (optional) else-block return node.render(context); } else { boolean hit = false; // Iterate through the list of terms (of which we do not know the size): // - term (',' term)* // - term ('or' term)* // and stop when we encounter a BlockNode while(!(node instanceof BlockNode)) { Object whenExpressionValue = node.render(context); if (LValue.areEqual(condition, whenExpressionValue)) { hit = true; } i++; node = nodes[i]; } if(hit) { return node.render(context); } } } return null; } }
public class class_name { @Override public Object render(Map<String, Object> context, LNode... nodes) { // ^(CASE condition var // ^(WHEN term+ block) 1,2,3 b1 // ^(ELSE block?)) b2 Object condition = nodes[0].render(context); for (int i = 1; i < nodes.length; i++) { LNode node = nodes[i]; if(i == nodes.length - 1 && node instanceof BlockNode) { // this must be the trailing (optional) else-block return node.render(context); // depends on control dependency: [if], data = [none] } else { boolean hit = false; // Iterate through the list of terms (of which we do not know the size): // - term (',' term)* // - term ('or' term)* // and stop when we encounter a BlockNode while(!(node instanceof BlockNode)) { Object whenExpressionValue = node.render(context); if (LValue.areEqual(condition, whenExpressionValue)) { hit = true; // depends on control dependency: [if], data = [none] } i++; // depends on control dependency: [while], data = [none] node = nodes[i]; // depends on control dependency: [while], data = [none] } if(hit) { return node.render(context); // depends on control dependency: [if], data = [none] } } } return null; } }
public class class_name { public void rollback_only() { if (tc.isEntryEnabled()) Tr.entry(tc, "rollback_only"); try { _transaction.setRollbackOnly(); _transaction.subRollback(); } catch (Throwable exc) { FFDCFilter.processException(exc, "com.ibm.tx.remote.TransactionWrapper.rollback_only", "813", this); if (tc.isEventEnabled()) Tr.event(tc, "rollback_only caught exception setting coordinator rollback_only", exc); } if (tc.isEntryEnabled()) Tr.exit(tc, "rollback_only"); } }
public class class_name { public void rollback_only() { if (tc.isEntryEnabled()) Tr.entry(tc, "rollback_only"); try { _transaction.setRollbackOnly(); // depends on control dependency: [try], data = [none] _transaction.subRollback(); // depends on control dependency: [try], data = [none] } catch (Throwable exc) { FFDCFilter.processException(exc, "com.ibm.tx.remote.TransactionWrapper.rollback_only", "813", this); if (tc.isEventEnabled()) Tr.event(tc, "rollback_only caught exception setting coordinator rollback_only", exc); } // depends on control dependency: [catch], data = [none] if (tc.isEntryEnabled()) Tr.exit(tc, "rollback_only"); } }
public class class_name { @Pure public boolean isConnectableTo(RoadPath path) { assert path != null; if (path.isEmpty()) { return false; } RoadConnection first1 = getFirstPoint(); RoadConnection last1 = getLastPoint(); first1 = first1.getWrappedRoadConnection(); last1 = last1.getWrappedRoadConnection(); RoadConnection first2 = path.getFirstPoint(); RoadConnection last2 = path.getLastPoint(); first2 = first2.getWrappedRoadConnection(); last2 = last2.getWrappedRoadConnection(); return first1.equals(first2) || first1.equals(last2) || last1.equals(first2) || last1.equals(last2); } }
public class class_name { @Pure public boolean isConnectableTo(RoadPath path) { assert path != null; if (path.isEmpty()) { return false; // depends on control dependency: [if], data = [none] } RoadConnection first1 = getFirstPoint(); RoadConnection last1 = getLastPoint(); first1 = first1.getWrappedRoadConnection(); last1 = last1.getWrappedRoadConnection(); RoadConnection first2 = path.getFirstPoint(); RoadConnection last2 = path.getLastPoint(); first2 = first2.getWrappedRoadConnection(); last2 = last2.getWrappedRoadConnection(); return first1.equals(first2) || first1.equals(last2) || last1.equals(first2) || last1.equals(last2); } }
public class class_name { @Api public void addCard(KEY_TYPE key, Canvas card) { if (currentCard != null) { currentCard.hide(); } addMember(card); currentCard = card; cards.put(key, card); } }
public class class_name { @Api public void addCard(KEY_TYPE key, Canvas card) { if (currentCard != null) { currentCard.hide(); // depends on control dependency: [if], data = [none] } addMember(card); currentCard = card; cards.put(key, card); } }
public class class_name { public Date getFinishDate() { Date result = (Date) getCachedValue(ProjectField.FINISH_DATE); if (result == null) { result = getParentFile().getFinishDate(); } return (result); } }
public class class_name { public Date getFinishDate() { Date result = (Date) getCachedValue(ProjectField.FINISH_DATE); if (result == null) { result = getParentFile().getFinishDate(); // depends on control dependency: [if], data = [none] } return (result); } }
public class class_name { public static <I extends ImageGray<I>, D extends ImageGray<D>> DenseOpticalFlow<I> flowKlt(@Nullable PkltConfig configKlt, int radius , Class<I> inputType , Class<D> derivType ) { if( configKlt == null ) configKlt = new PkltConfig(); if( derivType == null ) { derivType = GImageDerivativeOps.getDerivativeType(inputType); } int numLayers = configKlt.pyramidScaling.length; ImageType<I> imagetype = ImageType.single(inputType); PyramidDiscrete<I> pyramidA = FactoryPyramid.discreteGaussian(configKlt.pyramidScaling, -1, 2, true, imagetype); PyramidDiscrete<I> pyramidB = FactoryPyramid.discreteGaussian(configKlt.pyramidScaling, -1, 2, true, imagetype); PyramidKltTracker<I, D> tracker = FactoryTrackerAlg.kltPyramid(configKlt.config, inputType, derivType); DenseOpticalFlowKlt<I, D> flowKlt = new DenseOpticalFlowKlt<>(tracker, numLayers, radius); ImageGradient<I, D> gradient = FactoryDerivative.sobel(inputType,derivType); return new FlowKlt_to_DenseOpticalFlow<>(flowKlt, gradient, pyramidA, pyramidB, inputType, derivType); } }
public class class_name { public static <I extends ImageGray<I>, D extends ImageGray<D>> DenseOpticalFlow<I> flowKlt(@Nullable PkltConfig configKlt, int radius , Class<I> inputType , Class<D> derivType ) { if( configKlt == null ) configKlt = new PkltConfig(); if( derivType == null ) { derivType = GImageDerivativeOps.getDerivativeType(inputType); // depends on control dependency: [if], data = [none] } int numLayers = configKlt.pyramidScaling.length; ImageType<I> imagetype = ImageType.single(inputType); PyramidDiscrete<I> pyramidA = FactoryPyramid.discreteGaussian(configKlt.pyramidScaling, -1, 2, true, imagetype); PyramidDiscrete<I> pyramidB = FactoryPyramid.discreteGaussian(configKlt.pyramidScaling, -1, 2, true, imagetype); PyramidKltTracker<I, D> tracker = FactoryTrackerAlg.kltPyramid(configKlt.config, inputType, derivType); DenseOpticalFlowKlt<I, D> flowKlt = new DenseOpticalFlowKlt<>(tracker, numLayers, radius); ImageGradient<I, D> gradient = FactoryDerivative.sobel(inputType,derivType); return new FlowKlt_to_DenseOpticalFlow<>(flowKlt, gradient, pyramidA, pyramidB, inputType, derivType); } }
public class class_name { public Interval getInstance(Long minStart, Long maxStart, Granularity startGran, Long minFinish, Long maxFinish, Granularity finishGran) { List<Object> key = Arrays.asList(new Object[]{minStart, maxStart, startGran, minFinish, maxFinish, finishGran}); Interval result; synchronized (cache) { result = cache.get(key); if (result == null) { if (minStart == null || maxStart == null || minFinish == null || maxFinish == null) { result = new DefaultInterval(minStart, maxStart, startGran, minFinish, maxFinish, finishGran, null, null, null); } else { result = new SimpleInterval(minStart, maxStart, startGran, minFinish, maxFinish, finishGran); } cache.put(key, result); } } return result; } }
public class class_name { public Interval getInstance(Long minStart, Long maxStart, Granularity startGran, Long minFinish, Long maxFinish, Granularity finishGran) { List<Object> key = Arrays.asList(new Object[]{minStart, maxStart, startGran, minFinish, maxFinish, finishGran}); Interval result; synchronized (cache) { result = cache.get(key); if (result == null) { if (minStart == null || maxStart == null || minFinish == null || maxFinish == null) { result = new DefaultInterval(minStart, maxStart, startGran, minFinish, maxFinish, finishGran, null, null, null); // depends on control dependency: [if], data = [(minStart] } else { result = new SimpleInterval(minStart, maxStart, startGran, minFinish, maxFinish, finishGran); // depends on control dependency: [if], data = [(minStart] } cache.put(key, result); // depends on control dependency: [if], data = [none] } } return result; } }
public class class_name { public void marshall(GetConnectorDefinitionRequest getConnectorDefinitionRequest, ProtocolMarshaller protocolMarshaller) { if (getConnectorDefinitionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getConnectorDefinitionRequest.getConnectorDefinitionId(), CONNECTORDEFINITIONID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetConnectorDefinitionRequest getConnectorDefinitionRequest, ProtocolMarshaller protocolMarshaller) { if (getConnectorDefinitionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getConnectorDefinitionRequest.getConnectorDefinitionId(), CONNECTORDEFINITIONID_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public List<Change> asRemovedChanges() { final WeeklyOpeningHours normalized = this.normalize(); final List<Change> changes = new ArrayList<>(); for (final DayOpeningHours doh : normalized) { changes.addAll(doh.asRemovedChanges()); } return changes; } }
public class class_name { public List<Change> asRemovedChanges() { final WeeklyOpeningHours normalized = this.normalize(); final List<Change> changes = new ArrayList<>(); for (final DayOpeningHours doh : normalized) { changes.addAll(doh.asRemovedChanges()); // depends on control dependency: [for], data = [doh] } return changes; } }
public class class_name { public static boolean isAllUpperCase(final String cs) { if (Strings.isNullOrEmpty(cs)) { return false; } final int sz = cs.length(); for (int i = 0; i < sz; i++) { if (!Character.isUpperCase(cs.charAt(i))) { return false; } } return true; } }
public class class_name { public static boolean isAllUpperCase(final String cs) { if (Strings.isNullOrEmpty(cs)) { return false; // depends on control dependency: [if], data = [none] } final int sz = cs.length(); for (int i = 0; i < sz; i++) { if (!Character.isUpperCase(cs.charAt(i))) { return false; // depends on control dependency: [if], data = [none] } } return true; } }
public class class_name { public static final int lengthOf(WsByteBuffer[] list) { if (null == list) { return 0; } int length = 0; for (int i = 0; i < list.length && null != list[i]; i++) { length += list[i].remaining(); } return length; } }
public class class_name { public static final int lengthOf(WsByteBuffer[] list) { if (null == list) { return 0; // depends on control dependency: [if], data = [none] } int length = 0; for (int i = 0; i < list.length && null != list[i]; i++) { length += list[i].remaining(); // depends on control dependency: [for], data = [i] } return length; } }
public class class_name { private boolean wildcardMatches(String pattern, String stringToMatch) { boolean match = false; int length = pattern.length(); if(pattern.charAt(0) == '*') { if(length == 1) { match = true; // * } else if(pattern.charAt(length-1) == '*' && length > 2 && stringToMatch.contains(pattern.substring(1, length-3).toLowerCase())) { match = true; // *match* } else if(length > 1 && stringToMatch.endsWith(pattern.substring(1).toLowerCase())) { match = true; // *match } } else if(pattern.charAt(length-1) == '*' && stringToMatch.startsWith(pattern.substring(0, length-2).toLowerCase())) { match = true; // match* } else if(pattern.equalsIgnoreCase(stringToMatch)) { // match match = true; } return match; } }
public class class_name { private boolean wildcardMatches(String pattern, String stringToMatch) { boolean match = false; int length = pattern.length(); if(pattern.charAt(0) == '*') { if(length == 1) { match = true; // * // depends on control dependency: [if], data = [none] } else if(pattern.charAt(length-1) == '*' && length > 2 && stringToMatch.contains(pattern.substring(1, length-3).toLowerCase())) { match = true; // *match* // depends on control dependency: [if], data = [none] } else if(length > 1 && stringToMatch.endsWith(pattern.substring(1).toLowerCase())) { match = true; // *match // depends on control dependency: [if], data = [none] } } else if(pattern.charAt(length-1) == '*' && stringToMatch.startsWith(pattern.substring(0, length-2).toLowerCase())) { match = true; // match* // depends on control dependency: [if], data = [none] } else if(pattern.equalsIgnoreCase(stringToMatch)) { // match match = true; // depends on control dependency: [if], data = [none] } return match; } }
public class class_name { public static boolean isValid(@Nullable final String booleanStr) { if (StringUtils.isBlank(booleanStr)) { return false; } final String lowerCaseBoolean = getLowerCaseString(booleanStr); return lowerCaseBoolean.equals(BooleanValues.TRUE) || lowerCaseBoolean.equals(BooleanValues.FALSE); } }
public class class_name { public static boolean isValid(@Nullable final String booleanStr) { if (StringUtils.isBlank(booleanStr)) { return false; // depends on control dependency: [if], data = [none] } final String lowerCaseBoolean = getLowerCaseString(booleanStr); return lowerCaseBoolean.equals(BooleanValues.TRUE) || lowerCaseBoolean.equals(BooleanValues.FALSE); } }
public class class_name { @Nullable public GroupByQuery toGroupByQuery() { if (grouping == null) { return null; } final Filtration filtration = Filtration.create(filter).optimize(sourceQuerySignature); final DimFilterHavingSpec havingSpec; if (grouping.getHavingFilter() != null) { havingSpec = new DimFilterHavingSpec( Filtration.create(grouping.getHavingFilter()).optimizeFilterOnly(sourceQuerySignature).getDimFilter(), true ); } else { havingSpec = null; } final List<PostAggregator> postAggregators = new ArrayList<>(grouping.getPostAggregators()); if (sortProject != null) { postAggregators.addAll(sortProject.getPostAggregators()); } return new GroupByQuery( dataSource, filtration.getQuerySegmentSpec(), getVirtualColumns(true), filtration.getDimFilter(), Granularities.ALL, grouping.getDimensionSpecs(), grouping.getAggregatorFactories(), postAggregators, havingSpec, limitSpec, null, ImmutableSortedMap.copyOf(plannerContext.getQueryContext()) ); } }
public class class_name { @Nullable public GroupByQuery toGroupByQuery() { if (grouping == null) { return null; // depends on control dependency: [if], data = [none] } final Filtration filtration = Filtration.create(filter).optimize(sourceQuerySignature); final DimFilterHavingSpec havingSpec; if (grouping.getHavingFilter() != null) { havingSpec = new DimFilterHavingSpec( Filtration.create(grouping.getHavingFilter()).optimizeFilterOnly(sourceQuerySignature).getDimFilter(), true ); // depends on control dependency: [if], data = [none] } else { havingSpec = null; // depends on control dependency: [if], data = [none] } final List<PostAggregator> postAggregators = new ArrayList<>(grouping.getPostAggregators()); if (sortProject != null) { postAggregators.addAll(sortProject.getPostAggregators()); // depends on control dependency: [if], data = [(sortProject] } return new GroupByQuery( dataSource, filtration.getQuerySegmentSpec(), getVirtualColumns(true), filtration.getDimFilter(), Granularities.ALL, grouping.getDimensionSpecs(), grouping.getAggregatorFactories(), postAggregators, havingSpec, limitSpec, null, ImmutableSortedMap.copyOf(plannerContext.getQueryContext()) ); } }
public class class_name { public WebElement searchForWebElement(final By by, int minimumNumberOfMatches){ if(minimumNumberOfMatches < 1){ minimumNumberOfMatches = 1; } List<WebElement> viewsFromScreen = webUtils.getWebElements(by, true); addViewsToList (webElements, viewsFromScreen); return getViewFromList(webElements, minimumNumberOfMatches); } }
public class class_name { public WebElement searchForWebElement(final By by, int minimumNumberOfMatches){ if(minimumNumberOfMatches < 1){ minimumNumberOfMatches = 1; // depends on control dependency: [if], data = [none] } List<WebElement> viewsFromScreen = webUtils.getWebElements(by, true); addViewsToList (webElements, viewsFromScreen); return getViewFromList(webElements, minimumNumberOfMatches); } }
public class class_name { static String formalTypeParametersString(TypeElement type) { List<? extends TypeParameterElement> typeParameters = type.getTypeParameters(); if (typeParameters.isEmpty()) { return ""; } else { StringBuilder sb = new StringBuilder("<"); String sep = ""; for (TypeParameterElement typeParameter : typeParameters) { sb.append(sep); sep = ", "; appendTypeParameterWithBounds(typeParameter, sb); } return sb.append(">").toString(); } } }
public class class_name { static String formalTypeParametersString(TypeElement type) { List<? extends TypeParameterElement> typeParameters = type.getTypeParameters(); if (typeParameters.isEmpty()) { return ""; // depends on control dependency: [if], data = [none] } else { StringBuilder sb = new StringBuilder("<"); String sep = ""; for (TypeParameterElement typeParameter : typeParameters) { sb.append(sep); // depends on control dependency: [for], data = [none] sep = ", "; // depends on control dependency: [for], data = [none] appendTypeParameterWithBounds(typeParameter, sb); // depends on control dependency: [for], data = [typeParameter] } return sb.append(">").toString(); // depends on control dependency: [if], data = [none] } } }
public class class_name { String getDataSourceHeader() { String header = getHeader(); if (header == null) { return null; } StringBuffer sb = new StringBuffer(128); sb.append(Tokens.T_SET).append(' ').append(Tokens.T_TABLE).append(' '); sb.append(getName().getSchemaQualifiedStatementName()); sb.append(' ').append(Tokens.T_SOURCE).append(' '); sb.append(Tokens.T_HEADER).append(' '); sb.append(header); return sb.toString(); } }
public class class_name { String getDataSourceHeader() { String header = getHeader(); if (header == null) { return null; // depends on control dependency: [if], data = [none] } StringBuffer sb = new StringBuffer(128); sb.append(Tokens.T_SET).append(' ').append(Tokens.T_TABLE).append(' '); sb.append(getName().getSchemaQualifiedStatementName()); sb.append(' ').append(Tokens.T_SOURCE).append(' '); sb.append(Tokens.T_HEADER).append(' '); sb.append(header); return sb.toString(); } }
public class class_name { static boolean deepEquals(final Object a, final Object b) { if (a == null || b == null) { return a == b; } else if (a instanceof Object[] && b instanceof Object[]) { return Arrays.deepEquals((Object[]) a, (Object[]) b); } else if (a instanceof boolean[] && b instanceof boolean[]) { return Arrays.equals((boolean[]) a, (boolean[]) b); } else if (a instanceof byte[] && b instanceof byte[]) { return Arrays.equals((byte[]) a, (byte[]) b); } else if (a instanceof char[] && b instanceof char[]) { return Arrays.equals((char[]) a, (char[]) b); } else if (a instanceof double[] && b instanceof double[]) { return Arrays.equals((double[]) a, (double[]) b); } else if (a instanceof float[] && b instanceof float[]) { return Arrays.equals((float[]) a, (float[]) b); } else if (a instanceof int[] && b instanceof int[]) { return Arrays.equals((int[]) a, (int[]) b); } else if (a instanceof long[] && b instanceof long[]) { return Arrays.equals((long[]) a, (long[]) b); } else if (a instanceof short[] && b instanceof short[]) { return Arrays.equals((short[]) a, (short[]) b); } return a.equals(b); } /** * Null-safe equivalent of {@code a.equals(b)}. */ static boolean equals(final Object a, final Object b) { return (a == null) ? (b == null) : a.equals(b); } /** * Convenience wrapper for {@link Arrays#hashCode}, adding varargs. * This can be used to compute a hash code for an object's fields as follows: * {@code Objects.hash(a, b, c)}. */ static int hash(final Object... values) { return Arrays.hashCode(values); } /** * Returns "null" for null or {@code o.toString()}. */ static String toString(final Object o) { return (o == null) ? "null" : o.toString(); } }
public class class_name { static boolean deepEquals(final Object a, final Object b) { if (a == null || b == null) { return a == b; // depends on control dependency: [if], data = [none] } else if (a instanceof Object[] && b instanceof Object[]) { return Arrays.deepEquals((Object[]) a, (Object[]) b); // depends on control dependency: [if], data = [none] } else if (a instanceof boolean[] && b instanceof boolean[]) { return Arrays.equals((boolean[]) a, (boolean[]) b); // depends on control dependency: [if], data = [none] } else if (a instanceof byte[] && b instanceof byte[]) { return Arrays.equals((byte[]) a, (byte[]) b); // depends on control dependency: [if], data = [none] } else if (a instanceof char[] && b instanceof char[]) { return Arrays.equals((char[]) a, (char[]) b); // depends on control dependency: [if], data = [none] } else if (a instanceof double[] && b instanceof double[]) { return Arrays.equals((double[]) a, (double[]) b); // depends on control dependency: [if], data = [none] } else if (a instanceof float[] && b instanceof float[]) { return Arrays.equals((float[]) a, (float[]) b); // depends on control dependency: [if], data = [none] } else if (a instanceof int[] && b instanceof int[]) { return Arrays.equals((int[]) a, (int[]) b); // depends on control dependency: [if], data = [none] } else if (a instanceof long[] && b instanceof long[]) { return Arrays.equals((long[]) a, (long[]) b); // depends on control dependency: [if], data = [none] } else if (a instanceof short[] && b instanceof short[]) { return Arrays.equals((short[]) a, (short[]) b); // depends on control dependency: [if], data = [none] } return a.equals(b); } /** * Null-safe equivalent of {@code a.equals(b)}. */ static boolean equals(final Object a, final Object b) { return (a == null) ? (b == null) : a.equals(b); } /** * Convenience wrapper for {@link Arrays#hashCode}, adding varargs. * This can be used to compute a hash code for an object's fields as follows: * {@code Objects.hash(a, b, c)}. */ static int hash(final Object... values) { return Arrays.hashCode(values); } /** * Returns "null" for null or {@code o.toString()}. */ static String toString(final Object o) { return (o == null) ? "null" : o.toString(); } }
public class class_name { static Edge selectClosest( NodeInfo a , NodeInfo b , boolean checkSide ) { double bestScore = Double.MAX_VALUE; Edge bestEdgeA = null; Edge edgeAB = a.findEdge(b); double distAB = a.distance(b); if( edgeAB == null ) { return null;// TODO BUG! FIX! } for (int i = 0; i < a.edges.size; i++) { Edge edgeA = a.edges.get(i); NodeInfo aa = a.edges.get(i).target; if( aa.marked ) continue; for (int j = 0; j < b.edges.size; j++) { Edge edgeB = b.edges.get(j); NodeInfo bb = b.edges.get(j).target; if( bb.marked ) continue; if( aa == bb ) { // System.out.println("center "+aa.ellipse.center); if( checkSide && UtilAngle.distanceCW(edgeAB.angle,edgeA.angle) > Math.PI*0.75 ) continue; double angle = UtilAngle.dist(edgeA.angle,edgeB.angle); if( angle < 0.3 ) continue; double da = EllipsesIntoClusters.axisAdjustedDistanceSq(a.ellipse,aa.ellipse); double db = EllipsesIntoClusters.axisAdjustedDistanceSq(b.ellipse,aa.ellipse); da = Math.sqrt(da); db = Math.sqrt(db); // see if they are approximately the same distance double diffRatio = Math.abs(da-db)/Math.max(da,db); if( diffRatio > 0.3 ) continue; // TODO reject if too far double d = (da+db)/distAB + 0.1*angle; if( d < bestScore ) { bestScore = d; bestEdgeA = a.edges.get(i); } break; } } } return bestEdgeA; } }
public class class_name { static Edge selectClosest( NodeInfo a , NodeInfo b , boolean checkSide ) { double bestScore = Double.MAX_VALUE; Edge bestEdgeA = null; Edge edgeAB = a.findEdge(b); double distAB = a.distance(b); if( edgeAB == null ) { return null;// TODO BUG! FIX! // depends on control dependency: [if], data = [none] } for (int i = 0; i < a.edges.size; i++) { Edge edgeA = a.edges.get(i); NodeInfo aa = a.edges.get(i).target; if( aa.marked ) continue; for (int j = 0; j < b.edges.size; j++) { Edge edgeB = b.edges.get(j); NodeInfo bb = b.edges.get(j).target; if( bb.marked ) continue; if( aa == bb ) { // System.out.println("center "+aa.ellipse.center); if( checkSide && UtilAngle.distanceCW(edgeAB.angle,edgeA.angle) > Math.PI*0.75 ) continue; double angle = UtilAngle.dist(edgeA.angle,edgeB.angle); if( angle < 0.3 ) continue; double da = EllipsesIntoClusters.axisAdjustedDistanceSq(a.ellipse,aa.ellipse); double db = EllipsesIntoClusters.axisAdjustedDistanceSq(b.ellipse,aa.ellipse); da = Math.sqrt(da); // depends on control dependency: [if], data = [none] db = Math.sqrt(db); // depends on control dependency: [if], data = [none] // see if they are approximately the same distance double diffRatio = Math.abs(da-db)/Math.max(da,db); if( diffRatio > 0.3 ) continue; // TODO reject if too far double d = (da+db)/distAB + 0.1*angle; if( d < bestScore ) { bestScore = d; // depends on control dependency: [if], data = [none] bestEdgeA = a.edges.get(i); // depends on control dependency: [if], data = [none] } break; } } } return bestEdgeA; } }
public class class_name { private static ImmutableSet<Symbol> lookup( Symbol.TypeSymbol typeSym, Symbol.TypeSymbol start, Name identifier, Types types, Symbol.PackageSymbol pkg) { if (typeSym == null) { return ImmutableSet.of(); } ImmutableSet.Builder<Symbol> members = ImmutableSet.builder(); members.addAll(lookup(types.supertype(typeSym.type).tsym, start, identifier, types, pkg)); for (Type i : types.interfaces(typeSym.type)) { members.addAll(lookup(i.tsym, start, identifier, types, pkg)); } OUTER: for (Symbol member : typeSym.members().getSymbolsByName(identifier)) { if (!member.isStatic()) { continue; } switch ((int) (member.flags() & Flags.AccessFlags)) { case Flags.PRIVATE: continue OUTER; case 0: case Flags.PROTECTED: if (member.packge() != pkg) { continue OUTER; } break; case Flags.PUBLIC: default: break; } if (member.isMemberOf(start, types)) { members.add(member); } } return members.build(); } }
public class class_name { private static ImmutableSet<Symbol> lookup( Symbol.TypeSymbol typeSym, Symbol.TypeSymbol start, Name identifier, Types types, Symbol.PackageSymbol pkg) { if (typeSym == null) { return ImmutableSet.of(); // depends on control dependency: [if], data = [none] } ImmutableSet.Builder<Symbol> members = ImmutableSet.builder(); members.addAll(lookup(types.supertype(typeSym.type).tsym, start, identifier, types, pkg)); for (Type i : types.interfaces(typeSym.type)) { members.addAll(lookup(i.tsym, start, identifier, types, pkg)); // depends on control dependency: [for], data = [i] } OUTER: for (Symbol member : typeSym.members().getSymbolsByName(identifier)) { if (!member.isStatic()) { continue; } switch ((int) (member.flags() & Flags.AccessFlags)) { case Flags.PRIVATE: continue OUTER; case 0: case Flags.PROTECTED: if (member.packge() != pkg) { continue OUTER; } break; case Flags.PUBLIC: default: break; } if (member.isMemberOf(start, types)) { members.add(member); // depends on control dependency: [if], data = [none] } } return members.build(); } }
public class class_name { @SuppressWarnings({ "unchecked", "rawtypes" }) @Override public T reduce(T value1, T value2) throws Exception { for (int position : fields) { // Save position of compared key // Get both values - both implement comparable Comparable comparable1 = value1.getFieldNotNull(position); Comparable comparable2 = value2.getFieldNotNull(position); // Compare values int comp = comparable1.compareTo(comparable2); // If comp is smaller than 0 comparable 1 is smaller. // Return the smaller value. if (comp < 0) { return value1; } else if (comp > 0) { return value2; } } return value1; } }
public class class_name { @SuppressWarnings({ "unchecked", "rawtypes" }) @Override public T reduce(T value1, T value2) throws Exception { for (int position : fields) { // Save position of compared key // Get both values - both implement comparable Comparable comparable1 = value1.getFieldNotNull(position); Comparable comparable2 = value2.getFieldNotNull(position); // Compare values int comp = comparable1.compareTo(comparable2); // If comp is smaller than 0 comparable 1 is smaller. // Return the smaller value. if (comp < 0) { return value1; // depends on control dependency: [if], data = [none] } else if (comp > 0) { return value2; // depends on control dependency: [if], data = [none] } } return value1; } }
public class class_name { long[] toLongArray() { // create array through an aligned copy BitVector copy = alignedCopy(); long[] longs = copy.bits; int length = longs.length; if (length == 0) return longs; // reverse the array for (int i = 0, mid = length >> 1, j = length - 1; i < mid; i++, j--) { long t = longs[i]; longs[i] = longs[j]; longs[j] = t; } // mask off top bits in case copy was produced via clone final long mask = -1L >>> (ADDRESS_SIZE - copy.finish & ADDRESS_MASK); longs[0] &= mask; // return the result return longs; } }
public class class_name { long[] toLongArray() { // create array through an aligned copy BitVector copy = alignedCopy(); long[] longs = copy.bits; int length = longs.length; if (length == 0) return longs; // reverse the array for (int i = 0, mid = length >> 1, j = length - 1; i < mid; i++, j--) { long t = longs[i]; longs[i] = longs[j]; // depends on control dependency: [for], data = [i] longs[j] = t; // depends on control dependency: [for], data = [none] } // mask off top bits in case copy was produced via clone final long mask = -1L >>> (ADDRESS_SIZE - copy.finish & ADDRESS_MASK); longs[0] &= mask; // return the result return longs; } }
public class class_name { JCBlock block(int pos, long flags) { accept(LBRACE); List<JCStatement> stats = blockStatements(); JCBlock t = F.at(pos).Block(flags, stats); while (token.kind == CASE || token.kind == DEFAULT) { syntaxError("orphaned", token.kind); switchBlockStatementGroups(); } // the Block node has a field "endpos" for first char of last token, which is // usually but not necessarily the last char of the last token. t.endpos = token.pos; accept(RBRACE); return toP(t); } }
public class class_name { JCBlock block(int pos, long flags) { accept(LBRACE); List<JCStatement> stats = blockStatements(); JCBlock t = F.at(pos).Block(flags, stats); while (token.kind == CASE || token.kind == DEFAULT) { syntaxError("orphaned", token.kind); // depends on control dependency: [while], data = [none] switchBlockStatementGroups(); // depends on control dependency: [while], data = [none] } // the Block node has a field "endpos" for first char of last token, which is // usually but not necessarily the last char of the last token. t.endpos = token.pos; accept(RBRACE); return toP(t); } }
public class class_name { public static boolean read(InputStream ios, BufrTables.Tables tables) throws IOException { if (ios == null) return false; if (tables.b == null) tables.b = new TableB("fake", "fake"); if (tables.d == null) tables.d = new TableD("fake", "fake"); HashMap<String, String> number = new HashMap<>(); // key = mnemonic value = fxy HashMap<String, String> desc = new HashMap<>(); // key = mnemonic value = description HashMap<String, String> mnseq = new HashMap<>(); try { BufferedReader dataIS = new BufferedReader(new InputStreamReader(ios, CDM.utf8Charset)); // read mnemonic table Matcher m; // read header info and disregard while (true) { String line = dataIS.readLine(); if (line == null) throw new RuntimeException("Bad NCEP mnemonic BUFR table "); if (line.contains("MNEMONIC")) break; } // read mnemonic, number, and description //| HEADR | 362001 | TABLE D ENTRY - PROFILE COORDINATES | while (true) { String line = dataIS.readLine(); if (line == null) break; if (line.contains("MNEMONIC")) break; if (line.contains("----")) continue; if (line.startsWith("*")) continue; if (line.startsWith("| ")) continue; m = fields3.matcher(line); if (m.find()) { String mnu = m.group(1).trim(); String fxy = m.group(2).trim(); if (fxy.startsWith("3")) { number.put(mnu, fxy); desc.put(mnu, m.group(3).replace("TABLE D ENTRY - ", "").trim()); } else if (fxy.startsWith("0")) { number.put(mnu, fxy); desc.put(mnu, m.group(3).replace("TABLE B ENTRY - ", "").trim()); } else if (fxy.startsWith("A")) { number.put(mnu, fxy); desc.put(mnu, m.group(3).replace("TABLE A ENTRY - ", "").trim()); } } else if (debugTable) { System.out.println("bad mnemonic, number, and description: " + line); } } // read in sequences using mnemonics //| ETACLS1 | HEADR {PROFILE} SURF FLUX HYDR D10M {SLYR} XTRA | while (true) { String line = dataIS.readLine(); if (line == null) break; if (line.contains("MNEMONIC")) break; if (line.contains("----")) continue; if (line.startsWith("| ")) continue; if (line.startsWith("*")) continue; m = fields2.matcher(line); if (m.find()) { String mnu = m.group(1).trim(); if (mnseq.containsKey(mnu)) { // concat lines with same mnu String value = mnseq.get(mnu); value = value + " " + m.group(2); mnseq.put(mnu, value); } else { mnseq.put(mnu, m.group(2)); } } else if (debugTable) { System.out.println("bad sequence mnemonic: " + line); } } // create sequences, replacing mnemonics with numbers for (Map.Entry<String, String> ent : mnseq.entrySet()) { String seq = ent.getValue(); seq = seq.replaceAll("\\<", "1-1-0 0-31-0 "); seq = seq.replaceAll("\\>", ""); seq = seq.replaceAll("\\{", "1-1-0 0-31-1 "); seq = seq.replaceAll("\\}", ""); seq = seq.replaceAll("\\(", "1-1-0 0-31-2 "); seq = seq.replaceAll("\\)", ""); StringTokenizer stoke = new StringTokenizer(seq, " "); List<Short> list = new ArrayList<>(); while (stoke.hasMoreTokens()) { String mn = stoke.nextToken(); if (mn.charAt(1) == '-') { list.add(Descriptor.getFxy(mn)); continue; } // element descriptor needs hyphens m = ints6.matcher(mn); if (m.find()) { String F = mn.substring(0, 1); String X = removeLeading0(mn.substring(1, 3)); String Y = removeLeading0(mn.substring(3)); list.add(Descriptor.getFxy(F + "-" + X + "-" + Y)); continue; } if (mn.startsWith("\"")) { int idx = mn.lastIndexOf('"'); String count = mn.substring(idx + 1); list.add(Descriptor.getFxy("1-1-" + count)); mn = mn.substring(1, idx); } if (mn.startsWith(".")) { String des = mn.substring(mn.length() - 4); mn = mn.replace(des, "...."); } String fxy = number.get(mn); String F = fxy.substring(0, 1); String X = removeLeading0(fxy.substring(1, 3)); String Y = removeLeading0(fxy.substring(3)); list.add(Descriptor.getFxy(F + "-" + X + "-" + Y)); } String fxy = number.get(ent.getKey()); String X = removeLeading0(fxy.substring(1, 3)); String Y = removeLeading0(fxy.substring(3)); // these are in latest tables if (XlocalCutoff > Integer.parseInt(X) && YlocalCutoff > Integer.parseInt(Y)) continue; //key = F + "-" + X + "-" + Y; short seqX = Short.parseShort(X.trim()); short seqY = Short.parseShort(Y.trim()); tables.d.addDescriptor(seqX, seqY, ent.getKey(), list); //short id = Descriptor.getFxy(key); //sequences.put(Short.valueOf(id), tableD); } // add some static repetition sequences // LOOK why? List<Short> list = new ArrayList<>(); // 16 bit delayed repetition list.add(Descriptor.getFxy("1-1-0")); list.add(Descriptor.getFxy("0-31-2")); tables.d.addDescriptor((short) 60, (short) 1, "", list); //tableD = new DescriptorTableD("", "3-60-1", list, false); //tableD.put( "3-60-1", d); //short id = Descriptor.getFxy("3-60-1"); //sequences.put(Short.valueOf(id), tableD); list = new ArrayList<>(); // 8 bit delayed repetition list.add(Descriptor.getFxy("1-1-0")); list.add(Descriptor.getFxy("0-31-1")); tables.d.addDescriptor((short) 60, (short) 2, "", list); //tableD = new DescriptorTableD("", "3-60-2", list, false); //tableD.put( "3-60-2", d); //id = Descriptor.getFxy("3-60-2"); //sequences.put(Short.valueOf(id), tableD); list = new ArrayList<>(); // 8 bit delayed repetition list.add(Descriptor.getFxy("1-1-0")); list.add(Descriptor.getFxy("0-31-1")); tables.d.addDescriptor((short) 60, (short) 3, "", list); //tableD = new DescriptorTableD("", "3-60-3", list, false); //tableD.put( "3-60-3", d); //id = Descriptor.getFxy("3-60-3"); //sequences.put(Short.valueOf(id), tableD); list = new ArrayList<>(); // 1 bit delayed repetition list.add(Descriptor.getFxy("1-1-0")); list.add(Descriptor.getFxy("0-31-0")); tables.d.addDescriptor((short) 60, (short) 4, "", list); //tableD = new DescriptorTableD("", "3-60-4", list, false); //tableD.put( "3-60-4", d); //id = Descriptor.getFxy("3-60-4"); //sequences.put(Short.valueOf(id), tableD); // add in element descriptors // MNEMONIC | SCAL | REFERENCE | BIT | UNITS //| FTIM | 0 | 0 | 24 | SECONDS |-------------| //tableB = new TableB(tablename, tablename); while (true) { String line = dataIS.readLine(); if (line == null) break; if (line.contains("MNEMONIC")) break; if (line.startsWith("| ")) continue; if (line.startsWith("*")) continue; m = fields5.matcher(line); if (m.find()) { if (m.group(1).equals("")) { continue; } else if (number.containsKey(m.group(1).trim())) { // add descriptor to tableB String fxy = number.get(m.group(1).trim()); String X = fxy.substring(1, 3); String Y = fxy.substring(3); String mnu = m.group(1).trim(); String descr = desc.get(mnu); short x = Short.parseShort(X.trim()); short y = Short.parseShort(Y.trim()); // these are in latest tables so skip LOOK WHY if (XlocalCutoff > x && YlocalCutoff > y) continue; int scale = Integer.parseInt(m.group(2).trim()); int refVal = Integer.parseInt(m.group(3).trim()); int width = Integer.parseInt(m.group(4).trim()); String units = m.group(5).trim(); tables.b.addDescriptor(x, y, scale, refVal, width, mnu, units, descr); } else if (debugTable) { System.out.println("bad element descriptors: " + line); } } } } finally { ios.close(); } // LOOK why ? // default for NCEP // 0; 63; 0; 0; 0; 16; Numeric; Byte count tables.b.addDescriptor((short) 63, (short) 0, 0, 0, 16, "Byte count", "Numeric", null); return true; } }
public class class_name { public static boolean read(InputStream ios, BufrTables.Tables tables) throws IOException { if (ios == null) return false; if (tables.b == null) tables.b = new TableB("fake", "fake"); if (tables.d == null) tables.d = new TableD("fake", "fake"); HashMap<String, String> number = new HashMap<>(); // key = mnemonic value = fxy HashMap<String, String> desc = new HashMap<>(); // key = mnemonic value = description HashMap<String, String> mnseq = new HashMap<>(); try { BufferedReader dataIS = new BufferedReader(new InputStreamReader(ios, CDM.utf8Charset)); // read mnemonic table Matcher m; // read header info and disregard while (true) { String line = dataIS.readLine(); if (line == null) throw new RuntimeException("Bad NCEP mnemonic BUFR table "); if (line.contains("MNEMONIC")) break; } // read mnemonic, number, and description //| HEADR | 362001 | TABLE D ENTRY - PROFILE COORDINATES | while (true) { String line = dataIS.readLine(); if (line == null) break; if (line.contains("MNEMONIC")) break; if (line.contains("----")) continue; if (line.startsWith("*")) continue; if (line.startsWith("| ")) continue; m = fields3.matcher(line); // depends on control dependency: [while], data = [none] if (m.find()) { String mnu = m.group(1).trim(); String fxy = m.group(2).trim(); if (fxy.startsWith("3")) { number.put(mnu, fxy); // depends on control dependency: [if], data = [none] desc.put(mnu, m.group(3).replace("TABLE D ENTRY - ", "").trim()); // depends on control dependency: [if], data = [none] } else if (fxy.startsWith("0")) { number.put(mnu, fxy); // depends on control dependency: [if], data = [none] desc.put(mnu, m.group(3).replace("TABLE B ENTRY - ", "").trim()); // depends on control dependency: [if], data = [none] } else if (fxy.startsWith("A")) { number.put(mnu, fxy); // depends on control dependency: [if], data = [none] desc.put(mnu, m.group(3).replace("TABLE A ENTRY - ", "").trim()); // depends on control dependency: [if], data = [none] } } else if (debugTable) { System.out.println("bad mnemonic, number, and description: " + line); // depends on control dependency: [if], data = [none] } } // read in sequences using mnemonics //| ETACLS1 | HEADR {PROFILE} SURF FLUX HYDR D10M {SLYR} XTRA | while (true) { String line = dataIS.readLine(); if (line == null) break; if (line.contains("MNEMONIC")) break; if (line.contains("----")) continue; if (line.startsWith("| ")) continue; if (line.startsWith("*")) continue; m = fields2.matcher(line); // depends on control dependency: [while], data = [none] if (m.find()) { String mnu = m.group(1).trim(); if (mnseq.containsKey(mnu)) { // concat lines with same mnu String value = mnseq.get(mnu); value = value + " " + m.group(2); // depends on control dependency: [if], data = [none] mnseq.put(mnu, value); // depends on control dependency: [if], data = [none] } else { mnseq.put(mnu, m.group(2)); // depends on control dependency: [if], data = [none] } } else if (debugTable) { System.out.println("bad sequence mnemonic: " + line); // depends on control dependency: [if], data = [none] } } // create sequences, replacing mnemonics with numbers for (Map.Entry<String, String> ent : mnseq.entrySet()) { String seq = ent.getValue(); seq = seq.replaceAll("\\<", "1-1-0 0-31-0 "); // depends on control dependency: [for], data = [none] seq = seq.replaceAll("\\>", ""); // depends on control dependency: [for], data = [none] seq = seq.replaceAll("\\{", "1-1-0 0-31-1 "); // depends on control dependency: [for], data = [none] seq = seq.replaceAll("\\}", ""); // depends on control dependency: [for], data = [none] seq = seq.replaceAll("\\(", "1-1-0 0-31-2 "); seq = seq.replaceAll("\\)", ""); // depends on control dependency: [for], data = [none] StringTokenizer stoke = new StringTokenizer(seq, " "); List<Short> list = new ArrayList<>(); while (stoke.hasMoreTokens()) { String mn = stoke.nextToken(); if (mn.charAt(1) == '-') { list.add(Descriptor.getFxy(mn)); // depends on control dependency: [if], data = [none] continue; } // element descriptor needs hyphens m = ints6.matcher(mn); // depends on control dependency: [while], data = [none] if (m.find()) { String F = mn.substring(0, 1); String X = removeLeading0(mn.substring(1, 3)); String Y = removeLeading0(mn.substring(3)); list.add(Descriptor.getFxy(F + "-" + X + "-" + Y)); // depends on control dependency: [if], data = [none] continue; } if (mn.startsWith("\"")) { int idx = mn.lastIndexOf('"'); String count = mn.substring(idx + 1); list.add(Descriptor.getFxy("1-1-" + count)); // depends on control dependency: [if], data = [none] mn = mn.substring(1, idx); // depends on control dependency: [if], data = [none] } if (mn.startsWith(".")) { String des = mn.substring(mn.length() - 4); mn = mn.replace(des, "...."); // depends on control dependency: [if], data = [none] } String fxy = number.get(mn); String F = fxy.substring(0, 1); String X = removeLeading0(fxy.substring(1, 3)); String Y = removeLeading0(fxy.substring(3)); list.add(Descriptor.getFxy(F + "-" + X + "-" + Y)); // depends on control dependency: [while], data = [none] } String fxy = number.get(ent.getKey()); String X = removeLeading0(fxy.substring(1, 3)); String Y = removeLeading0(fxy.substring(3)); // these are in latest tables if (XlocalCutoff > Integer.parseInt(X) && YlocalCutoff > Integer.parseInt(Y)) continue; //key = F + "-" + X + "-" + Y; short seqX = Short.parseShort(X.trim()); short seqY = Short.parseShort(Y.trim()); tables.d.addDescriptor(seqX, seqY, ent.getKey(), list); // depends on control dependency: [for], data = [ent] //short id = Descriptor.getFxy(key); //sequences.put(Short.valueOf(id), tableD); } // add some static repetition sequences // LOOK why? List<Short> list = new ArrayList<>(); // 16 bit delayed repetition list.add(Descriptor.getFxy("1-1-0")); list.add(Descriptor.getFxy("0-31-2")); tables.d.addDescriptor((short) 60, (short) 1, "", list); //tableD = new DescriptorTableD("", "3-60-1", list, false); //tableD.put( "3-60-1", d); //short id = Descriptor.getFxy("3-60-1"); //sequences.put(Short.valueOf(id), tableD); list = new ArrayList<>(); // 8 bit delayed repetition list.add(Descriptor.getFxy("1-1-0")); list.add(Descriptor.getFxy("0-31-1")); tables.d.addDescriptor((short) 60, (short) 2, "", list); //tableD = new DescriptorTableD("", "3-60-2", list, false); //tableD.put( "3-60-2", d); //id = Descriptor.getFxy("3-60-2"); //sequences.put(Short.valueOf(id), tableD); list = new ArrayList<>(); // 8 bit delayed repetition list.add(Descriptor.getFxy("1-1-0")); list.add(Descriptor.getFxy("0-31-1")); tables.d.addDescriptor((short) 60, (short) 3, "", list); //tableD = new DescriptorTableD("", "3-60-3", list, false); //tableD.put( "3-60-3", d); //id = Descriptor.getFxy("3-60-3"); //sequences.put(Short.valueOf(id), tableD); list = new ArrayList<>(); // 1 bit delayed repetition list.add(Descriptor.getFxy("1-1-0")); list.add(Descriptor.getFxy("0-31-0")); tables.d.addDescriptor((short) 60, (short) 4, "", list); //tableD = new DescriptorTableD("", "3-60-4", list, false); //tableD.put( "3-60-4", d); //id = Descriptor.getFxy("3-60-4"); //sequences.put(Short.valueOf(id), tableD); // add in element descriptors // MNEMONIC | SCAL | REFERENCE | BIT | UNITS //| FTIM | 0 | 0 | 24 | SECONDS |-------------| //tableB = new TableB(tablename, tablename); while (true) { String line = dataIS.readLine(); if (line == null) break; if (line.contains("MNEMONIC")) break; if (line.startsWith("| ")) continue; if (line.startsWith("*")) continue; m = fields5.matcher(line); // depends on control dependency: [while], data = [none] if (m.find()) { if (m.group(1).equals("")) { continue; } else if (number.containsKey(m.group(1).trim())) { // add descriptor to tableB String fxy = number.get(m.group(1).trim()); String X = fxy.substring(1, 3); String Y = fxy.substring(3); String mnu = m.group(1).trim(); String descr = desc.get(mnu); short x = Short.parseShort(X.trim()); short y = Short.parseShort(Y.trim()); // these are in latest tables so skip LOOK WHY if (XlocalCutoff > x && YlocalCutoff > y) continue; int scale = Integer.parseInt(m.group(2).trim()); int refVal = Integer.parseInt(m.group(3).trim()); int width = Integer.parseInt(m.group(4).trim()); String units = m.group(5).trim(); tables.b.addDescriptor(x, y, scale, refVal, width, mnu, units, descr); // depends on control dependency: [if], data = [none] } else if (debugTable) { System.out.println("bad element descriptors: " + line); // depends on control dependency: [if], data = [none] } } } } finally { ios.close(); } // LOOK why ? // default for NCEP // 0; 63; 0; 0; 0; 16; Numeric; Byte count tables.b.addDescriptor((short) 63, (short) 0, 0, 0, 16, "Byte count", "Numeric", null); return true; } }
public class class_name { public static UnicodeUtil getInstance() { if (instance == null) { synchronized (UnicodeUtilImpl.class) { if (instance == null) { new UnicodeUtilImpl().initialize(); } } } return instance; } }
public class class_name { public static UnicodeUtil getInstance() { if (instance == null) { synchronized (UnicodeUtilImpl.class) { // depends on control dependency: [if], data = [none] if (instance == null) { new UnicodeUtilImpl().initialize(); // depends on control dependency: [if], data = [none] } } } return instance; } }
public class class_name { public static void quoteOnly(Appendable buffer, String input) { if (input == null) return; try { buffer.append('"'); for (int i = 0; i < input.length(); ++i) { char c = input.charAt(i); if (c == '"' || c == '\\') buffer.append('\\'); buffer.append(c); } buffer.append('"'); } catch (IOException x) { throw new RuntimeException(x); } } }
public class class_name { public static void quoteOnly(Appendable buffer, String input) { if (input == null) return; try { buffer.append('"'); // depends on control dependency: [try], data = [none] for (int i = 0; i < input.length(); ++i) { char c = input.charAt(i); if (c == '"' || c == '\\') buffer.append('\\'); buffer.append(c); // depends on control dependency: [for], data = [none] } buffer.append('"'); // depends on control dependency: [try], data = [none] } catch (IOException x) { throw new RuntimeException(x); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public List<Entity> run(String parentKeyName) { Datastore datastore = transaction.getDatastore(); // [START run] KeyFactory keyFactory = datastore.newKeyFactory().setKind("ParentKind"); Key parentKey = keyFactory.newKey(parentKeyName); // Build a query Query<Entity> query = Query.newEntityQueryBuilder() .setKind("MyKind") .setFilter(PropertyFilter.hasAncestor(parentKey)) .build(); QueryResults<Entity> results = transaction.run(query); List<Entity> entities = Lists.newArrayList(); while (results.hasNext()) { Entity result = results.next(); // do something with result entities.add(result); } transaction.commit(); // [END run] return entities; } }
public class class_name { public List<Entity> run(String parentKeyName) { Datastore datastore = transaction.getDatastore(); // [START run] KeyFactory keyFactory = datastore.newKeyFactory().setKind("ParentKind"); Key parentKey = keyFactory.newKey(parentKeyName); // Build a query Query<Entity> query = Query.newEntityQueryBuilder() .setKind("MyKind") .setFilter(PropertyFilter.hasAncestor(parentKey)) .build(); QueryResults<Entity> results = transaction.run(query); List<Entity> entities = Lists.newArrayList(); while (results.hasNext()) { Entity result = results.next(); // do something with result entities.add(result); // depends on control dependency: [while], data = [none] } transaction.commit(); // [END run] return entities; } }
public class class_name { @SafeVarargs public final TreeNode<T> attach(final T... children) { for (T child : children) { attach(TreeNode.of(child)); } return this; } }
public class class_name { @SafeVarargs public final TreeNode<T> attach(final T... children) { for (T child : children) { attach(TreeNode.of(child)); // depends on control dependency: [for], data = [child] } return this; } }
public class class_name { public synchronized I_CmsSearchDocument getDocument(String fieldname, String term, String[] fls) { try { SolrQuery query = new SolrQuery(); if (CmsSearchField.FIELD_PATH.equals(fieldname)) { query.setQuery(fieldname + ":\"" + term + "\""); } else { query.setQuery(fieldname + ":" + term); } query.addFilterQuery("{!collapse field=" + fieldname + "}"); if (null != fls) { query.setFields(fls); } QueryResponse res = m_solr.query(query); if (res != null) { SolrDocumentList sdl = m_solr.query(query).getResults(); if ((sdl.getNumFound() > 0L) && (sdl.get(0) != null)) { return new CmsSolrDocument(sdl.get(0)); } } } catch (Exception e) { // ignore and assume that the document could not be found LOG.error(e.getMessage(), e); } return null; } }
public class class_name { public synchronized I_CmsSearchDocument getDocument(String fieldname, String term, String[] fls) { try { SolrQuery query = new SolrQuery(); if (CmsSearchField.FIELD_PATH.equals(fieldname)) { query.setQuery(fieldname + ":\"" + term + "\""); // depends on control dependency: [if], data = [none] } else { query.setQuery(fieldname + ":" + term); // depends on control dependency: [if], data = [none] } query.addFilterQuery("{!collapse field=" + fieldname + "}"); // depends on control dependency: [try], data = [none] if (null != fls) { query.setFields(fls); // depends on control dependency: [if], data = [fls)] } QueryResponse res = m_solr.query(query); if (res != null) { SolrDocumentList sdl = m_solr.query(query).getResults(); if ((sdl.getNumFound() > 0L) && (sdl.get(0) != null)) { return new CmsSolrDocument(sdl.get(0)); // depends on control dependency: [if], data = [none] } } } catch (Exception e) { // ignore and assume that the document could not be found LOG.error(e.getMessage(), e); } // depends on control dependency: [catch], data = [none] return null; } }
public class class_name { @Override public StatusResult get(@Nonnull Run<?, ?> run, @Nonnull TaskListener listener) throws IOException, InterruptedException { for (ConditionalResult conditionalResult : getResults()) { if (conditionalResult.matches(run)) { return new StatusResult( defaultIfNull(EnumUtils.getEnum(GHCommitState.class, conditionalResult.getState()), ERROR), new ExpandableMessage(conditionalResult.getMessage()).expandAll(run, listener) ); } } return new StatusResult( PENDING, new ExpandableMessage("Can't define which status to set").expandAll(run, listener) ); } }
public class class_name { @Override public StatusResult get(@Nonnull Run<?, ?> run, @Nonnull TaskListener listener) throws IOException, InterruptedException { for (ConditionalResult conditionalResult : getResults()) { if (conditionalResult.matches(run)) { return new StatusResult( defaultIfNull(EnumUtils.getEnum(GHCommitState.class, conditionalResult.getState()), ERROR), new ExpandableMessage(conditionalResult.getMessage()).expandAll(run, listener) ); // depends on control dependency: [if], data = [none] } } return new StatusResult( PENDING, new ExpandableMessage("Can't define which status to set").expandAll(run, listener) ); } }
public class class_name { public GridCoverage2D buildRaster() { if (makeNew) { GridCoverage2D coverage = buildCoverage("raster", newWR, regionMap, crs); return coverage; } else { throw new RuntimeException("The raster is readonly, so no new raster can be built."); } } }
public class class_name { public GridCoverage2D buildRaster() { if (makeNew) { GridCoverage2D coverage = buildCoverage("raster", newWR, regionMap, crs); return coverage; // depends on control dependency: [if], data = [none] } else { throw new RuntimeException("The raster is readonly, so no new raster can be built."); } } }
public class class_name { public void marshall(CreateFleetRequest createFleetRequest, ProtocolMarshaller protocolMarshaller) { if (createFleetRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(createFleetRequest.getName(), NAME_BINDING); protocolMarshaller.marshall(createFleetRequest.getDescription(), DESCRIPTION_BINDING); protocolMarshaller.marshall(createFleetRequest.getBuildId(), BUILDID_BINDING); protocolMarshaller.marshall(createFleetRequest.getScriptId(), SCRIPTID_BINDING); protocolMarshaller.marshall(createFleetRequest.getServerLaunchPath(), SERVERLAUNCHPATH_BINDING); protocolMarshaller.marshall(createFleetRequest.getServerLaunchParameters(), SERVERLAUNCHPARAMETERS_BINDING); protocolMarshaller.marshall(createFleetRequest.getLogPaths(), LOGPATHS_BINDING); protocolMarshaller.marshall(createFleetRequest.getEC2InstanceType(), EC2INSTANCETYPE_BINDING); protocolMarshaller.marshall(createFleetRequest.getEC2InboundPermissions(), EC2INBOUNDPERMISSIONS_BINDING); protocolMarshaller.marshall(createFleetRequest.getNewGameSessionProtectionPolicy(), NEWGAMESESSIONPROTECTIONPOLICY_BINDING); protocolMarshaller.marshall(createFleetRequest.getRuntimeConfiguration(), RUNTIMECONFIGURATION_BINDING); protocolMarshaller.marshall(createFleetRequest.getResourceCreationLimitPolicy(), RESOURCECREATIONLIMITPOLICY_BINDING); protocolMarshaller.marshall(createFleetRequest.getMetricGroups(), METRICGROUPS_BINDING); protocolMarshaller.marshall(createFleetRequest.getPeerVpcAwsAccountId(), PEERVPCAWSACCOUNTID_BINDING); protocolMarshaller.marshall(createFleetRequest.getPeerVpcId(), PEERVPCID_BINDING); protocolMarshaller.marshall(createFleetRequest.getFleetType(), FLEETTYPE_BINDING); protocolMarshaller.marshall(createFleetRequest.getInstanceRoleArn(), INSTANCEROLEARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(CreateFleetRequest createFleetRequest, ProtocolMarshaller protocolMarshaller) { if (createFleetRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(createFleetRequest.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getDescription(), DESCRIPTION_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getBuildId(), BUILDID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getScriptId(), SCRIPTID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getServerLaunchPath(), SERVERLAUNCHPATH_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getServerLaunchParameters(), SERVERLAUNCHPARAMETERS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getLogPaths(), LOGPATHS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getEC2InstanceType(), EC2INSTANCETYPE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getEC2InboundPermissions(), EC2INBOUNDPERMISSIONS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getNewGameSessionProtectionPolicy(), NEWGAMESESSIONPROTECTIONPOLICY_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getRuntimeConfiguration(), RUNTIMECONFIGURATION_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getResourceCreationLimitPolicy(), RESOURCECREATIONLIMITPOLICY_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getMetricGroups(), METRICGROUPS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getPeerVpcAwsAccountId(), PEERVPCAWSACCOUNTID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getPeerVpcId(), PEERVPCID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getFleetType(), FLEETTYPE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createFleetRequest.getInstanceRoleArn(), INSTANCEROLEARN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void execute( EnforcerRuleHelper theHelper ) throws EnforcerRuleException { List<String> missingProfiles = new ArrayList<String>(); try { MavenProject project = (MavenProject) theHelper.evaluate( "${project}" ); if ( StringUtils.isNotEmpty( profiles ) ) { String[] profs = profiles.split( "," ); for ( String profile : profs ) { if ( !isProfileActive( project, profile ) ) { missingProfiles.add( profile ); } } boolean fail = false; if ( !missingProfiles.isEmpty() ) { fail = true; // if (all && missingProfiles.size() != profs.length) // { // fail = true; // } // else // { // if (!all && missingProfiles.size() >= (profs.length -1)) // { // fail = true; // } // } } if ( fail ) { String message = getMessage(); StringBuilder buf = new StringBuilder(); if ( message != null ) { buf.append( message + "\n" ); } for ( String profile : missingProfiles ) { buf.append( "Profile \"" + profile + "\" is not activated.\n" ); } throw new EnforcerRuleException( buf.toString() ); } } } catch ( ExpressionEvaluationException e ) { throw new EnforcerRuleException( "Unable to retrieve the project.", e ); } } }
public class class_name { public void execute( EnforcerRuleHelper theHelper ) throws EnforcerRuleException { List<String> missingProfiles = new ArrayList<String>(); try { MavenProject project = (MavenProject) theHelper.evaluate( "${project}" ); if ( StringUtils.isNotEmpty( profiles ) ) { String[] profs = profiles.split( "," ); for ( String profile : profs ) { if ( !isProfileActive( project, profile ) ) { missingProfiles.add( profile ); // depends on control dependency: [if], data = [none] } } boolean fail = false; if ( !missingProfiles.isEmpty() ) { fail = true; // depends on control dependency: [if], data = [none] // if (all && missingProfiles.size() != profs.length) // { // fail = true; // } // else // { // if (!all && missingProfiles.size() >= (profs.length -1)) // { // fail = true; // } // } } if ( fail ) { String message = getMessage(); StringBuilder buf = new StringBuilder(); if ( message != null ) { buf.append( message + "\n" ); // depends on control dependency: [if], data = [( message] } for ( String profile : missingProfiles ) { buf.append( "Profile \"" + profile + "\" is not activated.\n" ); // depends on control dependency: [for], data = [profile] } throw new EnforcerRuleException( buf.toString() ); } } } catch ( ExpressionEvaluationException e ) { throw new EnforcerRuleException( "Unable to retrieve the project.", e ); } } }
public class class_name { public void run() { while( running ) { long interval_end = System.currentTimeMillis() + interval; while( interval_end > System.currentTimeMillis() ) { Thread.yield(); } fireIntervalElapsed(); } running = false; } }
public class class_name { public void run() { while( running ) { long interval_end = System.currentTimeMillis() + interval; while( interval_end > System.currentTimeMillis() ) { Thread.yield(); // depends on control dependency: [while], data = [none] } fireIntervalElapsed(); // depends on control dependency: [while], data = [none] } running = false; } }