code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public static boolean createBundleJar ( File target, TileSetBundle bundle, ImageProvider improv, String imageBase, boolean keepOriginalPngs, boolean uncompressed) throws IOException { // now we have to create the actual bundle file FileOutputStream fout = new FileOutputStream(target); Manifest manifest = new Manifest(); JarOutputStream jar = new JarOutputStream(fout, manifest); jar.setLevel(uncompressed ? Deflater.NO_COMPRESSION : Deflater.BEST_COMPRESSION); try { // write all of the image files to the bundle, converting the // tilesets to trimmed tilesets in the process Iterator<Integer> iditer = bundle.enumerateTileSetIds(); // Store off the updated TileSets in a separate Map so we can wait to change the // bundle till we're done iterating. HashIntMap<TileSet> toUpdate = new HashIntMap<TileSet>(); while (iditer.hasNext()) { int tileSetId = iditer.next().intValue(); TileSet set = bundle.getTileSet(tileSetId); String imagePath = set.getImagePath(); // sanity checks if (imagePath == null) { log.warning("Tileset contains no image path " + "[set=" + set + "]. It ain't gonna work."); continue; } // if this is an object tileset, trim it if (!keepOriginalPngs && (set instanceof ObjectTileSet)) { // set the tileset up with an image provider; we // need to do this so that we can trim it! set.setImageProvider(improv); // we're going to trim it, so adjust the path imagePath = adjustImagePath(imagePath); jar.putNextEntry(new JarEntry(imagePath)); try { // create a trimmed object tileset, which will // write the trimmed tileset image to the jar // output stream TrimmedObjectTileSet tset = TrimmedObjectTileSet.trimObjectTileSet( (ObjectTileSet)set, jar); tset.setImagePath(imagePath); // replace the original set with the trimmed // tileset in the tileset bundle toUpdate.put(tileSetId, tset); } catch (Exception e) { e.printStackTrace(System.err); String msg = "Error adding tileset to bundle " + imagePath + ", " + set.getName() + ": " + e; throw (IOException) new IOException(msg).initCause(e); } } else { // read the image file and convert it to our custom // format in the bundle File ifile = new File(imageBase, imagePath); try { BufferedImage image = ImageIO.read(ifile); if (!keepOriginalPngs && FastImageIO.canWrite(image)) { imagePath = adjustImagePath(imagePath); jar.putNextEntry(new JarEntry(imagePath)); set.setImagePath(imagePath); FastImageIO.write(image, jar); } else { jar.putNextEntry(new JarEntry(imagePath)); FileInputStream imgin = new FileInputStream(ifile); StreamUtil.copy(imgin, jar); } } catch (Exception e) { String msg = "Failure bundling image " + ifile + ": " + e; throw (IOException) new IOException(msg).initCause(e); } } } bundle.putAll(toUpdate); // now write a serialized representation of the tileset bundle // object to the bundle jar file JarEntry entry = new JarEntry(BundleUtil.METADATA_PATH); jar.putNextEntry(entry); ObjectOutputStream oout = new ObjectOutputStream(jar); oout.writeObject(bundle); oout.flush(); // finally close up the jar file and call ourself done jar.close(); return true; } catch (Exception e) { // remove the incomplete jar file and rethrow the exception jar.close(); if (!target.delete()) { log.warning("Failed to close botched bundle '" + target + "'."); } String errmsg = "Failed to create bundle " + target + ": " + e; throw (IOException) new IOException(errmsg).initCause(e); } } }
public class class_name { public static boolean createBundleJar ( File target, TileSetBundle bundle, ImageProvider improv, String imageBase, boolean keepOriginalPngs, boolean uncompressed) throws IOException { // now we have to create the actual bundle file FileOutputStream fout = new FileOutputStream(target); Manifest manifest = new Manifest(); JarOutputStream jar = new JarOutputStream(fout, manifest); jar.setLevel(uncompressed ? Deflater.NO_COMPRESSION : Deflater.BEST_COMPRESSION); try { // write all of the image files to the bundle, converting the // tilesets to trimmed tilesets in the process Iterator<Integer> iditer = bundle.enumerateTileSetIds(); // Store off the updated TileSets in a separate Map so we can wait to change the // bundle till we're done iterating. HashIntMap<TileSet> toUpdate = new HashIntMap<TileSet>(); while (iditer.hasNext()) { int tileSetId = iditer.next().intValue(); TileSet set = bundle.getTileSet(tileSetId); String imagePath = set.getImagePath(); // sanity checks if (imagePath == null) { log.warning("Tileset contains no image path " + "[set=" + set + "]. It ain't gonna work."); continue; } // if this is an object tileset, trim it if (!keepOriginalPngs && (set instanceof ObjectTileSet)) { // set the tileset up with an image provider; we // need to do this so that we can trim it! set.setImageProvider(improv); // we're going to trim it, so adjust the path imagePath = adjustImagePath(imagePath); // depends on control dependency: [if], data = [none] jar.putNextEntry(new JarEntry(imagePath)); // depends on control dependency: [if], data = [(imagePath] try { // create a trimmed object tileset, which will // write the trimmed tileset image to the jar // output stream TrimmedObjectTileSet tset = TrimmedObjectTileSet.trimObjectTileSet( (ObjectTileSet)set, jar); tset.setImagePath(imagePath); // depends on control dependency: [try], data = [none] // replace the original set with the trimmed // tileset in the tileset bundle toUpdate.put(tileSetId, tset); // depends on control dependency: [try], data = [none] } catch (Exception e) { e.printStackTrace(System.err); String msg = "Error adding tileset to bundle " + imagePath + ", " + set.getName() + ": " + e; throw (IOException) new IOException(msg).initCause(e); } // depends on control dependency: [catch], data = [none] } else { // read the image file and convert it to our custom // format in the bundle File ifile = new File(imageBase, imagePath); try { BufferedImage image = ImageIO.read(ifile); if (!keepOriginalPngs && FastImageIO.canWrite(image)) { imagePath = adjustImagePath(imagePath); // depends on control dependency: [if], data = [none] jar.putNextEntry(new JarEntry(imagePath)); // depends on control dependency: [if], data = [none] set.setImagePath(imagePath); // depends on control dependency: [if], data = [none] FastImageIO.write(image, jar); // depends on control dependency: [if], data = [none] } else { jar.putNextEntry(new JarEntry(imagePath)); // depends on control dependency: [if], data = [none] FileInputStream imgin = new FileInputStream(ifile); StreamUtil.copy(imgin, jar); // depends on control dependency: [if], data = [none] } } catch (Exception e) { String msg = "Failure bundling image " + ifile + ": " + e; throw (IOException) new IOException(msg).initCause(e); } // depends on control dependency: [catch], data = [none] } } bundle.putAll(toUpdate); // now write a serialized representation of the tileset bundle // object to the bundle jar file JarEntry entry = new JarEntry(BundleUtil.METADATA_PATH); jar.putNextEntry(entry); ObjectOutputStream oout = new ObjectOutputStream(jar); oout.writeObject(bundle); oout.flush(); // finally close up the jar file and call ourself done jar.close(); return true; } catch (Exception e) { // remove the incomplete jar file and rethrow the exception jar.close(); if (!target.delete()) { log.warning("Failed to close botched bundle '" + target + "'."); // depends on control dependency: [if], data = [none] } String errmsg = "Failed to create bundle " + target + ": " + e; throw (IOException) new IOException(errmsg).initCause(e); } } }
public class class_name { public String getExpressionString() { StringBuffer buf = new StringBuffer(); buf.append("("); buf.append(mExpression.getExpressionString()); for (int i = 0; i < mOperators.size(); i++) { BinaryOperator operator = (BinaryOperator) mOperators.get(i); Expression expression = (Expression) mExpressions.get(i); buf.append(" "); buf.append(operator.getOperatorSymbol()); buf.append(" "); buf.append(expression.getExpressionString()); } buf.append(")"); return buf.toString(); } }
public class class_name { public String getExpressionString() { StringBuffer buf = new StringBuffer(); buf.append("("); buf.append(mExpression.getExpressionString()); for (int i = 0; i < mOperators.size(); i++) { BinaryOperator operator = (BinaryOperator) mOperators.get(i); Expression expression = (Expression) mExpressions.get(i); buf.append(" "); // depends on control dependency: [for], data = [none] buf.append(operator.getOperatorSymbol()); // depends on control dependency: [for], data = [none] buf.append(" "); // depends on control dependency: [for], data = [none] buf.append(expression.getExpressionString()); // depends on control dependency: [for], data = [none] } buf.append(")"); return buf.toString(); } }
public class class_name { ComponentTree tree() { if (tree != null) { return tree; } // Build complete tree before assigning it. ComponentTree newTree = new ComponentTree(this); newTree.setEventPipeline(new BufferingEventPipeline(newTree)); tree = newTree; fire(new Attached(component(), null), channel()); return tree; } }
public class class_name { ComponentTree tree() { if (tree != null) { return tree; // depends on control dependency: [if], data = [none] } // Build complete tree before assigning it. ComponentTree newTree = new ComponentTree(this); newTree.setEventPipeline(new BufferingEventPipeline(newTree)); tree = newTree; fire(new Attached(component(), null), channel()); return tree; } }
public class class_name { private ConsumerSessionImpl findConsumerSession(long id) throws SINotAuthorizedException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "findConsumerSession", Long.valueOf(id)); ConsumerSessionImpl sessionInternal = _messageProcessor.getConsumer(id); if (sessionInternal != null) { // Check if this is the right Consumer. if (sessionInternal.getIdInternal() == id) { if (sessionInternal.getConnectionInternal() != this) { // Defect 346001. Check that the subjects match, but only if bus security is enabled if (_isBusSecure) { // get the bus security interface in order to make audit calls AuthUtils sibAuthUtils = _messageProcessor.getAuthorisationUtils(); // get the name of the destination the consumer is connected to (used for auditing) String destinationName = sessionInternal.getDestinationAddress().getDestinationName(); // Check that the subjects match. Subject connsSubject = ((ConnectionImpl) sessionInternal.getConnectionInternal()).getSecuritySubject(); if (connsSubject != null) { boolean subjectsDiffer = false; // Check null subject if (_subject == null) { subjectsDiffer = true; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Subjects differ - base subject is null"); } // Check for privileged SIBServerSubject if (!subjectsDiffer && isSIBServerSubject()) { // Connected user is privileged SIBServerSubject, check that the // bifurcated session has same degree of privilege if (!_messageProcessor.getAuthorisationUtils().isSIBServerSubject(connsSubject)) { subjectsDiffer = true; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Subjects differ - base subject is privileged"); } } if (!subjectsDiffer) { // Compare the resolved string userids String resolvedConnUserid = _messageProcessor.getAuthorisationUtils().getUserName(connsSubject); String resolvedUserid = _messageProcessor.getAuthorisationUtils().getUserName(_subject); // NB Sib.security, ensures that resolved Userids are non-null if (!resolvedConnUserid.equals(resolvedUserid)) { subjectsDiffer = true; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Subjects differ by userid - base " + resolvedUserid + ", connected " + resolvedConnUserid); } } // Throw exception if (subjectsDiffer) { // audit the authorization failure sibAuthUtils.createBifurcatedConsumerSessionAuthorizationFailed(connsSubject, destinationName, id); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "findConsumerSession", "SINotAuthorizedException - not same"); throw new SINotAuthorizedException( nls.getFormattedMessage( "CREATE_BIFURCATED_CONSUMER_ERROR_CWSIP0094", new Object[] { Long.valueOf(id), _messageProcessor.getMessagingEngineName() }, null)); } } else if (_subject != null) { // audit the authorization failure sibAuthUtils.createBifurcatedConsumerSessionAuthorizationFailed(connsSubject, destinationName, id); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "findConsumerSession", "SINotAuthorizedException - null"); throw new SINotAuthorizedException( nls.getFormattedMessage( "CREATE_BIFURCATED_CONSUMER_ERROR_CWSIP0094", new Object[] { Long.valueOf(id), _messageProcessor.getMessagingEngineName() }, null)); } } } } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "findConsumerSession", sessionInternal); return sessionInternal; } }
public class class_name { private ConsumerSessionImpl findConsumerSession(long id) throws SINotAuthorizedException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "findConsumerSession", Long.valueOf(id)); ConsumerSessionImpl sessionInternal = _messageProcessor.getConsumer(id); if (sessionInternal != null) { // Check if this is the right Consumer. if (sessionInternal.getIdInternal() == id) { if (sessionInternal.getConnectionInternal() != this) { // Defect 346001. Check that the subjects match, but only if bus security is enabled if (_isBusSecure) { // get the bus security interface in order to make audit calls AuthUtils sibAuthUtils = _messageProcessor.getAuthorisationUtils(); // get the name of the destination the consumer is connected to (used for auditing) String destinationName = sessionInternal.getDestinationAddress().getDestinationName(); // Check that the subjects match. Subject connsSubject = ((ConnectionImpl) sessionInternal.getConnectionInternal()).getSecuritySubject(); if (connsSubject != null) { boolean subjectsDiffer = false; // Check null subject if (_subject == null) { subjectsDiffer = true; // depends on control dependency: [if], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Subjects differ - base subject is null"); } // Check for privileged SIBServerSubject if (!subjectsDiffer && isSIBServerSubject()) { // Connected user is privileged SIBServerSubject, check that the // bifurcated session has same degree of privilege if (!_messageProcessor.getAuthorisationUtils().isSIBServerSubject(connsSubject)) { subjectsDiffer = true; // depends on control dependency: [if], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Subjects differ - base subject is privileged"); } } if (!subjectsDiffer) { // Compare the resolved string userids String resolvedConnUserid = _messageProcessor.getAuthorisationUtils().getUserName(connsSubject); String resolvedUserid = _messageProcessor.getAuthorisationUtils().getUserName(_subject); // NB Sib.security, ensures that resolved Userids are non-null if (!resolvedConnUserid.equals(resolvedUserid)) { subjectsDiffer = true; // depends on control dependency: [if], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Subjects differ by userid - base " + resolvedUserid + ", connected " + resolvedConnUserid); } } // Throw exception if (subjectsDiffer) { // audit the authorization failure sibAuthUtils.createBifurcatedConsumerSessionAuthorizationFailed(connsSubject, destinationName, id); // depends on control dependency: [if], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "findConsumerSession", "SINotAuthorizedException - not same"); throw new SINotAuthorizedException( nls.getFormattedMessage( "CREATE_BIFURCATED_CONSUMER_ERROR_CWSIP0094", new Object[] { Long.valueOf(id), _messageProcessor.getMessagingEngineName() }, null)); } } else if (_subject != null) { // audit the authorization failure sibAuthUtils.createBifurcatedConsumerSessionAuthorizationFailed(connsSubject, destinationName, id); // depends on control dependency: [if], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "findConsumerSession", "SINotAuthorizedException - null"); throw new SINotAuthorizedException( nls.getFormattedMessage( "CREATE_BIFURCATED_CONSUMER_ERROR_CWSIP0094", new Object[] { Long.valueOf(id), _messageProcessor.getMessagingEngineName() }, null)); } } } } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "findConsumerSession", sessionInternal); return sessionInternal; } }
public class class_name { @Override protected String getDescription(final MBeanOperationInfo op) { String descr = op.getDescription(); Method m = getMethod(op); if (m != null) { MBeanInfo d = m.getAnnotation(MBeanInfo.class); if (d != null) { descr = d.value(); } } return descr; } }
public class class_name { @Override protected String getDescription(final MBeanOperationInfo op) { String descr = op.getDescription(); Method m = getMethod(op); if (m != null) { MBeanInfo d = m.getAnnotation(MBeanInfo.class); if (d != null) { descr = d.value(); // depends on control dependency: [if], data = [none] } } return descr; } }
public class class_name { @SuppressWarnings("unchecked") public static <T> Class<T> classForName(String className) { try { return (Class<T>)Class.forName(className); } catch (ClassNotFoundException | VerifyError e) { // Catching VerifyError fixes issue #147. I don't know how to unit test it. return null; } } }
public class class_name { @SuppressWarnings("unchecked") public static <T> Class<T> classForName(String className) { try { return (Class<T>)Class.forName(className); // depends on control dependency: [try], data = [none] } catch (ClassNotFoundException | VerifyError e) { // Catching VerifyError fixes issue #147. I don't know how to unit test it. return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static boolean containsWhitespace(final String string) { for (int i = 0; i < string.length(); i++) { if (Character.isWhitespace(string.charAt(i))) { return true; } } return false; } }
public class class_name { public static boolean containsWhitespace(final String string) { for (int i = 0; i < string.length(); i++) { if (Character.isWhitespace(string.charAt(i))) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public List<UrlCss3Value> getUrlCss3Values() { // for security improvements for urlCss3Values without // compromising performance. // TODO the drawback of this the returning of new object each time even // if the urlCss3Values array has not been modified, as a minor // performance // improvement, return a new object only if the urlCss3Values is // modified. if (urlCss3Values == null) { return null; } return Collections.unmodifiableList(Arrays.asList(urlCss3Values)); } }
public class class_name { public List<UrlCss3Value> getUrlCss3Values() { // for security improvements for urlCss3Values without // compromising performance. // TODO the drawback of this the returning of new object each time even // if the urlCss3Values array has not been modified, as a minor // performance // improvement, return a new object only if the urlCss3Values is // modified. if (urlCss3Values == null) { return null; // depends on control dependency: [if], data = [none] } return Collections.unmodifiableList(Arrays.asList(urlCss3Values)); } }
public class class_name { private boolean satisfied(final CLClause c) { if (c.satisfied()) { return true; } for (int i = 0; i < c.lits().size(); i++) { if (val(c.lits().get(i)) == VALUE_TRUE) { if (this.level == 0) { c.setSatisfied(true); } return true; } } return false; } }
public class class_name { private boolean satisfied(final CLClause c) { if (c.satisfied()) { return true; } // depends on control dependency: [if], data = [none] for (int i = 0; i < c.lits().size(); i++) { if (val(c.lits().get(i)) == VALUE_TRUE) { if (this.level == 0) { c.setSatisfied(true); } // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public void execute() throws MojoExecutionException { getLog().debug("starting packaging"); AbstractConfiguration[] configurations= (AbstractConfiguration[]) getPluginContext().get(ConfiguratorMojo.GENERATED_CONFIGURATIONS_KEY); try { for(AbstractConfiguration configuration : configurations) { if(!(configuration instanceof NodeConfiguration)) { continue; } String classifier= configuration.className.substring(configuration.className.lastIndexOf('.') + 1); File jarFile= checkJarFile(classifier); JavaApplicationDescriptor descriptor= getDescriptor(); descriptor.setNodeConfiguration(configuration.className); descriptor.setJarFile(jarFile); File jadFile= getJadFile(classifier); getDescriptor().writeDescriptor(jadFile); getProjectHelper().attachArtifact(getProject(), "jad", classifier, jadFile); } } catch (IOException ioe) { throw new MojoExecutionException("could not create .jad file", ioe); } catch (RuntimeException e) { throw new MojoExecutionException("could not create .jad file", e); } getLog().debug("finished packaging"); } }
public class class_name { public void execute() throws MojoExecutionException { getLog().debug("starting packaging"); AbstractConfiguration[] configurations= (AbstractConfiguration[]) getPluginContext().get(ConfiguratorMojo.GENERATED_CONFIGURATIONS_KEY); try { for(AbstractConfiguration configuration : configurations) { if(!(configuration instanceof NodeConfiguration)) { continue; } String classifier= configuration.className.substring(configuration.className.lastIndexOf('.') + 1); File jarFile= checkJarFile(classifier); JavaApplicationDescriptor descriptor= getDescriptor(); descriptor.setNodeConfiguration(configuration.className); // depends on control dependency: [for], data = [configuration] descriptor.setJarFile(jarFile); // depends on control dependency: [for], data = [none] File jadFile= getJadFile(classifier); getDescriptor().writeDescriptor(jadFile); // depends on control dependency: [for], data = [none] getProjectHelper().attachArtifact(getProject(), "jad", classifier, jadFile); // depends on control dependency: [for], data = [none] } } catch (IOException ioe) { throw new MojoExecutionException("could not create .jad file", ioe); } catch (RuntimeException e) { throw new MojoExecutionException("could not create .jad file", e); } getLog().debug("finished packaging"); } }
public class class_name { void allowedOptions(String... opts) { for (String opt : opts) { options.putIfAbsent(opt, false); } } }
public class class_name { void allowedOptions(String... opts) { for (String opt : opts) { options.putIfAbsent(opt, false); // depends on control dependency: [for], data = [opt] } } }
public class class_name { public void setApiKeyPrefix(String apiKeyPrefix) { for (Authentication auth : authentications.values()) { if (auth instanceof ApiKeyAuth) { ((ApiKeyAuth) auth).setApiKeyPrefix(apiKeyPrefix); return; } } throw new RuntimeException("No API key authentication configured!"); } }
public class class_name { public void setApiKeyPrefix(String apiKeyPrefix) { for (Authentication auth : authentications.values()) { if (auth instanceof ApiKeyAuth) { ((ApiKeyAuth) auth).setApiKeyPrefix(apiKeyPrefix); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } } throw new RuntimeException("No API key authentication configured!"); } }
public class class_name { private void addHeadersToRequest(HttpRequestBase httpRequest, Request<?> request) { httpRequest.addHeader(HttpHeaders.HOST, getHostHeaderValue(request.getEndpoint())); // Copy over any other headers already in our request for (Entry<String, String> entry : request.getHeaders().entrySet()) { /* * HttpClient4 fills in the Content-Length header and complains if * it's already present, so we skip it here. We also skip the Host * header to avoid sending it twice, which will interfere with some * signing schemes. */ if (!(ignoreHeaders.contains(entry.getKey()))) { httpRequest.addHeader(entry.getKey(), entry.getValue()); } } /* Set content type and encoding */ if (httpRequest.getHeaders(HttpHeaders.CONTENT_TYPE) == null || httpRequest .getHeaders (HttpHeaders.CONTENT_TYPE).length == 0) { httpRequest.addHeader(HttpHeaders.CONTENT_TYPE, "application/x-www-form-urlencoded; " + "charset=" + DEFAULT_ENCODING.toLowerCase()); } } }
public class class_name { private void addHeadersToRequest(HttpRequestBase httpRequest, Request<?> request) { httpRequest.addHeader(HttpHeaders.HOST, getHostHeaderValue(request.getEndpoint())); // Copy over any other headers already in our request for (Entry<String, String> entry : request.getHeaders().entrySet()) { /* * HttpClient4 fills in the Content-Length header and complains if * it's already present, so we skip it here. We also skip the Host * header to avoid sending it twice, which will interfere with some * signing schemes. */ if (!(ignoreHeaders.contains(entry.getKey()))) { httpRequest.addHeader(entry.getKey(), entry.getValue()); // depends on control dependency: [if], data = [none] } } /* Set content type and encoding */ if (httpRequest.getHeaders(HttpHeaders.CONTENT_TYPE) == null || httpRequest .getHeaders (HttpHeaders.CONTENT_TYPE).length == 0) { httpRequest.addHeader(HttpHeaders.CONTENT_TYPE, "application/x-www-form-urlencoded; " + "charset=" + DEFAULT_ENCODING.toLowerCase()); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected void filterTable(String search) { m_container.removeAllContainerFilters(); if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(search)) { m_container.addContainerFilter( new Or( new SimpleStringFilter(PROP_CAPTION, search, true, false), new SimpleStringFilter(PROP_DESCRIPTION, search, true, false))); } } }
public class class_name { protected void filterTable(String search) { m_container.removeAllContainerFilters(); if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(search)) { m_container.addContainerFilter( new Or( new SimpleStringFilter(PROP_CAPTION, search, true, false), new SimpleStringFilter(PROP_DESCRIPTION, search, true, false))); // depends on control dependency: [if], data = [none] } } }
public class class_name { private String parseTextDecl(String encoding) throws SAXException, IOException { String encodingName = null; int flags = LIT_DISABLE_CREF | LIT_DISABLE_PE | LIT_DISABLE_EREF; // Read an optional version. if (tryRead("version")) { String version; parseEq(); checkLegalVersion(version = readLiteral(flags)); if (!version.equals("1.0")) { if (version.equals("1.1")) { fatal("XML 1.1 not supported."); // 2006-04-24 hsivonen } else { fatal("illegal XML version", version, "1.0"); // removed // 1.1 -- // 2006-04-24 // hsivonen } } requireWhitespace(); } // Read the encoding. require("encoding"); parseEq(); encodingName = readLiteral(flags); checkEncodingLiteral(encodingName); // 2006-04-28 hsivonen if (reader == null) { draconianInputStreamReader(encodingName, is, true); } else { checkEncodingMatch(encoding, encodingName); } skipWhitespace(); require("?>"); return encodingName; } }
public class class_name { private String parseTextDecl(String encoding) throws SAXException, IOException { String encodingName = null; int flags = LIT_DISABLE_CREF | LIT_DISABLE_PE | LIT_DISABLE_EREF; // Read an optional version. if (tryRead("version")) { String version; parseEq(); checkLegalVersion(version = readLiteral(flags)); if (!version.equals("1.0")) { if (version.equals("1.1")) { fatal("XML 1.1 not supported."); // 2006-04-24 hsivonen // depends on control dependency: [if], data = [none] } else { fatal("illegal XML version", version, "1.0"); // removed // depends on control dependency: [if], data = [none] // 1.1 -- // 2006-04-24 // hsivonen } } requireWhitespace(); } // Read the encoding. require("encoding"); parseEq(); encodingName = readLiteral(flags); checkEncodingLiteral(encodingName); // 2006-04-28 hsivonen if (reader == null) { draconianInputStreamReader(encodingName, is, true); } else { checkEncodingMatch(encoding, encodingName); } skipWhitespace(); require("?>"); return encodingName; } }
public class class_name { final public StringWrapper prepare(String s) { BagOfTokens bag = new BagOfTokens(s, tokenizer.tokenize(s)); double totalWeight = bag.getTotalWeight(); for (Iterator i=bag.tokenIterator(); i.hasNext(); ) { Token tok = (Token)i.next(); double freq = bag.getWeight(tok); bag.setWeight( tok, smoothedProbability(tok, freq, totalWeight) ); } return bag; } }
public class class_name { final public StringWrapper prepare(String s) { BagOfTokens bag = new BagOfTokens(s, tokenizer.tokenize(s)); double totalWeight = bag.getTotalWeight(); for (Iterator i=bag.tokenIterator(); i.hasNext(); ) { Token tok = (Token)i.next(); double freq = bag.getWeight(tok); bag.setWeight( tok, smoothedProbability(tok, freq, totalWeight) ); // depends on control dependency: [for], data = [none] } return bag; } }
public class class_name { private String getNamespaceValue(final String value) { if (value == null || value.contains(":") || value.equals("text")) { return value; } else { return getCurrentNamespace() + ":" + value; } } }
public class class_name { private String getNamespaceValue(final String value) { if (value == null || value.contains(":") || value.equals("text")) { return value; // depends on control dependency: [if], data = [none] } else { return getCurrentNamespace() + ":" + value; // depends on control dependency: [if], data = [none] } } }
public class class_name { private static boolean checkInitialize(Method method, Object obj) { boolean isInitialized = true; try { isInitialized = (Boolean) method.invoke(null, new Object[] {obj}); } catch (IllegalArgumentException e) { // do nothing } catch (IllegalAccessException e) { // do nothing } catch (InvocationTargetException e) { // do nothing } return isInitialized; } }
public class class_name { private static boolean checkInitialize(Method method, Object obj) { boolean isInitialized = true; try { isInitialized = (Boolean) method.invoke(null, new Object[] {obj}); // depends on control dependency: [try], data = [none] } catch (IllegalArgumentException e) { // do nothing } catch (IllegalAccessException e) { // depends on control dependency: [catch], data = [none] // do nothing } catch (InvocationTargetException e) { // depends on control dependency: [catch], data = [none] // do nothing } // depends on control dependency: [catch], data = [none] return isInitialized; } }
public class class_name { private final void freeByteArrayStream(ByteArrayOutputStream baos) { // If the pool is not full, then add to the pool. Note that this must // be synchronized, as multiple threads may access the pool. d175235 synchronized (svBAOSs) { if (svBAOSsSize < svBAOSs.length) { svBAOSs[svBAOSsSize] = baos; ++svBAOSsSize; } } } }
public class class_name { private final void freeByteArrayStream(ByteArrayOutputStream baos) { // If the pool is not full, then add to the pool. Note that this must // be synchronized, as multiple threads may access the pool. d175235 synchronized (svBAOSs) { if (svBAOSsSize < svBAOSs.length) { svBAOSs[svBAOSsSize] = baos; // depends on control dependency: [if], data = [none] ++svBAOSsSize; // depends on control dependency: [if], data = [none] } } } }
public class class_name { public void onResourceRequested(final ResourceRequestEvent resourceRequestEvent, final String containerId, final URI jarFileUri) { try { createAzureBatchTask(containerId, jarFileUri); this.outstandingResourceRequests.put(containerId, resourceRequestEvent); this.outstandingResourceRequestCount.incrementAndGet(); this.updateRuntimeStatus(); } catch (IOException e) { LOG.log(Level.SEVERE, "Failed to create Azure Batch task with the following exception: {0}", e); throw new RuntimeException(e); } } }
public class class_name { public void onResourceRequested(final ResourceRequestEvent resourceRequestEvent, final String containerId, final URI jarFileUri) { try { createAzureBatchTask(containerId, jarFileUri); // depends on control dependency: [try], data = [none] this.outstandingResourceRequests.put(containerId, resourceRequestEvent); // depends on control dependency: [try], data = [none] this.outstandingResourceRequestCount.incrementAndGet(); // depends on control dependency: [try], data = [none] this.updateRuntimeStatus(); // depends on control dependency: [try], data = [none] } catch (IOException e) { LOG.log(Level.SEVERE, "Failed to create Azure Batch task with the following exception: {0}", e); throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public final ProjectionJavaScriptBuilder types(final List<TypeName> eventTypes) { for (final TypeName type : eventTypes) { type(type.asBaseType()); } return this; } }
public class class_name { public final ProjectionJavaScriptBuilder types(final List<TypeName> eventTypes) { for (final TypeName type : eventTypes) { type(type.asBaseType()); // depends on control dependency: [for], data = [type] } return this; } }
public class class_name { public List<GoogleMapShape> toShapes( GeometryCollection<Geometry> geometryCollection) { List<GoogleMapShape> shapes = new ArrayList<GoogleMapShape>(); for (Geometry geometry : geometryCollection.getGeometries()) { GoogleMapShape shape = toShape(geometry); shapes.add(shape); } return shapes; } }
public class class_name { public List<GoogleMapShape> toShapes( GeometryCollection<Geometry> geometryCollection) { List<GoogleMapShape> shapes = new ArrayList<GoogleMapShape>(); for (Geometry geometry : geometryCollection.getGeometries()) { GoogleMapShape shape = toShape(geometry); shapes.add(shape); // depends on control dependency: [for], data = [none] } return shapes; } }
public class class_name { public static String concatPathAndFilename(final String path, final String filename, final String separator) { checkNotNull("filename", filename); checkNotNull("separator", separator); checkNotEmpty("separator", separator); if (path == null) { return filename; } final String trimmedPath = path.trim(); if (trimmedPath.length() == 0) { return filename; } final String trimmedFilename = filename.trim(); if (trimmedPath.endsWith(separator)) { return trimmedPath + trimmedFilename; } return trimmedPath + separator + trimmedFilename; } }
public class class_name { public static String concatPathAndFilename(final String path, final String filename, final String separator) { checkNotNull("filename", filename); checkNotNull("separator", separator); checkNotEmpty("separator", separator); if (path == null) { return filename; // depends on control dependency: [if], data = [none] } final String trimmedPath = path.trim(); if (trimmedPath.length() == 0) { return filename; // depends on control dependency: [if], data = [none] } final String trimmedFilename = filename.trim(); if (trimmedPath.endsWith(separator)) { return trimmedPath + trimmedFilename; // depends on control dependency: [if], data = [none] } return trimmedPath + separator + trimmedFilename; } }
public class class_name { public RunJobFlowRequest withNewSupportedProducts(SupportedProductConfig... newSupportedProducts) { if (this.newSupportedProducts == null) { setNewSupportedProducts(new com.amazonaws.internal.SdkInternalList<SupportedProductConfig>(newSupportedProducts.length)); } for (SupportedProductConfig ele : newSupportedProducts) { this.newSupportedProducts.add(ele); } return this; } }
public class class_name { public RunJobFlowRequest withNewSupportedProducts(SupportedProductConfig... newSupportedProducts) { if (this.newSupportedProducts == null) { setNewSupportedProducts(new com.amazonaws.internal.SdkInternalList<SupportedProductConfig>(newSupportedProducts.length)); // depends on control dependency: [if], data = [none] } for (SupportedProductConfig ele : newSupportedProducts) { this.newSupportedProducts.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public void cleanupProjectArtifacts(final int projectId) { try { this.storageCleaner.cleanupProjectArtifacts(projectId); } catch (final Exception e) { log.error("Error occured during cleanup. Ignoring and continuing...", e); } } }
public class class_name { public void cleanupProjectArtifacts(final int projectId) { try { this.storageCleaner.cleanupProjectArtifacts(projectId); // depends on control dependency: [try], data = [none] } catch (final Exception e) { log.error("Error occured during cleanup. Ignoring and continuing...", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { protected static AbatisService getInstance(Context context, String dbName, int version) { if (instance == null) { instance = new AbatisService(context, dbName, version); } return instance; } }
public class class_name { protected static AbatisService getInstance(Context context, String dbName, int version) { if (instance == null) { instance = new AbatisService(context, dbName, version); // depends on control dependency: [if], data = [none] } return instance; } }
public class class_name { public static Collection<DatasetDescriptor> findPotentialDatasets( FileSystem fs, Path path) throws IOException { List<DatasetDescriptor> descriptors = Lists.newArrayList(); Result result = visit(new FindDatasets(), fs, path); if (result instanceof Result.Table) { descriptors.add(descriptor(fs, (Result.Table) result)); } else if (result instanceof Result.Group) { for (Result.Table table : ((Result.Group) result).tables) { descriptors.add(descriptor(fs, table)); } } return descriptors; } }
public class class_name { public static Collection<DatasetDescriptor> findPotentialDatasets( FileSystem fs, Path path) throws IOException { List<DatasetDescriptor> descriptors = Lists.newArrayList(); Result result = visit(new FindDatasets(), fs, path); if (result instanceof Result.Table) { descriptors.add(descriptor(fs, (Result.Table) result)); } else if (result instanceof Result.Group) { for (Result.Table table : ((Result.Group) result).tables) { descriptors.add(descriptor(fs, table)); // depends on control dependency: [for], data = [table] } } return descriptors; } }
public class class_name { private void processSnapshotResponse(ClientResponse response) { setState(State.WAITING); final long now = System.currentTimeMillis(); m_nextSnapshotTime += m_frequencyInMillis; if (m_nextSnapshotTime < now) { m_nextSnapshotTime = now - 1; } if (response.getStatus() != ClientResponse.SUCCESS){ logFailureResponse("Snapshot failed", response); return; } final VoltTable results[] = response.getResults(); final VoltTable result = results[0]; final String err = SnapshotUtil.didSnapshotRequestFailWithErr(results); if (err != null) { SNAP_LOG.warn("Snapshot failed with failure response: " + err); m_snapshots.removeLast(); return; } //assert(result.getColumnName(1).equals("TABLE")); boolean success = true; while (result.advanceRow()) { if (!result.getString("RESULT").equals("SUCCESS")) { success = false; SNAP_LOG.warn("Snapshot save feasibility test failed for host " + result.getLong("HOST_ID") + " table " + result.getString("TABLE") + " with error message " + result.getString("ERR_MSG")); } } if (!success) { m_snapshots.removeLast(); } } }
public class class_name { private void processSnapshotResponse(ClientResponse response) { setState(State.WAITING); final long now = System.currentTimeMillis(); m_nextSnapshotTime += m_frequencyInMillis; if (m_nextSnapshotTime < now) { m_nextSnapshotTime = now - 1; // depends on control dependency: [if], data = [none] } if (response.getStatus() != ClientResponse.SUCCESS){ logFailureResponse("Snapshot failed", response); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } final VoltTable results[] = response.getResults(); final VoltTable result = results[0]; final String err = SnapshotUtil.didSnapshotRequestFailWithErr(results); if (err != null) { SNAP_LOG.warn("Snapshot failed with failure response: " + err); m_snapshots.removeLast(); return; } //assert(result.getColumnName(1).equals("TABLE")); boolean success = true; while (result.advanceRow()) { if (!result.getString("RESULT").equals("SUCCESS")) { success = false; SNAP_LOG.warn("Snapshot save feasibility test failed for host " + result.getLong("HOST_ID") + " table " + result.getString("TABLE") + " with error message " + result.getString("ERR_MSG")); } } if (!success) { m_snapshots.removeLast(); } } }
public class class_name { private Map<String, String> buildApplyParams(RefundApplyRequest request) { Map<String, String> refundParams = new TreeMap<>(); // 配置参数 buildConfigParams(refundParams); // 业务参数 putIfNotEmpty(refundParams, WepayField.TRANSACTION_ID, request.getTransactionId()); putIfNotEmpty(refundParams, WepayField.OUT_TRADE_NO, request.getOutTradeNo()); put(refundParams, WepayField.OUT_REFUND_NO, request.getOutRefundNo()); put(refundParams, WepayField.TOTAL_FEE, request.getTotalFee() + ""); put(refundParams, WepayField.REFUND_FEE, request.getRefundFee() + ""); put(refundParams, WepayField.NONCE_STR, RandomStrs.generate(16)); put(refundParams, WepayField.OP_USER_ID, request.getOpUserId()); putIfNotEmpty(refundParams, WepayField.DEVICE_INFO, request.getDeviceInfo()); if (request.getRefundFeeType() != null){ put(refundParams, WepayField.REFUND_FEE_TYPE, request.getRefundFeeType().type()); } // 签名参数 buildSignParams(refundParams); return refundParams; } }
public class class_name { private Map<String, String> buildApplyParams(RefundApplyRequest request) { Map<String, String> refundParams = new TreeMap<>(); // 配置参数 buildConfigParams(refundParams); // 业务参数 putIfNotEmpty(refundParams, WepayField.TRANSACTION_ID, request.getTransactionId()); putIfNotEmpty(refundParams, WepayField.OUT_TRADE_NO, request.getOutTradeNo()); put(refundParams, WepayField.OUT_REFUND_NO, request.getOutRefundNo()); put(refundParams, WepayField.TOTAL_FEE, request.getTotalFee() + ""); put(refundParams, WepayField.REFUND_FEE, request.getRefundFee() + ""); put(refundParams, WepayField.NONCE_STR, RandomStrs.generate(16)); put(refundParams, WepayField.OP_USER_ID, request.getOpUserId()); putIfNotEmpty(refundParams, WepayField.DEVICE_INFO, request.getDeviceInfo()); if (request.getRefundFeeType() != null){ put(refundParams, WepayField.REFUND_FEE_TYPE, request.getRefundFeeType().type()); // depends on control dependency: [if], data = [none] } // 签名参数 buildSignParams(refundParams); return refundParams; } }
public class class_name { private List<SimulatorEvent> handleKillTaskAction(KillTaskAction action, long now) { TaskAttemptID taskId = action.getTaskID(); // we don't have a nice(r) toString() in Hadoop's TaskActions if (LOG.isDebugEnabled()) { LOG.debug("Handling kill task action, taskId=" + taskId + ", now=" + now); } SimulatorTaskInProgress tip = tasks.get(taskId); // Safety check: We might get a KillTaskAction even for completed reduces if (tip == null) { return SimulatorEngine.EMPTY_EVENTS; } progressTaskStatus(tip, now); // make progress up to date TaskStatus finalStatus = (TaskStatus)tip.getTaskStatus().clone(); finalStatus.setFinishTime(now); finalStatus.setRunState(State.KILLED); finishRunningTask(finalStatus, now); if (finalStatus.getIsMap() || finalStatus.getPhase() == Phase.REDUCE) { // if we have already created a task attempt completion event we remember // the task id, so that we can safely ignore the event when its delivered orphanTaskCompletions.add(taskId); } return SimulatorEngine.EMPTY_EVENTS; } }
public class class_name { private List<SimulatorEvent> handleKillTaskAction(KillTaskAction action, long now) { TaskAttemptID taskId = action.getTaskID(); // we don't have a nice(r) toString() in Hadoop's TaskActions if (LOG.isDebugEnabled()) { LOG.debug("Handling kill task action, taskId=" + taskId + ", now=" + now); // depends on control dependency: [if], data = [none] } SimulatorTaskInProgress tip = tasks.get(taskId); // Safety check: We might get a KillTaskAction even for completed reduces if (tip == null) { return SimulatorEngine.EMPTY_EVENTS; // depends on control dependency: [if], data = [none] } progressTaskStatus(tip, now); // make progress up to date TaskStatus finalStatus = (TaskStatus)tip.getTaskStatus().clone(); finalStatus.setFinishTime(now); finalStatus.setRunState(State.KILLED); finishRunningTask(finalStatus, now); if (finalStatus.getIsMap() || finalStatus.getPhase() == Phase.REDUCE) { // if we have already created a task attempt completion event we remember // the task id, so that we can safely ignore the event when its delivered orphanTaskCompletions.add(taskId); // depends on control dependency: [if], data = [none] } return SimulatorEngine.EMPTY_EVENTS; } }
public class class_name { private Set<ConstraintViolation<?>> validateJsr303(V value) { @SuppressWarnings("rawtypes") Set set; if (this.property == null) { if (value == null) { return null; } set = this.validator.validate(value, this.groups); } else { set = this.validator.validateValue(this.pojoType, this.property, value, this.groups); } @SuppressWarnings("unchecked") Set<ConstraintViolation<?>> violationSet = set; return violationSet; } }
public class class_name { private Set<ConstraintViolation<?>> validateJsr303(V value) { @SuppressWarnings("rawtypes") Set set; if (this.property == null) { if (value == null) { return null; // depends on control dependency: [if], data = [none] } set = this.validator.validate(value, this.groups); // depends on control dependency: [if], data = [none] } else { set = this.validator.validateValue(this.pojoType, this.property, value, this.groups); // depends on control dependency: [if], data = [none] } @SuppressWarnings("unchecked") Set<ConstraintViolation<?>> violationSet = set; return violationSet; } }
public class class_name { @VisibleForTesting List<Long> prepareRowBaseTimesRollup() { final RollupInterval interval = rollup_query.getRollupInterval(); // standard TSDB table format, i.e. we're using the default table and schema if (interval.getUnits() == 'h') { return prepareRowBaseTimes(); } else { final List<Long> row_base_times = new ArrayList<Long>( (int) ((end_row_time - start_row_time) / interval.getIntervals())); long ts = RollupUtils.getRollupBasetime(start_row_time, interval); while (ts <= end_row_time) { row_base_times.add(ts); // TODO - possible this could overshoot in some cases. It shouldn't // if the rollups are properly configured, but... you know. Check it. ts = RollupUtils.getRollupBasetime(ts + (interval.getIntervalSeconds() * interval.getIntervals()), interval); } return row_base_times; } } }
public class class_name { @VisibleForTesting List<Long> prepareRowBaseTimesRollup() { final RollupInterval interval = rollup_query.getRollupInterval(); // standard TSDB table format, i.e. we're using the default table and schema if (interval.getUnits() == 'h') { return prepareRowBaseTimes(); // depends on control dependency: [if], data = [none] } else { final List<Long> row_base_times = new ArrayList<Long>( (int) ((end_row_time - start_row_time) / interval.getIntervals())); long ts = RollupUtils.getRollupBasetime(start_row_time, interval); while (ts <= end_row_time) { row_base_times.add(ts); // depends on control dependency: [while], data = [(ts] // TODO - possible this could overshoot in some cases. It shouldn't // if the rollups are properly configured, but... you know. Check it. ts = RollupUtils.getRollupBasetime(ts + (interval.getIntervalSeconds() * interval.getIntervals()), interval); // depends on control dependency: [while], data = [(ts] } return row_base_times; // depends on control dependency: [if], data = [none] } } }
public class class_name { public void marshall(GetSnapshotLimitsRequest getSnapshotLimitsRequest, ProtocolMarshaller protocolMarshaller) { if (getSnapshotLimitsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getSnapshotLimitsRequest.getDirectoryId(), DIRECTORYID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetSnapshotLimitsRequest getSnapshotLimitsRequest, ProtocolMarshaller protocolMarshaller) { if (getSnapshotLimitsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getSnapshotLimitsRequest.getDirectoryId(), DIRECTORYID_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static File getLogsRoot() { String tagsLogsPath = SystemProperties.getString(LOGS_ROOT_PATH_PROPERTY); if (tagsLogsPath == null) { return new File(Jenkins.get().getRootDir(), "logs"); } else { Level logLevel = Level.INFO; if (ALREADY_LOGGED) { logLevel = Level.FINE; } LOGGER.log(logLevel, "Using non default root path for tasks logging: {0}. (Beware: no automated migration if you change or remove it again)", LOGS_ROOT_PATH_PROPERTY); ALREADY_LOGGED = true; return new File(tagsLogsPath); } } }
public class class_name { public static File getLogsRoot() { String tagsLogsPath = SystemProperties.getString(LOGS_ROOT_PATH_PROPERTY); if (tagsLogsPath == null) { return new File(Jenkins.get().getRootDir(), "logs"); // depends on control dependency: [if], data = [none] } else { Level logLevel = Level.INFO; if (ALREADY_LOGGED) { logLevel = Level.FINE; // depends on control dependency: [if], data = [none] } LOGGER.log(logLevel, "Using non default root path for tasks logging: {0}. (Beware: no automated migration if you change or remove it again)", LOGS_ROOT_PATH_PROPERTY); // depends on control dependency: [if], data = [none] ALREADY_LOGGED = true; // depends on control dependency: [if], data = [none] return new File(tagsLogsPath); // depends on control dependency: [if], data = [(tagsLogsPath] } } }
public class class_name { public Color getColorFor( double value ) { if (value <= min) { return colors[0]; } else if (value >= max) { return colors[colors.length - 1]; } else { for( int i = 1; i < colors.length; i++ ) { double v1 = values[i - 1]; double v2 = values[i]; if (value < v2) { double v = (value - v1) / (v2 - v1); Color interpolateColor = interpolateColor(colors[i - 1], colors[i], (float) v); return interpolateColor; } } return colors[colors.length - 1]; } } }
public class class_name { public Color getColorFor( double value ) { if (value <= min) { return colors[0]; // depends on control dependency: [if], data = [none] } else if (value >= max) { return colors[colors.length - 1]; // depends on control dependency: [if], data = [none] } else { for( int i = 1; i < colors.length; i++ ) { double v1 = values[i - 1]; double v2 = values[i]; if (value < v2) { double v = (value - v1) / (v2 - v1); Color interpolateColor = interpolateColor(colors[i - 1], colors[i], (float) v); return interpolateColor; // depends on control dependency: [if], data = [none] } } return colors[colors.length - 1]; // depends on control dependency: [if], data = [none] } } }
public class class_name { public boolean exclusiveTryLockNanos(long nanosTimeout) throws InterruptedException { final long lastTime = System.nanoTime(); // Try to acquire the lock in write-mode if (stampedLock.tryWriteLock(nanosTimeout, TimeUnit.NANOSECONDS) == 0) { return false; } // We can only do this after writerOwner has been set to the current thread AtomicInteger[] localReadersStateArray = readersStateArrayRef.get(); if (localReadersStateArray == null) { // Set to dummyArray before scanning the readersStateList to impose // a linearizability condition readersStateArrayRef.set(dummyArray); // Copy readersStateList to an array localReadersStateArray = readersStateList.toArray(new AtomicInteger[readersStateList.size()]); readersStateArrayRef.compareAndSet(dummyArray, localReadersStateArray); } // Scan the array of Reader states for (AtomicInteger readerState : localReadersStateArray) { while (readerState != null && readerState.get() == SRWL_STATE_READING) { if (System.nanoTime() - lastTime < nanosTimeout) { Thread.yield(); } else { // Time has expired and there is at least one ongoing Reader so give up stampedLock.asWriteLock().unlock(); return false; } } } return true; } }
public class class_name { public boolean exclusiveTryLockNanos(long nanosTimeout) throws InterruptedException { final long lastTime = System.nanoTime(); // Try to acquire the lock in write-mode if (stampedLock.tryWriteLock(nanosTimeout, TimeUnit.NANOSECONDS) == 0) { return false; } // We can only do this after writerOwner has been set to the current thread AtomicInteger[] localReadersStateArray = readersStateArrayRef.get(); if (localReadersStateArray == null) { // Set to dummyArray before scanning the readersStateList to impose // a linearizability condition readersStateArrayRef.set(dummyArray); // Copy readersStateList to an array localReadersStateArray = readersStateList.toArray(new AtomicInteger[readersStateList.size()]); readersStateArrayRef.compareAndSet(dummyArray, localReadersStateArray); } // Scan the array of Reader states for (AtomicInteger readerState : localReadersStateArray) { while (readerState != null && readerState.get() == SRWL_STATE_READING) { if (System.nanoTime() - lastTime < nanosTimeout) { Thread.yield(); // depends on control dependency: [if], data = [none] } else { // Time has expired and there is at least one ongoing Reader so give up stampedLock.asWriteLock().unlock(); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } } } return true; } }
public class class_name { private static Throwable getWorkflowCause( Exception exception ){ Throwable e = exception; while( e instanceof WorkflowException ){ e = e.getCause(); } return e; } }
public class class_name { private static Throwable getWorkflowCause( Exception exception ){ Throwable e = exception; while( e instanceof WorkflowException ){ e = e.getCause(); // depends on control dependency: [while], data = [none] } return e; } }
public class class_name { public Collection<String> getDirectoriesToMonitor() { Collection<String> files = new HashSet<String>(); if (configDropinDefaults != null) { files.add(configDropinDefaults.toRepositoryPath()); } if (configDropinOverrides != null) { files.add(configDropinOverrides.toRepositoryPath()); } return files; } }
public class class_name { public Collection<String> getDirectoriesToMonitor() { Collection<String> files = new HashSet<String>(); if (configDropinDefaults != null) { files.add(configDropinDefaults.toRepositoryPath()); // depends on control dependency: [if], data = [(configDropinDefaults] } if (configDropinOverrides != null) { files.add(configDropinOverrides.toRepositoryPath()); // depends on control dependency: [if], data = [(configDropinOverrides] } return files; } }
public class class_name { public synchronized void setPlaybackState(int player, long position, boolean playing) { if (getMonitoredPlayer() != 0 && player != getMonitoredPlayer()) { throw new IllegalStateException("Cannot setPlaybackState for another player when monitoring player " + getMonitoredPlayer()); } if (player < 1) { throw new IllegalArgumentException("player must be positive"); } PlaybackState oldFurthestState = getFurthestPlaybackState(); PlaybackState newState = new PlaybackState(player, position, playing); PlaybackState oldState = playbackStateMap.put(player, newState); if (oldState == null || oldState.position != newState.position) { repaintDueToPlaybackStateChange(oldState, newState, oldFurthestState); } } }
public class class_name { public synchronized void setPlaybackState(int player, long position, boolean playing) { if (getMonitoredPlayer() != 0 && player != getMonitoredPlayer()) { throw new IllegalStateException("Cannot setPlaybackState for another player when monitoring player " + getMonitoredPlayer()); } if (player < 1) { throw new IllegalArgumentException("player must be positive"); } PlaybackState oldFurthestState = getFurthestPlaybackState(); PlaybackState newState = new PlaybackState(player, position, playing); PlaybackState oldState = playbackStateMap.put(player, newState); if (oldState == null || oldState.position != newState.position) { repaintDueToPlaybackStateChange(oldState, newState, oldFurthestState); // depends on control dependency: [if], data = [(oldState] } } }
public class class_name { private boolean applySnapshotToDB(EntryPosition position, boolean init) { // 获取一份快照 Map<String, String> schemaDdls = null; lock.readLock().lock(); try { if (!init && !hasNewDdl) { // 如果是持续构建,则识别一下是否有DDL变更过,如果没有就忽略了 return false; } this.hasNewDdl = false; schemaDdls = memoryTableMeta.snapshot(); } finally { lock.readLock().unlock(); } MemoryTableMeta tmpMemoryTableMeta = new MemoryTableMeta(); for (Map.Entry<String, String> entry : schemaDdls.entrySet()) { tmpMemoryTableMeta.apply(position, entry.getKey(), entry.getValue(), null); } // 基于临时内存对象进行对比 boolean compareAll = true; for (Schema schema : tmpMemoryTableMeta.getRepository().getSchemas()) { for (String table : schema.showTables()) { String fullName = schema + "." + table; if (blackFilter == null || !blackFilter.filter(fullName)) { if (filter == null || filter.filter(fullName)) { // issue : https://github.com/alibaba/canal/issues/1168 // 在生成snapshot时重新过滤一遍 if (!compareTableMetaDbAndMemory(connection, tmpMemoryTableMeta, schema.getName(), table)) { compareAll = false; } } } } } if (compareAll) { Map<String, String> content = new HashMap<String, String>(); content.put("destination", destination); content.put("binlogFile", position.getJournalName()); content.put("binlogOffest", String.valueOf(position.getPosition())); content.put("binlogMasterId", String.valueOf(position.getServerId())); content.put("binlogTimestamp", String.valueOf(position.getTimestamp())); content.put("data", JSON.toJSONString(schemaDdls)); if (content.isEmpty()) { throw new RuntimeException("apply failed caused by content is empty in applySnapshotToDB"); } MetaSnapshotDO snapshotDO = new MetaSnapshotDO(); try { BeanUtils.populate(snapshotDO, content); metaSnapshotDAO.insert(snapshotDO); } catch (Throwable e) { if (isUkDuplicateException(e)) { // 忽略掉重复的位点 logger.info("dup apply snapshot use position : " + position + " , just ignore"); } else { throw new CanalParseException("apply failed caused by : " + e.getMessage(), e); } } return true; } else { logger.error("compare failed , check log"); } return false; } }
public class class_name { private boolean applySnapshotToDB(EntryPosition position, boolean init) { // 获取一份快照 Map<String, String> schemaDdls = null; lock.readLock().lock(); try { if (!init && !hasNewDdl) { // 如果是持续构建,则识别一下是否有DDL变更过,如果没有就忽略了 return false; // depends on control dependency: [if], data = [none] } this.hasNewDdl = false; // depends on control dependency: [try], data = [none] schemaDdls = memoryTableMeta.snapshot(); // depends on control dependency: [try], data = [none] } finally { lock.readLock().unlock(); } MemoryTableMeta tmpMemoryTableMeta = new MemoryTableMeta(); for (Map.Entry<String, String> entry : schemaDdls.entrySet()) { tmpMemoryTableMeta.apply(position, entry.getKey(), entry.getValue(), null); // depends on control dependency: [for], data = [entry] } // 基于临时内存对象进行对比 boolean compareAll = true; for (Schema schema : tmpMemoryTableMeta.getRepository().getSchemas()) { for (String table : schema.showTables()) { String fullName = schema + "." + table; if (blackFilter == null || !blackFilter.filter(fullName)) { if (filter == null || filter.filter(fullName)) { // issue : https://github.com/alibaba/canal/issues/1168 // 在生成snapshot时重新过滤一遍 if (!compareTableMetaDbAndMemory(connection, tmpMemoryTableMeta, schema.getName(), table)) { compareAll = false; // depends on control dependency: [if], data = [none] } } } } } if (compareAll) { Map<String, String> content = new HashMap<String, String>(); content.put("destination", destination); // depends on control dependency: [if], data = [none] content.put("binlogFile", position.getJournalName()); // depends on control dependency: [if], data = [none] content.put("binlogOffest", String.valueOf(position.getPosition())); // depends on control dependency: [if], data = [none] content.put("binlogMasterId", String.valueOf(position.getServerId())); // depends on control dependency: [if], data = [none] content.put("binlogTimestamp", String.valueOf(position.getTimestamp())); // depends on control dependency: [if], data = [none] content.put("data", JSON.toJSONString(schemaDdls)); // depends on control dependency: [if], data = [none] if (content.isEmpty()) { throw new RuntimeException("apply failed caused by content is empty in applySnapshotToDB"); } MetaSnapshotDO snapshotDO = new MetaSnapshotDO(); try { BeanUtils.populate(snapshotDO, content); // depends on control dependency: [try], data = [none] metaSnapshotDAO.insert(snapshotDO); // depends on control dependency: [try], data = [none] } catch (Throwable e) { if (isUkDuplicateException(e)) { // 忽略掉重复的位点 logger.info("dup apply snapshot use position : " + position + " , just ignore"); // depends on control dependency: [if], data = [none] } else { throw new CanalParseException("apply failed caused by : " + e.getMessage(), e); } } // depends on control dependency: [catch], data = [none] return true; // depends on control dependency: [if], data = [none] } else { logger.error("compare failed , check log"); // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { @Override public void setContentType(final String type) { super.setContentType(type); contentTypeResolver = new ContentTypeHeaderResolver(type); if (bufferContentType(type, contentTypeResolver.getMimeType(), contentTypeResolver.getEncoding())) { enableBuffering(); } else { disableBuffering(); } } }
public class class_name { @Override public void setContentType(final String type) { super.setContentType(type); contentTypeResolver = new ContentTypeHeaderResolver(type); if (bufferContentType(type, contentTypeResolver.getMimeType(), contentTypeResolver.getEncoding())) { enableBuffering(); // depends on control dependency: [if], data = [none] } else { disableBuffering(); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public List<Object> getServices(Class<?> root,Class<?> transactionType){ Map<Class<?>, List<Object>> map = mapBusinessProvider.get(root); if(map != null){ List<Object> list = map.get(transactionType); if(list != null){ return Collections.unmodifiableList(list); } } return null; } }
public class class_name { @Override public List<Object> getServices(Class<?> root,Class<?> transactionType){ Map<Class<?>, List<Object>> map = mapBusinessProvider.get(root); if(map != null){ List<Object> list = map.get(transactionType); if(list != null){ return Collections.unmodifiableList(list); // depends on control dependency: [if], data = [(list] } } return null; } }
public class class_name { public static String toUnicodeHex(int value) { final StringBuilder builder = new StringBuilder(6); builder.append("\\u"); String hex = toHex(value); int len = hex.length(); if (len < 4) { builder.append("0000", 0, 4 - len);// 不足4位补0 } builder.append(hex); return builder.toString(); } }
public class class_name { public static String toUnicodeHex(int value) { final StringBuilder builder = new StringBuilder(6); builder.append("\\u"); String hex = toHex(value); int len = hex.length(); if (len < 4) { builder.append("0000", 0, 4 - len);// 不足4位补0 // depends on control dependency: [if], data = [none] } builder.append(hex); return builder.toString(); } }
public class class_name { @Override public FSDataOutputStream create(Path file, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { final FTPClient client = connect(); Path workDir = new Path(client.printWorkingDirectory()); Path absolute = makeAbsolute(workDir, file); if (exists(client, file)) { if (overwrite) { delete(client, file); } else { disconnect(client); throw new IOException("File already exists: " + file); } } Path parent = absolute.getParent(); if (parent == null || !mkdirs(client, parent, FsPermission.getDefault())) { parent = (parent == null) ? new Path("/") : parent; disconnect(client); throw new IOException("create(): Mkdirs failed to create: " + parent); } client.allocate(bufferSize); // Change to parent directory on the server. Only then can we write to the // file on the server by opening up an OutputStream. As a side effect the // working directory on the server is changed to the parent directory of the // file. The FTP client connection is closed when close() is called on the // FSDataOutputStream. client.changeWorkingDirectory(parent.toUri().getPath()); FSDataOutputStream fos = new FSDataOutputStream(client.storeFileStream(file .getName()), statistics) { @Override public void close() throws IOException { super.close(); if (!client.isConnected()) { throw new FTPException("Client not connected"); } boolean cmdCompleted = client.completePendingCommand(); disconnect(client); if (!cmdCompleted) { throw new FTPException("Could not complete transfer, Reply Code - " + client.getReplyCode()); } } }; if (!FTPReply.isPositivePreliminary(client.getReplyCode())) { // The ftpClient is an inconsistent state. Must close the stream // which in turn will logout and disconnect from FTP server fos.close(); throw new IOException("Unable to create file: " + file + ", Aborting"); } return fos; } }
public class class_name { @Override public FSDataOutputStream create(Path file, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { final FTPClient client = connect(); Path workDir = new Path(client.printWorkingDirectory()); Path absolute = makeAbsolute(workDir, file); if (exists(client, file)) { if (overwrite) { delete(client, file); // depends on control dependency: [if], data = [none] } else { disconnect(client); // depends on control dependency: [if], data = [none] throw new IOException("File already exists: " + file); } } Path parent = absolute.getParent(); if (parent == null || !mkdirs(client, parent, FsPermission.getDefault())) { parent = (parent == null) ? new Path("/") : parent; disconnect(client); throw new IOException("create(): Mkdirs failed to create: " + parent); } client.allocate(bufferSize); // Change to parent directory on the server. Only then can we write to the // file on the server by opening up an OutputStream. As a side effect the // working directory on the server is changed to the parent directory of the // file. The FTP client connection is closed when close() is called on the // FSDataOutputStream. client.changeWorkingDirectory(parent.toUri().getPath()); FSDataOutputStream fos = new FSDataOutputStream(client.storeFileStream(file .getName()), statistics) { @Override public void close() throws IOException { super.close(); if (!client.isConnected()) { throw new FTPException("Client not connected"); } boolean cmdCompleted = client.completePendingCommand(); disconnect(client); if (!cmdCompleted) { throw new FTPException("Could not complete transfer, Reply Code - " + client.getReplyCode()); } } }; if (!FTPReply.isPositivePreliminary(client.getReplyCode())) { // The ftpClient is an inconsistent state. Must close the stream // which in turn will logout and disconnect from FTP server fos.close(); throw new IOException("Unable to create file: " + file + ", Aborting"); } return fos; } }
public class class_name { public static FullyQualifiedJavaType getModelTypeWithBLOBs(IntrospectedTable introspectedTable) { FullyQualifiedJavaType type; if (introspectedTable.getRules().generateRecordWithBLOBsClass()) { type = new FullyQualifiedJavaType(introspectedTable.getRecordWithBLOBsType()); } else { // the blob fields must be rolled up into the base class type = new FullyQualifiedJavaType(introspectedTable.getBaseRecordType()); } return type; } }
public class class_name { public static FullyQualifiedJavaType getModelTypeWithBLOBs(IntrospectedTable introspectedTable) { FullyQualifiedJavaType type; if (introspectedTable.getRules().generateRecordWithBLOBsClass()) { type = new FullyQualifiedJavaType(introspectedTable.getRecordWithBLOBsType()); // depends on control dependency: [if], data = [none] } else { // the blob fields must be rolled up into the base class type = new FullyQualifiedJavaType(introspectedTable.getBaseRecordType()); // depends on control dependency: [if], data = [none] } return type; } }
public class class_name { public void removeValue(V value) { if (value == null) { throw new IllegalArgumentException("Method argument value must not be null."); } for (Collection<V> values : values()) { values.remove(value); } } }
public class class_name { public void removeValue(V value) { if (value == null) { throw new IllegalArgumentException("Method argument value must not be null."); } for (Collection<V> values : values()) { values.remove(value); // depends on control dependency: [for], data = [values] } } }
public class class_name { Index getIndexForColumns(OrderedIntHashSet set) { int maxMatchCount = 0; Index selected = null; if (set.isEmpty()) { return null; } for (int i = 0, count = indexList.length; i < count; i++) { Index currentindex = getIndex(i); int[] indexcols = currentindex.getColumns(); int matchCount = set.getOrderedMatchCount(indexcols); if (matchCount == 0) { continue; } if (matchCount == indexcols.length) { return currentindex; } if (matchCount > maxMatchCount) { maxMatchCount = matchCount; selected = currentindex; } } if (selected != null) { return selected; } switch (tableType) { case TableBase.SYSTEM_SUBQUERY : case TableBase.SYSTEM_TABLE : case TableBase.VIEW_TABLE : case TableBase.TEMP_TABLE : { selected = createIndexForColumns(set.toArray()); } } return selected; } }
public class class_name { Index getIndexForColumns(OrderedIntHashSet set) { int maxMatchCount = 0; Index selected = null; if (set.isEmpty()) { return null; // depends on control dependency: [if], data = [none] } for (int i = 0, count = indexList.length; i < count; i++) { Index currentindex = getIndex(i); int[] indexcols = currentindex.getColumns(); int matchCount = set.getOrderedMatchCount(indexcols); if (matchCount == 0) { continue; } if (matchCount == indexcols.length) { return currentindex; // depends on control dependency: [if], data = [none] } if (matchCount > maxMatchCount) { maxMatchCount = matchCount; // depends on control dependency: [if], data = [none] selected = currentindex; // depends on control dependency: [if], data = [none] } } if (selected != null) { return selected; // depends on control dependency: [if], data = [none] } switch (tableType) { case TableBase.SYSTEM_SUBQUERY : case TableBase.SYSTEM_TABLE : case TableBase.VIEW_TABLE : case TableBase.TEMP_TABLE : { selected = createIndexForColumns(set.toArray()); } } return selected; } }
public class class_name { public List<WebAppFilterMapping> getFilterMappings(final String filterName) { final Set<WebAppFilterMapping> webAppFilterMappings = filterMappings .get(filterName); if (webAppFilterMappings == null) { return new ArrayList<>(); } return new ArrayList<>(webAppFilterMappings); } }
public class class_name { public List<WebAppFilterMapping> getFilterMappings(final String filterName) { final Set<WebAppFilterMapping> webAppFilterMappings = filterMappings .get(filterName); if (webAppFilterMappings == null) { return new ArrayList<>(); // depends on control dependency: [if], data = [none] } return new ArrayList<>(webAppFilterMappings); } }
public class class_name { public static boolean isPassivationCapableBean(Bean<?> bean) { if (bean instanceof RIBean<?>) { return ((RIBean<?>) bean).isPassivationCapableBean(); } else { return bean instanceof PassivationCapable; } } }
public class class_name { public static boolean isPassivationCapableBean(Bean<?> bean) { if (bean instanceof RIBean<?>) { return ((RIBean<?>) bean).isPassivationCapableBean(); // depends on control dependency: [if], data = [)] } else { return bean instanceof PassivationCapable; // depends on control dependency: [if], data = [none] } } }
public class class_name { public List<TypeLiteral<?>> getParameterTypes() { if (parameterTypes == null) { parameterTypes = getDeclaringType().getParameterTypes(getMember()); } return parameterTypes; } }
public class class_name { public List<TypeLiteral<?>> getParameterTypes() { if (parameterTypes == null) { parameterTypes = getDeclaringType().getParameterTypes(getMember()); // depends on control dependency: [if], data = [none] } return parameterTypes; } }
public class class_name { public FacesConfigApplicationResourceLibraryContractsContractMappingType<FacesConfigApplicationResourceLibraryContractsType<T>> getOrCreateContractMapping() { List<Node> nodeList = childNode.get("contract-mapping"); if (nodeList != null && nodeList.size() > 0) { return new FacesConfigApplicationResourceLibraryContractsContractMappingTypeImpl<FacesConfigApplicationResourceLibraryContractsType<T>>(this, "contract-mapping", childNode, nodeList.get(0)); } return createContractMapping(); } }
public class class_name { public FacesConfigApplicationResourceLibraryContractsContractMappingType<FacesConfigApplicationResourceLibraryContractsType<T>> getOrCreateContractMapping() { List<Node> nodeList = childNode.get("contract-mapping"); if (nodeList != null && nodeList.size() > 0) { return new FacesConfigApplicationResourceLibraryContractsContractMappingTypeImpl<FacesConfigApplicationResourceLibraryContractsType<T>>(this, "contract-mapping", childNode, nodeList.get(0)); // depends on control dependency: [if], data = [none] } return createContractMapping(); } }
public class class_name { @Override public <T> T getValue(String key) { ValueObject vo = get(key); if (null == vo) { return null; } return vo.value(); } }
public class class_name { @Override public <T> T getValue(String key) { ValueObject vo = get(key); if (null == vo) { return null; // depends on control dependency: [if], data = [none] } return vo.value(); } }
public class class_name { public void addDirectoryScan(String dirName, String suffix, String regexpPatternString, String subdirsS, String olderS, Object auxInfo) { CompositeMFileFilter filters = new CompositeMFileFilter(); if (null != regexpPatternString) filters.addIncludeFilter(new RegExpMatchOnName(regexpPatternString)); else if (suffix != null) filters.addIncludeFilter(new WildcardMatchOnPath("*" + suffix + "$")); if (olderS != null) { try { TimeDuration tu = new TimeDuration(olderS); filters.addAndFilter(new LastModifiedLimit((long) (1000 * tu.getValueInSeconds()))); } catch (Exception e) { logger.error(collectionName + ": Invalid time unit for olderThan = {}", olderS); } } boolean wantSubdirs = true; if ((subdirsS != null) && subdirsS.equalsIgnoreCase("false")) wantSubdirs = false; CollectionConfig mc = new CollectionConfig(dirName, dirName, wantSubdirs, filters, auxInfo); // create name StringBuilder sb = new StringBuilder(dirName); if (wantSubdirs) sb.append("**/"); if (null != regexpPatternString) sb.append(regexpPatternString); else if (suffix != null) sb.append(suffix); else sb.append("noFilter"); collectionName = sb.toString(); scanList.add(mc); } }
public class class_name { public void addDirectoryScan(String dirName, String suffix, String regexpPatternString, String subdirsS, String olderS, Object auxInfo) { CompositeMFileFilter filters = new CompositeMFileFilter(); if (null != regexpPatternString) filters.addIncludeFilter(new RegExpMatchOnName(regexpPatternString)); else if (suffix != null) filters.addIncludeFilter(new WildcardMatchOnPath("*" + suffix + "$")); if (olderS != null) { try { TimeDuration tu = new TimeDuration(olderS); filters.addAndFilter(new LastModifiedLimit((long) (1000 * tu.getValueInSeconds()))); // depends on control dependency: [try], data = [none] } catch (Exception e) { logger.error(collectionName + ": Invalid time unit for olderThan = {}", olderS); } // depends on control dependency: [catch], data = [none] } boolean wantSubdirs = true; if ((subdirsS != null) && subdirsS.equalsIgnoreCase("false")) wantSubdirs = false; CollectionConfig mc = new CollectionConfig(dirName, dirName, wantSubdirs, filters, auxInfo); // create name StringBuilder sb = new StringBuilder(dirName); if (wantSubdirs) sb.append("**/"); if (null != regexpPatternString) sb.append(regexpPatternString); else if (suffix != null) sb.append(suffix); else sb.append("noFilter"); collectionName = sb.toString(); scanList.add(mc); } }
public class class_name { private IRing prepareRing(List vec, IAtomContainer mol) { // add the atoms in vec to the new ring int atomCount = vec.size(); IRing ring = mol.getBuilder().newInstance(IRing.class, atomCount); IAtom[] atoms = new IAtom[atomCount]; vec.toArray(atoms); ring.setAtoms(atoms); // add the bonds in mol to the new ring try { IBond b; for (int i = 0; i < atomCount - 1; i++) { b = mol.getBond(atoms[i], atoms[i + 1]); if (b != null) { ring.addBond(b); } else { logger.error("This should not happen."); } } b = mol.getBond(atoms[0], atoms[atomCount - 1]); if (b != null) { ring.addBond(b); } else { logger.error("This should not happen either."); } } catch (Exception exc) { logger.debug(exc); } logger.debug("found Ring ", ring); return ring; } }
public class class_name { private IRing prepareRing(List vec, IAtomContainer mol) { // add the atoms in vec to the new ring int atomCount = vec.size(); IRing ring = mol.getBuilder().newInstance(IRing.class, atomCount); IAtom[] atoms = new IAtom[atomCount]; vec.toArray(atoms); ring.setAtoms(atoms); // add the bonds in mol to the new ring try { IBond b; for (int i = 0; i < atomCount - 1; i++) { b = mol.getBond(atoms[i], atoms[i + 1]); // depends on control dependency: [for], data = [i] if (b != null) { ring.addBond(b); // depends on control dependency: [if], data = [(b] } else { logger.error("This should not happen."); // depends on control dependency: [if], data = [none] } } b = mol.getBond(atoms[0], atoms[atomCount - 1]); // depends on control dependency: [try], data = [none] if (b != null) { ring.addBond(b); // depends on control dependency: [if], data = [(b] } else { logger.error("This should not happen either."); // depends on control dependency: [if], data = [none] } } catch (Exception exc) { logger.debug(exc); } // depends on control dependency: [catch], data = [none] logger.debug("found Ring ", ring); return ring; } }
public class class_name { private void copyAppConfigFiles(File appBuildDir, File appConfigDir) throws IOException { File[] files = appConfigDir.listFiles(); if (files != null) { appBuildDir.mkdirs(); for (File file : files) { File outFile = new File(appBuildDir, file.getName()); if (file.isDirectory()) { copyAppConfigFiles(outFile, file); } else { Files.copy(file, outFile); } } } } }
public class class_name { private void copyAppConfigFiles(File appBuildDir, File appConfigDir) throws IOException { File[] files = appConfigDir.listFiles(); if (files != null) { appBuildDir.mkdirs(); for (File file : files) { File outFile = new File(appBuildDir, file.getName()); if (file.isDirectory()) { copyAppConfigFiles(outFile, file); // depends on control dependency: [if], data = [none] } else { Files.copy(file, outFile); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { @Override public Inet6Address toInetAddress() { if(hasZone()) { Inet6Address result; if(hasNoValueCache() || (result = valueCache.inetAddress) == null) { valueCache.inetAddress = result = (Inet6Address) toInetAddressImpl(getBytes()); } return result; } return (Inet6Address) super.toInetAddress(); } }
public class class_name { @Override public Inet6Address toInetAddress() { if(hasZone()) { Inet6Address result; if(hasNoValueCache() || (result = valueCache.inetAddress) == null) { valueCache.inetAddress = result = (Inet6Address) toInetAddressImpl(getBytes()); // depends on control dependency: [if], data = [none] } return result; // depends on control dependency: [if], data = [none] } return (Inet6Address) super.toInetAddress(); } }
public class class_name { public int getAllCookieValues(String name, List<String> list) { int added = 0; if (0 < this.parsedList.size() && null != name) { for (HttpCookie cookie : this.parsedList) { if (name.equals(cookie.getName())) { list.add(cookie.getValue()); } } } return added; } }
public class class_name { public int getAllCookieValues(String name, List<String> list) { int added = 0; if (0 < this.parsedList.size() && null != name) { for (HttpCookie cookie : this.parsedList) { if (name.equals(cookie.getName())) { list.add(cookie.getValue()); // depends on control dependency: [if], data = [none] } } } return added; } }
public class class_name { private void addPostParams(final Request request) { if (uniqueName != null) { request.addPostParam("UniqueName", uniqueName); } if (callbackMethod != null) { request.addPostParam("CallbackMethod", callbackMethod); } if (callbackUrl != null) { request.addPostParam("CallbackUrl", callbackUrl.toString()); } if (friendlyName != null) { request.addPostParam("FriendlyName", friendlyName); } if (ratePlan != null) { request.addPostParam("RatePlan", ratePlan.toString()); } if (status != null) { request.addPostParam("Status", status); } if (commandsCallbackMethod != null) { request.addPostParam("CommandsCallbackMethod", commandsCallbackMethod.toString()); } if (commandsCallbackUrl != null) { request.addPostParam("CommandsCallbackUrl", commandsCallbackUrl.toString()); } if (smsFallbackMethod != null) { request.addPostParam("SmsFallbackMethod", smsFallbackMethod.toString()); } if (smsFallbackUrl != null) { request.addPostParam("SmsFallbackUrl", smsFallbackUrl.toString()); } if (smsMethod != null) { request.addPostParam("SmsMethod", smsMethod.toString()); } if (smsUrl != null) { request.addPostParam("SmsUrl", smsUrl.toString()); } if (voiceFallbackMethod != null) { request.addPostParam("VoiceFallbackMethod", voiceFallbackMethod.toString()); } if (voiceFallbackUrl != null) { request.addPostParam("VoiceFallbackUrl", voiceFallbackUrl.toString()); } if (voiceMethod != null) { request.addPostParam("VoiceMethod", voiceMethod.toString()); } if (voiceUrl != null) { request.addPostParam("VoiceUrl", voiceUrl.toString()); } } }
public class class_name { private void addPostParams(final Request request) { if (uniqueName != null) { request.addPostParam("UniqueName", uniqueName); // depends on control dependency: [if], data = [none] } if (callbackMethod != null) { request.addPostParam("CallbackMethod", callbackMethod); // depends on control dependency: [if], data = [none] } if (callbackUrl != null) { request.addPostParam("CallbackUrl", callbackUrl.toString()); // depends on control dependency: [if], data = [none] } if (friendlyName != null) { request.addPostParam("FriendlyName", friendlyName); // depends on control dependency: [if], data = [none] } if (ratePlan != null) { request.addPostParam("RatePlan", ratePlan.toString()); // depends on control dependency: [if], data = [none] } if (status != null) { request.addPostParam("Status", status); // depends on control dependency: [if], data = [none] } if (commandsCallbackMethod != null) { request.addPostParam("CommandsCallbackMethod", commandsCallbackMethod.toString()); // depends on control dependency: [if], data = [none] } if (commandsCallbackUrl != null) { request.addPostParam("CommandsCallbackUrl", commandsCallbackUrl.toString()); // depends on control dependency: [if], data = [none] } if (smsFallbackMethod != null) { request.addPostParam("SmsFallbackMethod", smsFallbackMethod.toString()); // depends on control dependency: [if], data = [none] } if (smsFallbackUrl != null) { request.addPostParam("SmsFallbackUrl", smsFallbackUrl.toString()); // depends on control dependency: [if], data = [none] } if (smsMethod != null) { request.addPostParam("SmsMethod", smsMethod.toString()); // depends on control dependency: [if], data = [none] } if (smsUrl != null) { request.addPostParam("SmsUrl", smsUrl.toString()); // depends on control dependency: [if], data = [none] } if (voiceFallbackMethod != null) { request.addPostParam("VoiceFallbackMethod", voiceFallbackMethod.toString()); // depends on control dependency: [if], data = [none] } if (voiceFallbackUrl != null) { request.addPostParam("VoiceFallbackUrl", voiceFallbackUrl.toString()); // depends on control dependency: [if], data = [none] } if (voiceMethod != null) { request.addPostParam("VoiceMethod", voiceMethod.toString()); // depends on control dependency: [if], data = [none] } if (voiceUrl != null) { request.addPostParam("VoiceUrl", voiceUrl.toString()); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void replaceElement(final CmsContainerPageElementPanel elementWidget, final String elementId) { final CmsRpcAction<CmsContainerElementData> action = new CmsRpcAction<CmsContainerElementData>() { @Override public void execute() { start(500, true); getContainerpageService().replaceElement( getData().getRpcContext(), getData().getDetailId(), getRequestParams(), elementWidget.getId(), elementId, getPageState(), getLocale(), this); } @Override protected void onResponse(CmsContainerElementData result) { stop(false); if (result != null) { // cache the loaded element m_elements.put(result.getClientId(), result); try { replaceContainerElement(elementWidget, result); resetEditButtons(); addToRecentList(result.getClientId(), null); setPageChanged(new Runnable() { public void run() { // nothing to do } }); } catch (Exception e) { // should never happen CmsDebugLog.getInstance().printLine(e.getLocalizedMessage()); } } } }; if (!isGroupcontainerEditing()) { lockContainerpage(new I_CmsSimpleCallback<Boolean>() { public void execute(Boolean arg) { if (arg.booleanValue()) { action.execute(); } } }); } else { action.execute(); } } }
public class class_name { public void replaceElement(final CmsContainerPageElementPanel elementWidget, final String elementId) { final CmsRpcAction<CmsContainerElementData> action = new CmsRpcAction<CmsContainerElementData>() { @Override public void execute() { start(500, true); getContainerpageService().replaceElement( getData().getRpcContext(), getData().getDetailId(), getRequestParams(), elementWidget.getId(), elementId, getPageState(), getLocale(), this); } @Override protected void onResponse(CmsContainerElementData result) { stop(false); if (result != null) { // cache the loaded element m_elements.put(result.getClientId(), result); // depends on control dependency: [if], data = [(result] try { replaceContainerElement(elementWidget, result); // depends on control dependency: [try], data = [none] resetEditButtons(); // depends on control dependency: [try], data = [none] addToRecentList(result.getClientId(), null); // depends on control dependency: [try], data = [none] setPageChanged(new Runnable() { public void run() { // nothing to do } }); // depends on control dependency: [try], data = [none] } catch (Exception e) { // should never happen CmsDebugLog.getInstance().printLine(e.getLocalizedMessage()); } // depends on control dependency: [catch], data = [none] } } }; if (!isGroupcontainerEditing()) { lockContainerpage(new I_CmsSimpleCallback<Boolean>() { public void execute(Boolean arg) { if (arg.booleanValue()) { action.execute(); // depends on control dependency: [if], data = [none] } } }); // depends on control dependency: [if], data = [none] } else { action.execute(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public Optional<FileStatus> findPersistedFile(State state, CopyEntity file, Predicate<FileStatus> filter) throws IOException { if (!this.persistDir.isPresent() || !this.fs.exists(this.persistDir.get())) { return Optional.absent(); } Path guidPath = new Path(this.persistDir.get(), computeGuid(state, file)); FileStatus[] statuses; try { statuses = this.fs.listStatus(guidPath); } catch (FileNotFoundException e) { return Optional.absent(); } for (FileStatus fileStatus : statuses) { if (filter.apply(fileStatus)) { return Optional.of(fileStatus); } } return Optional.absent(); } }
public class class_name { public Optional<FileStatus> findPersistedFile(State state, CopyEntity file, Predicate<FileStatus> filter) throws IOException { if (!this.persistDir.isPresent() || !this.fs.exists(this.persistDir.get())) { return Optional.absent(); } Path guidPath = new Path(this.persistDir.get(), computeGuid(state, file)); FileStatus[] statuses; try { statuses = this.fs.listStatus(guidPath); } catch (FileNotFoundException e) { return Optional.absent(); } for (FileStatus fileStatus : statuses) { if (filter.apply(fileStatus)) { return Optional.of(fileStatus); // depends on control dependency: [if], data = [none] } } return Optional.absent(); } }
public class class_name { protected void firePeerConnected(URI peerURI, SpaceID space) { final NetworkServiceListener[] ilisteners; synchronized (this.listeners) { ilisteners = new NetworkServiceListener[this.listeners.size()]; this.listeners.toArray(ilisteners); } for (final NetworkServiceListener listener : ilisteners) { listener.peerConnected(peerURI, space); } } }
public class class_name { protected void firePeerConnected(URI peerURI, SpaceID space) { final NetworkServiceListener[] ilisteners; synchronized (this.listeners) { ilisteners = new NetworkServiceListener[this.listeners.size()]; this.listeners.toArray(ilisteners); } for (final NetworkServiceListener listener : ilisteners) { listener.peerConnected(peerURI, space); // depends on control dependency: [for], data = [listener] } } }
public class class_name { public static File[] filesInFolderMatchingStemRegex(File file, final String stemRegex) { if (file == null) { return new File[0]; } if (!file.exists() || !file.isDirectory()) { return new File[0]; } return file.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.matches(stemRegex); } }); } }
public class class_name { public static File[] filesInFolderMatchingStemRegex(File file, final String stemRegex) { if (file == null) { return new File[0]; // depends on control dependency: [if], data = [none] } if (!file.exists() || !file.isDirectory()) { return new File[0]; // depends on control dependency: [if], data = [none] } return file.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.matches(stemRegex); } }); } }
public class class_name { public void onPrepareMenu(){ if(mGroupChanged || mMenuDataChanged){ mToolbar.getViewTreeObserver().addOnGlobalLayoutListener(mOnGlobalLayoutListener); Menu menu = mToolbar.getMenu(); for(int i = 0, count = menu.size(); i < count; i++){ MenuItem item = menu.getItem(i); item.setVisible(item.getGroupId() == mCurrentGroup || item.getGroupId() == 0); } mMenuDataChanged = false; } } }
public class class_name { public void onPrepareMenu(){ if(mGroupChanged || mMenuDataChanged){ mToolbar.getViewTreeObserver().addOnGlobalLayoutListener(mOnGlobalLayoutListener); // depends on control dependency: [if], data = [none] Menu menu = mToolbar.getMenu(); for(int i = 0, count = menu.size(); i < count; i++){ MenuItem item = menu.getItem(i); item.setVisible(item.getGroupId() == mCurrentGroup || item.getGroupId() == 0); // depends on control dependency: [for], data = [none] } mMenuDataChanged = false; // depends on control dependency: [if], data = [none] } } }
public class class_name { void onHeartbeat(Member member, long timestamp) { if (!heartbeatAwareQuorumFunction) { return; } ((HeartbeatAware) quorumFunction).onHeartbeat(member, timestamp); } }
public class class_name { void onHeartbeat(Member member, long timestamp) { if (!heartbeatAwareQuorumFunction) { return; // depends on control dependency: [if], data = [none] } ((HeartbeatAware) quorumFunction).onHeartbeat(member, timestamp); } }
public class class_name { public ContentPermissionManager getContentPermissionManager() { if (contentPermissionManager != null) { return contentPermissionManager; } contentPermissionManager = (ContentPermissionManager) ContainerManager.getComponent("contentPermissionManager"); return contentPermissionManager; } }
public class class_name { public ContentPermissionManager getContentPermissionManager() { if (contentPermissionManager != null) { return contentPermissionManager; // depends on control dependency: [if], data = [none] } contentPermissionManager = (ContentPermissionManager) ContainerManager.getComponent("contentPermissionManager"); return contentPermissionManager; } }
public class class_name { private boolean isColumnType(List<String> columnTypes, String columnName, List<String> tableNames, boolean debugPrint) { if (debugPrint) { System.out.println(" In NonVoltDBBackend.isColumnType:"); System.out.println(" columnTypes: " + columnTypes); System.out.println(" columnName : " + columnName); System.out.println(" tableNames : " + tableNames); } if (tableNames == null || tableNames.size() == 0) { tableNames = Arrays.asList((String)null); if (debugPrint) { System.out.println(" tableNames2: " + tableNames); } } for (String tn : tableNames) { // Lower-case table and column names are required for PostgreSQL; // we might need to alter this if we use another comparison // database (besides HSQL) someday String tableName = (tn == null) ? tn : tn.trim().toLowerCase(); if (debugPrint) { System.out.println(" tableName : " + tableName); } try { ResultSet rs = dbconn.getMetaData().getColumns(null, null, tableName, columnName.trim().toLowerCase()); while (rs.next()) { String columnType = getVoltColumnTypeName(rs.getString(6)); if (debugPrint) { System.out.println(" tableName : " + rs.getString(3)); System.out.println(" columnName : " + rs.getString(4)); System.out.println(" columnType : " + columnType); } if (columnTypes.contains(columnType)) { if (debugPrint) { System.out.println(" returning : true"); } return true; } } } catch (SQLException e) { printCaughtException("In NonVoltDBBackend.isColumnType, with tableName "+tableName+", columnName " + columnName+", columnTypes "+columnTypes+", caught SQLException:\n " + e); } } if (debugPrint) { System.out.println(" returning : false"); } return false; } }
public class class_name { private boolean isColumnType(List<String> columnTypes, String columnName, List<String> tableNames, boolean debugPrint) { if (debugPrint) { System.out.println(" In NonVoltDBBackend.isColumnType:"); // depends on control dependency: [if], data = [none] System.out.println(" columnTypes: " + columnTypes); // depends on control dependency: [if], data = [none] System.out.println(" columnName : " + columnName); // depends on control dependency: [if], data = [none] System.out.println(" tableNames : " + tableNames); // depends on control dependency: [if], data = [none] } if (tableNames == null || tableNames.size() == 0) { tableNames = Arrays.asList((String)null); // depends on control dependency: [if], data = [none] if (debugPrint) { System.out.println(" tableNames2: " + tableNames); // depends on control dependency: [if], data = [none] } } for (String tn : tableNames) { // Lower-case table and column names are required for PostgreSQL; // we might need to alter this if we use another comparison // database (besides HSQL) someday String tableName = (tn == null) ? tn : tn.trim().toLowerCase(); if (debugPrint) { System.out.println(" tableName : " + tableName); // depends on control dependency: [if], data = [none] } try { ResultSet rs = dbconn.getMetaData().getColumns(null, null, tableName, columnName.trim().toLowerCase()); while (rs.next()) { String columnType = getVoltColumnTypeName(rs.getString(6)); if (debugPrint) { System.out.println(" tableName : " + rs.getString(3)); // depends on control dependency: [if], data = [none] System.out.println(" columnName : " + rs.getString(4)); // depends on control dependency: [if], data = [none] System.out.println(" columnType : " + columnType); // depends on control dependency: [if], data = [none] } if (columnTypes.contains(columnType)) { if (debugPrint) { System.out.println(" returning : true"); // depends on control dependency: [if], data = [none] } return true; // depends on control dependency: [if], data = [none] } } } catch (SQLException e) { printCaughtException("In NonVoltDBBackend.isColumnType, with tableName "+tableName+", columnName " + columnName+", columnTypes "+columnTypes+", caught SQLException:\n " + e); } // depends on control dependency: [catch], data = [none] } if (debugPrint) { System.out.println(" returning : false"); // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public static String next(boolean withHyphen) { byte[] array = nextBytes(); final StringBuilder buf = new StringBuilder(withHyphen ? 26 : 24); int t; for (int i = 0; i < array.length; i++) { if (withHyphen && i % 4 == 0 && i != 0) { buf.append("-"); } t = array[i] & 0xff; if (t < 16) { buf.append('0'); } buf.append(Integer.toHexString(t)); } return buf.toString(); } }
public class class_name { public static String next(boolean withHyphen) { byte[] array = nextBytes(); final StringBuilder buf = new StringBuilder(withHyphen ? 26 : 24); int t; for (int i = 0; i < array.length; i++) { if (withHyphen && i % 4 == 0 && i != 0) { buf.append("-"); // depends on control dependency: [if], data = [none] } t = array[i] & 0xff; // depends on control dependency: [for], data = [i] if (t < 16) { buf.append('0'); // depends on control dependency: [if], data = [none] } buf.append(Integer.toHexString(t)); // depends on control dependency: [for], data = [none] } return buf.toString(); } }
public class class_name { private static JsonNode toJson(AggregatedHttpMessage res, @Nullable JsonNodeType expectedNodeType) { final String content = toString(res); final JsonNode node; try { node = Jackson.readTree(content); } catch (JsonParseException e) { throw new CentralDogmaException("failed to parse the response JSON", e); } if (expectedNodeType != null && node.getNodeType() != expectedNodeType) { throw new CentralDogmaException( "invalid server response; expected: " + expectedNodeType + ", actual: " + node.getNodeType() + ", content: " + content); } return node; } }
public class class_name { private static JsonNode toJson(AggregatedHttpMessage res, @Nullable JsonNodeType expectedNodeType) { final String content = toString(res); final JsonNode node; try { node = Jackson.readTree(content); // depends on control dependency: [try], data = [none] } catch (JsonParseException e) { throw new CentralDogmaException("failed to parse the response JSON", e); } // depends on control dependency: [catch], data = [none] if (expectedNodeType != null && node.getNodeType() != expectedNodeType) { throw new CentralDogmaException( "invalid server response; expected: " + expectedNodeType + ", actual: " + node.getNodeType() + ", content: " + content); } return node; } }
public class class_name { static Predicate<CtClass> createMatchingPredicate(final Set<PathMatcher> pathMatchers) { return ctClass -> { for (final PathMatcher pathMatcher : pathMatchers) { if (pathMatcher.matches(Paths.get(ctClass.getName()))) { return true; } } return false; }; } }
public class class_name { static Predicate<CtClass> createMatchingPredicate(final Set<PathMatcher> pathMatchers) { return ctClass -> { for (final PathMatcher pathMatcher : pathMatchers) { if (pathMatcher.matches(Paths.get(ctClass.getName()))) { return true; // depends on control dependency: [if], data = [none] } } return false; }; } }
public class class_name { public double compute(double[] x, double[] grad) { double val = 0.0; switch (type) { case NULL: return val; case QUADRATIC: for (int i = 0; i < x.length; i++) { val += x[i] * x[i] / 2.0 / sigmaSq; grad[i] += x[i] / sigmaSq; } return val; case HUBER: // P.J. Huber. 1973. Robust regression: Asymptotics, conjectures and // Monte Carlo. The Annals of Statistics 1: 799-821. // See also: // P. J. Huber. Robust Statistics. John Wiley & Sons, New York, 1981. for (int i = 0; i < x.length; i++) { if (x[i] < -epsilon) { val += (-x[i] - epsilon / 2.0) / sigmaSq; grad[i] += -1.0 / sigmaSq; } else if (x[i] < epsilon) { val += x[i] * x[i] / 2.0 / epsilon / sigmaSq; grad[i] += x[i] / epsilon / sigmaSq; } else { val += (x[i] - epsilon / 2.0) / sigmaSq; grad[i] += 1.0 / sigmaSq; } } return val; case QUARTIC: for (int i = 0; i < x.length; i++) { val += (x[i] * x[i]) * (x[i] * x[i]) / 2.0 / sigmaQu; grad[i] += x[i] / sigmaQu; } return val; case ADAPT: double[] newX = ArrayMath.pairwiseSubtract(x, means); val += otherPrior.compute(newX, grad); return val; case COSH: double norm = ArrayMath.norm_1(x) / sigmaSq; double d; if (norm > 30.0) { val = norm - Math.log(2); d = 1.0 / sigmaSq; } else { val = Math.log(Math.cosh(norm)); d = (2 * (1 / (Math.exp(-2.0 * norm) + 1)) - 1.0) / sigmaSq; } for (int i=0; i < x.length; i++) { grad[i] += Math.signum(x[i]) * d; } return val; case MULTIPLE_QUADRATIC: // for (int i = 0; i < x.length; i++) { // val += x[i] * x[i]* 1/2 * regularizationHyperparameters[i]; // grad[i] += x[i] * regularizationHyperparameters[i]; // } for (int i = 0; i < x.length; i++) { val += x[i] * x[i] / 2.0 / sigmaSqM[i]; grad[i] += x[i] / sigmaSqM[i]; } return val; default: throw new RuntimeException("LogPrior.valueAt is undefined for prior of type " + this); } } }
public class class_name { public double compute(double[] x, double[] grad) { double val = 0.0; switch (type) { case NULL: return val; case QUADRATIC: for (int i = 0; i < x.length; i++) { val += x[i] * x[i] / 2.0 / sigmaSq; // depends on control dependency: [for], data = [i] grad[i] += x[i] / sigmaSq; // depends on control dependency: [for], data = [i] } return val; case HUBER: // P.J. Huber. 1973. Robust regression: Asymptotics, conjectures and // Monte Carlo. The Annals of Statistics 1: 799-821. // See also: // P. J. Huber. Robust Statistics. John Wiley & Sons, New York, 1981. for (int i = 0; i < x.length; i++) { if (x[i] < -epsilon) { val += (-x[i] - epsilon / 2.0) / sigmaSq; // depends on control dependency: [if], data = [none] grad[i] += -1.0 / sigmaSq; // depends on control dependency: [if], data = [none] } else if (x[i] < epsilon) { val += x[i] * x[i] / 2.0 / epsilon / sigmaSq; // depends on control dependency: [if], data = [none] grad[i] += x[i] / epsilon / sigmaSq; // depends on control dependency: [if], data = [none] } else { val += (x[i] - epsilon / 2.0) / sigmaSq; // depends on control dependency: [if], data = [(x[i]] grad[i] += 1.0 / sigmaSq; // depends on control dependency: [if], data = [none] } } return val; case QUARTIC: for (int i = 0; i < x.length; i++) { val += (x[i] * x[i]) * (x[i] * x[i]) / 2.0 / sigmaQu; // depends on control dependency: [for], data = [i] grad[i] += x[i] / sigmaQu; // depends on control dependency: [for], data = [i] } return val; case ADAPT: double[] newX = ArrayMath.pairwiseSubtract(x, means); val += otherPrior.compute(newX, grad); return val; case COSH: double norm = ArrayMath.norm_1(x) / sigmaSq; double d; if (norm > 30.0) { val = norm - Math.log(2); // depends on control dependency: [if], data = [none] d = 1.0 / sigmaSq; // depends on control dependency: [if], data = [none] } else { val = Math.log(Math.cosh(norm)); // depends on control dependency: [if], data = [(norm] d = (2 * (1 / (Math.exp(-2.0 * norm) + 1)) - 1.0) / sigmaSq; // depends on control dependency: [if], data = [none] } for (int i=0; i < x.length; i++) { grad[i] += Math.signum(x[i]) * d; // depends on control dependency: [for], data = [i] } return val; case MULTIPLE_QUADRATIC: // for (int i = 0; i < x.length; i++) { // val += x[i] * x[i]* 1/2 * regularizationHyperparameters[i]; // grad[i] += x[i] * regularizationHyperparameters[i]; // } for (int i = 0; i < x.length; i++) { val += x[i] * x[i] / 2.0 / sigmaSqM[i]; // depends on control dependency: [for], data = [i] grad[i] += x[i] / sigmaSqM[i]; // depends on control dependency: [for], data = [i] } return val; default: throw new RuntimeException("LogPrior.valueAt is undefined for prior of type " + this); } } }
public class class_name { public String createPatternString(String pattern, int length) { if (pattern == null) { return null; } int totalLength = pattern.length() * length; StringBuffer sb = new StringBuffer(totalLength); for (int i = 0; i < length; i++) { sb.append(pattern); } return sb.toString(); } }
public class class_name { public String createPatternString(String pattern, int length) { if (pattern == null) { return null; // depends on control dependency: [if], data = [none] } int totalLength = pattern.length() * length; StringBuffer sb = new StringBuffer(totalLength); for (int i = 0; i < length; i++) { sb.append(pattern); // depends on control dependency: [for], data = [none] } return sb.toString(); } }
public class class_name { public Observable<ServiceResponse<KeyVerifyResult>> verifyWithServiceResponseAsync(String vaultBaseUrl, String keyName, String keyVersion, JsonWebKeySignatureAlgorithm algorithm, byte[] digest, byte[] signature) { if (vaultBaseUrl == null) { throw new IllegalArgumentException("Parameter vaultBaseUrl is required and cannot be null."); } if (keyName == null) { throw new IllegalArgumentException("Parameter keyName is required and cannot be null."); } if (keyVersion == null) { throw new IllegalArgumentException("Parameter keyVersion is required and cannot be null."); } if (this.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.apiVersion() is required and cannot be null."); } if (algorithm == null) { throw new IllegalArgumentException("Parameter algorithm is required and cannot be null."); } if (digest == null) { throw new IllegalArgumentException("Parameter digest is required and cannot be null."); } if (signature == null) { throw new IllegalArgumentException("Parameter signature is required and cannot be null."); } KeyVerifyParameters parameters = new KeyVerifyParameters(); parameters.withAlgorithm(algorithm); parameters.withDigest(digest); parameters.withSignature(signature); String parameterizedHost = Joiner.on(", ").join("{vaultBaseUrl}", vaultBaseUrl); return service.verify(keyName, keyVersion, this.apiVersion(), this.acceptLanguage(), parameters, parameterizedHost, this.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<KeyVerifyResult>>>() { @Override public Observable<ServiceResponse<KeyVerifyResult>> call(Response<ResponseBody> response) { try { ServiceResponse<KeyVerifyResult> clientResponse = verifyDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } }
public class class_name { public Observable<ServiceResponse<KeyVerifyResult>> verifyWithServiceResponseAsync(String vaultBaseUrl, String keyName, String keyVersion, JsonWebKeySignatureAlgorithm algorithm, byte[] digest, byte[] signature) { if (vaultBaseUrl == null) { throw new IllegalArgumentException("Parameter vaultBaseUrl is required and cannot be null."); } if (keyName == null) { throw new IllegalArgumentException("Parameter keyName is required and cannot be null."); } if (keyVersion == null) { throw new IllegalArgumentException("Parameter keyVersion is required and cannot be null."); } if (this.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.apiVersion() is required and cannot be null."); } if (algorithm == null) { throw new IllegalArgumentException("Parameter algorithm is required and cannot be null."); } if (digest == null) { throw new IllegalArgumentException("Parameter digest is required and cannot be null."); } if (signature == null) { throw new IllegalArgumentException("Parameter signature is required and cannot be null."); } KeyVerifyParameters parameters = new KeyVerifyParameters(); parameters.withAlgorithm(algorithm); parameters.withDigest(digest); parameters.withSignature(signature); String parameterizedHost = Joiner.on(", ").join("{vaultBaseUrl}", vaultBaseUrl); return service.verify(keyName, keyVersion, this.apiVersion(), this.acceptLanguage(), parameters, parameterizedHost, this.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<KeyVerifyResult>>>() { @Override public Observable<ServiceResponse<KeyVerifyResult>> call(Response<ResponseBody> response) { try { ServiceResponse<KeyVerifyResult> clientResponse = verifyDelegate(response); return Observable.just(clientResponse); // depends on control dependency: [try], data = [none] } catch (Throwable t) { return Observable.error(t); } // depends on control dependency: [catch], data = [none] } }); } }
public class class_name { private void stopServices() { // Stop services in reverse order of starting. m_logger.debug("Stopping all services"); ListIterator<Service> iter = m_startedServices.listIterator(m_startedServices.size()); while (iter.hasPrevious()) { Service service = iter.previous(); m_logger.debug("Stopping service: " + service.getClass().getSimpleName()); service.stop(); iter.remove(); } m_initializedServices.clear(); m_storageServices.clear(); } }
public class class_name { private void stopServices() { // Stop services in reverse order of starting. m_logger.debug("Stopping all services"); ListIterator<Service> iter = m_startedServices.listIterator(m_startedServices.size()); while (iter.hasPrevious()) { Service service = iter.previous(); m_logger.debug("Stopping service: " + service.getClass().getSimpleName()); // depends on control dependency: [while], data = [none] service.stop(); // depends on control dependency: [while], data = [none] iter.remove(); // depends on control dependency: [while], data = [none] } m_initializedServices.clear(); m_storageServices.clear(); } }
public class class_name { public static List<TypeMirror> getComponentCustomizeOptions(Elements elementsUtil, TypeElement component) { Component componentAnnotation = component.getAnnotation(Component.class); try { Class<?>[] componentsClass = componentAnnotation.customizeOptions(); return Stream .of(componentsClass) .map(Class::getCanonicalName) .map(elementsUtil::getTypeElement) .map(TypeElement::asType) .collect(Collectors.toList()); } catch (MirroredTypesException mte) { return new LinkedList<>(mte.getTypeMirrors()); } } }
public class class_name { public static List<TypeMirror> getComponentCustomizeOptions(Elements elementsUtil, TypeElement component) { Component componentAnnotation = component.getAnnotation(Component.class); try { Class<?>[] componentsClass = componentAnnotation.customizeOptions(); return Stream .of(componentsClass) .map(Class::getCanonicalName) .map(elementsUtil::getTypeElement) .map(TypeElement::asType) .collect(Collectors.toList()); // depends on control dependency: [try], data = [none] } catch (MirroredTypesException mte) { return new LinkedList<>(mte.getTypeMirrors()); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public Query getMiniSelectQuery(Collection<String> columns, Collection<Match> matches) { int matchSize = matches == null ? 0 : matches.size(); int maxSqlLength = matches == null ? 50 : (matchSize + 3) * 10; StringBuilder sb = new StringBuilder(maxSqlLength); // SQL语句和参数列表的构造 sb.append("select "); if (columns == null || columns.size() == 0) { sb.append("*,"); } else { for (String column : columns) { sb.append(column).append(','); } } sb.deleteCharAt(sb.length() - 1); sb.append(" from ").append(dao.getTableName()); if (matchSize == 0) { return new Query(sb.toString(), new ArrayList<Object>(0)); } // 如果有条件 List<Object> params = new ArrayList<Object>(matchSize + 1); appendQuerytoSQL(matches, sb, params); return new Query(sb.toString(), params); } }
public class class_name { public Query getMiniSelectQuery(Collection<String> columns, Collection<Match> matches) { int matchSize = matches == null ? 0 : matches.size(); int maxSqlLength = matches == null ? 50 : (matchSize + 3) * 10; StringBuilder sb = new StringBuilder(maxSqlLength); // SQL语句和参数列表的构造 sb.append("select "); if (columns == null || columns.size() == 0) { sb.append("*,"); // depends on control dependency: [if], data = [none] } else { for (String column : columns) { sb.append(column).append(','); // depends on control dependency: [for], data = [column] } } sb.deleteCharAt(sb.length() - 1); sb.append(" from ").append(dao.getTableName()); if (matchSize == 0) { return new Query(sb.toString(), new ArrayList<Object>(0)); // depends on control dependency: [if], data = [0)] } // 如果有条件 List<Object> params = new ArrayList<Object>(matchSize + 1); appendQuerytoSQL(matches, sb, params); return new Query(sb.toString(), params); } }
public class class_name { private void loadDictionary(final InputStream dictInputStream) throws IOException { dictionary = new HashMap<String, String>(); final BufferedReader breader = new BufferedReader( new InputStreamReader(dictInputStream, Charset.forName("UTF-8"))); String line; while ((line = breader.readLine()) != null) { final String[] lineArray = tabPattern.split(line); if (lineArray.length == 4) { final Matcher lineMatcher = linePattern .matcher(lineArray[0].toLowerCase()); dictionary.put(lineMatcher.replaceAll(" "), lineArray[2]); } else { System.err.println("WARNING: line starting with " + lineArray[0] + " is not well-formed; skipping!!"); } } } }
public class class_name { private void loadDictionary(final InputStream dictInputStream) throws IOException { dictionary = new HashMap<String, String>(); final BufferedReader breader = new BufferedReader( new InputStreamReader(dictInputStream, Charset.forName("UTF-8"))); String line; while ((line = breader.readLine()) != null) { final String[] lineArray = tabPattern.split(line); if (lineArray.length == 4) { final Matcher lineMatcher = linePattern .matcher(lineArray[0].toLowerCase()); dictionary.put(lineMatcher.replaceAll(" "), lineArray[2]); // depends on control dependency: [if], data = [none] } else { System.err.println("WARNING: line starting with " + lineArray[0] + " is not well-formed; skipping!!"); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static Rule getRule(final String expression, final boolean isPostFix) { String postFix = expression; if (!isPostFix) { postFix = CONVERTER.convert(expression); } return new ExpressionRule(COMPILER.compileExpression(postFix)); } }
public class class_name { public static Rule getRule(final String expression, final boolean isPostFix) { String postFix = expression; if (!isPostFix) { postFix = CONVERTER.convert(expression); // depends on control dependency: [if], data = [none] } return new ExpressionRule(COMPILER.compileExpression(postFix)); } }
public class class_name { public HostName toCanonicalHostName() { HostName host = canonicalHost; if(host == null) { if(isMultiple()) { throw new IncompatibleAddressException(this, "ipaddress.error.unavailable.numeric"); } InetAddress inetAddress = toInetAddress(); String hostStr = inetAddress.getCanonicalHostName();//note: this does not return ipv6 addresses enclosed in brackets [] if(hostStr.equals(inetAddress.getHostAddress())) { //we got back the address, so the host is me host = new HostName(hostStr, new ParsedHost(hostStr, getProvider())); host.resolvedAddress = this; } else { //the reverse lookup succeeded in finding a host string //we might not be the default resolved address for the host, so we don't set that field host = new HostName(hostStr); } } return host; } }
public class class_name { public HostName toCanonicalHostName() { HostName host = canonicalHost; if(host == null) { if(isMultiple()) { throw new IncompatibleAddressException(this, "ipaddress.error.unavailable.numeric"); } InetAddress inetAddress = toInetAddress(); String hostStr = inetAddress.getCanonicalHostName();//note: this does not return ipv6 addresses enclosed in brackets [] if(hostStr.equals(inetAddress.getHostAddress())) { //we got back the address, so the host is me host = new HostName(hostStr, new ParsedHost(hostStr, getProvider())); // depends on control dependency: [if], data = [none] host.resolvedAddress = this; // depends on control dependency: [if], data = [none] } else { //the reverse lookup succeeded in finding a host string //we might not be the default resolved address for the host, so we don't set that field host = new HostName(hostStr); // depends on control dependency: [if], data = [none] } } return host; } }
public class class_name { public void addJavaLibraries() { Vector packages = JavaEnvUtils.getJrePackages(); Enumeration e = packages.elements(); while (e.hasMoreElements()) { String packageName = (String) e.nextElement(); addSystemPackageRoot(packageName); } } }
public class class_name { public void addJavaLibraries() { Vector packages = JavaEnvUtils.getJrePackages(); Enumeration e = packages.elements(); while (e.hasMoreElements()) { String packageName = (String) e.nextElement(); addSystemPackageRoot(packageName); // depends on control dependency: [while], data = [none] } } }
public class class_name { public static String decode(String s) { if (s == null) { return null; } int i = s.indexOf('%'); if (i == -1) { return s; } StringBuilder builder = new StringBuilder(); int begin = 0; do { builder.append(s, begin, i); begin = i + 3; char ch = (char) Integer.parseInt(s.substring(i + 1, begin), 16); if ((ch & 0x80) != 0) { // Decode "modified UTF-8". if (s.charAt(begin++) != '%') { throw new IllegalArgumentException(); } char ch2 = (char) Integer.parseInt(s.substring(begin, begin + 2), 16); begin += 2; if ((ch & 0xe0) == 0xc0) { ch = (char) (((ch & 0x1f) << 6) | (ch2 & 0x3f)); } else if ((ch & 0xf0) == 0xe0) { if (s.charAt(begin++) != '%') { throw new IllegalArgumentException(); } char ch3 = (char) Integer.parseInt(s.substring(begin, begin + 2), 16); begin += 2; ch = (char) (((ch & 0x0f) << 12) | ((ch2 & 0x3f) << 6) | (ch3 & 0x3f)); } else { throw new IllegalArgumentException(); } } builder.append(ch); } while ((i = s.indexOf('%', begin)) != -1); builder.append(s, begin, s.length()); return builder.toString(); } }
public class class_name { public static String decode(String s) { if (s == null) { return null; // depends on control dependency: [if], data = [none] } int i = s.indexOf('%'); if (i == -1) { return s; // depends on control dependency: [if], data = [none] } StringBuilder builder = new StringBuilder(); int begin = 0; do { builder.append(s, begin, i); begin = i + 3; char ch = (char) Integer.parseInt(s.substring(i + 1, begin), 16); if ((ch & 0x80) != 0) { // Decode "modified UTF-8". if (s.charAt(begin++) != '%') { throw new IllegalArgumentException(); } char ch2 = (char) Integer.parseInt(s.substring(begin, begin + 2), 16); begin += 2; // depends on control dependency: [if], data = [none] if ((ch & 0xe0) == 0xc0) { ch = (char) (((ch & 0x1f) << 6) | (ch2 & 0x3f)); // depends on control dependency: [if], data = [none] } else if ((ch & 0xf0) == 0xe0) { if (s.charAt(begin++) != '%') { throw new IllegalArgumentException(); } char ch3 = (char) Integer.parseInt(s.substring(begin, begin + 2), 16); begin += 2; // depends on control dependency: [if], data = [none] ch = (char) (((ch & 0x0f) << 12) | ((ch2 & 0x3f) << 6) | (ch3 & 0x3f)); // depends on control dependency: [if], data = [none] } else { throw new IllegalArgumentException(); } } builder.append(ch); } while ((i = s.indexOf('%', begin)) != -1); builder.append(s, begin, s.length()); return builder.toString(); } }
public class class_name { private static boolean isURIString(String p_uric) { if (p_uric == null) { return false; } int end = p_uric.length(); char testChar = '\0'; for (int i = 0; i < end; i++) { testChar = p_uric.charAt(i); if (testChar == '%') { if (i + 2 >= end ||!isHex(p_uric.charAt(i + 1)) ||!isHex(p_uric.charAt(i + 2))) { return false; } else { i += 2; continue; } } if (isReservedCharacter(testChar) || isUnreservedCharacter(testChar)) { continue; } else { return false; } } return true; } }
public class class_name { private static boolean isURIString(String p_uric) { if (p_uric == null) { return false; // depends on control dependency: [if], data = [none] } int end = p_uric.length(); char testChar = '\0'; for (int i = 0; i < end; i++) { testChar = p_uric.charAt(i); // depends on control dependency: [for], data = [i] if (testChar == '%') { if (i + 2 >= end ||!isHex(p_uric.charAt(i + 1)) ||!isHex(p_uric.charAt(i + 2))) { return false; // depends on control dependency: [if], data = [none] } else { i += 2; // depends on control dependency: [if], data = [none] continue; } } if (isReservedCharacter(testChar) || isUnreservedCharacter(testChar)) { continue; } else { return false; // depends on control dependency: [if], data = [none] } } return true; } }
public class class_name { @SuppressWarnings("restriction") public final void putLong(int index, long value) { if (CHECKED) { if (index >= 0 && index <= this.memory.length - 8) { UNSAFE.putLong(this.memory, BASE_OFFSET + index, value); } else { throw new IndexOutOfBoundsException(); } } else { UNSAFE.putLong(this.memory, BASE_OFFSET + index, value); } } }
public class class_name { @SuppressWarnings("restriction") public final void putLong(int index, long value) { if (CHECKED) { if (index >= 0 && index <= this.memory.length - 8) { UNSAFE.putLong(this.memory, BASE_OFFSET + index, value); // depends on control dependency: [if], data = [none] } else { throw new IndexOutOfBoundsException(); } } else { UNSAFE.putLong(this.memory, BASE_OFFSET + index, value); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static synchronized int registerTokenForUDF(String functionName, int functionId, VoltType voltReturnType, VoltType[] voltParameterTypes) { int retFunctionId; Type hsqlReturnType = hsqlTypeFromVoltType(voltReturnType); Type[] hsqlParameterTypes = hsqlTypeFromVoltType(voltParameterTypes); // If the token is already registered in the map, do not bother again. FunctionDescriptor oldFd = findFunction(functionName, hsqlReturnType, hsqlParameterTypes); if (oldFd != null) { // This may replace functionName with itself. This will not be an error. FunctionDescriptor.addDefinedFunction(functionName, oldFd); retFunctionId = oldFd.getId(); // If we were given a non-negative function id, it // was defined in the catalog. Our re-verification here // should have a value which we put into the catalog sometime // earlier. So, this earlier value should match the one we // were told to return. assert((functionId < 0) || (functionId == retFunctionId)); } else { // if the function was not already defined, then // if functionId is a valid UDF id or pre-defined SQL function id, then use it // otherwise, we want a new number. // if (functionId > 0) { retFunctionId = functionId; } else { retFunctionId = getNextFunctionId(); } FunctionDescriptor fd = makeFunctionDescriptorFromParts(functionName, retFunctionId, hsqlReturnType, hsqlParameterTypes); // if the function id belongs to UDF, put it into the defined_function map if (isUserDefinedFunctionId(retFunctionId)) { FunctionDescriptor.addDefinedFunction(functionName, fd); } m_logger.debug(String.format("Added UDF \"%s\"(%d) with %d parameters", functionName, retFunctionId, voltParameterTypes.length)); } // Ensure that m_udfSeqId is larger than all the // ones we've seen so far. if (m_udfSeqId <= retFunctionId) { m_udfSeqId = retFunctionId + 1; } return retFunctionId; } }
public class class_name { public static synchronized int registerTokenForUDF(String functionName, int functionId, VoltType voltReturnType, VoltType[] voltParameterTypes) { int retFunctionId; Type hsqlReturnType = hsqlTypeFromVoltType(voltReturnType); Type[] hsqlParameterTypes = hsqlTypeFromVoltType(voltParameterTypes); // If the token is already registered in the map, do not bother again. FunctionDescriptor oldFd = findFunction(functionName, hsqlReturnType, hsqlParameterTypes); if (oldFd != null) { // This may replace functionName with itself. This will not be an error. FunctionDescriptor.addDefinedFunction(functionName, oldFd); // depends on control dependency: [if], data = [none] retFunctionId = oldFd.getId(); // depends on control dependency: [if], data = [none] // If we were given a non-negative function id, it // was defined in the catalog. Our re-verification here // should have a value which we put into the catalog sometime // earlier. So, this earlier value should match the one we // were told to return. assert((functionId < 0) || (functionId == retFunctionId)); // depends on control dependency: [if], data = [none] } else { // if the function was not already defined, then // if functionId is a valid UDF id or pre-defined SQL function id, then use it // otherwise, we want a new number. // if (functionId > 0) { retFunctionId = functionId; // depends on control dependency: [if], data = [none] } else { retFunctionId = getNextFunctionId(); // depends on control dependency: [if], data = [none] } FunctionDescriptor fd = makeFunctionDescriptorFromParts(functionName, retFunctionId, hsqlReturnType, hsqlParameterTypes); // if the function id belongs to UDF, put it into the defined_function map if (isUserDefinedFunctionId(retFunctionId)) { FunctionDescriptor.addDefinedFunction(functionName, fd); // depends on control dependency: [if], data = [none] } m_logger.debug(String.format("Added UDF \"%s\"(%d) with %d parameters", functionName, retFunctionId, voltParameterTypes.length)); // depends on control dependency: [if], data = [none] } // Ensure that m_udfSeqId is larger than all the // ones we've seen so far. if (m_udfSeqId <= retFunctionId) { m_udfSeqId = retFunctionId + 1; // depends on control dependency: [if], data = [none] } return retFunctionId; } }
public class class_name { private void writeIfChanged(byte[] b, FileObject file) throws IOException { boolean mustWrite = false; String event = "[No need to update file "; if (force) { mustWrite = true; event = "[Forcefully writing file "; } else { InputStream in; byte[] a; try { // regrettably, there's no API to get the length in bytes // for a FileObject, so we can't short-circuit reading the // file here in = file.openInputStream(); a = readBytes(in); if (!Arrays.equals(a, b)) { mustWrite = true; event = "[Overwriting file "; } } catch (FileNotFoundException e) { mustWrite = true; event = "[Creating file "; } } if (util.verbose) util.log(event + file + "]"); if (mustWrite) { OutputStream out = file.openOutputStream(); out.write(b); /* No buffering, just one big write! */ out.close(); } } }
public class class_name { private void writeIfChanged(byte[] b, FileObject file) throws IOException { boolean mustWrite = false; String event = "[No need to update file "; if (force) { mustWrite = true; event = "[Forcefully writing file "; } else { InputStream in; byte[] a; try { // regrettably, there's no API to get the length in bytes // for a FileObject, so we can't short-circuit reading the // file here in = file.openInputStream(); // depends on control dependency: [try], data = [none] a = readBytes(in); // depends on control dependency: [try], data = [none] if (!Arrays.equals(a, b)) { mustWrite = true; // depends on control dependency: [if], data = [none] event = "[Overwriting file "; // depends on control dependency: [if], data = [none] } } catch (FileNotFoundException e) { mustWrite = true; event = "[Creating file "; } // depends on control dependency: [catch], data = [none] } if (util.verbose) util.log(event + file + "]"); if (mustWrite) { OutputStream out = file.openOutputStream(); out.write(b); /* No buffering, just one big write! */ out.close(); } } }
public class class_name { @Override public List<PhoneNumber> searchForNumbers(String country, PhoneNumberSearchFilters listFilters) { List<PhoneNumber> availableNumbers = new ArrayList<PhoneNumber>(); if (logger.isDebugEnabled()) { logger.debug("searchPattern: " + listFilters.getFilterPattern()); } try { String uri = buildSearchUri(listFilters); HttpGet httpGet = new HttpGet(uri); if (telestaxProxyEnabled) addTelestaxProxyHeaders(httpGet, ProvisionProvider.REQUEST_TYPE.GETDIDS.name()); String response = executeRequest(httpGet); availableNumbers = toPhoneNumbers((SearchResult) XmlUtils.fromXml(response, SearchResult.class)); return availableNumbers; } catch (Exception e) { logger.error("Could not execute search request: " + uri, e); } return availableNumbers; } }
public class class_name { @Override public List<PhoneNumber> searchForNumbers(String country, PhoneNumberSearchFilters listFilters) { List<PhoneNumber> availableNumbers = new ArrayList<PhoneNumber>(); if (logger.isDebugEnabled()) { logger.debug("searchPattern: " + listFilters.getFilterPattern()); // depends on control dependency: [if], data = [none] } try { String uri = buildSearchUri(listFilters); HttpGet httpGet = new HttpGet(uri); if (telestaxProxyEnabled) addTelestaxProxyHeaders(httpGet, ProvisionProvider.REQUEST_TYPE.GETDIDS.name()); String response = executeRequest(httpGet); availableNumbers = toPhoneNumbers((SearchResult) XmlUtils.fromXml(response, SearchResult.class)); return availableNumbers; } catch (Exception e) { logger.error("Could not execute search request: " + uri, e); } return availableNumbers; } }
public class class_name { public EntityBuilder addSubEntities(List<Entity> entities) { if(entities == null) { throw new IllegalArgumentException("entities cannot be null."); } for (Entity entity : entities) { addSubEntity(entity); } return this; } }
public class class_name { public EntityBuilder addSubEntities(List<Entity> entities) { if(entities == null) { throw new IllegalArgumentException("entities cannot be null."); } for (Entity entity : entities) { addSubEntity(entity); // depends on control dependency: [for], data = [entity] } return this; } }
public class class_name { private void copy(ByteBuf src, ByteBuffer dst) { // This branch is necessary, because an Exception is thrown if the // destination buffer has more remaining (writable) bytes than // currently readable from the Netty ByteBuf source. if (src.isReadable()) { if (src.readableBytes() < dst.remaining()) { int oldLimit = dst.limit(); dst.limit(dst.position() + src.readableBytes()); src.readBytes(dst); dst.limit(oldLimit); } else { src.readBytes(dst); } } } }
public class class_name { private void copy(ByteBuf src, ByteBuffer dst) { // This branch is necessary, because an Exception is thrown if the // destination buffer has more remaining (writable) bytes than // currently readable from the Netty ByteBuf source. if (src.isReadable()) { if (src.readableBytes() < dst.remaining()) { int oldLimit = dst.limit(); dst.limit(dst.position() + src.readableBytes()); // depends on control dependency: [if], data = [none] src.readBytes(dst); // depends on control dependency: [if], data = [none] dst.limit(oldLimit); // depends on control dependency: [if], data = [none] } else { src.readBytes(dst); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static String getLocalDisplayName(String locale) { String desc = "" + locale; if (locale != null) { String[] langArray = locale.split("_"); Locale loc = null; if (langArray.length == 1) { loc = new Locale(langArray[0]); } else if (langArray.length == 2) { loc = new Locale(langArray[0], langArray[1]); } else if (langArray.length == 3) { loc = new Locale(langArray[0], langArray[1], langArray[2]); } if (loc != null) { desc = loc.getDisplayLanguage(loc); } } return desc; } }
public class class_name { public static String getLocalDisplayName(String locale) { String desc = "" + locale; if (locale != null) { String[] langArray = locale.split("_"); Locale loc = null; if (langArray.length == 1) { loc = new Locale(langArray[0]); // depends on control dependency: [if], data = [none] } else if (langArray.length == 2) { loc = new Locale(langArray[0], langArray[1]); // depends on control dependency: [if], data = [none] } else if (langArray.length == 3) { loc = new Locale(langArray[0], langArray[1], langArray[2]); // depends on control dependency: [if], data = [none] } if (loc != null) { desc = loc.getDisplayLanguage(loc); // depends on control dependency: [if], data = [(loc] } } return desc; } }
public class class_name { public void marshall(ResourceMetadata resourceMetadata, ProtocolMarshaller protocolMarshaller) { if (resourceMetadata == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(resourceMetadata.getArn(), ARN_BINDING); protocolMarshaller.marshall(resourceMetadata.getCreatedAt(), CREATEDAT_BINDING); protocolMarshaller.marshall(resourceMetadata.getLastUpdatedAt(), LASTUPDATEDAT_BINDING); protocolMarshaller.marshall(resourceMetadata.getUid(), UID_BINDING); protocolMarshaller.marshall(resourceMetadata.getVersion(), VERSION_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(ResourceMetadata resourceMetadata, ProtocolMarshaller protocolMarshaller) { if (resourceMetadata == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(resourceMetadata.getArn(), ARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(resourceMetadata.getCreatedAt(), CREATEDAT_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(resourceMetadata.getLastUpdatedAt(), LASTUPDATEDAT_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(resourceMetadata.getUid(), UID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(resourceMetadata.getVersion(), VERSION_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public SemanticVersion getPrevVersion() { int major = head.getMajorVersion(); int minor = head.getMinorVersion(); int patch = head.getPatchVersion(); if (patch > 0) { return new SemanticVersion(major, minor, patch - 1); } if (minor > 0) { return new SemanticVersion(major, minor - 1, 999); } if (major > 0) { return new SemanticVersion(major - 1, 999, 999); } return new SemanticVersion(0, 0, 0); } }
public class class_name { public SemanticVersion getPrevVersion() { int major = head.getMajorVersion(); int minor = head.getMinorVersion(); int patch = head.getPatchVersion(); if (patch > 0) { return new SemanticVersion(major, minor, patch - 1); // depends on control dependency: [if], data = [none] } if (minor > 0) { return new SemanticVersion(major, minor - 1, 999); // depends on control dependency: [if], data = [none] } if (major > 0) { return new SemanticVersion(major - 1, 999, 999); // depends on control dependency: [if], data = [(major] } return new SemanticVersion(0, 0, 0); } }
public class class_name { protected void pushBack(String str) { char[] chars = str.toCharArray(); for ( int j = chars.length - 1; j >= 0; j-- ) { reader.unread(chars[j]); } idx -= chars.length; } }
public class class_name { protected void pushBack(String str) { char[] chars = str.toCharArray(); for ( int j = chars.length - 1; j >= 0; j-- ) { reader.unread(chars[j]); // depends on control dependency: [for], data = [j] } idx -= chars.length; } }
public class class_name { private Engine executeDependencyCheck() throws ExceptionCollection { populateSettings(); final Engine engine; try { engine = new Engine(settings); } catch (DatabaseException ex) { throw new ExceptionCollection(ex, true); } if (this.updateOnly) { try { engine.doUpdates(); } catch (UpdateException ex) { throw new ExceptionCollection(ex); } finally { engine.close(); } } else { engine.setDependencies(this.dependencies); engine.analyzeDependencies(); } return engine; } }
public class class_name { private Engine executeDependencyCheck() throws ExceptionCollection { populateSettings(); final Engine engine; try { engine = new Engine(settings); } catch (DatabaseException ex) { throw new ExceptionCollection(ex, true); } if (this.updateOnly) { try { engine.doUpdates(); // depends on control dependency: [try], data = [none] } catch (UpdateException ex) { throw new ExceptionCollection(ex); } finally { // depends on control dependency: [catch], data = [none] engine.close(); } } else { engine.setDependencies(this.dependencies); engine.analyzeDependencies(); } return engine; } }
public class class_name { public void add(@Nonnull DeploymentNode deploymentNode, boolean addRelationships) { if (deploymentNode == null) { throw new IllegalArgumentException("A deployment node must be specified."); } if (addContainerInstancesAndDeploymentNodes(deploymentNode, addRelationships)) { Element parent = deploymentNode.getParent(); while (parent != null) { addElement(parent, addRelationships); parent = parent.getParent(); } } } }
public class class_name { public void add(@Nonnull DeploymentNode deploymentNode, boolean addRelationships) { if (deploymentNode == null) { throw new IllegalArgumentException("A deployment node must be specified."); } if (addContainerInstancesAndDeploymentNodes(deploymentNode, addRelationships)) { Element parent = deploymentNode.getParent(); while (parent != null) { addElement(parent, addRelationships); // depends on control dependency: [while], data = [(parent] parent = parent.getParent(); // depends on control dependency: [while], data = [none] } } } }
public class class_name { private PolicyDefinitionBean updatePluginIdInPolicyDefinition(PolicyDefinitionBean policyDef) { if (pluginBeanIdMap.containsKey(policyDef.getPluginId())){ try { Map.Entry<String, String> pluginCoordinates = pluginBeanIdMap.get(policyDef.getPluginId()); PluginBean plugin = storage.getPlugin(pluginCoordinates.getKey(), pluginCoordinates.getValue()); policyDef.setPluginId(plugin.getId()); } catch (StorageException e) { error(e); } } return policyDef; } }
public class class_name { private PolicyDefinitionBean updatePluginIdInPolicyDefinition(PolicyDefinitionBean policyDef) { if (pluginBeanIdMap.containsKey(policyDef.getPluginId())){ try { Map.Entry<String, String> pluginCoordinates = pluginBeanIdMap.get(policyDef.getPluginId()); PluginBean plugin = storage.getPlugin(pluginCoordinates.getKey(), pluginCoordinates.getValue()); policyDef.setPluginId(plugin.getId()); // depends on control dependency: [try], data = [none] } catch (StorageException e) { error(e); } // depends on control dependency: [catch], data = [none] } return policyDef; } }
public class class_name { public final void addViolation(final Status status,final String ruleName,final String ruleGroup,final String ruleDescription,final String positive) { final RuleViolation currentRuleViolation = ruleViolationMap.get(ruleName); if (currentRuleViolation == null || status.ordinal() > currentRuleViolation.getStatus().ordinal()) { ruleViolationMap.put(ruleName, new RuleViolation(getId(),getName(),resourceType,ruleName,ruleDescription,ruleGroup,status,positive)); } } }
public class class_name { public final void addViolation(final Status status,final String ruleName,final String ruleGroup,final String ruleDescription,final String positive) { final RuleViolation currentRuleViolation = ruleViolationMap.get(ruleName); if (currentRuleViolation == null || status.ordinal() > currentRuleViolation.getStatus().ordinal()) { ruleViolationMap.put(ruleName, new RuleViolation(getId(),getName(),resourceType,ruleName,ruleDescription,ruleGroup,status,positive)); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static base_responses add(nitro_service client, tmsessionaction resources[]) throws Exception { base_responses result = null; if (resources != null && resources.length > 0) { tmsessionaction addresources[] = new tmsessionaction[resources.length]; for (int i=0;i<resources.length;i++){ addresources[i] = new tmsessionaction(); addresources[i].name = resources[i].name; addresources[i].sesstimeout = resources[i].sesstimeout; addresources[i].defaultauthorizationaction = resources[i].defaultauthorizationaction; addresources[i].sso = resources[i].sso; addresources[i].ssocredential = resources[i].ssocredential; addresources[i].ssodomain = resources[i].ssodomain; addresources[i].httponlycookie = resources[i].httponlycookie; addresources[i].kcdaccount = resources[i].kcdaccount; addresources[i].persistentcookie = resources[i].persistentcookie; addresources[i].persistentcookievalidity = resources[i].persistentcookievalidity; addresources[i].homepage = resources[i].homepage; } result = add_bulk_request(client, addresources); } return result; } }
public class class_name { public static base_responses add(nitro_service client, tmsessionaction resources[]) throws Exception { base_responses result = null; if (resources != null && resources.length > 0) { tmsessionaction addresources[] = new tmsessionaction[resources.length]; for (int i=0;i<resources.length;i++){ addresources[i] = new tmsessionaction(); // depends on control dependency: [for], data = [i] addresources[i].name = resources[i].name; // depends on control dependency: [for], data = [i] addresources[i].sesstimeout = resources[i].sesstimeout; // depends on control dependency: [for], data = [i] addresources[i].defaultauthorizationaction = resources[i].defaultauthorizationaction; // depends on control dependency: [for], data = [i] addresources[i].sso = resources[i].sso; // depends on control dependency: [for], data = [i] addresources[i].ssocredential = resources[i].ssocredential; // depends on control dependency: [for], data = [i] addresources[i].ssodomain = resources[i].ssodomain; // depends on control dependency: [for], data = [i] addresources[i].httponlycookie = resources[i].httponlycookie; // depends on control dependency: [for], data = [i] addresources[i].kcdaccount = resources[i].kcdaccount; // depends on control dependency: [for], data = [i] addresources[i].persistentcookie = resources[i].persistentcookie; // depends on control dependency: [for], data = [i] addresources[i].persistentcookievalidity = resources[i].persistentcookievalidity; // depends on control dependency: [for], data = [i] addresources[i].homepage = resources[i].homepage; // depends on control dependency: [for], data = [i] } result = add_bulk_request(client, addresources); } return result; } }
public class class_name { public static OutputStream newOutputStream(final WritableByteChannel ch) { checkNotNull(ch, "ch"); return new OutputStream() { private ByteBuffer bb = null; private byte[] bs = null; // Invoker's previous array private byte[] b1 = null; public synchronized void write(int b) throws IOException { if (b1 == null) b1 = new byte[1]; b1[0] = (byte)b; this.write(b1); } public synchronized void write(byte[] bs, int off, int len) throws IOException { if ((off < 0) || (off > bs.length) || (len < 0) || ((off + len) > bs.length) || ((off + len) < 0)) { throw new IndexOutOfBoundsException(); } else if (len == 0) { return; } ByteBuffer bb = ((this.bs == bs) ? this.bb : ByteBuffer.wrap(bs)); bb.limit(Math.min(off + len, bb.capacity())); bb.position(off); this.bb = bb; this.bs = bs; Channels.writeFully(ch, bb); } public void close() throws IOException { ch.close(); } }; } }
public class class_name { public static OutputStream newOutputStream(final WritableByteChannel ch) { checkNotNull(ch, "ch"); return new OutputStream() { private ByteBuffer bb = null; private byte[] bs = null; // Invoker's previous array private byte[] b1 = null; public synchronized void write(int b) throws IOException { if (b1 == null) b1 = new byte[1]; b1[0] = (byte)b; this.write(b1); } public synchronized void write(byte[] bs, int off, int len) throws IOException { if ((off < 0) || (off > bs.length) || (len < 0) || ((off + len) > bs.length) || ((off + len) < 0)) { throw new IndexOutOfBoundsException(); } else if (len == 0) { return; // depends on control dependency: [if], data = [none] } ByteBuffer bb = ((this.bs == bs) ? this.bb : ByteBuffer.wrap(bs)); bb.limit(Math.min(off + len, bb.capacity())); bb.position(off); this.bb = bb; this.bs = bs; Channels.writeFully(ch, bb); } public void close() throws IOException { ch.close(); } }; } }
public class class_name { public String toDebugString() { StringBuilder sb = new StringBuilder(); sb.append("GosuPathEntry:\n"); sb.append(" root: ").append(_root.toJavaFile().getAbsolutePath()).append("\n"); for (IDirectory src : _srcs) { sb.append(" src: ").append(src.toJavaFile().getAbsolutePath()).append("\n"); } return sb.toString(); } }
public class class_name { public String toDebugString() { StringBuilder sb = new StringBuilder(); sb.append("GosuPathEntry:\n"); sb.append(" root: ").append(_root.toJavaFile().getAbsolutePath()).append("\n"); for (IDirectory src : _srcs) { sb.append(" src: ").append(src.toJavaFile().getAbsolutePath()).append("\n"); // depends on control dependency: [for], data = [src] } return sb.toString(); } }
public class class_name { protected void lookup(String lemma, String posTag, List<String> results) { synchronized (this) { // the stemmer is not thread-safe List<WordData> wordForms = stemmer.lookup(lemma + "|" + posTag); for (WordData wd : wordForms) { results.add(wd.getStem().toString()); } } } }
public class class_name { protected void lookup(String lemma, String posTag, List<String> results) { synchronized (this) { // the stemmer is not thread-safe List<WordData> wordForms = stemmer.lookup(lemma + "|" + posTag); for (WordData wd : wordForms) { results.add(wd.getStem().toString()); // depends on control dependency: [for], data = [wd] } } } }
public class class_name { @Override public Object getAttribute(String arg0) { try { collaborator.preInvoke(componentMetaData); return request.getAttribute(arg0); } finally { collaborator.postInvoke(); } } }
public class class_name { @Override public Object getAttribute(String arg0) { try { collaborator.preInvoke(componentMetaData); // depends on control dependency: [try], data = [none] return request.getAttribute(arg0); // depends on control dependency: [try], data = [none] } finally { collaborator.postInvoke(); } } }