code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public static void zipDirectory(Logger log, File directory, ZipOutputStream zos, String path, FileFilter filter) throws IOException { // get a listing of the directory content File[] dirList = directory.listFiles(); byte[] readBuffer = new byte[8192]; int bytesIn = 0; // loop through dirList, and zip the files if (dirList != null) { for (File f : dirList) { if (f.isDirectory()) { String prefix = path + f.getName() + "/"; if (matches(filter, f)) { zos.putNextEntry(new ZipEntry(prefix)); zipDirectory(log, f, zos, prefix, filter); } } else { String entry = path + f.getName(); if (matches(filter, f)) { FileInputStream fis = new FileInputStream(f); try { ZipEntry anEntry = new ZipEntry(entry); zos.putNextEntry(anEntry); bytesIn = fis.read(readBuffer); while (bytesIn != -1) { zos.write(readBuffer, 0, bytesIn); bytesIn = fis.read(readBuffer); } } finally { fis.close(); } if (log.isDebugEnabled()) { log.debug("zipping file " + entry); } } } zos.closeEntry(); } } } }
public class class_name { public static void zipDirectory(Logger log, File directory, ZipOutputStream zos, String path, FileFilter filter) throws IOException { // get a listing of the directory content File[] dirList = directory.listFiles(); byte[] readBuffer = new byte[8192]; int bytesIn = 0; // loop through dirList, and zip the files if (dirList != null) { for (File f : dirList) { if (f.isDirectory()) { String prefix = path + f.getName() + "/"; if (matches(filter, f)) { zos.putNextEntry(new ZipEntry(prefix)); // depends on control dependency: [if], data = [none] zipDirectory(log, f, zos, prefix, filter); // depends on control dependency: [if], data = [none] } } else { String entry = path + f.getName(); if (matches(filter, f)) { FileInputStream fis = new FileInputStream(f); try { ZipEntry anEntry = new ZipEntry(entry); zos.putNextEntry(anEntry); // depends on control dependency: [try], data = [none] bytesIn = fis.read(readBuffer); // depends on control dependency: [try], data = [none] while (bytesIn != -1) { zos.write(readBuffer, 0, bytesIn); // depends on control dependency: [while], data = [none] bytesIn = fis.read(readBuffer); // depends on control dependency: [while], data = [none] } } finally { fis.close(); } if (log.isDebugEnabled()) { log.debug("zipping file " + entry); // depends on control dependency: [if], data = [none] } } } zos.closeEntry(); } } } }
public class class_name { @VisibleForTesting Map<QName, List<String>> getGroups(final String value) { final Map<QName, List<String>> res = new HashMap<>(); final StringBuilder buf = new StringBuilder(); int previousEnd = 0; final Matcher m = groupPattern.matcher(value); while(m.find()) { buf.append(value.subSequence(previousEnd, m.start())); final String v = m.group(2); if (!v.trim().isEmpty()) { final QName k = QName.valueOf(m.group(1)); if (res.containsKey(k)) { final List<String> l = new ArrayList<>(res.get(k)); l.addAll(Arrays.asList(v.trim().split("\\s+"))); res.put(k, l); } else { res.put(k, Arrays.asList(v.trim().split("\\s+"))); } } previousEnd = m.end(); } buf.append(value.substring(previousEnd)); if (!buf.toString().trim().isEmpty()) { res.put(null, Arrays.asList(buf.toString().trim().split("\\s+"))); } return res; } }
public class class_name { @VisibleForTesting Map<QName, List<String>> getGroups(final String value) { final Map<QName, List<String>> res = new HashMap<>(); final StringBuilder buf = new StringBuilder(); int previousEnd = 0; final Matcher m = groupPattern.matcher(value); while(m.find()) { buf.append(value.subSequence(previousEnd, m.start())); // depends on control dependency: [while], data = [none] final String v = m.group(2); if (!v.trim().isEmpty()) { final QName k = QName.valueOf(m.group(1)); if (res.containsKey(k)) { final List<String> l = new ArrayList<>(res.get(k)); l.addAll(Arrays.asList(v.trim().split("\\s+"))); // depends on control dependency: [if], data = [none] res.put(k, l); // depends on control dependency: [if], data = [none] } else { res.put(k, Arrays.asList(v.trim().split("\\s+"))); // depends on control dependency: [if], data = [none] } } previousEnd = m.end(); // depends on control dependency: [while], data = [none] } buf.append(value.substring(previousEnd)); if (!buf.toString().trim().isEmpty()) { res.put(null, Arrays.asList(buf.toString().trim().split("\\s+"))); // depends on control dependency: [if], data = [none] } return res; } }
public class class_name { static List<Vector2d> newUnitVectors(final IAtom fromAtom, final List<IAtom> toAtoms) { final List<Vector2d> unitVectors = new ArrayList<Vector2d>(toAtoms.size()); for (final IAtom toAtom : toAtoms) { unitVectors.add(newUnitVector(fromAtom.getPoint2d(), toAtom.getPoint2d())); } return unitVectors; } }
public class class_name { static List<Vector2d> newUnitVectors(final IAtom fromAtom, final List<IAtom> toAtoms) { final List<Vector2d> unitVectors = new ArrayList<Vector2d>(toAtoms.size()); for (final IAtom toAtom : toAtoms) { unitVectors.add(newUnitVector(fromAtom.getPoint2d(), toAtom.getPoint2d())); // depends on control dependency: [for], data = [toAtom] } return unitVectors; } }
public class class_name { protected StringBuilder addPart(List<String> commandList,StringBuilder buffer,int endIndex,boolean typeSpace) { int delta=1; if(typeSpace) { delta=0; } String part=buffer.substring(delta,endIndex); commandList.add(part); buffer.delete(0,endIndex+delta); return buffer; } }
public class class_name { protected StringBuilder addPart(List<String> commandList,StringBuilder buffer,int endIndex,boolean typeSpace) { int delta=1; if(typeSpace) { delta=0; // depends on control dependency: [if], data = [none] } String part=buffer.substring(delta,endIndex); commandList.add(part); buffer.delete(0,endIndex+delta); return buffer; } }
public class class_name { public Variable[] promote() { Scope parent = getParent(); if (parent == null) { return new Variable[0]; } Collection<Variable> promotion = new ArrayList<Variable>(); // A set of variable names that have been moved into the promotion. Set<String> matchedNames = new HashSet<String>(7); intersectFrom(this, parent, matchedNames, promotion); Variable[] vars = new Variable[promotion.size()]; return promotion.toArray(vars); } }
public class class_name { public Variable[] promote() { Scope parent = getParent(); if (parent == null) { return new Variable[0]; // depends on control dependency: [if], data = [none] } Collection<Variable> promotion = new ArrayList<Variable>(); // A set of variable names that have been moved into the promotion. Set<String> matchedNames = new HashSet<String>(7); intersectFrom(this, parent, matchedNames, promotion); Variable[] vars = new Variable[promotion.size()]; return promotion.toArray(vars); } }
public class class_name { private boolean isPaintLineSeperators(JComponent c) { boolean paintLines = c instanceof JTextArea; // Global settings String globalOverride = System.getProperty("SeaGlass.JTextArea.drawLineSeparator"); if (globalOverride != null && globalOverride.length() > 0) { paintLines = Boolean.valueOf(globalOverride); } // Settings per component Boolean overrideProperty = (Boolean) c.getClientProperty("SeaGlass.JTextArea.drawLineSeparator"); if (overrideProperty != null) { paintLines = overrideProperty; } return paintLines; } }
public class class_name { private boolean isPaintLineSeperators(JComponent c) { boolean paintLines = c instanceof JTextArea; // Global settings String globalOverride = System.getProperty("SeaGlass.JTextArea.drawLineSeparator"); if (globalOverride != null && globalOverride.length() > 0) { paintLines = Boolean.valueOf(globalOverride); // depends on control dependency: [if], data = [(globalOverride] } // Settings per component Boolean overrideProperty = (Boolean) c.getClientProperty("SeaGlass.JTextArea.drawLineSeparator"); if (overrideProperty != null) { paintLines = overrideProperty; // depends on control dependency: [if], data = [none] } return paintLines; } }
public class class_name { public void setInstancesToTerminate(java.util.Collection<String> instancesToTerminate) { if (instancesToTerminate == null) { this.instancesToTerminate = null; return; } this.instancesToTerminate = new com.amazonaws.internal.SdkInternalList<String>(instancesToTerminate); } }
public class class_name { public void setInstancesToTerminate(java.util.Collection<String> instancesToTerminate) { if (instancesToTerminate == null) { this.instancesToTerminate = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.instancesToTerminate = new com.amazonaws.internal.SdkInternalList<String>(instancesToTerminate); } }
public class class_name { public URL buildWithQuery(String base, String queryString, Object... values) { String urlString = String.format(base + this.template, values) + queryString; URL url = null; try { url = new URL(urlString); } catch (MalformedURLException e) { assert false : "An invalid URL template indicates a bug in the SDK."; } return url; } }
public class class_name { public URL buildWithQuery(String base, String queryString, Object... values) { String urlString = String.format(base + this.template, values) + queryString; URL url = null; try { url = new URL(urlString); // depends on control dependency: [try], data = [none] } catch (MalformedURLException e) { assert false : "An invalid URL template indicates a bug in the SDK."; } // depends on control dependency: [catch], data = [none] return url; } }
public class class_name { public static void copyProperties(Object dest, Object orig){ try { if (orig != null && dest != null){ BeanUtils.copyProperties(dest, orig); PropertyUtils putils = new PropertyUtils(); PropertyDescriptor origDescriptors[] = putils.getPropertyDescriptors(orig); for (PropertyDescriptor origDescriptor : origDescriptors) { String name = origDescriptor.getName(); if ("class".equals(name)) { continue; // No point in trying to set an object's class } Class propertyType = origDescriptor.getPropertyType(); if (!Boolean.class.equals(propertyType) && !(Boolean.class.equals(propertyType))) continue; if (!putils.isReadable(orig, name)) { //because of bad convention Method m = orig.getClass().getMethod("is" + name.substring(0, 1).toUpperCase() + name.substring(1), (Class<?>[]) null); Object value = m.invoke(orig, (Object[]) null); if (putils.isWriteable(dest, name)) { BeanUtilsBean.getInstance().copyProperty(dest, name, value); } } } } } catch (Exception e) { throw new DJException("Could not copy properties for shared object: " + orig +", message: " + e.getMessage(),e); } } }
public class class_name { public static void copyProperties(Object dest, Object orig){ try { if (orig != null && dest != null){ BeanUtils.copyProperties(dest, orig); // depends on control dependency: [if], data = [none] PropertyUtils putils = new PropertyUtils(); PropertyDescriptor origDescriptors[] = putils.getPropertyDescriptors(orig); for (PropertyDescriptor origDescriptor : origDescriptors) { String name = origDescriptor.getName(); if ("class".equals(name)) { continue; // No point in trying to set an object's class } Class propertyType = origDescriptor.getPropertyType(); if (!Boolean.class.equals(propertyType) && !(Boolean.class.equals(propertyType))) continue; if (!putils.isReadable(orig, name)) { //because of bad convention Method m = orig.getClass().getMethod("is" + name.substring(0, 1).toUpperCase() + name.substring(1), (Class<?>[]) null); Object value = m.invoke(orig, (Object[]) null); if (putils.isWriteable(dest, name)) { BeanUtilsBean.getInstance().copyProperty(dest, name, value); // depends on control dependency: [if], data = [none] } } } } } catch (Exception e) { throw new DJException("Could not copy properties for shared object: " + orig +", message: " + e.getMessage(),e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public ListAuditFindingsResult withFindings(AuditFinding... findings) { if (this.findings == null) { setFindings(new java.util.ArrayList<AuditFinding>(findings.length)); } for (AuditFinding ele : findings) { this.findings.add(ele); } return this; } }
public class class_name { public ListAuditFindingsResult withFindings(AuditFinding... findings) { if (this.findings == null) { setFindings(new java.util.ArrayList<AuditFinding>(findings.length)); // depends on control dependency: [if], data = [none] } for (AuditFinding ele : findings) { this.findings.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { @Override public void unsetBufferManager(String sourceId, BufferManager bufferMgr) { if (auditService.isAuditRequired(AuditConstants.SECURITY_AUDIT_MGMT, AuditConstants.SUCCESS)) { AuditMgmtEvent av = new AuditMgmtEvent(thisConfiguration, "AuditHandler:" + auditService.AUDIT_FILE_HANDLER_NAME, "stop"); auditService.sendEvent(av); av = new AuditMgmtEvent(thisConfiguration, "AuditService", "stop"); auditService.sendEvent(av); } } }
public class class_name { @Override public void unsetBufferManager(String sourceId, BufferManager bufferMgr) { if (auditService.isAuditRequired(AuditConstants.SECURITY_AUDIT_MGMT, AuditConstants.SUCCESS)) { AuditMgmtEvent av = new AuditMgmtEvent(thisConfiguration, "AuditHandler:" + auditService.AUDIT_FILE_HANDLER_NAME, "stop"); auditService.sendEvent(av); // depends on control dependency: [if], data = [none] av = new AuditMgmtEvent(thisConfiguration, "AuditService", "stop"); // depends on control dependency: [if], data = [none] auditService.sendEvent(av); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String toLog(XmlProperty data) { if (data.channels == null) { return data.getName() + "(" + data.getOwner() + ")"; } else { return data.getName() + "(" + data.getOwner() + ")" + (data.channels); } } }
public class class_name { public static String toLog(XmlProperty data) { if (data.channels == null) { return data.getName() + "(" + data.getOwner() + ")"; // depends on control dependency: [if], data = [none] } else { return data.getName() + "(" + data.getOwner() + ")" + (data.channels); } } }
public class class_name { public List<Rulebases.Rulebase> getRulebase() { if (rulebase == null) { rulebase = new ArrayList<Rulebases.Rulebase>(); } return this.rulebase; } }
public class class_name { public List<Rulebases.Rulebase> getRulebase() { if (rulebase == null) { rulebase = new ArrayList<Rulebases.Rulebase>(); // depends on control dependency: [if], data = [none] } return this.rulebase; } }
public class class_name { public static String getUtcTimeAsIso8601(Calendar cal) { try { if (cal == null) { return DatatypeFactory.newInstance().newXMLGregorianCalendar( new GregorianCalendar(TimeZone.getTimeZone("UTC"))) .toXMLFormat().replaceAll("\\.[0-9]{3}", ""); } GregorianCalendar suppliedDateCalendar = new GregorianCalendar(TimeZone.getTimeZone("UTC")); suppliedDateCalendar.setTimeInMillis(cal.getTimeInMillis()); return DatatypeFactory.newInstance().newXMLGregorianCalendar( suppliedDateCalendar).toXMLFormat().replaceAll("\\.[0-9]{3}", ""); } catch (DatatypeConfigurationException e) { SimpleDateFormat xmlDateUtc = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); xmlDateUtc.setTimeZone(TimeZone.getTimeZone("UTC")); return xmlDateUtc.format(Calendar.getInstance()); } } }
public class class_name { public static String getUtcTimeAsIso8601(Calendar cal) { try { if (cal == null) { return DatatypeFactory.newInstance().newXMLGregorianCalendar( new GregorianCalendar(TimeZone.getTimeZone("UTC"))) .toXMLFormat().replaceAll("\\.[0-9]{3}", ""); // depends on control dependency: [if], data = [none] } GregorianCalendar suppliedDateCalendar = new GregorianCalendar(TimeZone.getTimeZone("UTC")); suppliedDateCalendar.setTimeInMillis(cal.getTimeInMillis()); // depends on control dependency: [try], data = [none] return DatatypeFactory.newInstance().newXMLGregorianCalendar( suppliedDateCalendar).toXMLFormat().replaceAll("\\.[0-9]{3}", ""); // depends on control dependency: [try], data = [none] } catch (DatatypeConfigurationException e) { SimpleDateFormat xmlDateUtc = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); xmlDateUtc.setTimeZone(TimeZone.getTimeZone("UTC")); return xmlDateUtc.format(Calendar.getInstance()); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @RequestMapping(value = "/**/consoleout.json", method = RequestMethod.GET) public ModelAndView consoleout(@RequestParam("pipelineName") String pipelineName, @RequestParam("pipelineCounter") String pipelineCounter, @RequestParam("stageName") String stageName, @RequestParam("buildName") String buildName, @RequestParam(value = "stageCounter", required = false) String stageCounter, @RequestParam(value = "startLineNumber", required = false) Long start ) { start = start == null ? 0L : start; try { JobIdentifier identifier = restfulService.findJob(pipelineName, pipelineCounter, stageName, stageCounter, buildName); if (jobInstanceDao.isJobCompleted(identifier) && !consoleService.doesLogExist(identifier)) { return logsNotFound(identifier); } ConsoleConsumer streamer = consoleService.getStreamer(start, identifier); return new ModelAndView(new ConsoleOutView(streamer, consoleLogCharset)); } catch (Exception e) { return buildNotFound(pipelineName, pipelineCounter, stageName, stageCounter, buildName); } } }
public class class_name { @RequestMapping(value = "/**/consoleout.json", method = RequestMethod.GET) public ModelAndView consoleout(@RequestParam("pipelineName") String pipelineName, @RequestParam("pipelineCounter") String pipelineCounter, @RequestParam("stageName") String stageName, @RequestParam("buildName") String buildName, @RequestParam(value = "stageCounter", required = false) String stageCounter, @RequestParam(value = "startLineNumber", required = false) Long start ) { start = start == null ? 0L : start; try { JobIdentifier identifier = restfulService.findJob(pipelineName, pipelineCounter, stageName, stageCounter, buildName); if (jobInstanceDao.isJobCompleted(identifier) && !consoleService.doesLogExist(identifier)) { return logsNotFound(identifier); // depends on control dependency: [if], data = [none] } ConsoleConsumer streamer = consoleService.getStreamer(start, identifier); return new ModelAndView(new ConsoleOutView(streamer, consoleLogCharset)); // depends on control dependency: [try], data = [none] } catch (Exception e) { return buildNotFound(pipelineName, pipelineCounter, stageName, stageCounter, buildName); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void goBack(final String returnCode) { if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(returnCode)) { CmsRpcAction<CmsReturnLinkInfo> goBackAction = new CmsRpcAction<CmsReturnLinkInfo>() { @Override public void execute() { start(300, false); CmsCoreProvider.getService().getLinkForReturnCode(returnCode, this); } @Override protected void onResponse(CmsReturnLinkInfo result) { stop(false); if (result.getStatus() == CmsReturnLinkInfo.Status.ok) { Window.Location.assign(result.getLink()); } else if (result.getStatus() == CmsReturnLinkInfo.Status.notfound) { CmsMessages msg = org.opencms.ade.sitemap.client.Messages.get(); String title = msg.key( org.opencms.ade.sitemap.client.Messages.GUI_RETURN_PAGE_NOT_FOUND_TITLE_0); String content = msg.key( org.opencms.ade.sitemap.client.Messages.GUI_RETURN_PAGE_NOT_FOUND_TEXT_0); CmsAlertDialog alert = new CmsAlertDialog(title, content); alert.center(); } } }; goBackAction.execute(); } else { CmsSitemapController controller = CmsSitemapView.getInstance().getController(); CmsClientSitemapEntry root = controller.getData().getRoot(); String newPath = root.getSitePath(); controller.leaveEditor(newPath); } } }
public class class_name { public static void goBack(final String returnCode) { if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(returnCode)) { CmsRpcAction<CmsReturnLinkInfo> goBackAction = new CmsRpcAction<CmsReturnLinkInfo>() { @Override public void execute() { start(300, false); CmsCoreProvider.getService().getLinkForReturnCode(returnCode, this); } @Override protected void onResponse(CmsReturnLinkInfo result) { stop(false); if (result.getStatus() == CmsReturnLinkInfo.Status.ok) { Window.Location.assign(result.getLink()); // depends on control dependency: [if], data = [none] } else if (result.getStatus() == CmsReturnLinkInfo.Status.notfound) { CmsMessages msg = org.opencms.ade.sitemap.client.Messages.get(); String title = msg.key( org.opencms.ade.sitemap.client.Messages.GUI_RETURN_PAGE_NOT_FOUND_TITLE_0); String content = msg.key( org.opencms.ade.sitemap.client.Messages.GUI_RETURN_PAGE_NOT_FOUND_TEXT_0); CmsAlertDialog alert = new CmsAlertDialog(title, content); alert.center(); // depends on control dependency: [if], data = [none] } } }; goBackAction.execute(); // depends on control dependency: [if], data = [none] } else { CmsSitemapController controller = CmsSitemapView.getInstance().getController(); CmsClientSitemapEntry root = controller.getData().getRoot(); String newPath = root.getSitePath(); controller.leaveEditor(newPath); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static void write(byte[] data, OutputStream os, boolean closeSink) { try { os.write(data); } catch (IOException e) { throw E.ioException(e); } finally { if (closeSink) { close(os); } } } }
public class class_name { public static void write(byte[] data, OutputStream os, boolean closeSink) { try { os.write(data); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw E.ioException(e); } finally { // depends on control dependency: [catch], data = [none] if (closeSink) { close(os); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static synchronized <T> IExpectationSetters<T> expectPrivate(Object instance, String methodName, Class<?> where, Class<?>[] parameterTypes, Object... arguments) throws Exception { if (instance == null) { throw new IllegalArgumentException("Instance or class to expect cannot be null."); } Method[] methods = null; if (methodName != null) { if (parameterTypes == null) { methods = Whitebox.getMethods(where, methodName); } else { methods = new Method[]{Whitebox.getMethod(where, methodName, parameterTypes)}; } } Method methodToExpect; if (methods != null && methods.length == 1) { methodToExpect = methods[0]; } else { methodToExpect = WhiteboxImpl.findMethodOrThrowException(instance, null, methodName, arguments); } return doExpectPrivate(instance, methodToExpect, arguments); } }
public class class_name { public static synchronized <T> IExpectationSetters<T> expectPrivate(Object instance, String methodName, Class<?> where, Class<?>[] parameterTypes, Object... arguments) throws Exception { if (instance == null) { throw new IllegalArgumentException("Instance or class to expect cannot be null."); } Method[] methods = null; if (methodName != null) { if (parameterTypes == null) { methods = Whitebox.getMethods(where, methodName); // depends on control dependency: [if], data = [none] } else { methods = new Method[]{Whitebox.getMethod(where, methodName, parameterTypes)}; // depends on control dependency: [if], data = [none] } } Method methodToExpect; if (methods != null && methods.length == 1) { methodToExpect = methods[0]; // depends on control dependency: [if], data = [none] } else { methodToExpect = WhiteboxImpl.findMethodOrThrowException(instance, null, methodName, arguments); // depends on control dependency: [if], data = [none] } return doExpectPrivate(instance, methodToExpect, arguments); } }
public class class_name { private static void startAll() { try { if (_configs!=null) { synchronized(_configs) { for(int i=0;i<_servers.size();i++) { HttpServer server = (HttpServer)_servers.get(i); if (!server.isStarted()) server.start(); } } } } catch(Exception e) { log.warn(LogSupport.EXCEPTION,e); } } }
public class class_name { private static void startAll() { try { if (_configs!=null) { synchronized(_configs) // depends on control dependency: [if], data = [(_configs] { for(int i=0;i<_servers.size();i++) { HttpServer server = (HttpServer)_servers.get(i); if (!server.isStarted()) server.start(); } } } } catch(Exception e) { log.warn(LogSupport.EXCEPTION,e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void putPropertyStrings(Service s) { String type = s.getType(); String algorithm = s.getAlgorithm(); // use super() to avoid permission check and other processing super.put(type + "." + algorithm, s.getClassName()); for (String alias : s.getAliases()) { super.put(ALIAS_PREFIX + type + "." + alias, algorithm); } for (Map.Entry<UString,String> entry : s.attributes.entrySet()) { String key = type + "." + algorithm + " " + entry.getKey(); super.put(key, entry.getValue()); } if (registered) { Security.increaseVersion(); } } }
public class class_name { private void putPropertyStrings(Service s) { String type = s.getType(); String algorithm = s.getAlgorithm(); // use super() to avoid permission check and other processing super.put(type + "." + algorithm, s.getClassName()); for (String alias : s.getAliases()) { super.put(ALIAS_PREFIX + type + "." + alias, algorithm); // depends on control dependency: [for], data = [alias] } for (Map.Entry<UString,String> entry : s.attributes.entrySet()) { String key = type + "." + algorithm + " " + entry.getKey(); super.put(key, entry.getValue()); // depends on control dependency: [for], data = [entry] } if (registered) { Security.increaseVersion(); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void createMinistrySummary(final AbstractOrderedLayout content, final String field, final String label) { final DataSeries dataSeries = new DataSeries(); final Series series = new Series(); final Map<String, List<GovernmentBodyAnnualOutcomeSummary>> reportByMinistry = esvApi.getGovernmentBodyReportByMinistry(); for (final Entry<String, List<GovernmentBodyAnnualOutcomeSummary>> entry : reportByMinistry.entrySet()) { series.addSeries(new XYseries().setLabel(entry.getKey())); dataSeries.newSeries(); final Map<Integer, Double> annualSummaryMap = entry.getValue().stream().filter(t -> t.getDescriptionFields().get(field) != null).collect(Collectors.groupingBy(GovernmentBodyAnnualOutcomeSummary::getYear,Collectors.summingDouble(GovernmentBodyAnnualOutcomeSummary::getYearTotal))); for (final Entry<Integer, Double> entryData : annualSummaryMap.entrySet()) { if (entryData.getValue() != null && entryData.getValue().intValue() > 0) { dataSeries.add(entryData.getKey() +1 +"-01-01" , entryData.getValue()); } } } addChart(content, label, new DCharts().setDataSeries(dataSeries) .setOptions(getChartOptions().createOptionsXYDateFloatLogYAxisLegendOutside(series)).show(), true); } }
public class class_name { private void createMinistrySummary(final AbstractOrderedLayout content, final String field, final String label) { final DataSeries dataSeries = new DataSeries(); final Series series = new Series(); final Map<String, List<GovernmentBodyAnnualOutcomeSummary>> reportByMinistry = esvApi.getGovernmentBodyReportByMinistry(); for (final Entry<String, List<GovernmentBodyAnnualOutcomeSummary>> entry : reportByMinistry.entrySet()) { series.addSeries(new XYseries().setLabel(entry.getKey())); // depends on control dependency: [for], data = [entry] dataSeries.newSeries(); // depends on control dependency: [for], data = [none] final Map<Integer, Double> annualSummaryMap = entry.getValue().stream().filter(t -> t.getDescriptionFields().get(field) != null).collect(Collectors.groupingBy(GovernmentBodyAnnualOutcomeSummary::getYear,Collectors.summingDouble(GovernmentBodyAnnualOutcomeSummary::getYearTotal))); for (final Entry<Integer, Double> entryData : annualSummaryMap.entrySet()) { if (entryData.getValue() != null && entryData.getValue().intValue() > 0) { dataSeries.add(entryData.getKey() +1 +"-01-01" , entryData.getValue()); // depends on control dependency: [if], data = [none] } } } addChart(content, label, new DCharts().setDataSeries(dataSeries) .setOptions(getChartOptions().createOptionsXYDateFloatLogYAxisLegendOutside(series)).show(), true); } }
public class class_name { public static <T> ObjectName getObjectName(final Class<T> clazz) { final String domain = clazz.getPackage().getName(); final String className = clazz.getSimpleName(); final String objectName = domain+":type="+className; LOG.debug("Returning object name: {}", objectName); try { return new ObjectName(objectName); } catch (final MalformedObjectNameException e) { // This should never, ever happen. LOG.error("Invalid ObjectName: "+objectName, e); throw new RuntimeException("Could not create ObjectName?", e); } } }
public class class_name { public static <T> ObjectName getObjectName(final Class<T> clazz) { final String domain = clazz.getPackage().getName(); final String className = clazz.getSimpleName(); final String objectName = domain+":type="+className; LOG.debug("Returning object name: {}", objectName); try { return new ObjectName(objectName); // depends on control dependency: [try], data = [none] } catch (final MalformedObjectNameException e) { // This should never, ever happen. LOG.error("Invalid ObjectName: "+objectName, e); throw new RuntimeException("Could not create ObjectName?", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @SuppressWarnings("unchecked") private <T extends BaseTopicWrapper<T>> void doSpecTopicSecondPass(final BuildData buildData, final Map<SpecTopic, Set<String>> usedIdAttributes) throws BuildProcessingException { log.info("Doing " + buildData.getBuildLocale() + " Spec Topic Pass"); final List<ITopicNode> topicNodes = buildData.getBuildDatabase().getAllTopicNodes(); log.info("\tProcessing " + topicNodes.size() + " Spec Topics"); final int showPercent = 10; final float total = topicNodes.size(); float current = 0; int lastPercent = 0; final DocBookXMLPreProcessor xmlPreProcessor = buildData.getXMLPreProcessor(); for (final ITopicNode topicNode : topicNodes) { // Check if the app should be shutdown if (isShuttingDown.get()) { return; } if (log.isDebugEnabled()) log.debug("\tProcessing SpecTopic " + topicNode.getId() + (topicNode.getRevision() != null ? (", " + "Revision " + topicNode.getRevision()) : "")); ++current; final int percent = Math.round(current / total * 100); if (percent - lastPercent >= showPercent) { lastPercent = percent; log.info("\tProcessing Pass " + percent + "% Done"); } final BaseTopicWrapper<?> topic = topicNode.getTopic(); final Document doc = topicNode.getXMLDocument(); assert doc != null; assert topic != null; if (doc != null) { final boolean valid = processSpecTopicInjections(buildData, topicNode, xmlPreProcessor); // Check if the app should be shutdown if (isShuttingDown.get()) { return; } if (!valid) { final String xmlStringInCDATA = DocBookBuildUtilities.convertDocumentToCDATAFormattedString(doc, getXMLFormatProperties()); buildData.getErrorDatabase().addError(topic, BuilderConstants.ERROR_INVALID_INJECTIONS + " The processed XML is <programlisting>" + xmlStringInCDATA + "</programlisting>"); DocBookBuildUtilities.setTopicNodeXMLForError(buildData, topicNode, getErrorInvalidInjectionTopicTemplate().getValue()); } else { // Check for any possible invalid injection references final List<InjectionError> injectionErrors = XMLUtilities.checkForInvalidInjections(doc); if (!injectionErrors.isEmpty()) { for (final InjectionError injectionError : injectionErrors) { final List<String> injectionErrorMsgs = new ArrayList<String>(); for (final String msg : injectionError.getMessages()) { injectionErrorMsgs.add(DocBookUtilities.buildListItem(msg)); } final String errorMsg = "\"" + injectionError.getInjection().trim() + "\" " + BuilderConstants .WARNING_POSSIBLE_INVALID_INJECTIONS + DocBookUtilities.wrapListItems( injectionErrorMsgs); buildData.getErrorDatabase().addWarning(topic, ErrorType.POSSIBLE_INVALID_INJECTION, errorMsg); } } } // Ensure that all of the id attributes are valid by setting any duplicates with a post fixed number. DocBookBuildUtilities.setUniqueIds(buildData, topicNode, topicNode.getXMLDocument().getDocumentElement(), topicNode.getXMLDocument(), usedIdAttributes); // Make sure the XML is valid docbook after the standard processing has been done if (validateTopicXML(buildData, topicNode, doc) && topicNode instanceof SpecTopic) { // Add the editor/report a bug links (these should always be valid) xmlPreProcessor.processTopicAdditionalInfo(buildData, (SpecTopic) topicNode, doc); } else { // Re-run the unique id pass, as the topic would have been replaced by an error template DocBookBuildUtilities.setUniqueIds(buildData, topicNode, topicNode.getXMLDocument().getDocumentElement(), topicNode.getXMLDocument(), usedIdAttributes); } } } } }
public class class_name { @SuppressWarnings("unchecked") private <T extends BaseTopicWrapper<T>> void doSpecTopicSecondPass(final BuildData buildData, final Map<SpecTopic, Set<String>> usedIdAttributes) throws BuildProcessingException { log.info("Doing " + buildData.getBuildLocale() + " Spec Topic Pass"); final List<ITopicNode> topicNodes = buildData.getBuildDatabase().getAllTopicNodes(); log.info("\tProcessing " + topicNodes.size() + " Spec Topics"); final int showPercent = 10; final float total = topicNodes.size(); float current = 0; int lastPercent = 0; final DocBookXMLPreProcessor xmlPreProcessor = buildData.getXMLPreProcessor(); for (final ITopicNode topicNode : topicNodes) { // Check if the app should be shutdown if (isShuttingDown.get()) { return; // depends on control dependency: [if], data = [none] } if (log.isDebugEnabled()) log.debug("\tProcessing SpecTopic " + topicNode.getId() + (topicNode.getRevision() != null ? (", " + "Revision " + topicNode.getRevision()) : "")); ++current; final int percent = Math.round(current / total * 100); if (percent - lastPercent >= showPercent) { lastPercent = percent; // depends on control dependency: [if], data = [none] log.info("\tProcessing Pass " + percent + "% Done"); // depends on control dependency: [if], data = [none] } final BaseTopicWrapper<?> topic = topicNode.getTopic(); final Document doc = topicNode.getXMLDocument(); assert doc != null; assert topic != null; if (doc != null) { final boolean valid = processSpecTopicInjections(buildData, topicNode, xmlPreProcessor); // Check if the app should be shutdown if (isShuttingDown.get()) { return; // depends on control dependency: [if], data = [none] } if (!valid) { final String xmlStringInCDATA = DocBookBuildUtilities.convertDocumentToCDATAFormattedString(doc, getXMLFormatProperties()); buildData.getErrorDatabase().addError(topic, BuilderConstants.ERROR_INVALID_INJECTIONS + " The processed XML is <programlisting>" + xmlStringInCDATA + "</programlisting>"); // depends on control dependency: [if], data = [none] DocBookBuildUtilities.setTopicNodeXMLForError(buildData, topicNode, getErrorInvalidInjectionTopicTemplate().getValue()); // depends on control dependency: [if], data = [none] } else { // Check for any possible invalid injection references final List<InjectionError> injectionErrors = XMLUtilities.checkForInvalidInjections(doc); if (!injectionErrors.isEmpty()) { for (final InjectionError injectionError : injectionErrors) { final List<String> injectionErrorMsgs = new ArrayList<String>(); for (final String msg : injectionError.getMessages()) { injectionErrorMsgs.add(DocBookUtilities.buildListItem(msg)); // depends on control dependency: [for], data = [msg] } final String errorMsg = "\"" + injectionError.getInjection().trim() + "\" " + BuilderConstants .WARNING_POSSIBLE_INVALID_INJECTIONS + DocBookUtilities.wrapListItems( injectionErrorMsgs); buildData.getErrorDatabase().addWarning(topic, ErrorType.POSSIBLE_INVALID_INJECTION, errorMsg); // depends on control dependency: [for], data = [none] } } } // Ensure that all of the id attributes are valid by setting any duplicates with a post fixed number. DocBookBuildUtilities.setUniqueIds(buildData, topicNode, topicNode.getXMLDocument().getDocumentElement(), topicNode.getXMLDocument(), usedIdAttributes); // depends on control dependency: [if], data = [none] // Make sure the XML is valid docbook after the standard processing has been done if (validateTopicXML(buildData, topicNode, doc) && topicNode instanceof SpecTopic) { // Add the editor/report a bug links (these should always be valid) xmlPreProcessor.processTopicAdditionalInfo(buildData, (SpecTopic) topicNode, doc); // depends on control dependency: [if], data = [none] } else { // Re-run the unique id pass, as the topic would have been replaced by an error template DocBookBuildUtilities.setUniqueIds(buildData, topicNode, topicNode.getXMLDocument().getDocumentElement(), topicNode.getXMLDocument(), usedIdAttributes); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public void publish(RoutedMessage routedMessage, Object id) { //Publish the trace if it is not coming from a handler thread if (!ThreadLocalHandler.get()) { if (routedMessage.getLogRecord() != null && bufferMgr != null) { bufferMgr.add(parse(routedMessage, id)); } } } }
public class class_name { public void publish(RoutedMessage routedMessage, Object id) { //Publish the trace if it is not coming from a handler thread if (!ThreadLocalHandler.get()) { if (routedMessage.getLogRecord() != null && bufferMgr != null) { bufferMgr.add(parse(routedMessage, id)); // depends on control dependency: [if], data = [none] } } } }
public class class_name { protected void setForeground(Color newColor) { if (foreground == null || !foreground.equals(newColor)) { foreground = new Color(newColor.getRGB()); invalidateCache(); } } }
public class class_name { protected void setForeground(Color newColor) { if (foreground == null || !foreground.equals(newColor)) { foreground = new Color(newColor.getRGB()); // depends on control dependency: [if], data = [none] invalidateCache(); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected Double getBilinearInterpolationValue(float offsetX, float offsetY, float minX, float maxX, float minY, float maxY, Double topLeft, Double topRight, Double bottomLeft, Double bottomRight) { Double value = null; if (topLeft != null && (topRight != null || minX == maxX) && (bottomLeft != null || minY == maxY) && (bottomRight != null || (minX == maxX && minY == maxY))) { float diffX = maxX - minX; double topRow; Double bottomRow; if (diffX == 0) { topRow = topLeft; bottomRow = bottomLeft; } else { float diffLeft = offsetX; float diffRight = diffX - offsetX; topRow = ((diffRight / diffX) * topLeft) + ((diffLeft / diffX) * topRight); bottomRow = ((diffRight / diffX) * bottomLeft) + ((diffLeft / diffX) * bottomRight); } float diffY = maxY - minY; double result; if (diffY == 0) { result = topRow; } else { float diffTop = offsetY; float diffBottom = diffY - offsetY; result = ((diffBottom / diffY) * topRow) + ((diffTop / diffY) * bottomRow); } value = result; } return value; } }
public class class_name { protected Double getBilinearInterpolationValue(float offsetX, float offsetY, float minX, float maxX, float minY, float maxY, Double topLeft, Double topRight, Double bottomLeft, Double bottomRight) { Double value = null; if (topLeft != null && (topRight != null || minX == maxX) && (bottomLeft != null || minY == maxY) && (bottomRight != null || (minX == maxX && minY == maxY))) { float diffX = maxX - minX; double topRow; Double bottomRow; if (diffX == 0) { topRow = topLeft; // depends on control dependency: [if], data = [none] bottomRow = bottomLeft; // depends on control dependency: [if], data = [none] } else { float diffLeft = offsetX; float diffRight = diffX - offsetX; topRow = ((diffRight / diffX) * topLeft) + ((diffLeft / diffX) * topRight); // depends on control dependency: [if], data = [none] bottomRow = ((diffRight / diffX) * bottomLeft) + ((diffLeft / diffX) * bottomRight); // depends on control dependency: [if], data = [none] } float diffY = maxY - minY; double result; if (diffY == 0) { result = topRow; // depends on control dependency: [if], data = [none] } else { float diffTop = offsetY; float diffBottom = diffY - offsetY; result = ((diffBottom / diffY) * topRow) + ((diffTop / diffY) * bottomRow); // depends on control dependency: [if], data = [none] } value = result; // depends on control dependency: [if], data = [none] } return value; } }
public class class_name { private Source register(Source source) { /** * As promised, the registrar simply abstracts away internal resolvers * by iterating over them during the registration process. */ for (Resolver resolver : resolvers) { resolver.register(source); } return source; } }
public class class_name { private Source register(Source source) { /** * As promised, the registrar simply abstracts away internal resolvers * by iterating over them during the registration process. */ for (Resolver resolver : resolvers) { resolver.register(source); // depends on control dependency: [for], data = [resolver] } return source; } }
public class class_name { public static float[] toPrimitive(Float... a) { if (a != null) { float[] p = new float[a.length]; for (int i = 0; i < a.length; i++) { p[i] = a[i]; } return p; } return null; } }
public class class_name { public static float[] toPrimitive(Float... a) { if (a != null) { float[] p = new float[a.length]; for (int i = 0; i < a.length; i++) { p[i] = a[i]; // depends on control dependency: [for], data = [i] } return p; // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public static String buildClusterWorkerCommand(String clusterName, String nfsParentDir, String sinkLogRootDir, String awsConfDir, String appWorkDir, String masterPublicIp, String workerS3ConfUri, String workerS3ConfFiles, String workerS3JarsUri, String workerS3JarsFiles, String workerJarsDir, String workerJvmMemory, Optional<String> workerJvmArgs, Optional<String> gobblinVersion) { final StringBuilder cloudInitCmds = new StringBuilder().append(BASH).append("\n"); final String clusterWorkerClassName = GobblinAWSTaskRunner.class.getSimpleName(); // Connect to NFS server // TODO: Replace with EFS (it went into GA on 6/30/2016) final String nfsDir = nfsParentDir + clusterName; final String nfsMountCmd = String.format("mount -t %s %s:%s %s", NFS_TYPE_4, masterPublicIp, nfsDir, nfsDir); cloudInitCmds.append("mkdir -p ").append(nfsDir).append("\n"); cloudInitCmds.append(nfsMountCmd).append("\n"); // Create various other directories cloudInitCmds.append("mkdir -p ").append(sinkLogRootDir).append("\n"); cloudInitCmds.append("chown -R ec2-user:ec2-user /home/ec2-user/*").append("\n"); // Setup short variables to save cloud-init script space if (gobblinVersion.isPresent()) { cloudInitCmds.append("vr=").append(gobblinVersion.get()).append("\n"); } cloudInitCmds.append("cg0=").append(workerS3ConfUri).append("\n"); cloudInitCmds.append("cg=").append(awsConfDir).append("\n"); cloudInitCmds.append("jr0=").append(workerS3JarsUri).append("\n"); cloudInitCmds.append("jr=").append(workerJarsDir).append("\n"); // Download configurations from S3 final StringBuilder classpath = new StringBuilder(); final List<String> awsConfs = SPLITTER.splitToList(workerS3ConfFiles); for (String awsConf : awsConfs) { cloudInitCmds.append(String.format("wget -P \"${cg}\" \"${cg0}\"%s", awsConf)).append("\n"); } classpath.append(awsConfDir); // Download jars from S3 // TODO: Limit only custom user jars to pulled from S3, load rest from AMI final List<String> awsJars = SPLITTER.splitToList(workerS3JarsFiles); for (String awsJar : awsJars) { cloudInitCmds.append(String.format("wget -P \"${jr}\" \"${jr0}\"%s", awsJar)).append("\n"); } classpath.append(":").append(workerJarsDir).append("*"); // Get a random Helix instance name cloudInitCmds.append("pi=`curl http://169.254.169.254/latest/meta-data/local-ipv4`").append("\n"); // TODO: Add cron that brings back worker if it dies // Launch Gobblin Worker final StringBuilder launchGobblinClusterWorkerCmd = new StringBuilder() .append("java") .append(" -cp ").append(classpath) .append(" -Xmx").append(workerJvmMemory) .append(" ").append(JvmUtils.formatJvmArguments(workerJvmArgs)) .append(" ").append(GobblinAWSTaskRunner.class.getName()) .append(" --").append(GobblinClusterConfigurationKeys.APPLICATION_NAME_OPTION_NAME) .append(" ").append(clusterName) .append(" --").append(GobblinClusterConfigurationKeys.HELIX_INSTANCE_NAME_OPTION_NAME) .append(" ").append("$pi") .append(" --").append(GobblinAWSConfigurationKeys.APP_WORK_DIR) .append(" ").append(appWorkDir) .append(" 1>").append(sinkLogRootDir) .append(clusterWorkerClassName).append(".") .append("$pi").append(".") .append(CloudInitScriptBuilder.STDOUT) .append(" 2>").append(sinkLogRootDir) .append(clusterWorkerClassName).append(".") .append("$pi").append(".") .append(CloudInitScriptBuilder.STDERR); cloudInitCmds.append(launchGobblinClusterWorkerCmd).append("\n"); final String cloudInitScript = cloudInitCmds.toString(); LOGGER.info("Cloud-init script for worker node: " + cloudInitScript); return encodeBase64(cloudInitScript); } }
public class class_name { public static String buildClusterWorkerCommand(String clusterName, String nfsParentDir, String sinkLogRootDir, String awsConfDir, String appWorkDir, String masterPublicIp, String workerS3ConfUri, String workerS3ConfFiles, String workerS3JarsUri, String workerS3JarsFiles, String workerJarsDir, String workerJvmMemory, Optional<String> workerJvmArgs, Optional<String> gobblinVersion) { final StringBuilder cloudInitCmds = new StringBuilder().append(BASH).append("\n"); final String clusterWorkerClassName = GobblinAWSTaskRunner.class.getSimpleName(); // Connect to NFS server // TODO: Replace with EFS (it went into GA on 6/30/2016) final String nfsDir = nfsParentDir + clusterName; final String nfsMountCmd = String.format("mount -t %s %s:%s %s", NFS_TYPE_4, masterPublicIp, nfsDir, nfsDir); cloudInitCmds.append("mkdir -p ").append(nfsDir).append("\n"); cloudInitCmds.append(nfsMountCmd).append("\n"); // Create various other directories cloudInitCmds.append("mkdir -p ").append(sinkLogRootDir).append("\n"); cloudInitCmds.append("chown -R ec2-user:ec2-user /home/ec2-user/*").append("\n"); // Setup short variables to save cloud-init script space if (gobblinVersion.isPresent()) { cloudInitCmds.append("vr=").append(gobblinVersion.get()).append("\n"); // depends on control dependency: [if], data = [none] } cloudInitCmds.append("cg0=").append(workerS3ConfUri).append("\n"); cloudInitCmds.append("cg=").append(awsConfDir).append("\n"); cloudInitCmds.append("jr0=").append(workerS3JarsUri).append("\n"); cloudInitCmds.append("jr=").append(workerJarsDir).append("\n"); // Download configurations from S3 final StringBuilder classpath = new StringBuilder(); final List<String> awsConfs = SPLITTER.splitToList(workerS3ConfFiles); for (String awsConf : awsConfs) { cloudInitCmds.append(String.format("wget -P \"${cg}\" \"${cg0}\"%s", awsConf)).append("\n"); // depends on control dependency: [for], data = [awsConf] } classpath.append(awsConfDir); // Download jars from S3 // TODO: Limit only custom user jars to pulled from S3, load rest from AMI final List<String> awsJars = SPLITTER.splitToList(workerS3JarsFiles); for (String awsJar : awsJars) { cloudInitCmds.append(String.format("wget -P \"${jr}\" \"${jr0}\"%s", awsJar)).append("\n"); // depends on control dependency: [for], data = [awsJar] } classpath.append(":").append(workerJarsDir).append("*"); // Get a random Helix instance name cloudInitCmds.append("pi=`curl http://169.254.169.254/latest/meta-data/local-ipv4`").append("\n"); // TODO: Add cron that brings back worker if it dies // Launch Gobblin Worker final StringBuilder launchGobblinClusterWorkerCmd = new StringBuilder() .append("java") .append(" -cp ").append(classpath) .append(" -Xmx").append(workerJvmMemory) .append(" ").append(JvmUtils.formatJvmArguments(workerJvmArgs)) .append(" ").append(GobblinAWSTaskRunner.class.getName()) .append(" --").append(GobblinClusterConfigurationKeys.APPLICATION_NAME_OPTION_NAME) .append(" ").append(clusterName) .append(" --").append(GobblinClusterConfigurationKeys.HELIX_INSTANCE_NAME_OPTION_NAME) .append(" ").append("$pi") .append(" --").append(GobblinAWSConfigurationKeys.APP_WORK_DIR) .append(" ").append(appWorkDir) .append(" 1>").append(sinkLogRootDir) .append(clusterWorkerClassName).append(".") .append("$pi").append(".") .append(CloudInitScriptBuilder.STDOUT) .append(" 2>").append(sinkLogRootDir) .append(clusterWorkerClassName).append(".") .append("$pi").append(".") .append(CloudInitScriptBuilder.STDERR); cloudInitCmds.append(launchGobblinClusterWorkerCmd).append("\n"); final String cloudInitScript = cloudInitCmds.toString(); LOGGER.info("Cloud-init script for worker node: " + cloudInitScript); return encodeBase64(cloudInitScript); } }
public class class_name { protected String interpolateString(String value) { String interpolatedValue = value; for (Map.Entry<String, String> entry : interpolationValues.entrySet()) { interpolatedValue = interpolatedValue.replace(entry.getKey(), entry.getValue()); } return interpolatedValue; } }
public class class_name { protected String interpolateString(String value) { String interpolatedValue = value; for (Map.Entry<String, String> entry : interpolationValues.entrySet()) { interpolatedValue = interpolatedValue.replace(entry.getKey(), entry.getValue()); // depends on control dependency: [for], data = [entry] } return interpolatedValue; } }
public class class_name { public static MetricValue valueOf(BigDecimal val, final String format) { if (val instanceof MetricValue) { // TODO: check that val.format == format return (MetricValue) val; } return new MetricValue(val, new DecimalFormat(format)); } }
public class class_name { public static MetricValue valueOf(BigDecimal val, final String format) { if (val instanceof MetricValue) { // TODO: check that val.format == format return (MetricValue) val; // depends on control dependency: [if], data = [none] } return new MetricValue(val, new DecimalFormat(format)); } }
public class class_name { private static void appendSystemPropertiesToCommandLine(AbstractWisdomMojo mojo, CommandLine cmd) { Properties userProperties = mojo.session.getUserProperties(); if (userProperties != null) { //noinspection unchecked Enumeration<String> names = (Enumeration<String>) userProperties.propertyNames(); while (names.hasMoreElements()) { String name = names.nextElement(); cmd.addArgument("-D" + name + "=" + userProperties.getProperty(name)); } } } }
public class class_name { private static void appendSystemPropertiesToCommandLine(AbstractWisdomMojo mojo, CommandLine cmd) { Properties userProperties = mojo.session.getUserProperties(); if (userProperties != null) { //noinspection unchecked Enumeration<String> names = (Enumeration<String>) userProperties.propertyNames(); while (names.hasMoreElements()) { String name = names.nextElement(); cmd.addArgument("-D" + name + "=" + userProperties.getProperty(name)); // depends on control dependency: [while], data = [none] } } } }
public class class_name { private Report getReport(AbstractBlock part) { Object primaryReportColum = part.getAttributes().get(PRIMARY_REPORT_COLUM); Object reportType = part.getAttributes().get(REPORT_TYPE); Properties reportProperties = parseProperties(part, REPORT_PROPERTIES); Report.ReportBuilder reportBuilder = Report.builder(); if (reportType != null) { reportBuilder.selectedTypes(Report.selectTypes(reportType.toString())); } if (primaryReportColum != null) { reportBuilder.primaryColumn(primaryReportColum.toString()); } return reportBuilder.properties(reportProperties).build(); } }
public class class_name { private Report getReport(AbstractBlock part) { Object primaryReportColum = part.getAttributes().get(PRIMARY_REPORT_COLUM); Object reportType = part.getAttributes().get(REPORT_TYPE); Properties reportProperties = parseProperties(part, REPORT_PROPERTIES); Report.ReportBuilder reportBuilder = Report.builder(); if (reportType != null) { reportBuilder.selectedTypes(Report.selectTypes(reportType.toString())); // depends on control dependency: [if], data = [(reportType] } if (primaryReportColum != null) { reportBuilder.primaryColumn(primaryReportColum.toString()); // depends on control dependency: [if], data = [(primaryReportColum] } return reportBuilder.properties(reportProperties).build(); } }
public class class_name { public void addMoreContext(CacheContext context) { Object cached = context.get(CacheContext.SECURITY_CONTEXT_KEY); if (cached == null) { SavedAuthorization sa = securityContext.getSavedAuthorization(); log.debug("Storing SavedAuthorization {}", sa); context.put(CacheContext.SECURITY_CONTEXT_KEY, sa); } } }
public class class_name { public void addMoreContext(CacheContext context) { Object cached = context.get(CacheContext.SECURITY_CONTEXT_KEY); if (cached == null) { SavedAuthorization sa = securityContext.getSavedAuthorization(); log.debug("Storing SavedAuthorization {}", sa); // depends on control dependency: [if], data = [none] context.put(CacheContext.SECURITY_CONTEXT_KEY, sa); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public void sawOpcode(int seen) { try { stack.precomputation(this); int pc = getPC(); CodeException ex = catchHandlerPCs.get(Integer.valueOf(pc)); if (ex != null) { int endPC; if ((seen == Const.GOTO) || (seen == Const.GOTO_W)) { endPC = this.getBranchTarget(); } else { endPC = Integer.MAX_VALUE; } ConstantPool pool = getConstantPool(); ConstantClass ccls = (ConstantClass) pool.getConstant(ex.getCatchType()); String catchSig = ccls.getBytes(pool); CatchInfo ci = new CatchInfo(ex.getHandlerPC(), endPC, catchSig); catchInfos.add(ci); } updateEndPCsOnCatchRegScope(catchInfos, pc, seen); removeFinishedCatchBlocks(catchInfos, pc); if (seen == Const.ATHROW) { processThrow(); } else if ((seen == Const.IRETURN) && isBooleanMethod && !hasValidFalseReturn && (stack.getStackDepth() > 0)) { processBooleanReturn(); } } finally { stack.sawOpcode(this, seen); } } }
public class class_name { @Override public void sawOpcode(int seen) { try { stack.precomputation(this); // depends on control dependency: [try], data = [none] int pc = getPC(); CodeException ex = catchHandlerPCs.get(Integer.valueOf(pc)); if (ex != null) { int endPC; if ((seen == Const.GOTO) || (seen == Const.GOTO_W)) { endPC = this.getBranchTarget(); // depends on control dependency: [if], data = [none] } else { endPC = Integer.MAX_VALUE; // depends on control dependency: [if], data = [none] } ConstantPool pool = getConstantPool(); ConstantClass ccls = (ConstantClass) pool.getConstant(ex.getCatchType()); String catchSig = ccls.getBytes(pool); CatchInfo ci = new CatchInfo(ex.getHandlerPC(), endPC, catchSig); catchInfos.add(ci); // depends on control dependency: [if], data = [none] } updateEndPCsOnCatchRegScope(catchInfos, pc, seen); // depends on control dependency: [try], data = [none] removeFinishedCatchBlocks(catchInfos, pc); // depends on control dependency: [try], data = [none] if (seen == Const.ATHROW) { processThrow(); // depends on control dependency: [if], data = [none] } else if ((seen == Const.IRETURN) && isBooleanMethod && !hasValidFalseReturn && (stack.getStackDepth() > 0)) { processBooleanReturn(); // depends on control dependency: [if], data = [none] } } finally { stack.sawOpcode(this, seen); } } }
public class class_name { @Nonnull public static FileIOError renameFile (@Nonnull final File aSourceFile, @Nonnull final File aTargetFile) { ValueEnforcer.notNull (aSourceFile, "SourceFile"); ValueEnforcer.notNull (aTargetFile, "TargetFile"); // Does the source file exist? if (!FileHelper.existsFile (aSourceFile)) return EFileIOErrorCode.SOURCE_DOES_NOT_EXIST.getAsIOError (EFileIOOperation.RENAME_FILE, aSourceFile); // Are source and target different? if (EqualsHelper.equals (aSourceFile, aTargetFile)) return EFileIOErrorCode.SOURCE_EQUALS_TARGET.getAsIOError (EFileIOOperation.RENAME_FILE, aSourceFile); // Does the target file already exist? if (aTargetFile.exists ()) return EFileIOErrorCode.TARGET_ALREADY_EXISTS.getAsIOError (EFileIOOperation.RENAME_FILE, aTargetFile); // Is the source parent directory writable? final File aSourceParentDir = aSourceFile.getParentFile (); if (aSourceParentDir != null && !aSourceParentDir.canWrite ()) return EFileIOErrorCode.SOURCE_PARENT_NOT_WRITABLE.getAsIOError (EFileIOOperation.RENAME_FILE, aSourceFile); // Is the target parent directory writable? final File aTargetParentDir = aTargetFile.getParentFile (); if (aTargetParentDir != null && aTargetParentDir.exists () && !aTargetParentDir.canWrite ()) return EFileIOErrorCode.TARGET_PARENT_NOT_WRITABLE.getAsIOError (EFileIOOperation.RENAME_FILE, aTargetFile); // Ensure parent of target directory is present FileHelper.ensureParentDirectoryIsPresent (aTargetFile); try { final EFileIOErrorCode eError = aSourceFile.renameTo (aTargetFile) ? EFileIOErrorCode.NO_ERROR : EFileIOErrorCode.OPERATION_FAILED; return eError.getAsIOError (EFileIOOperation.RENAME_FILE, aSourceFile, aTargetFile); } catch (final SecurityException ex) { return EFileIOErrorCode.getSecurityAsIOError (EFileIOOperation.RENAME_FILE, ex); } } }
public class class_name { @Nonnull public static FileIOError renameFile (@Nonnull final File aSourceFile, @Nonnull final File aTargetFile) { ValueEnforcer.notNull (aSourceFile, "SourceFile"); ValueEnforcer.notNull (aTargetFile, "TargetFile"); // Does the source file exist? if (!FileHelper.existsFile (aSourceFile)) return EFileIOErrorCode.SOURCE_DOES_NOT_EXIST.getAsIOError (EFileIOOperation.RENAME_FILE, aSourceFile); // Are source and target different? if (EqualsHelper.equals (aSourceFile, aTargetFile)) return EFileIOErrorCode.SOURCE_EQUALS_TARGET.getAsIOError (EFileIOOperation.RENAME_FILE, aSourceFile); // Does the target file already exist? if (aTargetFile.exists ()) return EFileIOErrorCode.TARGET_ALREADY_EXISTS.getAsIOError (EFileIOOperation.RENAME_FILE, aTargetFile); // Is the source parent directory writable? final File aSourceParentDir = aSourceFile.getParentFile (); if (aSourceParentDir != null && !aSourceParentDir.canWrite ()) return EFileIOErrorCode.SOURCE_PARENT_NOT_WRITABLE.getAsIOError (EFileIOOperation.RENAME_FILE, aSourceFile); // Is the target parent directory writable? final File aTargetParentDir = aTargetFile.getParentFile (); if (aTargetParentDir != null && aTargetParentDir.exists () && !aTargetParentDir.canWrite ()) return EFileIOErrorCode.TARGET_PARENT_NOT_WRITABLE.getAsIOError (EFileIOOperation.RENAME_FILE, aTargetFile); // Ensure parent of target directory is present FileHelper.ensureParentDirectoryIsPresent (aTargetFile); try { final EFileIOErrorCode eError = aSourceFile.renameTo (aTargetFile) ? EFileIOErrorCode.NO_ERROR : EFileIOErrorCode.OPERATION_FAILED; return eError.getAsIOError (EFileIOOperation.RENAME_FILE, aSourceFile, aTargetFile); // depends on control dependency: [try], data = [none] } catch (final SecurityException ex) { return EFileIOErrorCode.getSecurityAsIOError (EFileIOOperation.RENAME_FILE, ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public Date toDate(double x) { if (Double.isNaN(x)) { return null; } return new Date(Double.doubleToRawLongBits(x)); } }
public class class_name { public Date toDate(double x) { if (Double.isNaN(x)) { return null; // depends on control dependency: [if], data = [none] } return new Date(Double.doubleToRawLongBits(x)); } }
public class class_name { public static List<Point> computeDialogPositions(final int dialogWidth, final int dialogHeight) { List<Point> dialogPosition = null; final int windowBesides = ScreenSizeExtensions.getScreenWidth() / dialogWidth; final int windowBelow = ScreenSizeExtensions.getScreenHeight() / dialogHeight; final int listSize = windowBesides * windowBelow; dialogPosition = new ArrayList<>(listSize); int dotWidth = 0; int dotHeight = 0; for (int y = 0; y < windowBelow; y++) { dotWidth = 0; for (int x = 0; x < windowBesides; x++) { final Point p = new Point(dotWidth, dotHeight); dialogPosition.add(p); dotWidth = dotWidth + dialogWidth; } dotHeight = dotHeight + dialogHeight; } return dialogPosition; } }
public class class_name { public static List<Point> computeDialogPositions(final int dialogWidth, final int dialogHeight) { List<Point> dialogPosition = null; final int windowBesides = ScreenSizeExtensions.getScreenWidth() / dialogWidth; final int windowBelow = ScreenSizeExtensions.getScreenHeight() / dialogHeight; final int listSize = windowBesides * windowBelow; dialogPosition = new ArrayList<>(listSize); int dotWidth = 0; int dotHeight = 0; for (int y = 0; y < windowBelow; y++) { dotWidth = 0; // depends on control dependency: [for], data = [none] for (int x = 0; x < windowBesides; x++) { final Point p = new Point(dotWidth, dotHeight); dialogPosition.add(p); // depends on control dependency: [for], data = [none] dotWidth = dotWidth + dialogWidth; // depends on control dependency: [for], data = [none] } dotHeight = dotHeight + dialogHeight; // depends on control dependency: [for], data = [none] } return dialogPosition; } }
public class class_name { public static Object[] readObjects(ChannelBuffer buffer, int numOfObjects) { Object[] objects = new String[numOfObjects]; for (int i = 0; i < numOfObjects; i++) { Object theObject = readObject(buffer); if (null == theObject) break; objects[i] = theObject; } return objects; } }
public class class_name { public static Object[] readObjects(ChannelBuffer buffer, int numOfObjects) { Object[] objects = new String[numOfObjects]; for (int i = 0; i < numOfObjects; i++) { Object theObject = readObject(buffer); if (null == theObject) break; objects[i] = theObject; // depends on control dependency: [for], data = [i] } return objects; } }
public class class_name { private String getRelatedIDs(SmallMoleculeReference smr) { String ids = smr.getUri(); for (Object o : new PathAccessor( "SmallMoleculeReference/entityReferenceOf").getValueFromBean(smr)) { SimplePhysicalEntity spe = (SimplePhysicalEntity) o; ids += "\n" + spe.getUri(); } return ids; } }
public class class_name { private String getRelatedIDs(SmallMoleculeReference smr) { String ids = smr.getUri(); for (Object o : new PathAccessor( "SmallMoleculeReference/entityReferenceOf").getValueFromBean(smr)) { SimplePhysicalEntity spe = (SimplePhysicalEntity) o; ids += "\n" + spe.getUri(); // depends on control dependency: [for], data = [none] } return ids; } }
public class class_name { public static int[] getDeconvolutionOutputSize(INDArray inputData, int[] kernel, int[] strides, int[] padding, ConvolutionMode convolutionMode, int[] dilation) { // FIXME: int cast int hIn = (int) inputData.size(2); int wIn = (int) inputData.size(3); int[] eKernel = effectiveKernelSize(kernel, dilation); if (convolutionMode == ConvolutionMode.Same) { int hOut = strides[0] * hIn; int wOut = strides[1] * wIn; return new int[]{hOut, wOut}; } int hOut = strides[0] * (hIn - 1) + eKernel[0] - 2 * padding[0]; int wOut = strides[1] * (wIn - 1) + eKernel[1] - 2 * padding[1]; return new int[]{hOut, wOut}; } }
public class class_name { public static int[] getDeconvolutionOutputSize(INDArray inputData, int[] kernel, int[] strides, int[] padding, ConvolutionMode convolutionMode, int[] dilation) { // FIXME: int cast int hIn = (int) inputData.size(2); int wIn = (int) inputData.size(3); int[] eKernel = effectiveKernelSize(kernel, dilation); if (convolutionMode == ConvolutionMode.Same) { int hOut = strides[0] * hIn; int wOut = strides[1] * wIn; return new int[]{hOut, wOut}; // depends on control dependency: [if], data = [none] } int hOut = strides[0] * (hIn - 1) + eKernel[0] - 2 * padding[0]; int wOut = strides[1] * (wIn - 1) + eKernel[1] - 2 * padding[1]; return new int[]{hOut, wOut}; } }
public class class_name { @Override public void init(Context context) { try { // get the cookie that contains session information: Cookie cookie = context.request().cookie(applicationCookiePrefix + SESSION_SUFFIX); // check that the cookie is not empty: if (cookie != null && cookie.value() != null && !"".equals(cookie.value().trim()) && cookie.value().contains("-")) { String value = cookie.value(); // the first substring until "-" is the sign String sign = value.substring(0, value.indexOf('-')); // rest from "-" until the end is the payload of the cookie String payload = value.substring(value.indexOf('-') + 1); if (CookieDataCodec.safeEquals(sign, crypto.sign(payload))) { CookieDataCodec.decode(data, payload); } else { LOGGER.warn("Invalid session cookie - signature check failed"); } // Make sure session contains valid timestamp if (!data.containsKey(TIMESTAMP_KEY)) { data.clear(); } else { if (Long.parseLong(data.get(TIMESTAMP_KEY)) + sessionExpireTimeInMs < System .currentTimeMillis()) { // Session expired sessionDataHasBeenChanged = true; data.clear(); } } // Everything's alright => prolong session data.put(TIMESTAMP_KEY, Long.toString(System.currentTimeMillis())); } } catch (UnsupportedEncodingException unsupportedEncodingException) { LOGGER.error("Encoding exception - this must not happen", unsupportedEncodingException); } } }
public class class_name { @Override public void init(Context context) { try { // get the cookie that contains session information: Cookie cookie = context.request().cookie(applicationCookiePrefix + SESSION_SUFFIX); // check that the cookie is not empty: if (cookie != null && cookie.value() != null && !"".equals(cookie.value().trim()) && cookie.value().contains("-")) { String value = cookie.value(); // the first substring until "-" is the sign String sign = value.substring(0, value.indexOf('-')); // rest from "-" until the end is the payload of the cookie String payload = value.substring(value.indexOf('-') + 1); if (CookieDataCodec.safeEquals(sign, crypto.sign(payload))) { CookieDataCodec.decode(data, payload); // depends on control dependency: [if], data = [none] } else { LOGGER.warn("Invalid session cookie - signature check failed"); // depends on control dependency: [if], data = [none] } // Make sure session contains valid timestamp if (!data.containsKey(TIMESTAMP_KEY)) { data.clear(); // depends on control dependency: [if], data = [none] } else { if (Long.parseLong(data.get(TIMESTAMP_KEY)) + sessionExpireTimeInMs < System .currentTimeMillis()) { // Session expired sessionDataHasBeenChanged = true; // depends on control dependency: [if], data = [none] data.clear(); // depends on control dependency: [if], data = [none] } } // Everything's alright => prolong session data.put(TIMESTAMP_KEY, Long.toString(System.currentTimeMillis())); // depends on control dependency: [if], data = [none] } } catch (UnsupportedEncodingException unsupportedEncodingException) { LOGGER.error("Encoding exception - this must not happen", unsupportedEncodingException); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void officeCheck() { // move to receptionist for dynamic text //if (from == null) { // throwMailFromAddressNotFoundException(); //} if (toList == null || toList.isEmpty()) { throwMailToAddressNotFoundException(); } if (bodyFile == null && plainBody == null) { String msg = "Not found body file or plain body: bodyFile=" + bodyFile + " plainBody=" + plainBody; throw new SMailPostcardIllegalStateException(msg); } if (!forcedlyDirect && (bodyFile != null && plainBody != null)) { String msg = "Set either body file or plain body: bodyFile=" + bodyFile + " plainBody=" + plainBody; throw new SMailPostcardIllegalStateException(msg); } if (plainBody == null && htmlBody != null) { String msg = "Cannot set html body only (without plain body): htmlBody=" + htmlBody; throw new SMailPostcardIllegalStateException(msg); } if (!forcedlyDirect && (!wholeFixedTextUsed && templateVariableMap == null)) { String msg = "Not found template variable map:"; msg = msg + " wholeFixedTextUsed=" + wholeFixedTextUsed + " variableMap=" + templateVariableMap; throw new SMailPostcardIllegalStateException(msg); } if (!forcedlyDirect && (wholeFixedTextUsed && templateVariableMap != null)) { String msg = "Unneeded template variable map:"; msg = msg + " wholeFixedTextUsed=" + wholeFixedTextUsed + " variableMap=" + templateVariableMap; throw new SMailPostcardIllegalStateException(msg); } // no check for no variable mail (fixed message) //if (templateVariableMap != null && templateVariableMap.isEmpty()) { // String msg = "Empty variable map for template text: variableMap=" + templateVariableMap; // throw new SMailPostcardIllegalStateException(msg); //} } }
public class class_name { public void officeCheck() { // move to receptionist for dynamic text //if (from == null) { // throwMailFromAddressNotFoundException(); //} if (toList == null || toList.isEmpty()) { throwMailToAddressNotFoundException(); // depends on control dependency: [if], data = [none] } if (bodyFile == null && plainBody == null) { String msg = "Not found body file or plain body: bodyFile=" + bodyFile + " plainBody=" + plainBody; throw new SMailPostcardIllegalStateException(msg); } if (!forcedlyDirect && (bodyFile != null && plainBody != null)) { String msg = "Set either body file or plain body: bodyFile=" + bodyFile + " plainBody=" + plainBody; throw new SMailPostcardIllegalStateException(msg); } if (plainBody == null && htmlBody != null) { String msg = "Cannot set html body only (without plain body): htmlBody=" + htmlBody; // depends on control dependency: [if], data = [none] throw new SMailPostcardIllegalStateException(msg); } if (!forcedlyDirect && (!wholeFixedTextUsed && templateVariableMap == null)) { String msg = "Not found template variable map:"; msg = msg + " wholeFixedTextUsed=" + wholeFixedTextUsed + " variableMap=" + templateVariableMap; // depends on control dependency: [if], data = [none] throw new SMailPostcardIllegalStateException(msg); } if (!forcedlyDirect && (wholeFixedTextUsed && templateVariableMap != null)) { String msg = "Unneeded template variable map:"; msg = msg + " wholeFixedTextUsed=" + wholeFixedTextUsed + " variableMap=" + templateVariableMap; // depends on control dependency: [if], data = [none] throw new SMailPostcardIllegalStateException(msg); } // no check for no variable mail (fixed message) //if (templateVariableMap != null && templateVariableMap.isEmpty()) { // String msg = "Empty variable map for template text: variableMap=" + templateVariableMap; // throw new SMailPostcardIllegalStateException(msg); //} } }
public class class_name { public void pushNDArrayMessage(NDArrayMessage message) { //start a subscriber that can send us ndarrays if (subscriber == null) { running = new AtomicBoolean(true); subscriber = AeronNDArraySubscriber.startSubscriber(aeron, subscriberHost, subscriberPort, this, subscriberStream, running); log.debug("Started parameter server client on " + subscriber.connectionUrl()); } String[] split = ndarraySendUrl.split(":"); int port = Integer.parseInt(split[1]); int streamToPublish = Integer.parseInt(split[2]); String channel = AeronUtil.aeronChannel(split[0], port); log.debug("Parameter server client publishing to " + ndarraySendUrl); try (AeronNDArrayPublisher publisher = AeronNDArrayPublisher.builder().streamId(streamToPublish) .compress(isCompressArray()).aeron(aeron).channel(channel).build()) { publisher.publish(message); } catch (Exception e) { throw new RuntimeException(e); } } }
public class class_name { public void pushNDArrayMessage(NDArrayMessage message) { //start a subscriber that can send us ndarrays if (subscriber == null) { running = new AtomicBoolean(true); // depends on control dependency: [if], data = [none] subscriber = AeronNDArraySubscriber.startSubscriber(aeron, subscriberHost, subscriberPort, this, subscriberStream, running); // depends on control dependency: [if], data = [none] log.debug("Started parameter server client on " + subscriber.connectionUrl()); // depends on control dependency: [if], data = [none] } String[] split = ndarraySendUrl.split(":"); int port = Integer.parseInt(split[1]); int streamToPublish = Integer.parseInt(split[2]); String channel = AeronUtil.aeronChannel(split[0], port); log.debug("Parameter server client publishing to " + ndarraySendUrl); try (AeronNDArrayPublisher publisher = AeronNDArrayPublisher.builder().streamId(streamToPublish) .compress(isCompressArray()).aeron(aeron).channel(channel).build()) { publisher.publish(message); } catch (Exception e) { throw new RuntimeException(e); } } }
public class class_name { protected static void decorateCORSProperties( final HttpServletRequest request, final CORSRequestType corsRequestType) { if (request == null) { throw new IllegalArgumentException("HttpServletRequest object is null"); } if (corsRequestType == null) { throw new IllegalArgumentException("CORSRequestType object is null"); } switch (corsRequestType) { case SIMPLE: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.TRUE); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_ORIGIN, request.getHeader(CorsFilter.REQUEST_HEADER_ORIGIN)); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_TYPE, corsRequestType.name().toLowerCase(Locale.ENGLISH)); break; case ACTUAL: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.TRUE); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_ORIGIN, request.getHeader(CorsFilter.REQUEST_HEADER_ORIGIN)); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_TYPE, corsRequestType.name().toLowerCase(Locale.ENGLISH)); break; case PRE_FLIGHT: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.TRUE); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_ORIGIN, request.getHeader(CorsFilter.REQUEST_HEADER_ORIGIN)); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_TYPE, corsRequestType.name().toLowerCase(Locale.ENGLISH)); String headers = request.getHeader(REQUEST_HEADER_ACCESS_CONTROL_REQUEST_HEADERS); if (headers == null) { headers = ""; } request.setAttribute(CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_HEADERS, headers); break; case NOT_CORS: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.FALSE); break; default: // Don't set any attributes break; } } }
public class class_name { protected static void decorateCORSProperties( final HttpServletRequest request, final CORSRequestType corsRequestType) { if (request == null) { throw new IllegalArgumentException("HttpServletRequest object is null"); } if (corsRequestType == null) { throw new IllegalArgumentException("CORSRequestType object is null"); } switch (corsRequestType) { case SIMPLE: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.TRUE); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_ORIGIN, request.getHeader(CorsFilter.REQUEST_HEADER_ORIGIN)); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_TYPE, corsRequestType.name().toLowerCase(Locale.ENGLISH)); break; case ACTUAL: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.TRUE); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_ORIGIN, request.getHeader(CorsFilter.REQUEST_HEADER_ORIGIN)); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_TYPE, corsRequestType.name().toLowerCase(Locale.ENGLISH)); break; case PRE_FLIGHT: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.TRUE); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_ORIGIN, request.getHeader(CorsFilter.REQUEST_HEADER_ORIGIN)); request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_TYPE, corsRequestType.name().toLowerCase(Locale.ENGLISH)); String headers = request.getHeader(REQUEST_HEADER_ACCESS_CONTROL_REQUEST_HEADERS); if (headers == null) { headers = ""; // depends on control dependency: [if], data = [none] } request.setAttribute(CorsFilter.HTTP_REQUEST_ATTRIBUTE_REQUEST_HEADERS, headers); break; case NOT_CORS: request.setAttribute( CorsFilter.HTTP_REQUEST_ATTRIBUTE_IS_CORS_REQUEST, Boolean.FALSE); break; default: // Don't set any attributes break; } } }
public class class_name { private boolean checkCommonPropExistance(Path rootPath, String noExtFileName) throws IOException { Configuration conf = new Configuration(); FileStatus[] children = rootPath.getFileSystem(conf).listStatus(rootPath); for (FileStatus aChild : children) { if (aChild.getPath().getName().contains(noExtFileName)) { return false; } } return true; } }
public class class_name { private boolean checkCommonPropExistance(Path rootPath, String noExtFileName) throws IOException { Configuration conf = new Configuration(); FileStatus[] children = rootPath.getFileSystem(conf).listStatus(rootPath); for (FileStatus aChild : children) { if (aChild.getPath().getName().contains(noExtFileName)) { return false; // depends on control dependency: [if], data = [none] } } return true; } }
public class class_name { @SuppressWarnings("unchecked") private static <MK, K, V> Map<K, V> getMap(Cache<MK, Object> cache, MK key, boolean createIfAbsent, boolean fineGrained) { if (fineGrained) { // With fine grained maps the cache will store both the keyset under master key and the entries under group keys return FineGrainedAtomicMapProxyImpl.newInstance((Cache<Object, Object>) cache, key, createIfAbsent); } else { return AtomicMapProxyImpl.newInstance(cache, key, createIfAbsent); } } }
public class class_name { @SuppressWarnings("unchecked") private static <MK, K, V> Map<K, V> getMap(Cache<MK, Object> cache, MK key, boolean createIfAbsent, boolean fineGrained) { if (fineGrained) { // With fine grained maps the cache will store both the keyset under master key and the entries under group keys return FineGrainedAtomicMapProxyImpl.newInstance((Cache<Object, Object>) cache, key, createIfAbsent); // depends on control dependency: [if], data = [none] } else { return AtomicMapProxyImpl.newInstance(cache, key, createIfAbsent); // depends on control dependency: [if], data = [none] } } }
public class class_name { public DeploymentInfo addLastAuthenticationMechanism(final String name, final AuthenticationMechanism mechanism) { authenticationMechanisms.put(name, new ImmediateAuthenticationMechanismFactory(mechanism)); if(loginConfig == null) { loginConfig = new LoginConfig(null); } loginConfig.addLastAuthMethod(new AuthMethodConfig(name)); return this; } }
public class class_name { public DeploymentInfo addLastAuthenticationMechanism(final String name, final AuthenticationMechanism mechanism) { authenticationMechanisms.put(name, new ImmediateAuthenticationMechanismFactory(mechanism)); if(loginConfig == null) { loginConfig = new LoginConfig(null); // depends on control dependency: [if], data = [null)] } loginConfig.addLastAuthMethod(new AuthMethodConfig(name)); return this; } }
public class class_name { public int layerInputSize(String layerName) { Layer l = getLayer(layerName); if(l == null){ throw new IllegalArgumentException("No layer with name \"" + layerName + "\" exists"); } org.deeplearning4j.nn.conf.layers.Layer conf = l.conf().getLayer(); if (conf == null || !(conf instanceof FeedForwardLayer)) { return 0; } FeedForwardLayer ffl = (FeedForwardLayer) conf; // FIXME: int cast return (int) ffl.getNIn(); } }
public class class_name { public int layerInputSize(String layerName) { Layer l = getLayer(layerName); if(l == null){ throw new IllegalArgumentException("No layer with name \"" + layerName + "\" exists"); } org.deeplearning4j.nn.conf.layers.Layer conf = l.conf().getLayer(); if (conf == null || !(conf instanceof FeedForwardLayer)) { return 0; // depends on control dependency: [if], data = [none] } FeedForwardLayer ffl = (FeedForwardLayer) conf; // FIXME: int cast return (int) ffl.getNIn(); } }
public class class_name { private LeftJoinNode liftSubstitution(LeftJoinNode normalizedLeftJoin, ImmutableSubstitution<ImmutableTerm> remainingRightSubstitution, IntermediateQuery query) { SubstitutionPropagationProposal<LeftJoinNode> proposal = new SubstitutionPropagationProposalImpl<>( normalizedLeftJoin, remainingRightSubstitution); try { NodeCentricOptimizationResults<LeftJoinNode> results = query.applyProposal(proposal, true); return results .getNewNodeOrReplacingChild() // The LJ is expected to be the child of a construction node .flatMap(query::getFirstChild) .filter(n -> n instanceof LeftJoinNode) .map(n -> (LeftJoinNode) n) .orElseThrow(() -> new MinorOntopInternalBugException("Was expected to insert a construction node " + "followed by a LJ")); } catch (EmptyQueryException e) { throw new MinorOntopInternalBugException("This substitution propagation was not expected " + "to make the query be empty"); } } }
public class class_name { private LeftJoinNode liftSubstitution(LeftJoinNode normalizedLeftJoin, ImmutableSubstitution<ImmutableTerm> remainingRightSubstitution, IntermediateQuery query) { SubstitutionPropagationProposal<LeftJoinNode> proposal = new SubstitutionPropagationProposalImpl<>( normalizedLeftJoin, remainingRightSubstitution); try { NodeCentricOptimizationResults<LeftJoinNode> results = query.applyProposal(proposal, true); return results .getNewNodeOrReplacingChild() // The LJ is expected to be the child of a construction node .flatMap(query::getFirstChild) .filter(n -> n instanceof LeftJoinNode) .map(n -> (LeftJoinNode) n) .orElseThrow(() -> new MinorOntopInternalBugException("Was expected to insert a construction node " + "followed by a LJ")); // depends on control dependency: [try], data = [none] } catch (EmptyQueryException e) { throw new MinorOntopInternalBugException("This substitution propagation was not expected " + "to make the query be empty"); } // depends on control dependency: [catch], data = [none] } }
public class class_name { protected void broadcastTo ( DObject object, Name from, byte levelOrMode, String bundle, String msg) { if (from == null) { SpeakUtil.sendSystem(object, bundle, msg, levelOrMode /* level */); } else { SpeakUtil.sendSpeak(object, from, bundle, msg, levelOrMode /* mode */); } } }
public class class_name { protected void broadcastTo ( DObject object, Name from, byte levelOrMode, String bundle, String msg) { if (from == null) { SpeakUtil.sendSystem(object, bundle, msg, levelOrMode /* level */); // depends on control dependency: [if], data = [none] } else { SpeakUtil.sendSpeak(object, from, bundle, msg, levelOrMode /* mode */); // depends on control dependency: [if], data = [none] } } }
public class class_name { public <T extends EventListener> void subscribe(EventPublisher source, T listener) { if (source == null || listener == null) { throw new IllegalArgumentException("Parameters cannot be null"); } log.debug("[subscribe] Adding {} --> {}", source.getClass().getName(), listener.getClass().getName()); GenericEventDispatcher<T> dispatcher = (GenericEventDispatcher<T>) dispatchers.get(source); if (dispatcher == null) { log.warn("[subscribe] Registering with a disconnected source"); dispatcher = createDispatcher(source); } dispatcher.addListener(listener); } }
public class class_name { public <T extends EventListener> void subscribe(EventPublisher source, T listener) { if (source == null || listener == null) { throw new IllegalArgumentException("Parameters cannot be null"); } log.debug("[subscribe] Adding {} --> {}", source.getClass().getName(), listener.getClass().getName()); GenericEventDispatcher<T> dispatcher = (GenericEventDispatcher<T>) dispatchers.get(source); if (dispatcher == null) { log.warn("[subscribe] Registering with a disconnected source"); // depends on control dependency: [if], data = [none] dispatcher = createDispatcher(source); // depends on control dependency: [if], data = [none] } dispatcher.addListener(listener); } }
public class class_name { private void prioritizeFromEntryNode(DiGraphNode<Node, Branch> entry) { PriorityQueue<DiGraphNode<Node, Branch>> worklist = new PriorityQueue<>(10, priorityComparator); worklist.add(entry); while (!worklist.isEmpty()) { DiGraphNode<Node, Branch> current = worklist.remove(); if (nodePriorities.containsKey(current)) { continue; } nodePriorities.put(current, ++priorityCounter); List<DiGraphNode<Node, Branch>> successors = cfg.getDirectedSuccNodes(current); worklist.addAll(successors); } } }
public class class_name { private void prioritizeFromEntryNode(DiGraphNode<Node, Branch> entry) { PriorityQueue<DiGraphNode<Node, Branch>> worklist = new PriorityQueue<>(10, priorityComparator); worklist.add(entry); while (!worklist.isEmpty()) { DiGraphNode<Node, Branch> current = worklist.remove(); if (nodePriorities.containsKey(current)) { continue; } nodePriorities.put(current, ++priorityCounter); // depends on control dependency: [while], data = [none] List<DiGraphNode<Node, Branch>> successors = cfg.getDirectedSuccNodes(current); worklist.addAll(successors); // depends on control dependency: [while], data = [none] } } }
public class class_name { @Override public byte[] binaryDiff(final RevisionCodecData codecData, final Diff diff) throws UnsupportedEncodingException, EncodingException { byte[] bData = encode(codecData, diff); if (MODE_ZIP_COMPRESSION) { Deflater compresser = new Deflater(); compresser.setInput(bData); compresser.finish(); byte[] output = new byte[1000]; ByteArrayOutputStream stream = new ByteArrayOutputStream(); int cLength; do { cLength = compresser.deflate(output); stream.write(output, 0, cLength); } while (cLength == 1000); output = stream.toByteArray(); if (bData.length + 1 < output.length) { return bData; } else { stream = new ByteArrayOutputStream(); stream.write(new byte[] { -128 }, 0, 1); stream.write(output, 0, output.length); return stream.toByteArray(); } } return bData; } }
public class class_name { @Override public byte[] binaryDiff(final RevisionCodecData codecData, final Diff diff) throws UnsupportedEncodingException, EncodingException { byte[] bData = encode(codecData, diff); if (MODE_ZIP_COMPRESSION) { Deflater compresser = new Deflater(); compresser.setInput(bData); compresser.finish(); byte[] output = new byte[1000]; ByteArrayOutputStream stream = new ByteArrayOutputStream(); int cLength; do { cLength = compresser.deflate(output); stream.write(output, 0, cLength); } while (cLength == 1000); output = stream.toByteArray(); if (bData.length + 1 < output.length) { return bData; // depends on control dependency: [if], data = [none] } else { stream = new ByteArrayOutputStream(); // depends on control dependency: [if], data = [none] stream.write(new byte[] { -128 }, 0, 1); // depends on control dependency: [if], data = [none] stream.write(output, 0, output.length); // depends on control dependency: [if], data = [output.length)] return stream.toByteArray(); // depends on control dependency: [if], data = [none] } } return bData; } }
public class class_name { public byte[] encrypt(byte[] plainData) { checkArgument(plainData.length >= OVERHEAD_SIZE, "Invalid plainData, %s bytes", plainData.length); // workBytes := initVector || payload || zeros:4 byte[] workBytes = plainData.clone(); ByteBuffer workBuffer = ByteBuffer.wrap(workBytes); boolean success = false; try { // workBytes := initVector || payload || I(signature) int signature = hmacSignature(workBytes); workBuffer.putInt(workBytes.length - SIGNATURE_SIZE, signature); // workBytes := initVector || E(payload) || I(signature) xorPayloadToHmacPad(workBytes); if (logger.isDebugEnabled()) { logger.debug(dump("Encrypted", plainData, workBytes)); } success = true; return workBytes; } finally { if (!success && logger.isDebugEnabled()) { logger.debug(dump("Encrypted (failed)", plainData, workBytes)); } } } }
public class class_name { public byte[] encrypt(byte[] plainData) { checkArgument(plainData.length >= OVERHEAD_SIZE, "Invalid plainData, %s bytes", plainData.length); // workBytes := initVector || payload || zeros:4 byte[] workBytes = plainData.clone(); ByteBuffer workBuffer = ByteBuffer.wrap(workBytes); boolean success = false; try { // workBytes := initVector || payload || I(signature) int signature = hmacSignature(workBytes); workBuffer.putInt(workBytes.length - SIGNATURE_SIZE, signature); // depends on control dependency: [try], data = [none] // workBytes := initVector || E(payload) || I(signature) xorPayloadToHmacPad(workBytes); // depends on control dependency: [try], data = [none] if (logger.isDebugEnabled()) { logger.debug(dump("Encrypted", plainData, workBytes)); // depends on control dependency: [if], data = [none] } success = true; // depends on control dependency: [try], data = [none] return workBytes; // depends on control dependency: [try], data = [none] } finally { if (!success && logger.isDebugEnabled()) { logger.debug(dump("Encrypted (failed)", plainData, workBytes)); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public int getFlags() { int flags = 32; // Use Adobe Standard charset if (italicAngle != 0) { flags = flags | 64; } if (isFixedPitch != 0) { flags = flags | 2; } if (hasSerifs) { flags = flags | 1; } return flags; } }
public class class_name { public int getFlags() { int flags = 32; // Use Adobe Standard charset if (italicAngle != 0) { flags = flags | 64; // depends on control dependency: [if], data = [none] } if (isFixedPitch != 0) { flags = flags | 2; // depends on control dependency: [if], data = [none] } if (hasSerifs) { flags = flags | 1; // depends on control dependency: [if], data = [none] } return flags; } }
public class class_name { public void makeTouchable(GVRSceneObject sceneObject, OnTouch handler) { if (handler != null) { if (sceneObject.getRenderData() != null) { if (!touchHandlers.containsKey(sceneObject)) { makePickable(sceneObject); touchHandlers.put(sceneObject, new WeakReference<>(handler)); } } else if (sceneObject.getChildrenCount() > 0) { for (GVRSceneObject child : sceneObject.getChildren()) { makeTouchable(child, handler); } } } } }
public class class_name { public void makeTouchable(GVRSceneObject sceneObject, OnTouch handler) { if (handler != null) { if (sceneObject.getRenderData() != null) { if (!touchHandlers.containsKey(sceneObject)) { makePickable(sceneObject); // depends on control dependency: [if], data = [none] touchHandlers.put(sceneObject, new WeakReference<>(handler)); // depends on control dependency: [if], data = [none] } } else if (sceneObject.getChildrenCount() > 0) { for (GVRSceneObject child : sceneObject.getChildren()) { makeTouchable(child, handler); // depends on control dependency: [for], data = [child] } } } } }
public class class_name { protected Object createBeanProperty(final BeanProperty bp) { Setter setter = bp.getSetter(true); if (setter == null) { return null; } Class type = setter.getSetterRawType(); Object newInstance; try { newInstance = ClassUtil.newInstance(type); } catch (Exception ex) { if (isSilent) { return null; } throw new BeanException("Invalid property: " + bp.name, bp, ex); } newInstance = invokeSetter(setter, bp, newInstance); return newInstance; } }
public class class_name { protected Object createBeanProperty(final BeanProperty bp) { Setter setter = bp.getSetter(true); if (setter == null) { return null; // depends on control dependency: [if], data = [none] } Class type = setter.getSetterRawType(); Object newInstance; try { newInstance = ClassUtil.newInstance(type); // depends on control dependency: [try], data = [none] } catch (Exception ex) { if (isSilent) { return null; // depends on control dependency: [if], data = [none] } throw new BeanException("Invalid property: " + bp.name, bp, ex); } // depends on control dependency: [catch], data = [none] newInstance = invokeSetter(setter, bp, newInstance); return newInstance; } }
public class class_name { public static URL codeLocationFromPath(String filePath) { try { return new File(filePath).toURI().toURL(); } catch (Exception e) { throw new InvalidCodeLocation(filePath); } } }
public class class_name { public static URL codeLocationFromPath(String filePath) { try { return new File(filePath).toURI().toURL(); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new InvalidCodeLocation(filePath); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public long[] keys() { long[] values = new long[size]; int idx = 0; for (Entry entry : table) { while (entry != null) { values[idx++] = entry.key; entry = entry.next; } } return values; } }
public class class_name { public long[] keys() { long[] values = new long[size]; int idx = 0; for (Entry entry : table) { while (entry != null) { values[idx++] = entry.key; // depends on control dependency: [while], data = [none] entry = entry.next; // depends on control dependency: [while], data = [none] } } return values; } }
public class class_name { public synchronized static void write(int fd, byte[] ... data) throws IOException { for(byte[] single : data) { // write the data contents to the serial port via JNI native method write(fd, single, single.length); } } }
public class class_name { public synchronized static void write(int fd, byte[] ... data) throws IOException { for(byte[] single : data) { // write the data contents to the serial port via JNI native method write(fd, single, single.length); // depends on control dependency: [for], data = [single] } } }
public class class_name { protected void updateFoldingThreshhold() { int left = estimateRequiredWidth(m_itemsLeft) + estimateRequiredWidth(m_leftButtons); int right = estimateRequiredWidth(m_itemsRight) + estimateRequiredWidth(m_rightButtons); int requiredWidth = left > right ? left : right; if (requiredWidth < 350) { // folding not required at any width m_foldingThreshhold = 0; } else if (requiredWidth < 400) { m_foldingThreshhold = 984; } else if (requiredWidth <= 520) { m_foldingThreshhold = 1240; } else { // always fold m_foldingThreshhold = 10000; } updateButtonVisibility(Page.getCurrent().getBrowserWindowWidth()); } }
public class class_name { protected void updateFoldingThreshhold() { int left = estimateRequiredWidth(m_itemsLeft) + estimateRequiredWidth(m_leftButtons); int right = estimateRequiredWidth(m_itemsRight) + estimateRequiredWidth(m_rightButtons); int requiredWidth = left > right ? left : right; if (requiredWidth < 350) { // folding not required at any width m_foldingThreshhold = 0; // depends on control dependency: [if], data = [none] } else if (requiredWidth < 400) { m_foldingThreshhold = 984; // depends on control dependency: [if], data = [none] } else if (requiredWidth <= 520) { m_foldingThreshhold = 1240; // depends on control dependency: [if], data = [none] } else { // always fold m_foldingThreshhold = 10000; // depends on control dependency: [if], data = [none] } updateButtonVisibility(Page.getCurrent().getBrowserWindowWidth()); } }
public class class_name { public AnnotatedTypeBuilder<X> addToField(Field field, Annotation annotation) { if (fields.get(field) == null) { fields.put(field, new AnnotationBuilder()); } fields.get(field).add(annotation); return this; } }
public class class_name { public AnnotatedTypeBuilder<X> addToField(Field field, Annotation annotation) { if (fields.get(field) == null) { fields.put(field, new AnnotationBuilder()); // depends on control dependency: [if], data = [none] } fields.get(field).add(annotation); return this; } }
public class class_name { public static boolean startsWithAny(final CharSequence sequence, final CharSequence... searchStrings) { if (isEmpty(sequence) || ArrayUtils.isEmpty(searchStrings)) { return false; } for (final CharSequence searchString : searchStrings) { if (startsWith(sequence, searchString)) { return true; } } return false; } }
public class class_name { public static boolean startsWithAny(final CharSequence sequence, final CharSequence... searchStrings) { if (isEmpty(sequence) || ArrayUtils.isEmpty(searchStrings)) { return false; // depends on control dependency: [if], data = [none] } for (final CharSequence searchString : searchStrings) { if (startsWith(sequence, searchString)) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public static void process(GrayF32 orig, GrayF32 derivX, GrayF32 derivY, @Nullable ImageBorder_F32 border) { InputSanityCheck.reshapeOneIn(orig, derivX, derivY); if( BoofConcurrency.USE_CONCURRENT ) { GradientTwo0_Standard_MT.process(orig, derivX, derivY); } else { GradientTwo0_Standard.process(orig, derivX, derivY); } if( border != null ) { DerivativeHelperFunctions.processBorderHorizontal(orig, derivX , kernelDeriv_F32, border); DerivativeHelperFunctions.processBorderVertical(orig, derivY , kernelDeriv_F32, border); } } }
public class class_name { public static void process(GrayF32 orig, GrayF32 derivX, GrayF32 derivY, @Nullable ImageBorder_F32 border) { InputSanityCheck.reshapeOneIn(orig, derivX, derivY); if( BoofConcurrency.USE_CONCURRENT ) { GradientTwo0_Standard_MT.process(orig, derivX, derivY); // depends on control dependency: [if], data = [none] } else { GradientTwo0_Standard.process(orig, derivX, derivY); // depends on control dependency: [if], data = [none] } if( border != null ) { DerivativeHelperFunctions.processBorderHorizontal(orig, derivX , kernelDeriv_F32, border); // depends on control dependency: [if], data = [none] DerivativeHelperFunctions.processBorderVertical(orig, derivY , kernelDeriv_F32, border); // depends on control dependency: [if], data = [none] } } }
public class class_name { public T getByInterfaces(Class<?> clazz) { T object = null; Class<?>[] interfaces = clazz.getInterfaces(); for (Class<?> interfaceClass : interfaces) { object = map.get(interfaceClass); if(object != null) { break; } } return object; } }
public class class_name { public T getByInterfaces(Class<?> clazz) { T object = null; Class<?>[] interfaces = clazz.getInterfaces(); for (Class<?> interfaceClass : interfaces) { object = map.get(interfaceClass); // depends on control dependency: [for], data = [interfaceClass] if(object != null) { break; } } return object; } }
public class class_name { private void updateCustomerAndSetDefaultSourceIfNecessary(@NonNull Customer customer) { // An inverted early return - we don't need to talk to the CustomerSession if there is // already a default source selected or we have no or more than one customer sources in our // list. if (!TextUtils.isEmpty(customer.getDefaultSource()) || customer.getSources().size() != 1) { updateAdapterWithCustomer(customer); return; } // We only activate this if there is a single source in the list final CustomerSource customerSource = customer.getSources().get(0); if (customerSource == null || customerSource.getId() == null) { // If the source ID is null for the only source we have, then there is nothing // we can do but update the display. This should not happen. It is only possible // for a CustomerSource to have null ID because a Card is a customer source, and // before those are sent to Stripe, they haven't yet been assigned an ID. updateAdapterWithCustomer(customer); return; } mCustomerSession.setCustomerDefaultSource(customerSource.getId(), customerSource.getSourceType(), new PostUpdateCustomerRetrievalListener(this)); } }
public class class_name { private void updateCustomerAndSetDefaultSourceIfNecessary(@NonNull Customer customer) { // An inverted early return - we don't need to talk to the CustomerSession if there is // already a default source selected or we have no or more than one customer sources in our // list. if (!TextUtils.isEmpty(customer.getDefaultSource()) || customer.getSources().size() != 1) { updateAdapterWithCustomer(customer); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } // We only activate this if there is a single source in the list final CustomerSource customerSource = customer.getSources().get(0); if (customerSource == null || customerSource.getId() == null) { // If the source ID is null for the only source we have, then there is nothing // we can do but update the display. This should not happen. It is only possible // for a CustomerSource to have null ID because a Card is a customer source, and // before those are sent to Stripe, they haven't yet been assigned an ID. updateAdapterWithCustomer(customer); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } mCustomerSession.setCustomerDefaultSource(customerSource.getId(), customerSource.getSourceType(), new PostUpdateCustomerRetrievalListener(this)); } }
public class class_name { public UriMappingResolver addResources(String... resources) { if (resources.length % 2 == 1) { throw new IllegalArgumentException ("Expected even number of arguments"); } for (int i = 0; i < resources.length; i += 2) { addSchema(resources[i], resources[i+1]); } return this; } }
public class class_name { public UriMappingResolver addResources(String... resources) { if (resources.length % 2 == 1) { throw new IllegalArgumentException ("Expected even number of arguments"); } for (int i = 0; i < resources.length; i += 2) { addSchema(resources[i], resources[i+1]); // depends on control dependency: [for], data = [i] } return this; } }
public class class_name { public static final void setAutoCommit(final Connection connection, boolean autocommit) { try { connection.setAutoCommit(autocommit); } catch (Exception e) { if (LOG.isWarnEnabled()) LOG.warn("Error committing Connection: " + connection + " to: " + autocommit, e); } } }
public class class_name { public static final void setAutoCommit(final Connection connection, boolean autocommit) { try { connection.setAutoCommit(autocommit); // depends on control dependency: [try], data = [none] } catch (Exception e) { if (LOG.isWarnEnabled()) LOG.warn("Error committing Connection: " + connection + " to: " + autocommit, e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void setParameterObject(final PreparedStatement preparedStatement, final int parameterIndex, final Object param, final BindParameterMapperManager parameterMapperManager) throws SQLException { //JDBCの受け付ける型に変換 Object jdbcParam = parameterMapperManager.toJdbc(param, preparedStatement.getConnection()); if (Objects.equals(sqlType, SQL_TYPE_NOT_SET)) { if (jdbcParam instanceof java.sql.Array) { preparedStatement.setArray(parameterIndex, (java.sql.Array) jdbcParam); } else { preparedStatement.setObject(parameterIndex, jdbcParam); } } else { int targetSqlType = sqlType.getVendorTypeNumber();//各JDBCの対応状況が怪しいのでintで扱う if (jdbcParam != null) { if (jdbcParam instanceof java.sql.Array) { preparedStatement.setArray(parameterIndex, (java.sql.Array) jdbcParam); } else { preparedStatement.setObject(parameterIndex, jdbcParam, targetSqlType); } } else { preparedStatement.setNull(parameterIndex, targetSqlType); } } } }
public class class_name { private void setParameterObject(final PreparedStatement preparedStatement, final int parameterIndex, final Object param, final BindParameterMapperManager parameterMapperManager) throws SQLException { //JDBCの受け付ける型に変換 Object jdbcParam = parameterMapperManager.toJdbc(param, preparedStatement.getConnection()); if (Objects.equals(sqlType, SQL_TYPE_NOT_SET)) { if (jdbcParam instanceof java.sql.Array) { preparedStatement.setArray(parameterIndex, (java.sql.Array) jdbcParam); // depends on control dependency: [if], data = [none] } else { preparedStatement.setObject(parameterIndex, jdbcParam); // depends on control dependency: [if], data = [none] } } else { int targetSqlType = sqlType.getVendorTypeNumber();//各JDBCの対応状況が怪しいのでintで扱う if (jdbcParam != null) { if (jdbcParam instanceof java.sql.Array) { preparedStatement.setArray(parameterIndex, (java.sql.Array) jdbcParam); // depends on control dependency: [if], data = [none] } else { preparedStatement.setObject(parameterIndex, jdbcParam, targetSqlType); // depends on control dependency: [if], data = [none] } } else { preparedStatement.setNull(parameterIndex, targetSqlType); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static QueryMeta checkAndPromoteToTimeSeries(QueryMeta qMeta) { if (qMeta instanceof GroupByQueryMeta) { if (((GroupByQueryMeta)qMeta).fetchDimensions == null) { return TimeSeriesQueryMeta.promote(qMeta); } } return qMeta; } }
public class class_name { public static QueryMeta checkAndPromoteToTimeSeries(QueryMeta qMeta) { if (qMeta instanceof GroupByQueryMeta) { if (((GroupByQueryMeta)qMeta).fetchDimensions == null) { return TimeSeriesQueryMeta.promote(qMeta); // depends on control dependency: [if], data = [none] } } return qMeta; } }
public class class_name { private static void appendValue(StringBuilder sb, String item, boolean valid, long value) { sb.append(item).append('='); if (valid) { sb.append(value); } else { sb.append('?'); } } }
public class class_name { private static void appendValue(StringBuilder sb, String item, boolean valid, long value) { sb.append(item).append('='); if (valid) { sb.append(value); // depends on control dependency: [if], data = [none] } else { sb.append('?'); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public long length() { // If the summary is already in the cache, return it. // It'll have been added by a listDatasets() call on the parent directory. S3ObjectSummary objectSummary = objectSummaryCache.getIfPresent(s3uri); if (objectSummary != null) { return objectSummary.getSize(); } /* Get the metadata directly from S3. This will be expensive. * We get punished hard if length() and/or lastModified() is called on a bunch of datasets without * listDatasets() first being called on their parent directory. * * So, is the right thing to do here "getParentDataset().listDatasets()" and then query the cache again? * Perhaps, but listDatasets() throws an IOException, and length() and lastModified() do not. * We would have to change their signatures and the upstream client code to make it work. */ ObjectMetadata metadata = threddsS3Client.getObjectMetadata(s3uri); if (metadata != null) { return metadata.getContentLength(); } else { // "this" may be a collection or non-existent. In both cases, we return 0. return 0; } } }
public class class_name { @Override public long length() { // If the summary is already in the cache, return it. // It'll have been added by a listDatasets() call on the parent directory. S3ObjectSummary objectSummary = objectSummaryCache.getIfPresent(s3uri); if (objectSummary != null) { return objectSummary.getSize(); // depends on control dependency: [if], data = [none] } /* Get the metadata directly from S3. This will be expensive. * We get punished hard if length() and/or lastModified() is called on a bunch of datasets without * listDatasets() first being called on their parent directory. * * So, is the right thing to do here "getParentDataset().listDatasets()" and then query the cache again? * Perhaps, but listDatasets() throws an IOException, and length() and lastModified() do not. * We would have to change their signatures and the upstream client code to make it work. */ ObjectMetadata metadata = threddsS3Client.getObjectMetadata(s3uri); if (metadata != null) { return metadata.getContentLength(); // depends on control dependency: [if], data = [none] } else { // "this" may be a collection or non-existent. In both cases, we return 0. return 0; // depends on control dependency: [if], data = [none] } } }
public class class_name { public Clustering<MedoidModel> run(Database db, Relation<O> relation) { ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs()); final int size = ids.size(); int[] assignment = new int[size]; double[][] s = initialization.getSimilarityMatrix(db, relation, ids); double[][] r = new double[size][size]; double[][] a = new double[size][size]; IndefiniteProgress prog = LOG.isVerbose() ? new IndefiniteProgress("Affinity Propagation Iteration", LOG) : null; MutableProgress aprog = LOG.isVerbose() ? new MutableProgress("Stable assignments", size + 1, LOG) : null; int inactive = 0; for(int iteration = 0; iteration < maxiter && inactive < convergence; iteration++) { // Update responsibility matrix: for(int i = 0; i < size; i++) { double[] ai = a[i], ri = r[i], si = s[i]; // Find the two largest values (as initially maxk == i) double max1 = Double.NEGATIVE_INFINITY, max2 = Double.NEGATIVE_INFINITY; int maxk = -1; for(int k = 0; k < size; k++) { double val = ai[k] + si[k]; if(val > max1) { max2 = max1; max1 = val; maxk = k; } else if(val > max2) { max2 = val; } } // With the maximum value known, update r: for(int k = 0; k < size; k++) { double val = si[k] - ((k != maxk) ? max1 : max2); ri[k] = ri[k] * lambda + val * (1. - lambda); } } // Update availability matrix for(int k = 0; k < size; k++) { // Compute sum of max(0, r_ik) for all i. // For r_kk, don't apply the max. double colposum = 0.; for(int i = 0; i < size; i++) { if(i == k || r[i][k] > 0.) { colposum += r[i][k]; } } for(int i = 0; i < size; i++) { double val = colposum; // Adjust column sum by the one extra term. if(i == k || r[i][k] > 0.) { val -= r[i][k]; } if(i != k && val > 0.) { // min val = 0.; } a[i][k] = a[i][k] * lambda + val * (1 - lambda); } } int changed = 0; for(int i = 0; i < size; i++) { double[] ai = a[i], ri = r[i]; double max = Double.NEGATIVE_INFINITY; int maxj = -1; for(int j = 0; j < size; j++) { double v = ai[j] + ri[j]; if(v > max || (i == j && v >= max)) { max = v; maxj = j; } } if(assignment[i] != maxj) { changed += 1; assignment[i] = maxj; } } inactive = (changed > 0) ? 0 : (inactive + 1); LOG.incrementProcessed(prog); if(aprog != null) { aprog.setProcessed(size - changed, LOG); } } if(aprog != null) { aprog.setProcessed(aprog.getTotal(), LOG); } LOG.setCompleted(prog); // Cluster map, by lead object Int2ObjectOpenHashMap<ModifiableDBIDs> map = new Int2ObjectOpenHashMap<>(); DBIDArrayIter i1 = ids.iter(); for(int i = 0; i1.valid(); i1.advance(), i++) { int c = assignment[i]; // Add to cluster members: ModifiableDBIDs cids = map.get(c); if(cids == null) { cids = DBIDUtil.newArray(); map.put(c, cids); } cids.add(i1); } // If we stopped early, the cluster lead might be in a different cluster. for(ObjectIterator<Int2ObjectOpenHashMap.Entry<ModifiableDBIDs>> iter = map.int2ObjectEntrySet().fastIterator(); iter.hasNext();) { Int2ObjectOpenHashMap.Entry<ModifiableDBIDs> entry = iter.next(); final int key = entry.getIntKey(); int targetkey = key; ModifiableDBIDs tids = null; // Chase arrows: while(tids == null && assignment[targetkey] != targetkey) { targetkey = assignment[targetkey]; tids = map.get(targetkey); } if(tids != null && targetkey != key) { tids.addDBIDs(entry.getValue()); iter.remove(); } } Clustering<MedoidModel> clustering = new Clustering<>("Affinity Propagation Clustering", "ap-clustering"); ModifiableDBIDs noise = DBIDUtil.newArray(); for(ObjectIterator<Int2ObjectOpenHashMap.Entry<ModifiableDBIDs>> iter = map.int2ObjectEntrySet().fastIterator(); iter.hasNext();) { Int2ObjectOpenHashMap.Entry<ModifiableDBIDs> entry = iter.next(); i1.seek(entry.getIntKey()); if(entry.getValue().size() > 1) { MedoidModel mod = new MedoidModel(DBIDUtil.deref(i1)); clustering.addToplevelCluster(new Cluster<>(entry.getValue(), mod)); } else { noise.add(i1); } } if(noise.size() > 0) { MedoidModel mod = new MedoidModel(DBIDUtil.deref(noise.iter())); clustering.addToplevelCluster(new Cluster<>(noise, true, mod)); } return clustering; } }
public class class_name { public Clustering<MedoidModel> run(Database db, Relation<O> relation) { ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs()); final int size = ids.size(); int[] assignment = new int[size]; double[][] s = initialization.getSimilarityMatrix(db, relation, ids); double[][] r = new double[size][size]; double[][] a = new double[size][size]; IndefiniteProgress prog = LOG.isVerbose() ? new IndefiniteProgress("Affinity Propagation Iteration", LOG) : null; MutableProgress aprog = LOG.isVerbose() ? new MutableProgress("Stable assignments", size + 1, LOG) : null; int inactive = 0; for(int iteration = 0; iteration < maxiter && inactive < convergence; iteration++) { // Update responsibility matrix: for(int i = 0; i < size; i++) { double[] ai = a[i], ri = r[i], si = s[i]; // Find the two largest values (as initially maxk == i) double max1 = Double.NEGATIVE_INFINITY, max2 = Double.NEGATIVE_INFINITY; int maxk = -1; for(int k = 0; k < size; k++) { double val = ai[k] + si[k]; if(val > max1) { max2 = max1; // depends on control dependency: [if], data = [none] max1 = val; // depends on control dependency: [if], data = [none] maxk = k; // depends on control dependency: [if], data = [none] } else if(val > max2) { max2 = val; // depends on control dependency: [if], data = [none] } } // With the maximum value known, update r: for(int k = 0; k < size; k++) { double val = si[k] - ((k != maxk) ? max1 : max2); ri[k] = ri[k] * lambda + val * (1. - lambda); // depends on control dependency: [for], data = [k] } } // Update availability matrix for(int k = 0; k < size; k++) { // Compute sum of max(0, r_ik) for all i. // For r_kk, don't apply the max. double colposum = 0.; for(int i = 0; i < size; i++) { if(i == k || r[i][k] > 0.) { colposum += r[i][k]; // depends on control dependency: [if], data = [none] } } for(int i = 0; i < size; i++) { double val = colposum; // Adjust column sum by the one extra term. if(i == k || r[i][k] > 0.) { val -= r[i][k]; // depends on control dependency: [if], data = [none] } if(i != k && val > 0.) { // min val = 0.; // depends on control dependency: [if], data = [none] } a[i][k] = a[i][k] * lambda + val * (1 - lambda); // depends on control dependency: [for], data = [i] } } int changed = 0; for(int i = 0; i < size; i++) { double[] ai = a[i], ri = r[i]; double max = Double.NEGATIVE_INFINITY; int maxj = -1; for(int j = 0; j < size; j++) { double v = ai[j] + ri[j]; if(v > max || (i == j && v >= max)) { max = v; // depends on control dependency: [if], data = [none] maxj = j; // depends on control dependency: [if], data = [none] } } if(assignment[i] != maxj) { changed += 1; // depends on control dependency: [if], data = [none] assignment[i] = maxj; // depends on control dependency: [if], data = [none] } } inactive = (changed > 0) ? 0 : (inactive + 1); // depends on control dependency: [for], data = [none] LOG.incrementProcessed(prog); // depends on control dependency: [for], data = [none] if(aprog != null) { aprog.setProcessed(size - changed, LOG); // depends on control dependency: [if], data = [none] } } if(aprog != null) { aprog.setProcessed(aprog.getTotal(), LOG); // depends on control dependency: [if], data = [(aprog] } LOG.setCompleted(prog); // Cluster map, by lead object Int2ObjectOpenHashMap<ModifiableDBIDs> map = new Int2ObjectOpenHashMap<>(); DBIDArrayIter i1 = ids.iter(); for(int i = 0; i1.valid(); i1.advance(), i++) { int c = assignment[i]; // Add to cluster members: ModifiableDBIDs cids = map.get(c); if(cids == null) { cids = DBIDUtil.newArray(); // depends on control dependency: [if], data = [none] map.put(c, cids); // depends on control dependency: [if], data = [none] } cids.add(i1); // depends on control dependency: [for], data = [none] } // If we stopped early, the cluster lead might be in a different cluster. for(ObjectIterator<Int2ObjectOpenHashMap.Entry<ModifiableDBIDs>> iter = map.int2ObjectEntrySet().fastIterator(); iter.hasNext();) { Int2ObjectOpenHashMap.Entry<ModifiableDBIDs> entry = iter.next(); final int key = entry.getIntKey(); int targetkey = key; ModifiableDBIDs tids = null; // Chase arrows: while(tids == null && assignment[targetkey] != targetkey) { targetkey = assignment[targetkey]; // depends on control dependency: [while], data = [none] tids = map.get(targetkey); // depends on control dependency: [while], data = [none] } if(tids != null && targetkey != key) { tids.addDBIDs(entry.getValue()); // depends on control dependency: [if], data = [none] iter.remove(); // depends on control dependency: [if], data = [none] } } Clustering<MedoidModel> clustering = new Clustering<>("Affinity Propagation Clustering", "ap-clustering"); ModifiableDBIDs noise = DBIDUtil.newArray(); for(ObjectIterator<Int2ObjectOpenHashMap.Entry<ModifiableDBIDs>> iter = map.int2ObjectEntrySet().fastIterator(); iter.hasNext();) { Int2ObjectOpenHashMap.Entry<ModifiableDBIDs> entry = iter.next(); i1.seek(entry.getIntKey()); // depends on control dependency: [for], data = [none] if(entry.getValue().size() > 1) { MedoidModel mod = new MedoidModel(DBIDUtil.deref(i1)); clustering.addToplevelCluster(new Cluster<>(entry.getValue(), mod)); // depends on control dependency: [if], data = [none] } else { noise.add(i1); // depends on control dependency: [if], data = [1)] } } if(noise.size() > 0) { MedoidModel mod = new MedoidModel(DBIDUtil.deref(noise.iter())); clustering.addToplevelCluster(new Cluster<>(noise, true, mod)); // depends on control dependency: [if], data = [none] } return clustering; } }
public class class_name { public void marshall(ListResourceDefinitionsRequest listResourceDefinitionsRequest, ProtocolMarshaller protocolMarshaller) { if (listResourceDefinitionsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listResourceDefinitionsRequest.getMaxResults(), MAXRESULTS_BINDING); protocolMarshaller.marshall(listResourceDefinitionsRequest.getNextToken(), NEXTTOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(ListResourceDefinitionsRequest listResourceDefinitionsRequest, ProtocolMarshaller protocolMarshaller) { if (listResourceDefinitionsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listResourceDefinitionsRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(listResourceDefinitionsRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static Object toWrapperArray(Object array) { if (!array.getClass().isArray()) { throw new IllegalArgumentException("Must give array object."); } if (isPrimitiveArray(array.getClass())) { int length = Array.getLength(array); Object newArray = Array.newInstance(toWrapper(array.getClass().getComponentType()), length); for (int i = 0; i < length; i++) { Array.set(newArray, i, toWrapper(Array.get(array, i))); } return newArray; } else if (array.getClass().getComponentType().isArray()) { int length = Array.getLength(array); Object newArray = Array.newInstance(toWrapperArray(array.getClass().getComponentType()), length); for (int i = 0; i < length; i++) { Array.set(newArray, i, toWrapperArray(Array.get(array, i))); } return newArray; } return array; } }
public class class_name { public static Object toWrapperArray(Object array) { if (!array.getClass().isArray()) { throw new IllegalArgumentException("Must give array object."); } if (isPrimitiveArray(array.getClass())) { int length = Array.getLength(array); Object newArray = Array.newInstance(toWrapper(array.getClass().getComponentType()), length); for (int i = 0; i < length; i++) { Array.set(newArray, i, toWrapper(Array.get(array, i))); // depends on control dependency: [for], data = [i] } return newArray; // depends on control dependency: [if], data = [none] } else if (array.getClass().getComponentType().isArray()) { int length = Array.getLength(array); Object newArray = Array.newInstance(toWrapperArray(array.getClass().getComponentType()), length); for (int i = 0; i < length; i++) { Array.set(newArray, i, toWrapperArray(Array.get(array, i))); // depends on control dependency: [for], data = [i] } return newArray; // depends on control dependency: [if], data = [none] } return array; } }
public class class_name { public static BoxLegalHoldPolicy.Info createOngoing(BoxAPIConnection api, String name, String description) { URL url = ALL_LEGAL_HOLD_URL_TEMPLATE.build(api.getBaseURL()); BoxJSONRequest request = new BoxJSONRequest(api, url, "POST"); JsonObject requestJSON = new JsonObject() .add("policy_name", name) .add("is_ongoing", true); if (description != null) { requestJSON.add("description", description); } request.setBody(requestJSON.toString()); BoxJSONResponse response = (BoxJSONResponse) request.send(); JsonObject responseJSON = JsonObject.readFrom(response.getJSON()); BoxLegalHoldPolicy createdPolicy = new BoxLegalHoldPolicy(api, responseJSON.get("id").asString()); return createdPolicy.new Info(responseJSON); } }
public class class_name { public static BoxLegalHoldPolicy.Info createOngoing(BoxAPIConnection api, String name, String description) { URL url = ALL_LEGAL_HOLD_URL_TEMPLATE.build(api.getBaseURL()); BoxJSONRequest request = new BoxJSONRequest(api, url, "POST"); JsonObject requestJSON = new JsonObject() .add("policy_name", name) .add("is_ongoing", true); if (description != null) { requestJSON.add("description", description); // depends on control dependency: [if], data = [none] } request.setBody(requestJSON.toString()); BoxJSONResponse response = (BoxJSONResponse) request.send(); JsonObject responseJSON = JsonObject.readFrom(response.getJSON()); BoxLegalHoldPolicy createdPolicy = new BoxLegalHoldPolicy(api, responseJSON.get("id").asString()); return createdPolicy.new Info(responseJSON); } }
public class class_name { private void maybeUpdateTopLevelName(NodeTraversal t, Node nameNode) { String name = nameNode.getString(); if (!currentScript.isModule || !currentScript.topLevelNames.contains(name)) { return; } Var var = t.getScope().getVar(name); // If the name refers to a var that is not from the top level scope. if (var == null || var.getScope().getRootNode() != currentScript.rootNode) { // Then it shouldn't be renamed. return; } // If the name is part of a destructuring import, the import rewriting will take care of it if (var.getNameNode() == nameNode && nameNode.getParent().isStringKey() && nameNode.getGrandparent().isObjectPattern()) { Node destructuringLhsNode = nameNode.getGrandparent().getParent(); if (isCallTo(destructuringLhsNode.getLastChild(), GOOG_REQUIRE) || isCallTo(destructuringLhsNode.getLastChild(), GOOG_REQUIRETYPE)) { return; } } // If the name is an alias for an imported namespace rewrite from // "new Foo;" to "new module$exports$Foo;" boolean nameIsAnAlias = currentScript.namesToInlineByAlias.containsKey(name); if (nameIsAnAlias && var.getNode() != nameNode) { maybeAddAliasToSymbolTable(nameNode, currentScript.legacyNamespace); String namespaceToInline = currentScript.namesToInlineByAlias.get(name); if (namespaceToInline.equals(currentScript.getBinaryNamespace())) { currentScript.hasCreatedExportObject = true; } safeSetMaybeQualifiedString(nameNode, namespaceToInline); // Make sure this action won't shadow a local variable. if (namespaceToInline.indexOf('.') != -1) { String firstQualifiedName = namespaceToInline.substring(0, namespaceToInline.indexOf('.')); Var shadowedVar = t.getScope().getVar(firstQualifiedName); if (shadowedVar == null || shadowedVar.isGlobal() || shadowedVar.getScope().isModuleScope()) { return; } t.report( shadowedVar.getNode(), IMPORT_INLINING_SHADOWS_VAR, shadowedVar.getName(), namespaceToInline); } return; } // For non-import alias names rewrite from // "var foo; console.log(foo);" to // "var module$contents$Foo_foo; console.log(module$contents$Foo_foo);" safeSetString(nameNode, currentScript.contentsPrefix + name); } }
public class class_name { private void maybeUpdateTopLevelName(NodeTraversal t, Node nameNode) { String name = nameNode.getString(); if (!currentScript.isModule || !currentScript.topLevelNames.contains(name)) { return; // depends on control dependency: [if], data = [none] } Var var = t.getScope().getVar(name); // If the name refers to a var that is not from the top level scope. if (var == null || var.getScope().getRootNode() != currentScript.rootNode) { // Then it shouldn't be renamed. return; // depends on control dependency: [if], data = [none] } // If the name is part of a destructuring import, the import rewriting will take care of it if (var.getNameNode() == nameNode && nameNode.getParent().isStringKey() && nameNode.getGrandparent().isObjectPattern()) { Node destructuringLhsNode = nameNode.getGrandparent().getParent(); if (isCallTo(destructuringLhsNode.getLastChild(), GOOG_REQUIRE) || isCallTo(destructuringLhsNode.getLastChild(), GOOG_REQUIRETYPE)) { return; // depends on control dependency: [if], data = [none] } } // If the name is an alias for an imported namespace rewrite from // "new Foo;" to "new module$exports$Foo;" boolean nameIsAnAlias = currentScript.namesToInlineByAlias.containsKey(name); if (nameIsAnAlias && var.getNode() != nameNode) { maybeAddAliasToSymbolTable(nameNode, currentScript.legacyNamespace); // depends on control dependency: [if], data = [none] String namespaceToInline = currentScript.namesToInlineByAlias.get(name); if (namespaceToInline.equals(currentScript.getBinaryNamespace())) { currentScript.hasCreatedExportObject = true; // depends on control dependency: [if], data = [none] } safeSetMaybeQualifiedString(nameNode, namespaceToInline); // depends on control dependency: [if], data = [none] // Make sure this action won't shadow a local variable. if (namespaceToInline.indexOf('.') != -1) { String firstQualifiedName = namespaceToInline.substring(0, namespaceToInline.indexOf('.')); Var shadowedVar = t.getScope().getVar(firstQualifiedName); if (shadowedVar == null || shadowedVar.isGlobal() || shadowedVar.getScope().isModuleScope()) { return; // depends on control dependency: [if], data = [none] } t.report( shadowedVar.getNode(), IMPORT_INLINING_SHADOWS_VAR, shadowedVar.getName(), namespaceToInline); // depends on control dependency: [if], data = [none] } return; // depends on control dependency: [if], data = [none] } // For non-import alias names rewrite from // "var foo; console.log(foo);" to // "var module$contents$Foo_foo; console.log(module$contents$Foo_foo);" safeSetString(nameNode, currentScript.contentsPrefix + name); } }
public class class_name { public Object get(Object obj, String propertyName) { try { if (obj != null && propertyName != null) { return PropertyUtils.getNestedProperty(obj, propertyName); } } catch (NullPointerException e) { } catch (NestedNullException e) { } catch (Exception e) { // Now I don't like it. error(e); return "-undefined-"; } return null; } }
public class class_name { public Object get(Object obj, String propertyName) { try { if (obj != null && propertyName != null) { return PropertyUtils.getNestedProperty(obj, propertyName); // depends on control dependency: [if], data = [(obj] } } catch (NullPointerException e) { } catch (NestedNullException e) { // depends on control dependency: [catch], data = [none] } catch (Exception e) { // depends on control dependency: [catch], data = [none] // Now I don't like it. error(e); return "-undefined-"; } // depends on control dependency: [catch], data = [none] return null; } }
public class class_name { public java.util.List<Hit> getHit() { if (hit == null) { hit = new com.amazonaws.internal.SdkInternalList<Hit>(); } return hit; } }
public class class_name { public java.util.List<Hit> getHit() { if (hit == null) { hit = new com.amazonaws.internal.SdkInternalList<Hit>(); // depends on control dependency: [if], data = [none] } return hit; } }
public class class_name { public BackupSelection withListOfTags(Condition... listOfTags) { if (this.listOfTags == null) { setListOfTags(new java.util.ArrayList<Condition>(listOfTags.length)); } for (Condition ele : listOfTags) { this.listOfTags.add(ele); } return this; } }
public class class_name { public BackupSelection withListOfTags(Condition... listOfTags) { if (this.listOfTags == null) { setListOfTags(new java.util.ArrayList<Condition>(listOfTags.length)); // depends on control dependency: [if], data = [none] } for (Condition ele : listOfTags) { this.listOfTags.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public static void renderSelectOptions(FacesContext context, UIComponent component, Converter converter, Set lookupSet, List selectItemList) throws IOException { ResponseWriter writer = context.getResponseWriter(); // check for the hideNoSelectionOption attribute boolean hideNoSelectionOption = isHideNoSelectionOption(component); boolean componentDisabled = isTrue(component.getAttributes() .get("disabled")); for (Iterator it = selectItemList.iterator(); it.hasNext();) { SelectItem selectItem = (SelectItem) it.next(); if (selectItem instanceof SelectItemGroup) { writer.startElement(HTML.OPTGROUP_ELEM, null); // component); writer.writeAttribute(HTML.LABEL_ATTR, selectItem.getLabel(), null); SelectItem[] selectItems = ((SelectItemGroup) selectItem) .getSelectItems(); renderSelectOptions(context, component, converter, lookupSet, Arrays.asList(selectItems)); writer.endElement(HTML.OPTGROUP_ELEM); } else { String itemStrValue = org.apache.myfaces.shared.renderkit.RendererUtils .getConvertedStringValue(context, component, converter, selectItem); boolean selected = lookupSet.contains(itemStrValue); //TODO/FIX: we always compare the String vales, better fill lookupSet with Strings //only when useSubmittedValue==true, else use the real item value Objects // IF the hideNoSelectionOption attribute of the component is true // AND this selectItem is the "no selection option" // AND there are currently selected items // AND this item (the "no selection option") is not selected // (if there is currently no value on UISelectOne, lookupSet contains "") if (hideNoSelectionOption && selectItem.isNoSelectionOption() && lookupSet.size() != 0 && !(lookupSet.size() == 1 && lookupSet.contains("")) && !selected) { // do not render this selectItem continue; } writer.write(TABULATOR); writer.startElement(HTML.OPTION_ELEM, null); // component); if (itemStrValue != null) { writer.writeAttribute(HTML.VALUE_ATTR, itemStrValue, null); } else { writer.writeAttribute(HTML.VALUE_ATTR, "", null); } if (selected) { writer.writeAttribute(HTML.SELECTED_ATTR, HTML.SELECTED_ATTR, null); } boolean disabled = selectItem.isDisabled(); if (disabled) { writer.writeAttribute(HTML.DISABLED_ATTR, HTML.DISABLED_ATTR, null); } String labelClass = null; if (componentDisabled || disabled) { labelClass = (String) component.getAttributes().get( JSFAttr.DISABLED_CLASS_ATTR); } else { labelClass = (String) component.getAttributes().get( JSFAttr.ENABLED_CLASS_ATTR); } if (labelClass != null) { writer.writeAttribute("class", labelClass, "labelClass"); } boolean escape; if (component instanceof EscapeCapable) { escape = ((EscapeCapable) component).isEscape(); // Preserve tomahawk semantic. If escape=false // all items should be non escaped. If escape // is true check if selectItem.isEscape() is // true and do it. // This is done for remain compatibility. if (escape && selectItem.isEscape()) { writer.writeText(selectItem.getLabel(), null); } else { writer.write(selectItem.getLabel()); } } else { escape = RendererUtils.getBooleanAttribute(component, JSFAttr.ESCAPE_ATTR, false); //default is to escape //In JSF 1.2, when a SelectItem is created by default //selectItem.isEscape() returns true (this property //is not available on JSF 1.1). //so, if we found a escape property on the component //set to true, escape every item, but if not //check if isEscape() = true first. if (escape || selectItem.isEscape()) { writer.writeText(selectItem.getLabel(), null); } else { writer.write(selectItem.getLabel()); } } writer.endElement(HTML.OPTION_ELEM); } } } }
public class class_name { public static void renderSelectOptions(FacesContext context, UIComponent component, Converter converter, Set lookupSet, List selectItemList) throws IOException { ResponseWriter writer = context.getResponseWriter(); // check for the hideNoSelectionOption attribute boolean hideNoSelectionOption = isHideNoSelectionOption(component); boolean componentDisabled = isTrue(component.getAttributes() .get("disabled")); for (Iterator it = selectItemList.iterator(); it.hasNext();) { SelectItem selectItem = (SelectItem) it.next(); if (selectItem instanceof SelectItemGroup) { writer.startElement(HTML.OPTGROUP_ELEM, null); // component); // depends on control dependency: [if], data = [none] writer.writeAttribute(HTML.LABEL_ATTR, selectItem.getLabel(), null); // depends on control dependency: [if], data = [none] SelectItem[] selectItems = ((SelectItemGroup) selectItem) .getSelectItems(); renderSelectOptions(context, component, converter, lookupSet, Arrays.asList(selectItems)); // depends on control dependency: [if], data = [none] writer.endElement(HTML.OPTGROUP_ELEM); // depends on control dependency: [if], data = [none] } else { String itemStrValue = org.apache.myfaces.shared.renderkit.RendererUtils .getConvertedStringValue(context, component, converter, selectItem); boolean selected = lookupSet.contains(itemStrValue); //TODO/FIX: we always compare the String vales, better fill lookupSet with Strings //only when useSubmittedValue==true, else use the real item value Objects // IF the hideNoSelectionOption attribute of the component is true // AND this selectItem is the "no selection option" // AND there are currently selected items // AND this item (the "no selection option") is not selected // (if there is currently no value on UISelectOne, lookupSet contains "") if (hideNoSelectionOption && selectItem.isNoSelectionOption() && lookupSet.size() != 0 && !(lookupSet.size() == 1 && lookupSet.contains("")) && !selected) { // do not render this selectItem continue; } writer.write(TABULATOR); // depends on control dependency: [if], data = [none] writer.startElement(HTML.OPTION_ELEM, null); // component); // depends on control dependency: [if], data = [none] if (itemStrValue != null) { writer.writeAttribute(HTML.VALUE_ATTR, itemStrValue, null); // depends on control dependency: [if], data = [null)] } else { writer.writeAttribute(HTML.VALUE_ATTR, "", null); // depends on control dependency: [if], data = [null)] } if (selected) { writer.writeAttribute(HTML.SELECTED_ATTR, HTML.SELECTED_ATTR, null); // depends on control dependency: [if], data = [none] } boolean disabled = selectItem.isDisabled(); if (disabled) { writer.writeAttribute(HTML.DISABLED_ATTR, HTML.DISABLED_ATTR, null); // depends on control dependency: [if], data = [none] } String labelClass = null; if (componentDisabled || disabled) { labelClass = (String) component.getAttributes().get( JSFAttr.DISABLED_CLASS_ATTR); // depends on control dependency: [if], data = [none] } else { labelClass = (String) component.getAttributes().get( JSFAttr.ENABLED_CLASS_ATTR); // depends on control dependency: [if], data = [none] } if (labelClass != null) { writer.writeAttribute("class", labelClass, "labelClass"); // depends on control dependency: [if], data = [none] } boolean escape; if (component instanceof EscapeCapable) { escape = ((EscapeCapable) component).isEscape(); // depends on control dependency: [if], data = [none] // Preserve tomahawk semantic. If escape=false // all items should be non escaped. If escape // is true check if selectItem.isEscape() is // true and do it. // This is done for remain compatibility. if (escape && selectItem.isEscape()) { writer.writeText(selectItem.getLabel(), null); // depends on control dependency: [if], data = [none] } else { writer.write(selectItem.getLabel()); // depends on control dependency: [if], data = [none] } } else { escape = RendererUtils.getBooleanAttribute(component, JSFAttr.ESCAPE_ATTR, false); // depends on control dependency: [if], data = [none] //default is to escape //In JSF 1.2, when a SelectItem is created by default //selectItem.isEscape() returns true (this property //is not available on JSF 1.1). //so, if we found a escape property on the component //set to true, escape every item, but if not //check if isEscape() = true first. if (escape || selectItem.isEscape()) { writer.writeText(selectItem.getLabel(), null); // depends on control dependency: [if], data = [none] } else { writer.write(selectItem.getLabel()); // depends on control dependency: [if], data = [none] } } writer.endElement(HTML.OPTION_ELEM); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static PublicKey deserializePublicKey(byte[] keyData, String algorithm) { LOGGER.trace("deserialize public key from data using algorithm \"{}\"", algorithm); X509EncodedKeySpec pubSpec = new X509EncodedKeySpec(keyData); try { KeyFactory keyFactory = KeyFactory.getInstance(algorithm); return keyFactory.generatePublic(pubSpec); } catch (GeneralSecurityException e) { throw new IllegalArgumentException("provided data could not be converted to a PublicKey for algorithm " + algorithm, e); } } }
public class class_name { public static PublicKey deserializePublicKey(byte[] keyData, String algorithm) { LOGGER.trace("deserialize public key from data using algorithm \"{}\"", algorithm); X509EncodedKeySpec pubSpec = new X509EncodedKeySpec(keyData); try { KeyFactory keyFactory = KeyFactory.getInstance(algorithm); return keyFactory.generatePublic(pubSpec); // depends on control dependency: [try], data = [none] } catch (GeneralSecurityException e) { throw new IllegalArgumentException("provided data could not be converted to a PublicKey for algorithm " + algorithm, e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public byte[] readBytesNullEnd() { int initialPosition = position; int cnt = 0; while (remaining() > 0 && (buf[position++] != 0)) { cnt++; } final byte[] tmpArr = new byte[cnt]; System.arraycopy(buf, initialPosition, tmpArr, 0, cnt); return tmpArr; } }
public class class_name { public byte[] readBytesNullEnd() { int initialPosition = position; int cnt = 0; while (remaining() > 0 && (buf[position++] != 0)) { cnt++; // depends on control dependency: [while], data = [none] } final byte[] tmpArr = new byte[cnt]; System.arraycopy(buf, initialPosition, tmpArr, 0, cnt); return tmpArr; } }
public class class_name { @SuppressWarnings("unchecked") private static void updateCache(HashMap cache, Object key, Object value) { if (value == null) { return; } if (cache.size() > 1000) { cache.clear(); } cache.put(key, value); } }
public class class_name { @SuppressWarnings("unchecked") private static void updateCache(HashMap cache, Object key, Object value) { if (value == null) { return; // depends on control dependency: [if], data = [none] } if (cache.size() > 1000) { cache.clear(); // depends on control dependency: [if], data = [none] } cache.put(key, value); } }
public class class_name { @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { // Save the supplied value to the date picker. setCellEditorValue(value); // Draw the appropriate background colors to indicate a selected or unselected state. if (isSelected) { if (matchTableSelectionBackgroundColor) { datePicker.getComponentDateTextField().setBackground(table.getSelectionBackground()); datePicker.setBackground(table.getSelectionBackground()); } else { datePicker.zDrawTextFieldIndicators(); } } if (!isSelected) { if (matchTableBackgroundColor) { datePicker.getComponentDateTextField().setBackground(table.getBackground()); datePicker.setBackground(table.getBackground()); } else { datePicker.zDrawTextFieldIndicators(); } } // Draw the appropriate borders to indicate a focused or unfocused state. if (hasFocus) { datePicker.setBorder(borderFocusedCell); } else { datePicker.setBorder(borderUnfocusedCell); } // If needed, adjust the minimum row height for the table. zAdjustTableRowHeightIfNeeded(table); // This fixes a bug where the date text could "move around" during a table resize event. datePicker.getComponentDateTextField().setScrollOffset(0); // Return the date picker component. return datePicker; } }
public class class_name { @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { // Save the supplied value to the date picker. setCellEditorValue(value); // Draw the appropriate background colors to indicate a selected or unselected state. if (isSelected) { if (matchTableSelectionBackgroundColor) { datePicker.getComponentDateTextField().setBackground(table.getSelectionBackground()); // depends on control dependency: [if], data = [none] datePicker.setBackground(table.getSelectionBackground()); // depends on control dependency: [if], data = [none] } else { datePicker.zDrawTextFieldIndicators(); // depends on control dependency: [if], data = [none] } } if (!isSelected) { if (matchTableBackgroundColor) { datePicker.getComponentDateTextField().setBackground(table.getBackground()); // depends on control dependency: [if], data = [none] datePicker.setBackground(table.getBackground()); // depends on control dependency: [if], data = [none] } else { datePicker.zDrawTextFieldIndicators(); // depends on control dependency: [if], data = [none] } } // Draw the appropriate borders to indicate a focused or unfocused state. if (hasFocus) { datePicker.setBorder(borderFocusedCell); // depends on control dependency: [if], data = [none] } else { datePicker.setBorder(borderUnfocusedCell); // depends on control dependency: [if], data = [none] } // If needed, adjust the minimum row height for the table. zAdjustTableRowHeightIfNeeded(table); // This fixes a bug where the date text could "move around" during a table resize event. datePicker.getComponentDateTextField().setScrollOffset(0); // Return the date picker component. return datePicker; } }
public class class_name { public synchronized void resume() { closed = false; for(PauseListener p : pauseListeners) { p.resumed(); } pauseListeners.clear(); } }
public class class_name { public synchronized void resume() { closed = false; for(PauseListener p : pauseListeners) { p.resumed(); // depends on control dependency: [for], data = [p] } pauseListeners.clear(); } }
public class class_name { private void removeHandlerIfActive(ChannelHandlerContext ctx, String name) { if (ctx.channel().isActive()) { ChannelPipeline pipeline = ctx.pipeline(); ChannelHandler handler = pipeline.get(name); if (handler != null) { pipeline.remove(name); } } } }
public class class_name { private void removeHandlerIfActive(ChannelHandlerContext ctx, String name) { if (ctx.channel().isActive()) { ChannelPipeline pipeline = ctx.pipeline(); ChannelHandler handler = pipeline.get(name); if (handler != null) { pipeline.remove(name); // depends on control dependency: [if], data = [none] } } } }
public class class_name { protected void handleAppendResponse(RaftMemberContext member, AppendRequest request, AppendResponse response, long timestamp) { if (response.status() == RaftResponse.Status.OK) { handleAppendResponseOk(member, request, response); } else { handleAppendResponseError(member, request, response); } } }
public class class_name { protected void handleAppendResponse(RaftMemberContext member, AppendRequest request, AppendResponse response, long timestamp) { if (response.status() == RaftResponse.Status.OK) { handleAppendResponseOk(member, request, response); // depends on control dependency: [if], data = [none] } else { handleAppendResponseError(member, request, response); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static double[] pForDistribution(int[] counts) { double[] probabilities = new double[counts.length]; int total = 0; // Loop over the counts for all symbols adding up the total number. for (int c : counts) { total += c; } // Loop over the counts for all symbols dividing by the total number to provide a probability estimate. for (int i = 0; i < probabilities.length; i++) { if (total > 0) { probabilities[i] = ((double) counts[i]) / total; } else { probabilities[i] = 0.0d; } } return probabilities; } }
public class class_name { public static double[] pForDistribution(int[] counts) { double[] probabilities = new double[counts.length]; int total = 0; // Loop over the counts for all symbols adding up the total number. for (int c : counts) { total += c; // depends on control dependency: [for], data = [c] } // Loop over the counts for all symbols dividing by the total number to provide a probability estimate. for (int i = 0; i < probabilities.length; i++) { if (total > 0) { probabilities[i] = ((double) counts[i]) / total; // depends on control dependency: [if], data = [none] } else { probabilities[i] = 0.0d; // depends on control dependency: [if], data = [none] } } return probabilities; } }
public class class_name { private String makePropertyName(String name) { if (name.startsWith(beanParser.getFieldPrefix())) { return name.substring(beanParser.getFieldPrefix().length()); } return name; } }
public class class_name { private String makePropertyName(String name) { if (name.startsWith(beanParser.getFieldPrefix())) { return name.substring(beanParser.getFieldPrefix().length()); // depends on control dependency: [if], data = [none] } return name; } }
public class class_name { protected void setValue(int newValue, boolean updateTextField, boolean firePropertyChange) { int oldValue = value; if (newValue < min) { value = min; } else if (newValue > max) { value = max; } else { value = newValue; } if (updateTextField) { textField.setText(Integer.toString(value)); textField.setForeground(Color.black); } if (firePropertyChange) { firePropertyChange("value", oldValue, value); } } }
public class class_name { protected void setValue(int newValue, boolean updateTextField, boolean firePropertyChange) { int oldValue = value; if (newValue < min) { value = min; // depends on control dependency: [if], data = [none] } else if (newValue > max) { value = max; // depends on control dependency: [if], data = [none] } else { value = newValue; // depends on control dependency: [if], data = [none] } if (updateTextField) { textField.setText(Integer.toString(value)); // depends on control dependency: [if], data = [none] textField.setForeground(Color.black); // depends on control dependency: [if], data = [none] } if (firePropertyChange) { firePropertyChange("value", oldValue, value); // depends on control dependency: [if], data = [none] } } }
public class class_name { boolean executeInternal(String sql, Map<String, ParameterBindingDTO> parameterBindings) throws SQLException { raiseSQLExceptionIfStatementIsClosed(); connection.injectedDelay(); logger.debug("execute: {}", sql); String trimmedSql = sql.trim(); if (trimmedSql.length() >= 20 && trimmedSql.toLowerCase().startsWith("set-sf-property")) { // deprecated: sfsql executeSetProperty(sql); return false; } SFBaseResultSet sfResultSet; try { sfResultSet = sfStatement.execute(sql, parameterBindings, SFStatement.CallingMethod.EXECUTE); sfResultSet.setSession(this.connection.getSfSession()); if (resultSet != null) { openResultSets.add(resultSet); } resultSet = new SnowflakeResultSetV1(sfResultSet, this); queryID = sfResultSet.getQueryId(); // Legacy behavior treats update counts as result sets for single- // statement execute, so we only treat update counts as update counts // if JDBC_EXECUTE_RETURN_COUNT_FOR_DML is set, or if a statement // is multi-statement if (!sfResultSet.getStatementType().isGenerateResultSet() && (connection.getSfSession().isExecuteReturnCountForDML() || sfStatement.hasChildren())) { updateCount = ResultUtil.calculateUpdateCount(sfResultSet); if (resultSet != null) { openResultSets.add(resultSet); } resultSet = null; return false; } updateCount = NO_UPDATES; return true; } catch (SFException ex) { throw new SnowflakeSQLException(ex.getCause(), ex.getSqlState(), ex.getVendorCode(), ex.getParams()); } } }
public class class_name { boolean executeInternal(String sql, Map<String, ParameterBindingDTO> parameterBindings) throws SQLException { raiseSQLExceptionIfStatementIsClosed(); connection.injectedDelay(); logger.debug("execute: {}", sql); String trimmedSql = sql.trim(); if (trimmedSql.length() >= 20 && trimmedSql.toLowerCase().startsWith("set-sf-property")) { // deprecated: sfsql executeSetProperty(sql); return false; } SFBaseResultSet sfResultSet; try { sfResultSet = sfStatement.execute(sql, parameterBindings, SFStatement.CallingMethod.EXECUTE); sfResultSet.setSession(this.connection.getSfSession()); if (resultSet != null) { openResultSets.add(resultSet); // depends on control dependency: [if], data = [(resultSet] } resultSet = new SnowflakeResultSetV1(sfResultSet, this); queryID = sfResultSet.getQueryId(); // Legacy behavior treats update counts as result sets for single- // statement execute, so we only treat update counts as update counts // if JDBC_EXECUTE_RETURN_COUNT_FOR_DML is set, or if a statement // is multi-statement if (!sfResultSet.getStatementType().isGenerateResultSet() && (connection.getSfSession().isExecuteReturnCountForDML() || sfStatement.hasChildren())) { updateCount = ResultUtil.calculateUpdateCount(sfResultSet); // depends on control dependency: [if], data = [none] if (resultSet != null) { openResultSets.add(resultSet); // depends on control dependency: [if], data = [(resultSet] } resultSet = null; // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } updateCount = NO_UPDATES; return true; } catch (SFException ex) { throw new SnowflakeSQLException(ex.getCause(), ex.getSqlState(), ex.getVendorCode(), ex.getParams()); } } }
public class class_name { private static void formatBytesToSB (StringBuilder sb, byte[] data, int start, int countRequested, boolean displayCharRepresentations, int max) { if (max > MAX_TO_FORMAT) max = MAX_TO_FORMAT; // Ensure we can't be asked to format a completely bonkers amount of data int count = (countRequested <= max + 16) ? countRequested : max; // Determine how many bytes to format (giving 16 bytes leeway) if (data != null) { int len = data.length; sb.append("Array length = 0x"+Integer.toHexString(len)+" ("+len+"), displaying bytes from "+start+" for "+count); if (count < countRequested) { sb.append(" ("+countRequested+" bytes requested)"); } sb.append(ls+ls); if (displayCharRepresentations) sb.append(" offset : 0 1 2 3 4 5 6 7 8 9 A B C D E F 0 2 4 6 8 A C E " + ls); else sb.append(" offset : 0 1 2 3 4 5 6 7 8 9 A B C D E F" + ls); int t; boolean skip; int suppress = 0; int end = start + count; String c[] = new String[16]; // Current line bytes String p[] = new String[16]; // Previous line bytes String str[] = new String[16];// The string representation for (int j=0; j < 16; j++) { c[j] = null; str[j] = null; } for (int i=0; i < len; i = i+16) { skip = true; for (int j=0; j < 16; j++) { t = i + j; if ((t >= start) && (t < end) && (t < len)) { c[j] = pad(Integer.toHexString(data[t]),2); // Strip out some known 'bad-guys' (these are consistent across ASCII / EBCIDIC) // and replace them with the dead character if (c[j].equalsIgnoreCase("00") || // Null c[j].equalsIgnoreCase("09") || // Tab c[j].equalsIgnoreCase("0a") || // LF c[j].equalsIgnoreCase("0b") || // VertTab c[j].equalsIgnoreCase("0c") || // FF c[j].equalsIgnoreCase("0d") || // CR c[j].equalsIgnoreCase("07")) // Bell { str[j] = DEAD_CHAR; } else { str[j] = new String(data, t, 1); // Conversion is done here using the default // character set of the platform } skip = false; } else { c[j] = " "; str[j] = DEAD_CHAR; } } if (skip) { if (suppress > 0) sb.append(dup(suppress)); suppress = 0; c[0] = null; // Force a line difference } else { if (c[0].equals(p[0]) && c[1].equals(p[1]) && c[2].equals(p[2]) && c[3].equals(p[3]) && c[4].equals(p[4]) && c[5].equals(p[5]) && c[6].equals(p[6]) && c[7].equals(p[7]) && c[8].equals(p[8]) && c[9].equals(p[9]) && c[10].equals(p[10]) && c[11].equals(p[11]) && c[12].equals(p[12]) && c[13].equals(p[13]) && c[14].equals(p[14]) && c[15].equals(p[15])) { suppress++; } else { if (suppress > 0) sb.append(dup(suppress)); sb.append("0x"+pad(Integer.toHexString(i),8)+" ("+pad(Integer.valueOf(i).toString(),8," ")+") : "); sb.append(c[0]+c[1]+c[2]+c[3]+" "+c[4]+c[5]+c[6]+c[7]+" "+c[8]+c[9]+c[10]+c[11]+" "+c[12]+c[13]+c[14]+c[15]); if (displayCharRepresentations) { sb.append(" | "); sb.append(str[0]+str[1]+str[2]+str[3]+str[4]+str[5]+str[6]+str[7]+str[8]+str[9]+str[10]+str[11]+str[12]+str[13]+str[14]+str[15]); } sb.append(ls); for (int j=0; j < 16; j++) p[j] = c[j]; suppress = 0; } } } if (suppress > 0) sb.append(dup(suppress)); } // If the number of bytes formatted was fewer than requested, say so. if (count < countRequested) { sb.append("Suppressed remaining " + (countRequested-count) + " bytes." + ls); } } }
public class class_name { private static void formatBytesToSB (StringBuilder sb, byte[] data, int start, int countRequested, boolean displayCharRepresentations, int max) { if (max > MAX_TO_FORMAT) max = MAX_TO_FORMAT; // Ensure we can't be asked to format a completely bonkers amount of data int count = (countRequested <= max + 16) ? countRequested : max; // Determine how many bytes to format (giving 16 bytes leeway) if (data != null) { int len = data.length; sb.append("Array length = 0x"+Integer.toHexString(len)+" ("+len+"), displaying bytes from "+start+" for "+count); // depends on control dependency: [if], data = [none] if (count < countRequested) { sb.append(" ("+countRequested+" bytes requested)"); // depends on control dependency: [if], data = [none] } sb.append(ls+ls); // depends on control dependency: [if], data = [none] if (displayCharRepresentations) sb.append(" offset : 0 1 2 3 4 5 6 7 8 9 A B C D E F 0 2 4 6 8 A C E " + ls); else sb.append(" offset : 0 1 2 3 4 5 6 7 8 9 A B C D E F" + ls); int t; boolean skip; int suppress = 0; int end = start + count; String c[] = new String[16]; // Current line bytes String p[] = new String[16]; // Previous line bytes String str[] = new String[16];// The string representation for (int j=0; j < 16; j++) { c[j] = null; // depends on control dependency: [for], data = [j] str[j] = null; // depends on control dependency: [for], data = [j] } for (int i=0; i < len; i = i+16) { skip = true; // depends on control dependency: [for], data = [none] for (int j=0; j < 16; j++) { t = i + j; // depends on control dependency: [for], data = [j] if ((t >= start) && (t < end) && (t < len)) { c[j] = pad(Integer.toHexString(data[t]),2); // depends on control dependency: [if], data = [none] // Strip out some known 'bad-guys' (these are consistent across ASCII / EBCIDIC) // and replace them with the dead character if (c[j].equalsIgnoreCase("00") || // Null c[j].equalsIgnoreCase("09") || // Tab c[j].equalsIgnoreCase("0a") || // LF c[j].equalsIgnoreCase("0b") || // VertTab c[j].equalsIgnoreCase("0c") || // FF c[j].equalsIgnoreCase("0d") || // CR c[j].equalsIgnoreCase("07")) // Bell { str[j] = DEAD_CHAR; // depends on control dependency: [if], data = [none] } else { str[j] = new String(data, t, 1); // Conversion is done here using the default // depends on control dependency: [if], data = [none] // character set of the platform } skip = false; // depends on control dependency: [if], data = [none] } else { c[j] = " "; str[j] = DEAD_CHAR; // depends on control dependency: [if], data = [none] } } if (skip) { if (suppress > 0) sb.append(dup(suppress)); suppress = 0; // depends on control dependency: [if], data = [none] c[0] = null; // Force a line difference // depends on control dependency: [if], data = [none] } else { if (c[0].equals(p[0]) && c[1].equals(p[1]) && c[2].equals(p[2]) && c[3].equals(p[3]) && c[4].equals(p[4]) && c[5].equals(p[5]) && c[6].equals(p[6]) && c[7].equals(p[7]) && c[8].equals(p[8]) && c[9].equals(p[9]) && c[10].equals(p[10]) && c[11].equals(p[11]) && c[12].equals(p[12]) && c[13].equals(p[13]) && c[14].equals(p[14]) && c[15].equals(p[15])) { suppress++; // depends on control dependency: [if], data = [none] } else { if (suppress > 0) sb.append(dup(suppress)); sb.append("0x"+pad(Integer.toHexString(i),8)+" ("+pad(Integer.valueOf(i).toString(),8," ")+") : "); sb.append(c[0]+c[1]+c[2]+c[3]+" "+c[4]+c[5]+c[6]+c[7]+" "+c[8]+c[9]+c[10]+c[11]+" "+c[12]+c[13]+c[14]+c[15]); if (displayCharRepresentations) { sb.append(" | "); sb.append(str[0]+str[1]+str[2]+str[3]+str[4]+str[5]+str[6]+str[7]+str[8]+str[9]+str[10]+str[11]+str[12]+str[13]+str[14]+str[15]); // depends on control dependency: [if], data = [none] } sb.append(ls); // depends on control dependency: [if], data = [none] for (int j=0; j < 16; j++) p[j] = c[j]; suppress = 0; // depends on control dependency: [if], data = [none] } } } if (suppress > 0) sb.append(dup(suppress)); } // If the number of bytes formatted was fewer than requested, say so. if (count < countRequested) { sb.append("Suppressed remaining " + (countRequested-count) + " bytes." + ls); } } }
public class class_name { @Override public IResourceDescriptions getResourceDescriptions(ResourceSet resourceSet) { IResourceDescriptions result = super.getResourceDescriptions(resourceSet); if (compilerPhases.isIndexing(resourceSet)) { // during indexing we don't want to see any local files String projectName = getProjectName(resourceSet); if(projectName != null) { final String encodedProjectName = URI.encodeSegment(projectName, true); Predicate<URI> predicate = new Predicate<URI>() { @Override public boolean apply(URI uri) { return isProjectLocal(uri, encodedProjectName); } }; if (result instanceof IShadowedResourceDescriptions) { return new ShadowedFilteringResourceDescriptions(result, predicate); } else { return new FilteringResourceDescriptions(result, predicate); } } } return result; } }
public class class_name { @Override public IResourceDescriptions getResourceDescriptions(ResourceSet resourceSet) { IResourceDescriptions result = super.getResourceDescriptions(resourceSet); if (compilerPhases.isIndexing(resourceSet)) { // during indexing we don't want to see any local files String projectName = getProjectName(resourceSet); if(projectName != null) { final String encodedProjectName = URI.encodeSegment(projectName, true); Predicate<URI> predicate = new Predicate<URI>() { @Override public boolean apply(URI uri) { return isProjectLocal(uri, encodedProjectName); } }; if (result instanceof IShadowedResourceDescriptions) { return new ShadowedFilteringResourceDescriptions(result, predicate); // depends on control dependency: [if], data = [none] } else { return new FilteringResourceDescriptions(result, predicate); // depends on control dependency: [if], data = [none] } } } return result; } }
public class class_name { public static <T extends Extendable> List<T> group(final List<T> values, final Group[] groups) { final SortableList<T> list = getSortableList(values); final GroupStrategy strategy = new GroupStrategy(); for (int i = groups.length - 1; i >= 0; i--) { list.sortOnProperty(groups[i], true, strategy); } return list; } }
public class class_name { public static <T extends Extendable> List<T> group(final List<T> values, final Group[] groups) { final SortableList<T> list = getSortableList(values); final GroupStrategy strategy = new GroupStrategy(); for (int i = groups.length - 1; i >= 0; i--) { list.sortOnProperty(groups[i], true, strategy); // depends on control dependency: [for], data = [i] } return list; } }
public class class_name { public static String serialize(Object object, boolean addType) { if (object == null) { return "null"; } else if (object instanceof CharSequence || object instanceof Character) { //TODO 去除特殊字符 String tmp = object.toString(); return '\"' + tmp.replace("\"", "\\\"").replace("\b", "\\b") .replace("\t", "\\t").replace("\r", "\\r") .replace("\f", "\\f").replace("\n", "\\n") + '\"'; } else if (object instanceof Number || object instanceof Boolean) { return object.toString(); } else if (object instanceof Map) { StringBuilder sb = new StringBuilder(); sb.append('{'); Map map = (Map) object; Iterator itr = map.entrySet().iterator(); while (itr.hasNext()) { Map.Entry entry = (Map.Entry) itr.next(); sb.append(serialize(entry.getKey(), addType)).append(':').append(serialize(entry.getValue(), addType)) .append(','); } int last = sb.length() - 1; if (sb.charAt(last) == ',') { sb.deleteCharAt(last); } sb.append('}'); return sb.toString(); } else if (object instanceof Collection) { return serialize(((Collection) object).toArray(), addType); } else if (object.getClass().isArray()) { StringBuilder sb = new StringBuilder(); sb.append('['); int last = Array.getLength(object) - 1; for (int i = 0; i <= last; ++i) { Object value = Array.get(object, i); sb.append(serialize(value, addType)).append(','); } last = sb.length() - 1; if (sb.charAt(last) == ',') { sb.deleteCharAt(last); } sb.append(']'); return sb.toString(); } else { //throw new IllegalArgumentException("Unsupported type " + object.getClass().getName() + ":" + object.toString()); // 自定义对象,先转成map等 return serialize(BeanSerializer.serialize(object, addType), addType); } } }
public class class_name { public static String serialize(Object object, boolean addType) { if (object == null) { return "null"; // depends on control dependency: [if], data = [none] } else if (object instanceof CharSequence || object instanceof Character) { //TODO 去除特殊字符 String tmp = object.toString(); return '\"' + tmp.replace("\"", "\\\"").replace("\b", "\\b") .replace("\t", "\\t").replace("\r", "\\r") .replace("\f", "\\f").replace("\n", "\\n") + '\"'; // depends on control dependency: [if], data = [none] } else if (object instanceof Number || object instanceof Boolean) { return object.toString(); // depends on control dependency: [if], data = [none] } else if (object instanceof Map) { StringBuilder sb = new StringBuilder(); sb.append('{'); // depends on control dependency: [if], data = [none] Map map = (Map) object; Iterator itr = map.entrySet().iterator(); while (itr.hasNext()) { Map.Entry entry = (Map.Entry) itr.next(); sb.append(serialize(entry.getKey(), addType)).append(':').append(serialize(entry.getValue(), addType)) .append(','); // depends on control dependency: [while], data = [none] } int last = sb.length() - 1; if (sb.charAt(last) == ',') { sb.deleteCharAt(last); // depends on control dependency: [if], data = [none] } sb.append('}'); // depends on control dependency: [if], data = [none] return sb.toString(); // depends on control dependency: [if], data = [none] } else if (object instanceof Collection) { return serialize(((Collection) object).toArray(), addType); // depends on control dependency: [if], data = [none] } else if (object.getClass().isArray()) { StringBuilder sb = new StringBuilder(); sb.append('['); // depends on control dependency: [if], data = [none] int last = Array.getLength(object) - 1; for (int i = 0; i <= last; ++i) { Object value = Array.get(object, i); sb.append(serialize(value, addType)).append(','); // depends on control dependency: [for], data = [none] } last = sb.length() - 1; // depends on control dependency: [if], data = [none] if (sb.charAt(last) == ',') { sb.deleteCharAt(last); // depends on control dependency: [if], data = [none] } sb.append(']'); // depends on control dependency: [if], data = [none] return sb.toString(); // depends on control dependency: [if], data = [none] } else { //throw new IllegalArgumentException("Unsupported type " + object.getClass().getName() + ":" + object.toString()); // 自定义对象,先转成map等 return serialize(BeanSerializer.serialize(object, addType), addType); // depends on control dependency: [if], data = [none] } } }
public class class_name { private String outputMDCForAllKeys(Map<String, String> mdcPropertyMap) { StringBuilder buf = new StringBuilder(); boolean first = true; for (Map.Entry<String, String> entry : mdcPropertyMap.entrySet()) { if (first) { first = false; } else { buf.append(", "); } //format: key0=value0, key1=value1 buf.append(entry.getKey()).append('=').append(entry.getValue()); } return buf.toString(); } }
public class class_name { private String outputMDCForAllKeys(Map<String, String> mdcPropertyMap) { StringBuilder buf = new StringBuilder(); boolean first = true; for (Map.Entry<String, String> entry : mdcPropertyMap.entrySet()) { if (first) { first = false; // depends on control dependency: [if], data = [none] } else { buf.append(", "); // depends on control dependency: [if], data = [none] } //format: key0=value0, key1=value1 buf.append(entry.getKey()).append('=').append(entry.getValue()); // depends on control dependency: [for], data = [entry] } return buf.toString(); } }
public class class_name { public int getIndex(Object key) { int[] nums = (int[]) cset.get(key); if (nums == null) { return -1; } else { return nums[1]; } } }
public class class_name { public int getIndex(Object key) { int[] nums = (int[]) cset.get(key); if (nums == null) { return -1; // depends on control dependency: [if], data = [none] } else { return nums[1]; // depends on control dependency: [if], data = [none] } } }
public class class_name { public void setPresets(java.util.Collection<Preset> presets) { if (presets == null) { this.presets = null; return; } this.presets = new java.util.ArrayList<Preset>(presets); } }
public class class_name { public void setPresets(java.util.Collection<Preset> presets) { if (presets == null) { this.presets = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.presets = new java.util.ArrayList<Preset>(presets); } }