code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { @Deprecated public static double[][] timesTranspose(final double[] v1, final double[][] m2) { assert m2[0].length == 1 : ERR_MATRIX_INNERDIM; final double[][] re = new double[v1.length][m2.length]; for(int j = 0; j < m2.length; j++) { for(int i = 0; i < v1.length; i++) { re[i][j] = v1[i] * m2[j][0]; } } return re; } }
public class class_name { @Deprecated public static double[][] timesTranspose(final double[] v1, final double[][] m2) { assert m2[0].length == 1 : ERR_MATRIX_INNERDIM; final double[][] re = new double[v1.length][m2.length]; for(int j = 0; j < m2.length; j++) { for(int i = 0; i < v1.length; i++) { re[i][j] = v1[i] * m2[j][0]; // depends on control dependency: [for], data = [i] } } return re; } }
public class class_name { private Set<Function> chaseAtoms(Collection<Function> atoms) { Set<Function> derivedAtoms = new HashSet<>(); for (Function fact : atoms) { derivedAtoms.add(fact); for (LinearInclusionDependency d : dependencies.get(fact.getFunctionSymbol())) { CQIE rule = datalogFactory.getFreshCQIECopy(datalogFactory.getCQIE(d.getHead(), d.getBody())); Function ruleBody = rule.getBody().get(0); Substitution theta = unifierUtilities.getMGU(ruleBody, fact); if (theta != null && !theta.isEmpty()) { Function ruleHead = rule.getHead(); Function newFact = (Function)ruleHead.clone(); // unify to get fact is needed because the dependencies are not necessarily full // (in other words, they may contain existentials in the head) substitutionUtilities.applySubstitution(newFact, theta); derivedAtoms.add(newFact); } } } return derivedAtoms; } }
public class class_name { private Set<Function> chaseAtoms(Collection<Function> atoms) { Set<Function> derivedAtoms = new HashSet<>(); for (Function fact : atoms) { derivedAtoms.add(fact); // depends on control dependency: [for], data = [fact] for (LinearInclusionDependency d : dependencies.get(fact.getFunctionSymbol())) { CQIE rule = datalogFactory.getFreshCQIECopy(datalogFactory.getCQIE(d.getHead(), d.getBody())); Function ruleBody = rule.getBody().get(0); Substitution theta = unifierUtilities.getMGU(ruleBody, fact); if (theta != null && !theta.isEmpty()) { Function ruleHead = rule.getHead(); Function newFact = (Function)ruleHead.clone(); // unify to get fact is needed because the dependencies are not necessarily full // (in other words, they may contain existentials in the head) substitutionUtilities.applySubstitution(newFact, theta); // depends on control dependency: [if], data = [none] derivedAtoms.add(newFact); // depends on control dependency: [if], data = [none] } } } return derivedAtoms; } }
public class class_name { protected void addToPlayQueue (SoundKey skey) { boolean queued = enqueue(skey, true); if (queued) { if (_verbose.getValue()) { log.info("Sound request [key=" + skey.key + "]."); } } else /* if (_verbose.getValue()) */ { log.warning("SoundManager not playing sound because too many sounds in queue " + "[key=" + skey + "]."); } } }
public class class_name { protected void addToPlayQueue (SoundKey skey) { boolean queued = enqueue(skey, true); if (queued) { if (_verbose.getValue()) { log.info("Sound request [key=" + skey.key + "]."); // depends on control dependency: [if], data = [none] } } else /* if (_verbose.getValue()) */ { log.warning("SoundManager not playing sound because too many sounds in queue " + "[key=" + skey + "]."); // depends on control dependency: [if], data = [none] } } }
public class class_name { private static Map<String, String> split(String pAgentArgs) { Map<String,String> ret = new HashMap<String, String>(); if (pAgentArgs != null && pAgentArgs.length() > 0) { for (String arg : EscapeUtil.splitAsArray(pAgentArgs, EscapeUtil.CSV_ESCAPE, ",")) { String[] prop = arg.split("=",2); if (prop == null || prop.length != 2) { throw new IllegalArgumentException("jolokia: Invalid option '" + arg + "'"); } else { ret.put(prop[0],prop[1]); } } } return ret; } }
public class class_name { private static Map<String, String> split(String pAgentArgs) { Map<String,String> ret = new HashMap<String, String>(); if (pAgentArgs != null && pAgentArgs.length() > 0) { for (String arg : EscapeUtil.splitAsArray(pAgentArgs, EscapeUtil.CSV_ESCAPE, ",")) { String[] prop = arg.split("=",2); if (prop == null || prop.length != 2) { throw new IllegalArgumentException("jolokia: Invalid option '" + arg + "'"); } else { ret.put(prop[0],prop[1]); // depends on control dependency: [if], data = [(prop] } } } return ret; } }
public class class_name { public static List<CmsUser> filterCoreUsers(List<CmsUser> users) { Iterator<CmsUser> it = users.iterator(); while (it.hasNext()) { I_CmsPrincipal p = it.next(); if (p.getFlags() > I_CmsPrincipal.FLAG_CORE_LIMIT) { it.remove(); } } return users; } }
public class class_name { public static List<CmsUser> filterCoreUsers(List<CmsUser> users) { Iterator<CmsUser> it = users.iterator(); while (it.hasNext()) { I_CmsPrincipal p = it.next(); if (p.getFlags() > I_CmsPrincipal.FLAG_CORE_LIMIT) { it.remove(); // depends on control dependency: [if], data = [none] } } return users; } }
public class class_name { public void dispose () { // make sure the stream is stopped if (_state != AL10.AL_STOPPED) { stop(); } // delete the source and buffers _source.delete(); for (Buffer buffer : _buffers) { buffer.delete(); } // remove from manager _soundmgr.removeStream(this); } }
public class class_name { public void dispose () { // make sure the stream is stopped if (_state != AL10.AL_STOPPED) { stop(); // depends on control dependency: [if], data = [none] } // delete the source and buffers _source.delete(); for (Buffer buffer : _buffers) { buffer.delete(); // depends on control dependency: [for], data = [buffer] } // remove from manager _soundmgr.removeStream(this); } }
public class class_name { static CloudResourceBundle loadBundle(ServiceAccount serviceAccount, String bundleId, Locale locale) { CloudResourceBundle crb = null; ServiceClient client = ServiceClient.getInstance(serviceAccount); try { Map<String, String> resStrings = client.getResourceStrings(bundleId, locale.toLanguageTag(), false); crb = new CloudResourceBundle(resStrings); } catch (ServiceException e) { logger.info("Could not fetch resource data for " + locale + " from the translation bundle " + bundleId + ": " + e.getMessage()); } return crb; } }
public class class_name { static CloudResourceBundle loadBundle(ServiceAccount serviceAccount, String bundleId, Locale locale) { CloudResourceBundle crb = null; ServiceClient client = ServiceClient.getInstance(serviceAccount); try { Map<String, String> resStrings = client.getResourceStrings(bundleId, locale.toLanguageTag(), false); crb = new CloudResourceBundle(resStrings); // depends on control dependency: [try], data = [none] } catch (ServiceException e) { logger.info("Could not fetch resource data for " + locale + " from the translation bundle " + bundleId + ": " + e.getMessage()); } // depends on control dependency: [catch], data = [none] return crb; } }
public class class_name { public boolean compileEmptyCatalog(final String jarOutputPath) { // Use a special DDL reader to provide the contents. List<VoltCompilerReader> ddlReaderList = new ArrayList<>(1); ddlReaderList.add(new VoltCompilerStringReader("ddl.sql", m_emptyDDLComment)); // Seed it with the DDL so that a version upgrade hack in compileInternalToFile() // doesn't try to get the DDL file from the path. InMemoryJarfile jarFile = new InMemoryJarfile(); try { ddlReaderList.get(0).putInJar(jarFile, "ddl.sql"); } catch (IOException e) { compilerLog.error("Failed to add DDL file to empty in-memory jar."); return false; } return compileInternalToFile(jarOutputPath, null, null, ddlReaderList, jarFile); } }
public class class_name { public boolean compileEmptyCatalog(final String jarOutputPath) { // Use a special DDL reader to provide the contents. List<VoltCompilerReader> ddlReaderList = new ArrayList<>(1); ddlReaderList.add(new VoltCompilerStringReader("ddl.sql", m_emptyDDLComment)); // Seed it with the DDL so that a version upgrade hack in compileInternalToFile() // doesn't try to get the DDL file from the path. InMemoryJarfile jarFile = new InMemoryJarfile(); try { ddlReaderList.get(0).putInJar(jarFile, "ddl.sql"); // depends on control dependency: [try], data = [none] } catch (IOException e) { compilerLog.error("Failed to add DDL file to empty in-memory jar."); return false; } // depends on control dependency: [catch], data = [none] return compileInternalToFile(jarOutputPath, null, null, ddlReaderList, jarFile); } }
public class class_name { public void include(ConfigurationMetadataRepository repository) { for (ConfigurationMetadataGroup group : repository.getAllGroups().values()) { ConfigurationMetadataGroup existingGroup = this.allGroups.get(group.getId()); if (existingGroup == null) { this.allGroups.put(group.getId(), group); } else { // Merge properties group.getProperties().forEach((name, value) -> putIfAbsent( existingGroup.getProperties(), name, value)); // Merge sources group.getSources().forEach((name, value) -> putIfAbsent(existingGroup.getSources(), name, value)); } } } }
public class class_name { public void include(ConfigurationMetadataRepository repository) { for (ConfigurationMetadataGroup group : repository.getAllGroups().values()) { ConfigurationMetadataGroup existingGroup = this.allGroups.get(group.getId()); if (existingGroup == null) { this.allGroups.put(group.getId(), group); // depends on control dependency: [if], data = [none] } else { // Merge properties group.getProperties().forEach((name, value) -> putIfAbsent( existingGroup.getProperties(), name, value)); // depends on control dependency: [if], data = [none] // Merge sources group.getSources().forEach((name, value) -> putIfAbsent(existingGroup.getSources(), name, value)); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public Method getRestrictAtViewMethod(Annotation annotation) { Method restrictAtViewMethod; try { restrictAtViewMethod = annotation.annotationType().getDeclaredMethod("restrictAtPhase"); } catch (NoSuchMethodException ex) { restrictAtViewMethod = null; } catch (SecurityException ex) { throw new IllegalArgumentException("restrictAtView method must be accessible", ex); } return restrictAtViewMethod; } }
public class class_name { public Method getRestrictAtViewMethod(Annotation annotation) { Method restrictAtViewMethod; try { restrictAtViewMethod = annotation.annotationType().getDeclaredMethod("restrictAtPhase"); // depends on control dependency: [try], data = [none] } catch (NoSuchMethodException ex) { restrictAtViewMethod = null; } catch (SecurityException ex) { // depends on control dependency: [catch], data = [none] throw new IllegalArgumentException("restrictAtView method must be accessible", ex); } // depends on control dependency: [catch], data = [none] return restrictAtViewMethod; } }
public class class_name { protected void addCompileSourceArtifacts(JavaCommand cmd) throws MojoExecutionException { if ( compileSourcesArtifacts == null ) { return; } for ( String include : compileSourcesArtifacts ) { List<String> parts = new ArrayList<String>(); parts.addAll( Arrays.asList(include.split(":")) ); if ( parts.size() == 2 ) { // type is optional as it will mostly be "jar" parts.add( "jar" ); } String dependencyId = StringUtils.join( parts.iterator(), ":" ); boolean found = false; for ( Artifact artifact : getProjectArtifacts() ) { getLog().debug( "compare " + dependencyId + " with " + artifact.getDependencyConflictId() ); if ( artifact.getDependencyConflictId().equals( dependencyId ) ) { getLog().debug( "Add " + dependencyId + " sources.jar artifact to compile classpath" ); Artifact sources = resolve( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), "jar", "sources" ); cmd.addToClasspath( sources.getFile() ); found = true; break; } } if ( !found ) getLog().warn( "Declared compileSourcesArtifact was not found in project dependencies " + dependencyId ); } } }
public class class_name { protected void addCompileSourceArtifacts(JavaCommand cmd) throws MojoExecutionException { if ( compileSourcesArtifacts == null ) { return; } for ( String include : compileSourcesArtifacts ) { List<String> parts = new ArrayList<String>(); parts.addAll( Arrays.asList(include.split(":")) ); if ( parts.size() == 2 ) { // type is optional as it will mostly be "jar" parts.add( "jar" ); // depends on control dependency: [if], data = [none] } String dependencyId = StringUtils.join( parts.iterator(), ":" ); boolean found = false; for ( Artifact artifact : getProjectArtifacts() ) { getLog().debug( "compare " + dependencyId + " with " + artifact.getDependencyConflictId() ); // depends on control dependency: [for], data = [artifact] if ( artifact.getDependencyConflictId().equals( dependencyId ) ) { getLog().debug( "Add " + dependencyId + " sources.jar artifact to compile classpath" ); // depends on control dependency: [if], data = [none] Artifact sources = resolve( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), "jar", "sources" ); cmd.addToClasspath( sources.getFile() ); // depends on control dependency: [if], data = [none] found = true; // depends on control dependency: [if], data = [none] break; } } if ( !found ) getLog().warn( "Declared compileSourcesArtifact was not found in project dependencies " + dependencyId ); } } }
public class class_name { public double getZeroRate(double maturity) { if(maturity == 0) { return this.getZeroRate(1.0E-14); } return -Math.log(getDiscountFactor(null, maturity))/maturity; } }
public class class_name { public double getZeroRate(double maturity) { if(maturity == 0) { return this.getZeroRate(1.0E-14); // depends on control dependency: [if], data = [none] } return -Math.log(getDiscountFactor(null, maturity))/maturity; } }
public class class_name { private void fromEntryPoint(PatternDescrBuilder<?> pattern) throws RecognitionException { String ep = ""; match(input, DRL6Lexer.ID, DroolsSoftKeywords.ENTRY, null, DroolsEditorType.KEYWORD); if (state.failed) return; match(input, DRL6Lexer.MINUS, null, null, DroolsEditorType.KEYWORD); if (state.failed) return; match(input, DRL6Lexer.ID, DroolsSoftKeywords.POINT, null, DroolsEditorType.KEYWORD); if (state.failed) return; ep = stringId(); if (state.backtracking == 0) { pattern.from().entryPoint(ep); if (input.LA(1) != DRL6Lexer.EOF) { helper.emit(Location.LOCATION_LHS_BEGIN_OF_CONDITION); } } } }
public class class_name { private void fromEntryPoint(PatternDescrBuilder<?> pattern) throws RecognitionException { String ep = ""; match(input, DRL6Lexer.ID, DroolsSoftKeywords.ENTRY, null, DroolsEditorType.KEYWORD); if (state.failed) return; match(input, DRL6Lexer.MINUS, null, null, DroolsEditorType.KEYWORD); if (state.failed) return; match(input, DRL6Lexer.ID, DroolsSoftKeywords.POINT, null, DroolsEditorType.KEYWORD); if (state.failed) return; ep = stringId(); if (state.backtracking == 0) { pattern.from().entryPoint(ep); if (input.LA(1) != DRL6Lexer.EOF) { helper.emit(Location.LOCATION_LHS_BEGIN_OF_CONDITION); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public void update(double timestamp, String versionName, Integer versionCode) { last = timestamp; total++; Long countForVersionName = versionNames.get(versionName); if (countForVersionName == null) { countForVersionName = 0L; } Long countForVersionCode = versionCodes.get(versionCode); if (countForVersionCode == null) { countForVersionCode = 0L; } versionNames.put(versionName, countForVersionName + 1); versionCodes.put(versionCode, countForVersionCode + 1); } }
public class class_name { public void update(double timestamp, String versionName, Integer versionCode) { last = timestamp; total++; Long countForVersionName = versionNames.get(versionName); if (countForVersionName == null) { countForVersionName = 0L; // depends on control dependency: [if], data = [none] } Long countForVersionCode = versionCodes.get(versionCode); if (countForVersionCode == null) { countForVersionCode = 0L; // depends on control dependency: [if], data = [none] } versionNames.put(versionName, countForVersionName + 1); versionCodes.put(versionCode, countForVersionCode + 1); } }
public class class_name { public void actionPerformed(ActionEvent e) { int x = calendarButton.getWidth() - (int) popup.getPreferredSize().getWidth(); int y = calendarButton.getY() + calendarButton.getHeight(); Calendar calendar = Calendar.getInstance(); Date date = dateEditor.getDate(); if (date != null) { calendar.setTime(date); } jcalendar.setCalendar(calendar); popup.show(calendarButton, x, y); dateSelected = false; } }
public class class_name { public void actionPerformed(ActionEvent e) { int x = calendarButton.getWidth() - (int) popup.getPreferredSize().getWidth(); int y = calendarButton.getY() + calendarButton.getHeight(); Calendar calendar = Calendar.getInstance(); Date date = dateEditor.getDate(); if (date != null) { calendar.setTime(date); // depends on control dependency: [if], data = [(date] } jcalendar.setCalendar(calendar); popup.show(calendarButton, x, y); dateSelected = false; } }
public class class_name { public Actions dragAndDropBy(WebElement source, int xOffset, int yOffset) { if (isBuildingActions()) { action.addAction(new ClickAndHoldAction(jsonMouse, (Locatable) source)); action.addAction(new MoveToOffsetAction(jsonMouse, null, xOffset, yOffset)); action.addAction(new ButtonReleaseAction(jsonMouse, null)); } return moveInTicks(source, 0, 0) .tick(defaultMouse.createPointerDown(LEFT.asArg())) .tick(defaultMouse.createPointerMove(Duration.ofMillis(250), Origin.pointer(), xOffset, yOffset)) .tick(defaultMouse.createPointerUp(LEFT.asArg())); } }
public class class_name { public Actions dragAndDropBy(WebElement source, int xOffset, int yOffset) { if (isBuildingActions()) { action.addAction(new ClickAndHoldAction(jsonMouse, (Locatable) source)); // depends on control dependency: [if], data = [none] action.addAction(new MoveToOffsetAction(jsonMouse, null, xOffset, yOffset)); // depends on control dependency: [if], data = [none] action.addAction(new ButtonReleaseAction(jsonMouse, null)); // depends on control dependency: [if], data = [none] } return moveInTicks(source, 0, 0) .tick(defaultMouse.createPointerDown(LEFT.asArg())) .tick(defaultMouse.createPointerMove(Duration.ofMillis(250), Origin.pointer(), xOffset, yOffset)) .tick(defaultMouse.createPointerUp(LEFT.asArg())); } }
public class class_name { public static <K, V> OMVRBTreeEntry<K, V> predecessor(final OMVRBTreeEntry<K, V> t) { if (t == null) return null; else if (t.getLeft() != null) { OMVRBTreeEntry<K, V> p = t.getLeft(); while (p.getRight() != null) p = p.getRight(); return p; } else { OMVRBTreeEntry<K, V> p = t.getParent(); Entry<K, V> ch = t; while (p != null && ch == p.getLeft()) { ch = p; p = p.getParent(); } return p; } } }
public class class_name { public static <K, V> OMVRBTreeEntry<K, V> predecessor(final OMVRBTreeEntry<K, V> t) { if (t == null) return null; else if (t.getLeft() != null) { OMVRBTreeEntry<K, V> p = t.getLeft(); while (p.getRight() != null) p = p.getRight(); return p; // depends on control dependency: [if], data = [none] } else { OMVRBTreeEntry<K, V> p = t.getParent(); Entry<K, V> ch = t; while (p != null && ch == p.getLeft()) { ch = p; // depends on control dependency: [while], data = [none] p = p.getParent(); // depends on control dependency: [while], data = [none] } return p; // depends on control dependency: [if], data = [none] } } }
public class class_name { private static boolean isClinitMethod(Node fnNode) { if (!fnNode.isFunction()) { return false; } String fnName = NodeUtil.getName(fnNode); return fnName != null && isClinitMethodName(fnName); } }
public class class_name { private static boolean isClinitMethod(Node fnNode) { if (!fnNode.isFunction()) { return false; // depends on control dependency: [if], data = [none] } String fnName = NodeUtil.getName(fnNode); return fnName != null && isClinitMethodName(fnName); } }
public class class_name { public InputStream fetchExternalContent() { final URI uri = link.getUri(); final String scheme = uri.getScheme(); LOGGER.debug("scheme is {}", scheme); if (scheme != null) { try { if (scheme.equals("file")) { return new FileInputStream(uri.getPath()); } else if (scheme.equals("http") || scheme.equals("https")) { return uri.toURL().openStream(); } } catch (final IOException e) { throw new ExternalContentAccessException("Failed to read external content from " + uri, e); } } return null; } }
public class class_name { public InputStream fetchExternalContent() { final URI uri = link.getUri(); final String scheme = uri.getScheme(); LOGGER.debug("scheme is {}", scheme); if (scheme != null) { try { if (scheme.equals("file")) { return new FileInputStream(uri.getPath()); // depends on control dependency: [if], data = [none] } else if (scheme.equals("http") || scheme.equals("https")) { return uri.toURL().openStream(); // depends on control dependency: [if], data = [none] } } catch (final IOException e) { throw new ExternalContentAccessException("Failed to read external content from " + uri, e); } // depends on control dependency: [catch], data = [none] } return null; } }
public class class_name { public static Map<String, Object> computePatch(Object patched, Object original) { FieldsMask fieldsMask = new FieldsMask(); ArrayMap<String, Object> result = computePatchInternal(fieldsMask, patched, original); if (fieldsMask.numDifferences != 0) { result.put("@gd:fields", fieldsMask.buf.toString()); } return result; } }
public class class_name { public static Map<String, Object> computePatch(Object patched, Object original) { FieldsMask fieldsMask = new FieldsMask(); ArrayMap<String, Object> result = computePatchInternal(fieldsMask, patched, original); if (fieldsMask.numDifferences != 0) { result.put("@gd:fields", fieldsMask.buf.toString()); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public static StringTemplateGroup readStringTemplateGroup(InputStream stream) { try { return new StringTemplateGroup( new InputStreamReader(stream, "UTF-8"), DefaultTemplateLexer.class, new StringTemplateErrorListener() { @SuppressWarnings("synthetic-access") public void error(String arg0, Throwable arg1) { LOG.error(arg0 + ": " + arg1.getMessage(), arg1); } @SuppressWarnings("synthetic-access") public void warning(String arg0) { LOG.warn(arg0); } }); } catch (Exception e) { LOG.error(e.getLocalizedMessage(), e); return new StringTemplateGroup("dummy"); } } }
public class class_name { public static StringTemplateGroup readStringTemplateGroup(InputStream stream) { try { return new StringTemplateGroup( new InputStreamReader(stream, "UTF-8"), DefaultTemplateLexer.class, new StringTemplateErrorListener() { @SuppressWarnings("synthetic-access") public void error(String arg0, Throwable arg1) { LOG.error(arg0 + ": " + arg1.getMessage(), arg1); } @SuppressWarnings("synthetic-access") public void warning(String arg0) { LOG.warn(arg0); } }); // depends on control dependency: [try], data = [none] } catch (Exception e) { LOG.error(e.getLocalizedMessage(), e); return new StringTemplateGroup("dummy"); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @SuppressWarnings("unchecked") public <A extends SimpleAggregator> A createSimple(String name, Class<? extends Aggregator> klass) { Utils.checkState(!started, "Already started"); try { A aggregator = (A) CONSTRUCTORS.get(klass).newInstance(name); dataProvider.addAggregator(aggregator); aggregator.setDataProvider(dataProvider); return aggregator; } catch (Exception ex) { throw new RuntimeException(ex); } } }
public class class_name { @SuppressWarnings("unchecked") public <A extends SimpleAggregator> A createSimple(String name, Class<? extends Aggregator> klass) { Utils.checkState(!started, "Already started"); try { A aggregator = (A) CONSTRUCTORS.get(klass).newInstance(name); dataProvider.addAggregator(aggregator); // depends on control dependency: [try], data = [none] aggregator.setDataProvider(dataProvider); // depends on control dependency: [try], data = [none] return aggregator; // depends on control dependency: [try], data = [none] } catch (Exception ex) { throw new RuntimeException(ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static File writeFile(String filename, String content) { PrintWriter pw = null; try { pw = new PrintWriter(filename, FILE_ENCODING); pw.write(content); pw.flush(); } catch (FileNotFoundException e) { throw new IllegalArgumentException("Unable to write to: " + filename, e); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } finally { if (pw != null) { pw.close(); } } return new File(filename); } }
public class class_name { public static File writeFile(String filename, String content) { PrintWriter pw = null; try { pw = new PrintWriter(filename, FILE_ENCODING); // depends on control dependency: [try], data = [none] pw.write(content); // depends on control dependency: [try], data = [none] pw.flush(); // depends on control dependency: [try], data = [none] } catch (FileNotFoundException e) { throw new IllegalArgumentException("Unable to write to: " + filename, e); } catch (UnsupportedEncodingException e) { // depends on control dependency: [catch], data = [none] throw new RuntimeException(e); } finally { // depends on control dependency: [catch], data = [none] if (pw != null) { pw.close(); // depends on control dependency: [if], data = [none] } } return new File(filename); } }
public class class_name { public static DocumentBuilder getDocumentBuilder() { DocumentBuilder builder; try { builder = factory.newDocumentBuilder(); } catch (final ParserConfigurationException e) { throw new RuntimeException(e); } if (Configuration.DEBUG) { builder = new DebugDocumentBuilder(builder); } return builder; } }
public class class_name { public static DocumentBuilder getDocumentBuilder() { DocumentBuilder builder; try { builder = factory.newDocumentBuilder(); // depends on control dependency: [try], data = [none] } catch (final ParserConfigurationException e) { throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] if (Configuration.DEBUG) { builder = new DebugDocumentBuilder(builder); // depends on control dependency: [if], data = [none] } return builder; } }
public class class_name { public static boolean hasNumber(final JSONObject json, final String key, final boolean coerce) { if (!coerce) { return hasNumber(json, key); } final Object o = json.opt(key); if (o == null || o == JSONObject.NULL) { return false; } if (o instanceof Number) { return true; } if (o instanceof Boolean) { return true; } if (o instanceof String) { final String s = (String) o; try { Double.valueOf(s); return true; } catch (NumberFormatException e) { Log.e(Log.SUBSYSTEM.JSON, TAG, e, "hasNumber(): failed to coerce value at '%s' (%s)", key, o); } } return false; } }
public class class_name { public static boolean hasNumber(final JSONObject json, final String key, final boolean coerce) { if (!coerce) { return hasNumber(json, key); // depends on control dependency: [if], data = [none] } final Object o = json.opt(key); if (o == null || o == JSONObject.NULL) { return false; // depends on control dependency: [if], data = [none] } if (o instanceof Number) { return true; // depends on control dependency: [if], data = [none] } if (o instanceof Boolean) { return true; // depends on control dependency: [if], data = [none] } if (o instanceof String) { final String s = (String) o; try { Double.valueOf(s); // depends on control dependency: [try], data = [none] return true; // depends on control dependency: [try], data = [none] } catch (NumberFormatException e) { Log.e(Log.SUBSYSTEM.JSON, TAG, e, "hasNumber(): failed to coerce value at '%s' (%s)", key, o); } // depends on control dependency: [catch], data = [none] } return false; } }
public class class_name { public boolean hasEvents(final EventType _eventtype) { if (!this.eventChecked) { this.eventChecked = true; try { EventDefinition.addEvents(this); } catch (final EFapsException e) { AbstractAdminObject.LOG.error("Could not read events for Name:; {}', UUID: {}", this.name, this.uuid); } } return this.events.get(_eventtype) != null; } }
public class class_name { public boolean hasEvents(final EventType _eventtype) { if (!this.eventChecked) { this.eventChecked = true; // depends on control dependency: [if], data = [none] try { EventDefinition.addEvents(this); // depends on control dependency: [try], data = [none] } catch (final EFapsException e) { AbstractAdminObject.LOG.error("Could not read events for Name:; {}', UUID: {}", this.name, this.uuid); } // depends on control dependency: [catch], data = [none] } return this.events.get(_eventtype) != null; } }
public class class_name { public void reload() { try { if (_toplevel.specifics != null) _toplevel.specifics.clear(); _toplevel.global = null; for (T object: _toplevel.retriever.call()) { _toplevel.insert(object, _toplevel.coordinates, 0); } if (_toplevel.global == null) throw new IllegalStateException("***GlobalDataObjectMissing"); _toplevel.spread(); } catch (Exception x) { throw new RuntimeException(x.getMessage(), x); } } }
public class class_name { public void reload() { try { if (_toplevel.specifics != null) _toplevel.specifics.clear(); _toplevel.global = null; // depends on control dependency: [try], data = [none] for (T object: _toplevel.retriever.call()) { _toplevel.insert(object, _toplevel.coordinates, 0); // depends on control dependency: [for], data = [object] } if (_toplevel.global == null) throw new IllegalStateException("***GlobalDataObjectMissing"); _toplevel.spread(); // depends on control dependency: [try], data = [none] } catch (Exception x) { throw new RuntimeException(x.getMessage(), x); } // depends on control dependency: [catch], data = [none] } }
public class class_name { synchronized boolean deleteMessageForId(String messageId, String userId){ if(messageId == null || userId == null) return false; final String tName = Table.INBOX_MESSAGES.getName(); try { final SQLiteDatabase db = dbHelper.getWritableDatabase(); db.delete(tName, _ID + " = ? AND " + USER_ID + " = ?", new String[]{messageId,userId}); return true; } catch (final SQLiteException e) { getConfigLogger().verbose("Error removing stale records from " + tName, e); return false; } finally { dbHelper.close(); } } }
public class class_name { synchronized boolean deleteMessageForId(String messageId, String userId){ if(messageId == null || userId == null) return false; final String tName = Table.INBOX_MESSAGES.getName(); try { final SQLiteDatabase db = dbHelper.getWritableDatabase(); db.delete(tName, _ID + " = ? AND " + USER_ID + " = ?", new String[]{messageId,userId}); // depends on control dependency: [try], data = [none] return true; // depends on control dependency: [try], data = [none] } catch (final SQLiteException e) { getConfigLogger().verbose("Error removing stale records from " + tName, e); return false; } finally { // depends on control dependency: [catch], data = [none] dbHelper.close(); } } }
public class class_name { @Override public boolean isReadOnly(ELContext context, Object base, Object property) { context.setPropertyResolved(false); for (int i = 0, l = resolvers.size(); i < l; i++) { boolean readOnly = resolvers.get(i).isReadOnly(context, base, property); if (context.isPropertyResolved()) { return readOnly; } } return false; } }
public class class_name { @Override public boolean isReadOnly(ELContext context, Object base, Object property) { context.setPropertyResolved(false); for (int i = 0, l = resolvers.size(); i < l; i++) { boolean readOnly = resolvers.get(i).isReadOnly(context, base, property); if (context.isPropertyResolved()) { return readOnly; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public boolean enforceNode(int x, ICause cause) throws ContradictionException { assert cause != null; assert (x >= 0 && x < n); if (UB.getNodes().contains(x)) { if (LB.addNode(x)) { if (reactOnModification) { delta.add(x, GraphDelta.NE, cause); } GraphEventType e = GraphEventType.ADD_NODE; notifyPropagators(e, cause); return true; } return false; } this.contradiction(cause, "enforce node which is not in the domain"); return true; } }
public class class_name { public boolean enforceNode(int x, ICause cause) throws ContradictionException { assert cause != null; assert (x >= 0 && x < n); if (UB.getNodes().contains(x)) { if (LB.addNode(x)) { if (reactOnModification) { delta.add(x, GraphDelta.NE, cause); // depends on control dependency: [if], data = [none] } GraphEventType e = GraphEventType.ADD_NODE; notifyPropagators(e, cause); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } return false; } this.contradiction(cause, "enforce node which is not in the domain"); return true; } }
public class class_name { private javax.jms.Message receive(Destination destination, String selector) { javax.jms.Message receivedJmsMessage; if (log.isDebugEnabled()) { log.debug("Receiving JMS message on destination: '" + endpointConfiguration.getDestinationName(destination) + (StringUtils.hasText(selector) ? "(" + selector + ")" : "") + "'"); } if (StringUtils.hasText(selector)) { receivedJmsMessage = endpointConfiguration.getJmsTemplate().receiveSelected(destination, selector); } else { receivedJmsMessage = endpointConfiguration.getJmsTemplate().receive(destination); } if (receivedJmsMessage == null) { throw new ActionTimeoutException("Action timed out while receiving JMS message on '" + endpointConfiguration.getDestinationName(destination) + (StringUtils.hasText(selector) ? "(" + selector + ")" : "") + "'"); } log.info("Received JMS message on destination: '" + endpointConfiguration.getDestinationName(destination) + (StringUtils.hasText(selector) ? "(" + selector + ")" : "") + "'"); return receivedJmsMessage; } }
public class class_name { private javax.jms.Message receive(Destination destination, String selector) { javax.jms.Message receivedJmsMessage; if (log.isDebugEnabled()) { log.debug("Receiving JMS message on destination: '" + endpointConfiguration.getDestinationName(destination) + (StringUtils.hasText(selector) ? "(" + selector + ")" : "") + "'"); // depends on control dependency: [if], data = [none] } if (StringUtils.hasText(selector)) { receivedJmsMessage = endpointConfiguration.getJmsTemplate().receiveSelected(destination, selector); // depends on control dependency: [if], data = [none] } else { receivedJmsMessage = endpointConfiguration.getJmsTemplate().receive(destination); // depends on control dependency: [if], data = [none] } if (receivedJmsMessage == null) { throw new ActionTimeoutException("Action timed out while receiving JMS message on '" + endpointConfiguration.getDestinationName(destination) + (StringUtils.hasText(selector) ? "(" + selector + ")" : "") + "'"); } log.info("Received JMS message on destination: '" + endpointConfiguration.getDestinationName(destination) + (StringUtils.hasText(selector) ? "(" + selector + ")" : "") + "'"); return receivedJmsMessage; } }
public class class_name { @Override public void write(byte[] bytes, int off, int len) throws IOException { // if httpResponse.setContentType(x) has been called with !x.contains("text/html"), // then no need to continue scanning for the beforeTag if (injectionCanceled) { super.write(bytes, off, len); } else { final int index = indexOf(bytes, beforeTag, off, len); if (index == -1) { // beforeTag not found yet super.write(bytes, off, len); } else { // beforeTag found: inject content. super.write(bytes, off, index); final String content = htmlToInject.getContent(); // HttpServletResponse.getCharacterEncoding() shouldn't return null according the spec super.write(content.getBytes(response.getCharacterEncoding())); super.write(bytes, off + index, len - index); } } } }
public class class_name { @Override public void write(byte[] bytes, int off, int len) throws IOException { // if httpResponse.setContentType(x) has been called with !x.contains("text/html"), // then no need to continue scanning for the beforeTag if (injectionCanceled) { super.write(bytes, off, len); } else { final int index = indexOf(bytes, beforeTag, off, len); if (index == -1) { // beforeTag not found yet super.write(bytes, off, len); // depends on control dependency: [if], data = [none] } else { // beforeTag found: inject content. super.write(bytes, off, index); // depends on control dependency: [if], data = [none] final String content = htmlToInject.getContent(); // HttpServletResponse.getCharacterEncoding() shouldn't return null according the spec super.write(content.getBytes(response.getCharacterEncoding())); // depends on control dependency: [if], data = [none] super.write(bytes, off + index, len - index); // depends on control dependency: [if], data = [none] } } } }
public class class_name { @Override public void close() throws IOException { if (!closed) { closed = true; if (closeInput) { super.close(); } if (!noMoreInput) { try { cipher.doFinal(); } catch (IllegalBlockSizeException | BadPaddingException e) { throw new CipherException(e); } } ostart = ofinish = 0; } } }
public class class_name { @Override public void close() throws IOException { if (!closed) { closed = true; if (closeInput) { super.close(); } if (!noMoreInput) { try { cipher.doFinal(); // depends on control dependency: [try], data = [none] } catch (IllegalBlockSizeException | BadPaddingException e) { throw new CipherException(e); } // depends on control dependency: [catch], data = [none] } ostart = ofinish = 0; } } }
public class class_name { private static double score1static(Chunk[] chks, int treeIdx, double offset, double[] fs, int row, Distribution dist, int nClasses) { double f = chks[treeIdx].atd(row) + offset; double p = dist.linkInv(f); if (dist.distribution == DistributionFamily.modified_huber || dist.distribution == DistributionFamily.bernoulli || dist.distribution == DistributionFamily.quasibinomial) { fs[2] = p; fs[1] = 1.0 - p; return 1; // f2 = 1.0 - f1; so f1+f2 = 1.0 } else if (dist.distribution == DistributionFamily.multinomial) { if (nClasses == 2) { // This optimization assumes the 2nd tree of a 2-class system is the // inverse of the first. Fill in the missing tree fs[1] = p; fs[2] = 1 / p; return fs[1] + fs[2]; } // Multinomial loss function; sum(exp(data)). Load tree data assert (offset == 0); fs[1] = f; for (int k = 1; k < nClasses; k++) fs[k + 1] = chks[treeIdx + k].atd(row); // Rescale to avoid Infinities; return sum(exp(data)) return hex.genmodel.GenModel.log_rescale(fs); } else { return fs[0] = p; } } }
public class class_name { private static double score1static(Chunk[] chks, int treeIdx, double offset, double[] fs, int row, Distribution dist, int nClasses) { double f = chks[treeIdx].atd(row) + offset; double p = dist.linkInv(f); if (dist.distribution == DistributionFamily.modified_huber || dist.distribution == DistributionFamily.bernoulli || dist.distribution == DistributionFamily.quasibinomial) { fs[2] = p; // depends on control dependency: [if], data = [none] fs[1] = 1.0 - p; // depends on control dependency: [if], data = [none] return 1; // f2 = 1.0 - f1; so f1+f2 = 1.0 // depends on control dependency: [if], data = [none] } else if (dist.distribution == DistributionFamily.multinomial) { if (nClasses == 2) { // This optimization assumes the 2nd tree of a 2-class system is the // inverse of the first. Fill in the missing tree fs[1] = p; // depends on control dependency: [if], data = [none] fs[2] = 1 / p; // depends on control dependency: [if], data = [none] return fs[1] + fs[2]; // depends on control dependency: [if], data = [none] } // Multinomial loss function; sum(exp(data)). Load tree data assert (offset == 0); // depends on control dependency: [if], data = [none] fs[1] = f; // depends on control dependency: [if], data = [none] for (int k = 1; k < nClasses; k++) fs[k + 1] = chks[treeIdx + k].atd(row); // Rescale to avoid Infinities; return sum(exp(data)) return hex.genmodel.GenModel.log_rescale(fs); // depends on control dependency: [if], data = [none] } else { return fs[0] = p; // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public BaseRedisCache init() { super.init(); long expireAfterWrite = getExpireAfterWrite(); long expireAfterAccess = getExpireAfterAccess(); if (expireAfterAccess > 0 || expireAfterWrite > 0) { timeToLiveSeconds = expireAfterAccess > 0 ? expireAfterAccess : expireAfterWrite; } else { timeToLiveSeconds = -1; } /* * Parse custom property: key-mode */ KeyMode oldKeyMode = this.keyMode; try { this.keyMode = KeyMode.valueOf(getCacheProperty(CACHE_PROP_KEY_MODE).toUpperCase()); } catch (Exception e) { this.keyMode = oldKeyMode; if (getCacheProperty(CACHE_PROP_KEY_MODE) != null) { LOGGER.warn(e.getMessage(), e); } } /* * Parse custom property: ttl-seconds */ long oldTTL = this.timeToLiveSeconds; try { this.timeToLiveSeconds = Long.parseLong(getCacheProperty(CACHE_PROP_TTL_SECONDS)); } catch (Exception e) { this.timeToLiveSeconds = oldTTL; if (getCacheProperty(CACHE_PROP_TTL_SECONDS) != null) { LOGGER.warn(e.getMessage(), e); } } /* * Parse custom property: redis-password */ String password = getCacheProperty(CACHE_PROP_REDIS_PASSWORD); if (!StringUtils.isBlank(password)) { this.redisPassword = password; } return this; } }
public class class_name { @Override public BaseRedisCache init() { super.init(); long expireAfterWrite = getExpireAfterWrite(); long expireAfterAccess = getExpireAfterAccess(); if (expireAfterAccess > 0 || expireAfterWrite > 0) { timeToLiveSeconds = expireAfterAccess > 0 ? expireAfterAccess : expireAfterWrite; // depends on control dependency: [if], data = [none] } else { timeToLiveSeconds = -1; // depends on control dependency: [if], data = [none] } /* * Parse custom property: key-mode */ KeyMode oldKeyMode = this.keyMode; try { this.keyMode = KeyMode.valueOf(getCacheProperty(CACHE_PROP_KEY_MODE).toUpperCase()); // depends on control dependency: [try], data = [none] } catch (Exception e) { this.keyMode = oldKeyMode; if (getCacheProperty(CACHE_PROP_KEY_MODE) != null) { LOGGER.warn(e.getMessage(), e); // depends on control dependency: [if], data = [none] } } // depends on control dependency: [catch], data = [none] /* * Parse custom property: ttl-seconds */ long oldTTL = this.timeToLiveSeconds; try { this.timeToLiveSeconds = Long.parseLong(getCacheProperty(CACHE_PROP_TTL_SECONDS)); // depends on control dependency: [try], data = [none] } catch (Exception e) { this.timeToLiveSeconds = oldTTL; if (getCacheProperty(CACHE_PROP_TTL_SECONDS) != null) { LOGGER.warn(e.getMessage(), e); // depends on control dependency: [if], data = [none] } } // depends on control dependency: [catch], data = [none] /* * Parse custom property: redis-password */ String password = getCacheProperty(CACHE_PROP_REDIS_PASSWORD); if (!StringUtils.isBlank(password)) { this.redisPassword = password; // depends on control dependency: [if], data = [none] } return this; } }
public class class_name { public int[] buildSuffixArray(T[] tokens) { final int length = tokens.length; /* * Allocate slightly more space, some suffix construction strategies need it and * we don't want to waste space for multiple symbol mappings. */ input = new int[length + SuffixArrays.MAX_EXTRA_TRAILING_SPACE]; //System.out.println("Assigning token ids ..."); /* * We associate every token to an id, all `equal´ tokens to the same id. * The suffix array is built using only the the ids. */ tokIDs = new TreeMap<>(comparator); for (int i = 0; i < length; i++) { tokIDs.putIfAbsent(tokens[i], i); input[i] = tokIDs.get(tokens[i]); } //System.out.println("Token ids assigned."); return delegate.buildSuffixArray(input, 0, length); } }
public class class_name { public int[] buildSuffixArray(T[] tokens) { final int length = tokens.length; /* * Allocate slightly more space, some suffix construction strategies need it and * we don't want to waste space for multiple symbol mappings. */ input = new int[length + SuffixArrays.MAX_EXTRA_TRAILING_SPACE]; //System.out.println("Assigning token ids ..."); /* * We associate every token to an id, all `equal´ tokens to the same id. * The suffix array is built using only the the ids. */ tokIDs = new TreeMap<>(comparator); for (int i = 0; i < length; i++) { tokIDs.putIfAbsent(tokens[i], i); // depends on control dependency: [for], data = [i] input[i] = tokIDs.get(tokens[i]); // depends on control dependency: [for], data = [i] } //System.out.println("Token ids assigned."); return delegate.buildSuffixArray(input, 0, length); } }
public class class_name { public static int gcd(int a, int b) { /* * The reason we require both arguments to be >= 0 is because otherwise, what do you return on * gcd(0, Integer.MIN_VALUE)? BigInteger.gcd would return positive 2^31, but positive 2^31 * isn't an int. */ checkNonNegative("a", a); checkNonNegative("b", b); if (a == 0) { // 0 % b == 0, so b divides a, but the converse doesn't hold. // BigInteger.gcd is consistent with this decision. return b; } else if (b == 0) { return a; // similar logic } /* * Uses the binary GCD algorithm; see http://en.wikipedia.org/wiki/Binary_GCD_algorithm. * This is >40% faster than the Euclidean algorithm in benchmarks. */ int aTwos = Integer.numberOfTrailingZeros(a); a >>= aTwos; // divide out all 2s int bTwos = Integer.numberOfTrailingZeros(b); b >>= bTwos; // divide out all 2s while (a != b) { // both a, b are odd // The key to the binary GCD algorithm is as follows: // Both a and b are odd. Assume a > b; then gcd(a - b, b) = gcd(a, b). // But in gcd(a - b, b), a - b is even and b is odd, so we can divide out powers of two. // We bend over backwards to avoid branching, adapting a technique from // http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax int delta = a - b; // can't overflow, since a and b are nonnegative int minDeltaOrZero = delta & (delta >> (Integer.SIZE - 1)); // equivalent to Math.min(delta, 0) a = delta - minDeltaOrZero - minDeltaOrZero; // sets a to Math.abs(a - b) // a is now nonnegative and even b += minDeltaOrZero; // sets b to min(old a, b) a >>= Integer.numberOfTrailingZeros(a); // divide out all 2s, since 2 doesn't divide b } return a << min(aTwos, bTwos); } }
public class class_name { public static int gcd(int a, int b) { /* * The reason we require both arguments to be >= 0 is because otherwise, what do you return on * gcd(0, Integer.MIN_VALUE)? BigInteger.gcd would return positive 2^31, but positive 2^31 * isn't an int. */ checkNonNegative("a", a); checkNonNegative("b", b); if (a == 0) { // 0 % b == 0, so b divides a, but the converse doesn't hold. // BigInteger.gcd is consistent with this decision. return b; // depends on control dependency: [if], data = [none] } else if (b == 0) { return a; // similar logic // depends on control dependency: [if], data = [none] } /* * Uses the binary GCD algorithm; see http://en.wikipedia.org/wiki/Binary_GCD_algorithm. * This is >40% faster than the Euclidean algorithm in benchmarks. */ int aTwos = Integer.numberOfTrailingZeros(a); a >>= aTwos; // divide out all 2s int bTwos = Integer.numberOfTrailingZeros(b); b >>= bTwos; // divide out all 2s while (a != b) { // both a, b are odd // The key to the binary GCD algorithm is as follows: // Both a and b are odd. Assume a > b; then gcd(a - b, b) = gcd(a, b). // But in gcd(a - b, b), a - b is even and b is odd, so we can divide out powers of two. // We bend over backwards to avoid branching, adapting a technique from // http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax int delta = a - b; // can't overflow, since a and b are nonnegative int minDeltaOrZero = delta & (delta >> (Integer.SIZE - 1)); // equivalent to Math.min(delta, 0) a = delta - minDeltaOrZero - minDeltaOrZero; // sets a to Math.abs(a - b) // depends on control dependency: [while], data = [none] // a is now nonnegative and even b += minDeltaOrZero; // sets b to min(old a, b) // depends on control dependency: [while], data = [none] a >>= Integer.numberOfTrailingZeros(a); // divide out all 2s, since 2 doesn't divide b // depends on control dependency: [while], data = [(a] } return a << min(aTwos, bTwos); } }
public class class_name { private void passMessage(CliqueTree cliqueTree, int startFactor, int destFactor, boolean useSumProduct) { VariableNumMap sharedVars = cliqueTree.getFactor(startFactor).getVars().intersection(cliqueTree.getFactor(destFactor).getVars()); // Find the factors which have yet to be merged into the marginal // distribution of factor, but are necessary for computing the // specified message. Set<Integer> factorIndicesToCombine = Sets.newHashSet(cliqueTree.getNeighboringFactors(startFactor)); factorIndicesToCombine.removeAll(cliqueTree.getFactorsInMarginal(startFactor)); // If this is the upstream round of message passing, we might not have // received a message from destFactor yet. However, if we have received the // message, we should include it in the product as it will increase sparsity // and thereby improve efficiency. if (cliqueTree.getMessage(destFactor, startFactor) == null) { factorIndicesToCombine.remove(destFactor); } List<Factor> factorsToCombine = new ArrayList<Factor>(); for (Integer adjacentFactorNum : factorIndicesToCombine) { factorsToCombine.add(cliqueTree.getMessage(adjacentFactorNum, startFactor)); } // Update the marginal distribution of startFactor in the clique tree. Factor updatedMarginal = cliqueTree.getMarginal(startFactor).product(factorsToCombine); if (cliqueTree.getOutboundFactors(startFactor).size() == 0) { // If this factor has yet to send any outbound messages, we can // use it to renormalize the probability distribution to avoid // possible numerical overflow issues. Updating the marginal // at this point is equivalent to multiplying the original factor // by some constant value, which doesn't affect the probability // distribution. if (renormalize) { updatedMarginal = updatedMarginal.product(1.0 / updatedMarginal.getTotalUnnormalizedProbability()); } // Can also prune this marginal if a pruning strategy was provided. if (pruningStrategy != null) { updatedMarginal = pruningStrategy.apply(updatedMarginal); } } cliqueTree.setMarginal(startFactor, updatedMarginal); cliqueTree.addFactorsToMarginal(startFactor, factorIndicesToCombine); // The message from startFactor to destFactor is the marginal of // productFactor, divided by the message from destFactor to // startFactor, if it exists. Factor messageFactor = null; if (useSumProduct) { messageFactor = updatedMarginal.marginalize(updatedMarginal.getVars().removeAll(sharedVars).getVariableNums()); } else { messageFactor = updatedMarginal.maxMarginalize(updatedMarginal.getVars().removeAll(sharedVars).getVariableNums()); } // Divide out the destFactor -> startFactor message if necessary. if (cliqueTree.getFactorsInMarginal(startFactor).contains(destFactor)) { messageFactor = messageFactor.product(cliqueTree.getMessage(destFactor, startFactor).inverse()); } cliqueTree.addMessage(startFactor, destFactor, messageFactor); } }
public class class_name { private void passMessage(CliqueTree cliqueTree, int startFactor, int destFactor, boolean useSumProduct) { VariableNumMap sharedVars = cliqueTree.getFactor(startFactor).getVars().intersection(cliqueTree.getFactor(destFactor).getVars()); // Find the factors which have yet to be merged into the marginal // distribution of factor, but are necessary for computing the // specified message. Set<Integer> factorIndicesToCombine = Sets.newHashSet(cliqueTree.getNeighboringFactors(startFactor)); factorIndicesToCombine.removeAll(cliqueTree.getFactorsInMarginal(startFactor)); // If this is the upstream round of message passing, we might not have // received a message from destFactor yet. However, if we have received the // message, we should include it in the product as it will increase sparsity // and thereby improve efficiency. if (cliqueTree.getMessage(destFactor, startFactor) == null) { factorIndicesToCombine.remove(destFactor); // depends on control dependency: [if], data = [none] } List<Factor> factorsToCombine = new ArrayList<Factor>(); for (Integer adjacentFactorNum : factorIndicesToCombine) { factorsToCombine.add(cliqueTree.getMessage(adjacentFactorNum, startFactor)); // depends on control dependency: [for], data = [adjacentFactorNum] } // Update the marginal distribution of startFactor in the clique tree. Factor updatedMarginal = cliqueTree.getMarginal(startFactor).product(factorsToCombine); if (cliqueTree.getOutboundFactors(startFactor).size() == 0) { // If this factor has yet to send any outbound messages, we can // use it to renormalize the probability distribution to avoid // possible numerical overflow issues. Updating the marginal // at this point is equivalent to multiplying the original factor // by some constant value, which doesn't affect the probability // distribution. if (renormalize) { updatedMarginal = updatedMarginal.product(1.0 / updatedMarginal.getTotalUnnormalizedProbability()); // depends on control dependency: [if], data = [none] } // Can also prune this marginal if a pruning strategy was provided. if (pruningStrategy != null) { updatedMarginal = pruningStrategy.apply(updatedMarginal); // depends on control dependency: [if], data = [none] } } cliqueTree.setMarginal(startFactor, updatedMarginal); cliqueTree.addFactorsToMarginal(startFactor, factorIndicesToCombine); // The message from startFactor to destFactor is the marginal of // productFactor, divided by the message from destFactor to // startFactor, if it exists. Factor messageFactor = null; if (useSumProduct) { messageFactor = updatedMarginal.marginalize(updatedMarginal.getVars().removeAll(sharedVars).getVariableNums()); // depends on control dependency: [if], data = [none] } else { messageFactor = updatedMarginal.maxMarginalize(updatedMarginal.getVars().removeAll(sharedVars).getVariableNums()); // depends on control dependency: [if], data = [none] } // Divide out the destFactor -> startFactor message if necessary. if (cliqueTree.getFactorsInMarginal(startFactor).contains(destFactor)) { messageFactor = messageFactor.product(cliqueTree.getMessage(destFactor, startFactor).inverse()); // depends on control dependency: [if], data = [none] } cliqueTree.addMessage(startFactor, destFactor, messageFactor); } }
public class class_name { public final void setOptimizationLevel(int optimizationLevel) { if (sealed) onSealedMutation(); if (optimizationLevel == -2) { // To be compatible with Cocoon fork optimizationLevel = -1; } checkOptimizationLevel(optimizationLevel); if (codegenClass == null) optimizationLevel = -1; this.optimizationLevel = optimizationLevel; } }
public class class_name { public final void setOptimizationLevel(int optimizationLevel) { if (sealed) onSealedMutation(); if (optimizationLevel == -2) { // To be compatible with Cocoon fork optimizationLevel = -1; // depends on control dependency: [if], data = [none] } checkOptimizationLevel(optimizationLevel); if (codegenClass == null) optimizationLevel = -1; this.optimizationLevel = optimizationLevel; } }
public class class_name { public void executeSysProcPlanFragmentsAsync( SynthesizedPlanFragment pfs[]) { MpTransactionState txnState = (MpTransactionState)m_runner.getTxnState(); assert(txnState != null); int fragmentIndex = 0; for (SynthesizedPlanFragment pf : pfs) { assert (pf.parameters != null); FragmentTaskMessage task = FragmentTaskMessage.createWithOneFragment( txnState.initiatorHSId, m_site.getCorrespondingSiteId(), txnState.txnId, txnState.uniqueId, txnState.isReadOnly(), fragIdToHash(pf.fragmentId), pf.outputDepId, pf.parameters, false, txnState.isForReplay(), txnState.isNPartTxn(), txnState.getTimetamp()); //During @MigratePartitionLeader, a fragment may be mis-routed. fragmentIndex is used to check which fragment is mis-routed and //to determine how the follow-up fragments are processed. task.setBatch(fragmentIndex++); task.setFragmentTaskType(FragmentTaskMessage.SYS_PROC_PER_SITE); if (pf.multipartition) { // create a workunit for every execution site txnState.createAllParticipatingFragmentWork(task); } else { // create one workunit for the current site if (pf.siteId == -1) { txnState.createLocalFragmentWork(task, false); } else { txnState.createFragmentWork(new long[] { pf.siteId }, task); } } } } }
public class class_name { public void executeSysProcPlanFragmentsAsync( SynthesizedPlanFragment pfs[]) { MpTransactionState txnState = (MpTransactionState)m_runner.getTxnState(); assert(txnState != null); int fragmentIndex = 0; for (SynthesizedPlanFragment pf : pfs) { assert (pf.parameters != null); // depends on control dependency: [for], data = [pf] FragmentTaskMessage task = FragmentTaskMessage.createWithOneFragment( txnState.initiatorHSId, m_site.getCorrespondingSiteId(), txnState.txnId, txnState.uniqueId, txnState.isReadOnly(), fragIdToHash(pf.fragmentId), pf.outputDepId, pf.parameters, false, txnState.isForReplay(), txnState.isNPartTxn(), txnState.getTimetamp()); //During @MigratePartitionLeader, a fragment may be mis-routed. fragmentIndex is used to check which fragment is mis-routed and //to determine how the follow-up fragments are processed. task.setBatch(fragmentIndex++); // depends on control dependency: [for], data = [none] task.setFragmentTaskType(FragmentTaskMessage.SYS_PROC_PER_SITE); // depends on control dependency: [for], data = [none] if (pf.multipartition) { // create a workunit for every execution site txnState.createAllParticipatingFragmentWork(task); // depends on control dependency: [if], data = [none] } else { // create one workunit for the current site if (pf.siteId == -1) { txnState.createLocalFragmentWork(task, false); // depends on control dependency: [if], data = [none] } else { txnState.createFragmentWork(new long[] { pf.siteId }, task); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public static Duration getAdjustedDuration(ProjectProperties properties, int duration, TimeUnit timeUnit) { Duration result = null; if (duration != -1) { switch (timeUnit) { case DAYS: { double unitsPerDay = properties.getMinutesPerDay().doubleValue() * 10d; double totalDays = 0; if (unitsPerDay != 0) { totalDays = duration / unitsPerDay; } result = Duration.getInstance(totalDays, timeUnit); break; } case ELAPSED_DAYS: { double unitsPerDay = 24d * 600d; double totalDays = duration / unitsPerDay; result = Duration.getInstance(totalDays, timeUnit); break; } case WEEKS: { double unitsPerWeek = properties.getMinutesPerWeek().doubleValue() * 10d; double totalWeeks = 0; if (unitsPerWeek != 0) { totalWeeks = duration / unitsPerWeek; } result = Duration.getInstance(totalWeeks, timeUnit); break; } case ELAPSED_WEEKS: { double unitsPerWeek = (60 * 24 * 7 * 10); double totalWeeks = duration / unitsPerWeek; result = Duration.getInstance(totalWeeks, timeUnit); break; } case MONTHS: { double unitsPerMonth = properties.getMinutesPerDay().doubleValue() * properties.getDaysPerMonth().doubleValue() * 10d; double totalMonths = 0; if (unitsPerMonth != 0) { totalMonths = duration / unitsPerMonth; } result = Duration.getInstance(totalMonths, timeUnit); break; } case ELAPSED_MONTHS: { double unitsPerMonth = (60 * 24 * 30 * 10); double totalMonths = duration / unitsPerMonth; result = Duration.getInstance(totalMonths, timeUnit); break; } default: { result = getDuration(duration, timeUnit); break; } } } return (result); } }
public class class_name { public static Duration getAdjustedDuration(ProjectProperties properties, int duration, TimeUnit timeUnit) { Duration result = null; if (duration != -1) { switch (timeUnit) { case DAYS: { double unitsPerDay = properties.getMinutesPerDay().doubleValue() * 10d; double totalDays = 0; if (unitsPerDay != 0) { totalDays = duration / unitsPerDay; // depends on control dependency: [if], data = [none] } result = Duration.getInstance(totalDays, timeUnit); break; } case ELAPSED_DAYS: { double unitsPerDay = 24d * 600d; double totalDays = duration / unitsPerDay; result = Duration.getInstance(totalDays, timeUnit); // depends on control dependency: [if], data = [none] break; } case WEEKS: { double unitsPerWeek = properties.getMinutesPerWeek().doubleValue() * 10d; double totalWeeks = 0; if (unitsPerWeek != 0) { totalWeeks = duration / unitsPerWeek; // depends on control dependency: [if], data = [none] } result = Duration.getInstance(totalWeeks, timeUnit); break; } case ELAPSED_WEEKS: { double unitsPerWeek = (60 * 24 * 7 * 10); double totalWeeks = duration / unitsPerWeek; result = Duration.getInstance(totalWeeks, timeUnit); break; } case MONTHS: { double unitsPerMonth = properties.getMinutesPerDay().doubleValue() * properties.getDaysPerMonth().doubleValue() * 10d; double totalMonths = 0; if (unitsPerMonth != 0) { totalMonths = duration / unitsPerMonth; } result = Duration.getInstance(totalMonths, timeUnit); break; } case ELAPSED_MONTHS: { double unitsPerMonth = (60 * 24 * 30 * 10); double totalMonths = duration / unitsPerMonth; result = Duration.getInstance(totalMonths, timeUnit); break; } default: { result = getDuration(duration, timeUnit); break; } } } return (result); } }
public class class_name { public List<ExchangeRateProvider> getExchangeRateProviders(String... providers) { List<ExchangeRateProvider> provInstances = new ArrayList<>(); Collection<String> providerNames = Arrays.asList(providers); if (providerNames.isEmpty()) { providerNames = getProviderNames(); } for (String provName : providerNames) { ExchangeRateProvider provider = getExchangeRateProvider(provName); if(provider==null){ throw new MonetaryException("Unsupported conversion/rate provider: " + provName); } provInstances.add(provider); } return provInstances; } }
public class class_name { public List<ExchangeRateProvider> getExchangeRateProviders(String... providers) { List<ExchangeRateProvider> provInstances = new ArrayList<>(); Collection<String> providerNames = Arrays.asList(providers); if (providerNames.isEmpty()) { providerNames = getProviderNames(); // depends on control dependency: [if], data = [none] } for (String provName : providerNames) { ExchangeRateProvider provider = getExchangeRateProvider(provName); if(provider==null){ throw new MonetaryException("Unsupported conversion/rate provider: " + provName); } provInstances.add(provider); // depends on control dependency: [for], data = [none] } return provInstances; } }
public class class_name { private String docKindOfArg(final NamedArgumentDefinition argumentDefinition) { // deprecated // required (common or otherwise) // common optional // advanced // hidden // Required first (after positional, which are separate), regardless of what else it might be if (argumentDefinition.isDeprecated()) { return "deprecated"; } if (argumentDefinition.isControlledByPlugin()) { return "dependent"; } if (!argumentDefinition.isOptional()) { return "required"; } if (argumentDefinition.isCommon()) { // these will all be optional return "common"; } if (argumentDefinition.isAdvanced()) { return "advanced"; } if (argumentDefinition.isHidden()) { return "hidden"; } return "optional"; } }
public class class_name { private String docKindOfArg(final NamedArgumentDefinition argumentDefinition) { // deprecated // required (common or otherwise) // common optional // advanced // hidden // Required first (after positional, which are separate), regardless of what else it might be if (argumentDefinition.isDeprecated()) { return "deprecated"; // depends on control dependency: [if], data = [none] } if (argumentDefinition.isControlledByPlugin()) { return "dependent"; // depends on control dependency: [if], data = [none] } if (!argumentDefinition.isOptional()) { return "required"; // depends on control dependency: [if], data = [none] } if (argumentDefinition.isCommon()) { // these will all be optional return "common"; // depends on control dependency: [if], data = [none] } if (argumentDefinition.isAdvanced()) { return "advanced"; // depends on control dependency: [if], data = [none] } if (argumentDefinition.isHidden()) { return "hidden"; // depends on control dependency: [if], data = [none] } return "optional"; } }
public class class_name { @SuppressWarnings("unchecked") public <T extends DataSource> List<ConnectionAcquiringStrategyFactory<? extends ConnectionAcquiringStrategy, T>> getConnectionAcquiringStrategyFactories() { ConnectionAcquiringStrategyFactoryResolver<T> connectionAcquiringStrategyFactoryResolver = instantiateClass(PropertyKey.POOL_STRATEGIES_FACTORY_RESOLVER); if (connectionAcquiringStrategyFactoryResolver != null) { return connectionAcquiringStrategyFactoryResolver.resolveFactories(); } return Collections.emptyList(); } }
public class class_name { @SuppressWarnings("unchecked") public <T extends DataSource> List<ConnectionAcquiringStrategyFactory<? extends ConnectionAcquiringStrategy, T>> getConnectionAcquiringStrategyFactories() { ConnectionAcquiringStrategyFactoryResolver<T> connectionAcquiringStrategyFactoryResolver = instantiateClass(PropertyKey.POOL_STRATEGIES_FACTORY_RESOLVER); if (connectionAcquiringStrategyFactoryResolver != null) { return connectionAcquiringStrategyFactoryResolver.resolveFactories(); // depends on control dependency: [if], data = [none] } return Collections.emptyList(); } }
public class class_name { @SuppressWarnings("unchecked") private static void validateCompoundOperatorClauses(List<Object> clauses, Boolean[] textClauseLimitReached) throws QueryException { for (Object obj : clauses) { if (!(obj instanceof Map)) { String msg = String.format("Operator argument must be a Map %s", clauses.toString()); throw new QueryException(msg); } Map<String, Object> clause = (Map<String, Object>) obj; if (clause.size() != 1) { String msg; msg = String.format("Operator argument clause should have one key value pair: %s", clauses.toString()); throw new QueryException(msg); } String key = (String) clause.keySet().toArray()[0]; if (Arrays.asList(OR, NOT, AND).contains(key)) { // this should have a list as top level type Object compoundClauses = clause.get(key); validateCompoundOperatorOperand(compoundClauses); // validate list validateCompoundOperatorClauses((List<Object>) compoundClauses, textClauseLimitReached); } else if (!(key.startsWith("$"))) { // this should have a map // send this for validation validateClause((Map<String, Object>) clause.get(key)); } else if (key.equalsIgnoreCase(TEXT)) { // this should have a map // send this for validation validateTextClause(clause.get(key), textClauseLimitReached); } else { String msg = String.format("%s operator cannot be a top level operator", key); throw new QueryException(msg); } } } }
public class class_name { @SuppressWarnings("unchecked") private static void validateCompoundOperatorClauses(List<Object> clauses, Boolean[] textClauseLimitReached) throws QueryException { for (Object obj : clauses) { if (!(obj instanceof Map)) { String msg = String.format("Operator argument must be a Map %s", clauses.toString()); throw new QueryException(msg); } Map<String, Object> clause = (Map<String, Object>) obj; if (clause.size() != 1) { String msg; msg = String.format("Operator argument clause should have one key value pair: %s", clauses.toString()); // depends on control dependency: [if], data = [none] throw new QueryException(msg); } String key = (String) clause.keySet().toArray()[0]; if (Arrays.asList(OR, NOT, AND).contains(key)) { // this should have a list as top level type Object compoundClauses = clause.get(key); validateCompoundOperatorOperand(compoundClauses); // depends on control dependency: [if], data = [none] // validate list validateCompoundOperatorClauses((List<Object>) compoundClauses, textClauseLimitReached); // depends on control dependency: [if], data = [none] } else if (!(key.startsWith("$"))) { // this should have a map // send this for validation validateClause((Map<String, Object>) clause.get(key)); // depends on control dependency: [if], data = [none] } else if (key.equalsIgnoreCase(TEXT)) { // this should have a map // send this for validation validateTextClause(clause.get(key), textClauseLimitReached); // depends on control dependency: [if], data = [none] } else { String msg = String.format("%s operator cannot be a top level operator", key); throw new QueryException(msg); } } } }
public class class_name { protected static void multTransABlockMinus_U( double[] dataA, double []dataC, int indexA, int indexB, int indexC, final int heightA, final int widthA, final int widthC ) { // for( int i = 0; i < widthA; i++ ) { // for( int k = 0; k < heightA; k++ ) { // // double valA = dataA[k*widthA + i + indexA]; // for( int j = i; j < widthC; j++ ) { // dataC[ i*widthC + j + indexC ] -= valA * dataA[k*widthC + j + indexB]; // } // } // } for( int i = 0; i < widthA; i++ ) { for( int k = 0; k < heightA; k++ ) { double valA = dataA[k*widthA + i + indexA]; int b = k*widthC + indexB + i; int c = i*widthC + indexC + i; int endC = (c-i)+widthC; while( c != endC ) { // for( int j = i; j < widthC; j++ ) { dataC[ c++ ] -= valA * dataA[b++]; } } } } }
public class class_name { protected static void multTransABlockMinus_U( double[] dataA, double []dataC, int indexA, int indexB, int indexC, final int heightA, final int widthA, final int widthC ) { // for( int i = 0; i < widthA; i++ ) { // for( int k = 0; k < heightA; k++ ) { // // double valA = dataA[k*widthA + i + indexA]; // for( int j = i; j < widthC; j++ ) { // dataC[ i*widthC + j + indexC ] -= valA * dataA[k*widthC + j + indexB]; // } // } // } for( int i = 0; i < widthA; i++ ) { for( int k = 0; k < heightA; k++ ) { double valA = dataA[k*widthA + i + indexA]; int b = k*widthC + indexB + i; int c = i*widthC + indexC + i; int endC = (c-i)+widthC; while( c != endC ) { // for( int j = i; j < widthC; j++ ) { dataC[ c++ ] -= valA * dataA[b++]; // depends on control dependency: [while], data = [none] } } } } }
public class class_name { public AuthorizedList addCompartments(String... theCompartments) { Validate.notNull(theCompartments, "theCompartments must not be null"); for (String next : theCompartments) { addCompartment(next); } return this; } }
public class class_name { public AuthorizedList addCompartments(String... theCompartments) { Validate.notNull(theCompartments, "theCompartments must not be null"); for (String next : theCompartments) { addCompartment(next); // depends on control dependency: [for], data = [next] } return this; } }
public class class_name { protected boolean valueCheck(double value, String simpleRange) { if (simpleRange.isEmpty()) { return false; } if (simpleRange.endsWith(":")) { return value < Double.parseDouble(simpleRange.replace(":", "")); } if (simpleRange.startsWith("~:")) { return value > Double.parseDouble(simpleRange.replace("~:", "")); } if (simpleRange.startsWith("@")) { String[] values = simpleRange.replace("@", "").split(":"); return value >= Double.parseDouble(values[0]) && value <= Double.parseDouble(values[1]); } if (simpleRange.matches("^-{0,1}[0-9]+:-{0,1}[0-9]+$")) { String[] values = simpleRange.split(":"); return value < Double.parseDouble(values[0]) || value > Double.parseDouble(values[1]); } return simpleRange.matches("^-{0,1}[0-9]+$") && (0 > value || value > Double.parseDouble(simpleRange)); } }
public class class_name { protected boolean valueCheck(double value, String simpleRange) { if (simpleRange.isEmpty()) { return false; // depends on control dependency: [if], data = [none] } if (simpleRange.endsWith(":")) { return value < Double.parseDouble(simpleRange.replace(":", "")); // depends on control dependency: [if], data = [none] } if (simpleRange.startsWith("~:")) { return value > Double.parseDouble(simpleRange.replace("~:", "")); // depends on control dependency: [if], data = [none] } if (simpleRange.startsWith("@")) { String[] values = simpleRange.replace("@", "").split(":"); return value >= Double.parseDouble(values[0]) && value <= Double.parseDouble(values[1]); // depends on control dependency: [if], data = [none] } if (simpleRange.matches("^-{0,1}[0-9]+:-{0,1}[0-9]+$")) { String[] values = simpleRange.split(":"); return value < Double.parseDouble(values[0]) || value > Double.parseDouble(values[1]); // depends on control dependency: [if], data = [none] } return simpleRange.matches("^-{0,1}[0-9]+$") && (0 > value || value > Double.parseDouble(simpleRange)); } }
public class class_name { private void parseQueryStringList(){ //321485 if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15 logger.logp(Level.FINE, CLASS_NAME,"parseQueryStringList", ""); } if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){ checkRequestObjectInUse(); } SRTServletRequestThreadData reqData = SRTServletRequestThreadData.getInstance(); Hashtable tmpQueryParams = null; LinkedList queryStringList = SRTServletRequestThreadData.getInstance().getQueryStringList(); if (queryStringList ==null || queryStringList.isEmpty()){ //258025 String queryString = getQueryString(); if (queryString != null && ((queryString.indexOf('=') != -1) || WCCustomProperties.ALLOW_QUERY_PARAM_WITH_NO_EQUAL))//PM35450 { if (reqData.getParameters() == null || reqData.getParameters().isEmpty())// 258025 reqData.setParameters(RequestUtils.parseQueryString(getQueryString(), getReaderEncoding(true))); else{ tmpQueryParams = RequestUtils.parseQueryString(getQueryString(), getReaderEncoding(true)); mergeQueryParams(tmpQueryParams); } } } else{ Iterator i = queryStringList.iterator(); QSListItem qsListItem = null; String queryString; while (i.hasNext()){ qsListItem = ((QSListItem) i.next()); queryString = qsListItem._qs; // 321485 if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15 logger.logp(Level.FINE, CLASS_NAME,"parseQueryStringList", "queryString --> " + queryString); } if (qsListItem._qsHashtable != null) mergeQueryParams(qsListItem._qsHashtable); else if (queryString != null && ((queryString.indexOf('=') != -1) || WCCustomProperties.ALLOW_QUERY_PARAM_WITH_NO_EQUAL))//PM35450 { if (reqData.getParameters() == null || reqData.getParameters().isEmpty())// 258025 { qsListItem._qsHashtable = RequestUtils.parseQueryString(queryString, getReaderEncoding()); reqData.setParameters(qsListItem._qsHashtable); qsListItem._qs = null; } else{ tmpQueryParams = RequestUtils.parseQueryString(queryString, getReaderEncoding()); qsListItem._qsHashtable = tmpQueryParams; qsListItem._qs = null; mergeQueryParams(tmpQueryParams); } } } } } }
public class class_name { private void parseQueryStringList(){ //321485 if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15 logger.logp(Level.FINE, CLASS_NAME,"parseQueryStringList", ""); // depends on control dependency: [if], data = [none] } if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){ checkRequestObjectInUse(); // depends on control dependency: [if], data = [none] } SRTServletRequestThreadData reqData = SRTServletRequestThreadData.getInstance(); Hashtable tmpQueryParams = null; LinkedList queryStringList = SRTServletRequestThreadData.getInstance().getQueryStringList(); if (queryStringList ==null || queryStringList.isEmpty()){ //258025 String queryString = getQueryString(); if (queryString != null && ((queryString.indexOf('=') != -1) || WCCustomProperties.ALLOW_QUERY_PARAM_WITH_NO_EQUAL))//PM35450 { if (reqData.getParameters() == null || reqData.getParameters().isEmpty())// 258025 reqData.setParameters(RequestUtils.parseQueryString(getQueryString(), getReaderEncoding(true))); else{ tmpQueryParams = RequestUtils.parseQueryString(getQueryString(), getReaderEncoding(true)); // depends on control dependency: [if], data = [none] mergeQueryParams(tmpQueryParams); // depends on control dependency: [if], data = [none] } } } else{ Iterator i = queryStringList.iterator(); QSListItem qsListItem = null; String queryString; while (i.hasNext()){ qsListItem = ((QSListItem) i.next()); // depends on control dependency: [while], data = [none] queryString = qsListItem._qs; // depends on control dependency: [while], data = [none] // 321485 if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15 logger.logp(Level.FINE, CLASS_NAME,"parseQueryStringList", "queryString --> " + queryString); // depends on control dependency: [if], data = [none] } if (qsListItem._qsHashtable != null) mergeQueryParams(qsListItem._qsHashtable); else if (queryString != null && ((queryString.indexOf('=') != -1) || WCCustomProperties.ALLOW_QUERY_PARAM_WITH_NO_EQUAL))//PM35450 { if (reqData.getParameters() == null || reqData.getParameters().isEmpty())// 258025 { qsListItem._qsHashtable = RequestUtils.parseQueryString(queryString, getReaderEncoding()); // depends on control dependency: [if], data = [none] reqData.setParameters(qsListItem._qsHashtable); // depends on control dependency: [if], data = [none] qsListItem._qs = null; // depends on control dependency: [if], data = [none] } else{ tmpQueryParams = RequestUtils.parseQueryString(queryString, getReaderEncoding()); // depends on control dependency: [if], data = [none] qsListItem._qsHashtable = tmpQueryParams; // depends on control dependency: [if], data = [none] qsListItem._qs = null; // depends on control dependency: [if], data = [none] mergeQueryParams(tmpQueryParams); // depends on control dependency: [if], data = [none] } } } } } }
public class class_name { public void marshall(GetDeploymentStatusRequest getDeploymentStatusRequest, ProtocolMarshaller protocolMarshaller) { if (getDeploymentStatusRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getDeploymentStatusRequest.getDeploymentId(), DEPLOYMENTID_BINDING); protocolMarshaller.marshall(getDeploymentStatusRequest.getGroupId(), GROUPID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetDeploymentStatusRequest getDeploymentStatusRequest, ProtocolMarshaller protocolMarshaller) { if (getDeploymentStatusRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getDeploymentStatusRequest.getDeploymentId(), DEPLOYMENTID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getDeploymentStatusRequest.getGroupId(), GROUPID_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void fill(ZMatrixD1 a, double real, double imaginary) { int N = a.getDataLength(); for (int i = 0; i < N; i += 2) { a.data[i] = real; a.data[i+1] = imaginary; } } }
public class class_name { public static void fill(ZMatrixD1 a, double real, double imaginary) { int N = a.getDataLength(); for (int i = 0; i < N; i += 2) { a.data[i] = real; // depends on control dependency: [for], data = [i] a.data[i+1] = imaginary; // depends on control dependency: [for], data = [i] } } }
public class class_name { @Override public MetricRecorder getMetricRecorder(Method method, RetryPolicy retryPolicy, CircuitBreakerPolicy circuitBreakerPolicy, TimeoutPolicy timeoutPolicy, BulkheadPolicy bulkheadPolicy, FallbackPolicy fallbackPolicy, AsyncType isAsync) { synchronized (recorders) { MetricRecorder recorder = recorders.get(method); if (recorder == null) { recorder = createNewRecorder(method, retryPolicy, circuitBreakerPolicy, timeoutPolicy, bulkheadPolicy, fallbackPolicy, isAsync); recorders.put(method, recorder); } return recorder; } } }
public class class_name { @Override public MetricRecorder getMetricRecorder(Method method, RetryPolicy retryPolicy, CircuitBreakerPolicy circuitBreakerPolicy, TimeoutPolicy timeoutPolicy, BulkheadPolicy bulkheadPolicy, FallbackPolicy fallbackPolicy, AsyncType isAsync) { synchronized (recorders) { MetricRecorder recorder = recorders.get(method); if (recorder == null) { recorder = createNewRecorder(method, retryPolicy, circuitBreakerPolicy, timeoutPolicy, bulkheadPolicy, fallbackPolicy, isAsync); // depends on control dependency: [if], data = [none] recorders.put(method, recorder); // depends on control dependency: [if], data = [none] } return recorder; } } }
public class class_name { public boolean removeAll() { lock.lock(); boolean result = false; try { result = !uriIndex.isEmpty(); uriIndex.clear(); } finally { lock.unlock(); } return result; } }
public class class_name { public boolean removeAll() { lock.lock(); boolean result = false; try { result = !uriIndex.isEmpty(); // depends on control dependency: [try], data = [none] uriIndex.clear(); // depends on control dependency: [try], data = [none] } finally { lock.unlock(); } return result; } }
public class class_name { @Override public void run(final Iterable<Object[]> buffer) throws Exception { UpdateableDatastoreConnection con = datastore.openConnection(); try { final Column[] updateColumns = con.getSchemaNavigator() .convertToColumns(schemaName, tableName, columnNames); final Column[] whereColumns = con.getSchemaNavigator().convertToColumns(schemaName, tableName, conditionColumnNames); if (logger.isDebugEnabled()) { logger.debug("Updating columns: {}", Arrays.toString(updateColumns)); } final UpdateableDataContext dc = con.getUpdateableDataContext(); dc.executeUpdate(new BatchUpdateScript() { @Override public void run(UpdateCallback callback) { int updateCount = 0; for (Object[] rowData : buffer) { RowUpdationBuilder updationBuilder = callback.update(updateColumns[0].getTable()); for (int i = 0; i < updateColumns.length; i++) { final Object value = rowData[i]; updationBuilder = updationBuilder.value(updateColumns[i], value); } for (int i = 0; i < whereColumns.length; i++) { final Object value = rowData[i + updateColumns.length]; final Column whereColumn = whereColumns[i]; final FilterItem filterItem = new FilterItem(new SelectItem(whereColumn), OperatorType.EQUALS_TO, value); updationBuilder = updationBuilder.where(filterItem); } if (logger.isDebugEnabled()) { logger.debug("Updating: {}", Arrays.toString(rowData)); } try { updationBuilder.execute(); updateCount++; _updatedRowCount.incrementAndGet(); } catch (final RuntimeException e) { errorOccurred(rowData, e); } } if (updateCount > 0) { _componentContext.publishMessage(new ExecutionLogMessage(updateCount + " updates executed")); } } }); } finally { con.close(); } } }
public class class_name { @Override public void run(final Iterable<Object[]> buffer) throws Exception { UpdateableDatastoreConnection con = datastore.openConnection(); try { final Column[] updateColumns = con.getSchemaNavigator() .convertToColumns(schemaName, tableName, columnNames); final Column[] whereColumns = con.getSchemaNavigator().convertToColumns(schemaName, tableName, conditionColumnNames); if (logger.isDebugEnabled()) { logger.debug("Updating columns: {}", Arrays.toString(updateColumns)); // depends on control dependency: [if], data = [none] } final UpdateableDataContext dc = con.getUpdateableDataContext(); dc.executeUpdate(new BatchUpdateScript() { @Override public void run(UpdateCallback callback) { int updateCount = 0; for (Object[] rowData : buffer) { RowUpdationBuilder updationBuilder = callback.update(updateColumns[0].getTable()); for (int i = 0; i < updateColumns.length; i++) { final Object value = rowData[i]; updationBuilder = updationBuilder.value(updateColumns[i], value); // depends on control dependency: [for], data = [i] } for (int i = 0; i < whereColumns.length; i++) { final Object value = rowData[i + updateColumns.length]; final Column whereColumn = whereColumns[i]; final FilterItem filterItem = new FilterItem(new SelectItem(whereColumn), OperatorType.EQUALS_TO, value); updationBuilder = updationBuilder.where(filterItem); // depends on control dependency: [for], data = [none] } if (logger.isDebugEnabled()) { logger.debug("Updating: {}", Arrays.toString(rowData)); // depends on control dependency: [if], data = [none] } try { updationBuilder.execute(); // depends on control dependency: [try], data = [none] updateCount++; // depends on control dependency: [try], data = [none] _updatedRowCount.incrementAndGet(); // depends on control dependency: [try], data = [none] } catch (final RuntimeException e) { errorOccurred(rowData, e); } // depends on control dependency: [catch], data = [none] } if (updateCount > 0) { _componentContext.publishMessage(new ExecutionLogMessage(updateCount + " updates executed")); // depends on control dependency: [if], data = [(updateCount] } } }); } finally { con.close(); } } }
public class class_name { private Map<String, String> getStrategyOptionsFromTree(Tree options) { //Check for old [{}] syntax if (options.getText().equalsIgnoreCase("ARRAY")) { System.err.println("WARNING: [{}] strategy_options syntax is deprecated, please use {}"); if (options.getChildCount() == 0) return Collections.EMPTY_MAP; return getStrategyOptionsFromTree(options.getChild(0)); } // this map will be returned Map<String, String> strategyOptions = new HashMap<String, String>(); // each child node is ^(PAIR $key $value) for (int j = 0; j < options.getChildCount(); j++) { Tree optionPair = options.getChild(j); // current $key String key = CliUtils.unescapeSQLString(optionPair.getChild(0).getText()); // current $value String val = CliUtils.unescapeSQLString(optionPair.getChild(1).getText()); strategyOptions.put(key, val); } return strategyOptions; } }
public class class_name { private Map<String, String> getStrategyOptionsFromTree(Tree options) { //Check for old [{}] syntax if (options.getText().equalsIgnoreCase("ARRAY")) { System.err.println("WARNING: [{}] strategy_options syntax is deprecated, please use {}"); // depends on control dependency: [if], data = [none] if (options.getChildCount() == 0) return Collections.EMPTY_MAP; return getStrategyOptionsFromTree(options.getChild(0)); // depends on control dependency: [if], data = [none] } // this map will be returned Map<String, String> strategyOptions = new HashMap<String, String>(); // each child node is ^(PAIR $key $value) for (int j = 0; j < options.getChildCount(); j++) { Tree optionPair = options.getChild(j); // current $key String key = CliUtils.unescapeSQLString(optionPair.getChild(0).getText()); // current $value String val = CliUtils.unescapeSQLString(optionPair.getChild(1).getText()); strategyOptions.put(key, val); // depends on control dependency: [for], data = [none] } return strategyOptions; } }
public class class_name { public void parseTagedLine(String line) { String[] toks = line.split("(\\s| | |\\t)+"); words = new String[toks.length]; tags = new String[toks.length]; for(int i=0;i<toks.length;i++){ String[] tt = toks[i].split("/"); if(tt.length!=2) System.err.println("Wrong Format"); words[i] = tt[0]; tags[i]=tt[1]; } } }
public class class_name { public void parseTagedLine(String line) { String[] toks = line.split("(\\s| | |\\t)+"); words = new String[toks.length]; tags = new String[toks.length]; for(int i=0;i<toks.length;i++){ String[] tt = toks[i].split("/"); if(tt.length!=2) System.err.println("Wrong Format"); words[i] = tt[0]; // depends on control dependency: [for], data = [i] tags[i]=tt[1]; // depends on control dependency: [for], data = [i] } } }
public class class_name { public static byte[] sha1(byte[] input) { MessageDigest messageDigest = sha1Digest.get(); if (messageDigest == null) { try { messageDigest = MessageDigest.getInstance("SHA1"); sha1Digest.set(messageDigest); } catch (NoSuchAlgorithmException e) { throw new UaRuntimeException(StatusCodes.Bad_InternalError, e); } } return messageDigest.digest(input); } }
public class class_name { public static byte[] sha1(byte[] input) { MessageDigest messageDigest = sha1Digest.get(); if (messageDigest == null) { try { messageDigest = MessageDigest.getInstance("SHA1"); // depends on control dependency: [try], data = [none] sha1Digest.set(messageDigest); // depends on control dependency: [try], data = [none] } catch (NoSuchAlgorithmException e) { throw new UaRuntimeException(StatusCodes.Bad_InternalError, e); } // depends on control dependency: [catch], data = [none] } return messageDigest.digest(input); } }
public class class_name { int[] getChunkSizes(Track track) { long[] referenceChunkStarts = fragmenter.sampleNumbers(track); int[] chunkSizes = new int[referenceChunkStarts.length]; for (int i = 0; i < referenceChunkStarts.length; i++) { long start = referenceChunkStarts[i] - 1; long end; if (referenceChunkStarts.length == i + 1) { end = track.getSamples().size(); } else { end = referenceChunkStarts[i + 1] - 1; } chunkSizes[i] = l2i(end - start); } assert DefaultMp4Builder.this.track2Sample.get(track).size() == sum(chunkSizes) : "The number of samples and the sum of all chunk lengths must be equal"; return chunkSizes; } }
public class class_name { int[] getChunkSizes(Track track) { long[] referenceChunkStarts = fragmenter.sampleNumbers(track); int[] chunkSizes = new int[referenceChunkStarts.length]; for (int i = 0; i < referenceChunkStarts.length; i++) { long start = referenceChunkStarts[i] - 1; long end; if (referenceChunkStarts.length == i + 1) { end = track.getSamples().size(); // depends on control dependency: [if], data = [none] } else { end = referenceChunkStarts[i + 1] - 1; // depends on control dependency: [if], data = [none] } chunkSizes[i] = l2i(end - start); // depends on control dependency: [for], data = [i] } assert DefaultMp4Builder.this.track2Sample.get(track).size() == sum(chunkSizes) : "The number of samples and the sum of all chunk lengths must be equal"; return chunkSizes; } }
public class class_name { protected void setData(ReadableLogRecord logRecord) /* @MD19840AA*/ throws InternalLogException { if (tc.isEntryEnabled()) Tr.entry(tc, "setData", new Object[] {this, logRecord}); int totalDataSize = _dataSize + HEADER_SIZE; int unwrittenDataSize = 0; if (!_written) { unwrittenDataSize = totalDataSize; } // Remove the old payload from the containing recoverable // unit section _rus.payloadDeleted(totalDataSize, unwrittenDataSize); // The new data is encapsulated in the logRecord. We need to // swap the logRecord in the DataItem class. _logRecord = logRecord; try { if (tc.isDebugEnabled()) Tr.debug(tc, "Reading data size field @ position " + logRecord.position()); _dataSize = logRecord.getInt(); if (tc.isDebugEnabled()) Tr.debug(tc, "This data item contains " + _dataSize + " bytes of data"); _rus.payloadAdded(_dataSize + HEADER_SIZE); _rus.payloadWritten(_dataSize + HEADER_SIZE); _written = true; if (_storageMode == MultiScopeRecoveryLog.MEMORY_BACKED) { if (tc.isDebugEnabled()) Tr.debug(tc, "Reading " + _dataSize + "bytes of data @ position " + logRecord.position()); _data = new byte[_dataSize]; _logRecord.get(_data); } else { if (tc.isDebugEnabled()) Tr.debug(tc, "Tracking " + _dataSize + "bytes of data @ position " + logRecord.position()); _filePosition = logRecord.position(); _data = null; _logRecord.advancePosition(_dataSize); } } catch (java.nio.BufferUnderflowException exc) { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.SingleDataItem.setData", "180", this); throw new InternalLogException(exc); } catch (Exception exc) { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.SingleDataItem.setData", "185", this); throw new InternalLogException(exc); } if (tc.isEntryEnabled()) Tr.exit(tc, "setData"); } }
public class class_name { protected void setData(ReadableLogRecord logRecord) /* @MD19840AA*/ throws InternalLogException { if (tc.isEntryEnabled()) Tr.entry(tc, "setData", new Object[] {this, logRecord}); int totalDataSize = _dataSize + HEADER_SIZE; int unwrittenDataSize = 0; if (!_written) { unwrittenDataSize = totalDataSize; // depends on control dependency: [if], data = [none] } // Remove the old payload from the containing recoverable // unit section _rus.payloadDeleted(totalDataSize, unwrittenDataSize); // The new data is encapsulated in the logRecord. We need to // swap the logRecord in the DataItem class. _logRecord = logRecord; try { if (tc.isDebugEnabled()) Tr.debug(tc, "Reading data size field @ position " + logRecord.position()); _dataSize = logRecord.getInt(); // depends on control dependency: [try], data = [none] if (tc.isDebugEnabled()) Tr.debug(tc, "This data item contains " + _dataSize + " bytes of data"); _rus.payloadAdded(_dataSize + HEADER_SIZE); // depends on control dependency: [try], data = [none] _rus.payloadWritten(_dataSize + HEADER_SIZE); // depends on control dependency: [try], data = [none] _written = true; // depends on control dependency: [try], data = [none] if (_storageMode == MultiScopeRecoveryLog.MEMORY_BACKED) { if (tc.isDebugEnabled()) Tr.debug(tc, "Reading " + _dataSize + "bytes of data @ position " + logRecord.position()); _data = new byte[_dataSize]; // depends on control dependency: [if], data = [none] _logRecord.get(_data); // depends on control dependency: [if], data = [none] } else { if (tc.isDebugEnabled()) Tr.debug(tc, "Tracking " + _dataSize + "bytes of data @ position " + logRecord.position()); _filePosition = logRecord.position(); // depends on control dependency: [if], data = [none] _data = null; // depends on control dependency: [if], data = [none] _logRecord.advancePosition(_dataSize); // depends on control dependency: [if], data = [none] } } catch (java.nio.BufferUnderflowException exc) { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.SingleDataItem.setData", "180", this); throw new InternalLogException(exc); } // depends on control dependency: [catch], data = [none] catch (Exception exc) { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.SingleDataItem.setData", "185", this); throw new InternalLogException(exc); } // depends on control dependency: [catch], data = [none] if (tc.isEntryEnabled()) Tr.exit(tc, "setData"); } }
public class class_name { public void marshall(DeltaTime deltaTime, ProtocolMarshaller protocolMarshaller) { if (deltaTime == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deltaTime.getOffsetSeconds(), OFFSETSECONDS_BINDING); protocolMarshaller.marshall(deltaTime.getTimeExpression(), TIMEEXPRESSION_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(DeltaTime deltaTime, ProtocolMarshaller protocolMarshaller) { if (deltaTime == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deltaTime.getOffsetSeconds(), OFFSETSECONDS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(deltaTime.getTimeExpression(), TIMEEXPRESSION_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public String formatUnrounded(List<Duration> durations) { if (durations == null || durations.isEmpty()) return format(now()); StringBuilder result = new StringBuilder(); Duration duration = null; TimeFormat format = null; for (int i = 0; i < durations.size(); i++) { duration = durations.get(i); format = getFormat(duration.getUnit()); result.append(format.formatUnrounded(duration)); if (i < durations.size() - 1) result.append(" "); } return format.decorateUnrounded(duration, result.toString()); } }
public class class_name { public String formatUnrounded(List<Duration> durations) { if (durations == null || durations.isEmpty()) return format(now()); StringBuilder result = new StringBuilder(); Duration duration = null; TimeFormat format = null; for (int i = 0; i < durations.size(); i++) { duration = durations.get(i); // depends on control dependency: [for], data = [i] format = getFormat(duration.getUnit()); // depends on control dependency: [for], data = [none] result.append(format.formatUnrounded(duration)); // depends on control dependency: [for], data = [none] if (i < durations.size() - 1) result.append(" "); } return format.decorateUnrounded(duration, result.toString()); } }
public class class_name { public ManageableCollection getCollectionByQuery(Class collectionClass, Query query, boolean lazy) throws PersistenceBrokerException { ManageableCollection result; try { // BRJ: return empty Collection for null query if (query == null) { result = (ManageableCollection)collectionClass.newInstance(); } else { if (lazy) { result = pb.getProxyFactory().createCollectionProxy(pb.getPBKey(), query, collectionClass); } else { result = getCollectionByQuery(collectionClass, query.getSearchClass(), query); } } return result; } catch (Exception e) { if(e instanceof PersistenceBrokerException) { throw (PersistenceBrokerException) e; } else { throw new PersistenceBrokerException(e); } } } }
public class class_name { public ManageableCollection getCollectionByQuery(Class collectionClass, Query query, boolean lazy) throws PersistenceBrokerException { ManageableCollection result; try { // BRJ: return empty Collection for null query if (query == null) { result = (ManageableCollection)collectionClass.newInstance(); // depends on control dependency: [if], data = [none] } else { if (lazy) { result = pb.getProxyFactory().createCollectionProxy(pb.getPBKey(), query, collectionClass); // depends on control dependency: [if], data = [none] } else { result = getCollectionByQuery(collectionClass, query.getSearchClass(), query); // depends on control dependency: [if], data = [none] } } return result; } catch (Exception e) { if(e instanceof PersistenceBrokerException) { throw (PersistenceBrokerException) e; } else { throw new PersistenceBrokerException(e); } } } }
public class class_name { public void buildEnvironment(@Nonnull Run<?,?> build, @Nonnull Map<String,String> env) { if (build instanceof AbstractBuild) { buildEnvVars((AbstractBuild)build, env); } } }
public class class_name { public void buildEnvironment(@Nonnull Run<?,?> build, @Nonnull Map<String,String> env) { if (build instanceof AbstractBuild) { buildEnvVars((AbstractBuild)build, env); // depends on control dependency: [if], data = [none] } } }
public class class_name { private Summary explan(List<Keyword> keywords, String content) { SmartForest<Double> sf = new SmartForest<Double>(); for (Keyword keyword : keywords) { sf.add(keyword.getName(), keyword.getScore()); } // 先断句 List<Sentence> sentences = toSentenceList(content.toCharArray()); boolean flag = false; for (Sentence sentence : sentences) { flag = computeScore(sentence, sf, false) || flag ; } if (!flag) { if (content.length() <= len) { return new Summary(keywords, content); } return new Summary(keywords, content.substring(0, len)); } double maxScore = 0; int maxIndex = 0; MapCount<String> mc = new MapCount<>(); for (int i = 0; i < sentences.size(); i++) { double tempScore = sentences.get(i).score; int tempLength = sentences.get(i).value.length(); mc.addAll(sentences.get(i).mc.get()); if (tempLength >= len) { tempScore = tempScore * mc.get().size(); if (maxScore <= tempScore) { maxScore = tempScore; maxIndex = i; } else { mc.get().clear(); } continue; } for (int j = i + 1; j < sentences.size(); j++) { tempScore += sentences.get(j).score; tempLength += sentences.get(j).value.length(); mc.addAll(sentences.get(j).mc.get()); if (tempLength >= len) { tempScore = tempScore * mc.get().size(); if (maxScore <= tempScore) { maxScore = tempScore; maxIndex = i; } mc.get().clear(); break; } } if (tempLength < len) { tempScore = tempScore * mc.get().size(); if (maxScore <= tempScore) { maxScore = tempScore; maxIndex = i; break; } mc.get().clear(); } } StringBuilder sb = new StringBuilder(); for (int i = maxIndex; i < sentences.size(); i++) { sb.append(sentences.get(i).value); if (sb.length() > len) { break; } } String summaryStr = sb.toString(); /** * 是否强制文本长度。对于abc这种字符算半个长度 */ if (isSplitSummary && sb.length() > len) { String str = sb.toString(); Sentence sentence = new Sentence(str); computeScore(sentence, sf, true); List<Triplet<Integer, Integer, Double>> offset = sentence.offset; List<Integer> beginArr = new ArrayList<>() ; f: for (int i = 0; i < str.length(); i++) { for (Triplet<Integer,Integer,Double> t : offset) { if(i>t.getValue0() && i<t.getValue1()){ continue f; } } if(str.length()-i < len){ break ; } beginArr.add(i); } maxIndex = 0 ; maxScore = -10000 ; for (Integer begin : beginArr) { double score = 0 ; for (Triplet<Integer,Integer,Double> t : offset) { if(begin<t.getValue0() && begin+len>t.getValue1()){ score += t.getValue2() ; } } if(score>maxScore){ maxIndex = begin ; maxScore = score ; } } summaryStr = str.substring(maxIndex, Math.min(maxIndex + len + 1, str.length())); } return new Summary(keywords, summaryStr); } }
public class class_name { private Summary explan(List<Keyword> keywords, String content) { SmartForest<Double> sf = new SmartForest<Double>(); for (Keyword keyword : keywords) { sf.add(keyword.getName(), keyword.getScore()); // depends on control dependency: [for], data = [keyword] } // 先断句 List<Sentence> sentences = toSentenceList(content.toCharArray()); boolean flag = false; for (Sentence sentence : sentences) { flag = computeScore(sentence, sf, false) || flag ; // depends on control dependency: [for], data = [sentence] } if (!flag) { if (content.length() <= len) { return new Summary(keywords, content); // depends on control dependency: [if], data = [none] } return new Summary(keywords, content.substring(0, len)); // depends on control dependency: [if], data = [none] } double maxScore = 0; int maxIndex = 0; MapCount<String> mc = new MapCount<>(); for (int i = 0; i < sentences.size(); i++) { double tempScore = sentences.get(i).score; int tempLength = sentences.get(i).value.length(); mc.addAll(sentences.get(i).mc.get()); // depends on control dependency: [for], data = [i] if (tempLength >= len) { tempScore = tempScore * mc.get().size(); // depends on control dependency: [if], data = [none] if (maxScore <= tempScore) { maxScore = tempScore; // depends on control dependency: [if], data = [none] maxIndex = i; // depends on control dependency: [if], data = [none] } else { mc.get().clear(); // depends on control dependency: [if], data = [none] } continue; } for (int j = i + 1; j < sentences.size(); j++) { tempScore += sentences.get(j).score; // depends on control dependency: [for], data = [j] tempLength += sentences.get(j).value.length(); // depends on control dependency: [for], data = [j] mc.addAll(sentences.get(j).mc.get()); // depends on control dependency: [for], data = [j] if (tempLength >= len) { tempScore = tempScore * mc.get().size(); // depends on control dependency: [if], data = [none] if (maxScore <= tempScore) { maxScore = tempScore; // depends on control dependency: [if], data = [none] maxIndex = i; // depends on control dependency: [if], data = [none] } mc.get().clear(); // depends on control dependency: [if], data = [none] break; } } if (tempLength < len) { tempScore = tempScore * mc.get().size(); // depends on control dependency: [if], data = [none] if (maxScore <= tempScore) { maxScore = tempScore; // depends on control dependency: [if], data = [none] maxIndex = i; // depends on control dependency: [if], data = [none] break; } mc.get().clear(); // depends on control dependency: [if], data = [none] } } StringBuilder sb = new StringBuilder(); for (int i = maxIndex; i < sentences.size(); i++) { sb.append(sentences.get(i).value); // depends on control dependency: [for], data = [i] if (sb.length() > len) { break; } } String summaryStr = sb.toString(); /** * 是否强制文本长度。对于abc这种字符算半个长度 */ if (isSplitSummary && sb.length() > len) { String str = sb.toString(); Sentence sentence = new Sentence(str); computeScore(sentence, sf, true); // depends on control dependency: [if], data = [none] List<Triplet<Integer, Integer, Double>> offset = sentence.offset; List<Integer> beginArr = new ArrayList<>() ; f: for (int i = 0; i < str.length(); i++) { for (Triplet<Integer,Integer,Double> t : offset) { if(i>t.getValue0() && i<t.getValue1()){ continue f; } } if(str.length()-i < len){ break ; } beginArr.add(i); // depends on control dependency: [for], data = [i] } maxIndex = 0 ; // depends on control dependency: [if], data = [none] maxScore = -10000 ; // depends on control dependency: [if], data = [none] for (Integer begin : beginArr) { double score = 0 ; for (Triplet<Integer,Integer,Double> t : offset) { if(begin<t.getValue0() && begin+len>t.getValue1()){ score += t.getValue2() ; // depends on control dependency: [if], data = [none] } } if(score>maxScore){ maxIndex = begin ; // depends on control dependency: [if], data = [none] maxScore = score ; // depends on control dependency: [if], data = [none] } } summaryStr = str.substring(maxIndex, Math.min(maxIndex + len + 1, str.length())); // depends on control dependency: [if], data = [none] } return new Summary(keywords, summaryStr); } }
public class class_name { private void addPostParams(final Request request) { if (url != null) { request.addPostParam("Url", url.toString()); } if (method != null) { request.addPostParam("Method", method.toString()); } if (status != null) { request.addPostParam("Status", status.toString()); } if (fallbackUrl != null) { request.addPostParam("FallbackUrl", fallbackUrl.toString()); } if (fallbackMethod != null) { request.addPostParam("FallbackMethod", fallbackMethod.toString()); } if (statusCallback != null) { request.addPostParam("StatusCallback", statusCallback.toString()); } if (statusCallbackMethod != null) { request.addPostParam("StatusCallbackMethod", statusCallbackMethod.toString()); } } }
public class class_name { private void addPostParams(final Request request) { if (url != null) { request.addPostParam("Url", url.toString()); // depends on control dependency: [if], data = [none] } if (method != null) { request.addPostParam("Method", method.toString()); // depends on control dependency: [if], data = [none] } if (status != null) { request.addPostParam("Status", status.toString()); // depends on control dependency: [if], data = [none] } if (fallbackUrl != null) { request.addPostParam("FallbackUrl", fallbackUrl.toString()); // depends on control dependency: [if], data = [none] } if (fallbackMethod != null) { request.addPostParam("FallbackMethod", fallbackMethod.toString()); // depends on control dependency: [if], data = [none] } if (statusCallback != null) { request.addPostParam("StatusCallback", statusCallback.toString()); // depends on control dependency: [if], data = [none] } if (statusCallbackMethod != null) { request.addPostParam("StatusCallbackMethod", statusCallbackMethod.toString()); // depends on control dependency: [if], data = [none] } } }
public class class_name { private static void contextWord(String[] words, int index, int distance, String word, Map<String, List<ContextWord>> map){ String _word = null; if(index > -1 && index < words.length){ _word = words[index]; } if(_word != null && Utils.isChineseCharAndLengthAtLeastTwo(_word)){ addToMap(map, word, _word, distance); } } }
public class class_name { private static void contextWord(String[] words, int index, int distance, String word, Map<String, List<ContextWord>> map){ String _word = null; if(index > -1 && index < words.length){ _word = words[index]; // depends on control dependency: [if], data = [none] } if(_word != null && Utils.isChineseCharAndLengthAtLeastTwo(_word)){ addToMap(map, word, _word, distance); // depends on control dependency: [if], data = [none] } } }
public class class_name { static <K, V> void putAllImpl(Map<K, V> self, Map<? extends K, ? extends V> map) { for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) { self.put(entry.getKey(), entry.getValue()); } } }
public class class_name { static <K, V> void putAllImpl(Map<K, V> self, Map<? extends K, ? extends V> map) { for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) { self.put(entry.getKey(), entry.getValue()); // depends on control dependency: [for], data = [entry] } } }
public class class_name { @Override @SuppressWarnings({ "PMD.CyclomaticComplexity", "PMD.NcssCount", "PMD.NPathComplexity" }) public <T extends ComponentType> T attach(T child) { synchronized (this) { ComponentVertex childNode = componentVertex(child, null); List<Channel> attachedAsChannels = new ArrayList<>(); synchronized (childNode) { if (tree != null && tree.isStarted()) { for (TreeIterator itr = new TreeIterator(childNode); itr.hasNext();) { attachedAsChannels.add(itr.next()); } } synchronized (tree()) { if (childNode.tree == null) { // Newly created, stand-alone child node childNode.parent = this; childNode.setTree(tree); children.add(childNode); } else { // Attaching a tree... synchronized (childNode.tree) { if (childNode.parent != null) { throw new IllegalStateException( "Cannot attach a node with a parent."); } if (childNode.tree.isStarted()) { throw new IllegalStateException( "Cannot attach a started subtree."); } childNode.parent = this; ComponentTree childTree = childNode.tree; childNode.setTree(tree); children.add(childNode); tree.mergeEvents(childTree); } } tree.clearHandlerCache(); } } Channel parentChan = channel(); if (parentChan == null) { parentChan = Channel.BROADCAST; } Channel childChan = childNode.channel(); if (childChan == null) { parentChan = Channel.BROADCAST; } Attached evt = new Attached(childNode.component(), component()); if (parentChan.equals(Channel.BROADCAST) || childChan.equals(Channel.BROADCAST)) { fire(evt, Channel.BROADCAST); } else if (parentChan.equals(childChan)) { fire(evt, parentChan); } else { fire(evt, parentChan, childChan); } if (!attachedAsChannels.isEmpty()) { fire(new Start(), attachedAsChannels.toArray(new Channel[0])); } return child; } } }
public class class_name { @Override @SuppressWarnings({ "PMD.CyclomaticComplexity", "PMD.NcssCount", "PMD.NPathComplexity" }) public <T extends ComponentType> T attach(T child) { synchronized (this) { ComponentVertex childNode = componentVertex(child, null); List<Channel> attachedAsChannels = new ArrayList<>(); synchronized (childNode) { if (tree != null && tree.isStarted()) { for (TreeIterator itr = new TreeIterator(childNode); itr.hasNext();) { attachedAsChannels.add(itr.next()); // depends on control dependency: [for], data = [itr] } } synchronized (tree()) { if (childNode.tree == null) { // Newly created, stand-alone child node childNode.parent = this; // depends on control dependency: [if], data = [none] childNode.setTree(tree); // depends on control dependency: [if], data = [none] children.add(childNode); // depends on control dependency: [if], data = [none] } else { // Attaching a tree... synchronized (childNode.tree) { // depends on control dependency: [if], data = [(childNode.tree] if (childNode.parent != null) { throw new IllegalStateException( "Cannot attach a node with a parent."); } if (childNode.tree.isStarted()) { throw new IllegalStateException( "Cannot attach a started subtree."); } childNode.parent = this; ComponentTree childTree = childNode.tree; childNode.setTree(tree); children.add(childNode); tree.mergeEvents(childTree); } } tree.clearHandlerCache(); } } Channel parentChan = channel(); if (parentChan == null) { parentChan = Channel.BROADCAST; // depends on control dependency: [if], data = [none] } Channel childChan = childNode.channel(); if (childChan == null) { parentChan = Channel.BROADCAST; // depends on control dependency: [if], data = [none] } Attached evt = new Attached(childNode.component(), component()); if (parentChan.equals(Channel.BROADCAST) || childChan.equals(Channel.BROADCAST)) { fire(evt, Channel.BROADCAST); // depends on control dependency: [if], data = [none] } else if (parentChan.equals(childChan)) { fire(evt, parentChan); // depends on control dependency: [if], data = [none] } else { fire(evt, parentChan, childChan); // depends on control dependency: [if], data = [none] } if (!attachedAsChannels.isEmpty()) { fire(new Start(), attachedAsChannels.toArray(new Channel[0])); // depends on control dependency: [if], data = [none] } return child; } } }
public class class_name { private void handleAssign(final TSDB tsdb, final HttpQuery query) { // only accept GET And POST if (query.method() != HttpMethod.GET && query.method() != HttpMethod.POST) { throw new BadRequestException(HttpResponseStatus.METHOD_NOT_ALLOWED, "Method not allowed", "The HTTP method [" + query.method().getName() + "] is not permitted for this endpoint"); } final HashMap<String, List<String>> source; if (query.method() == HttpMethod.POST) { source = query.serializer().parseUidAssignV1(); } else { source = new HashMap<String, List<String>>(3); // cut down on some repetitive code, split the query string values by // comma and add them to the source hash String[] types = {"metric", "tagk", "tagv"}; for (int i = 0; i < types.length; i++) { final String values = query.getQueryStringParam(types[i]); if (values != null && !values.isEmpty()) { final String[] metrics = values.split(","); if (metrics != null && metrics.length > 0) { source.put(types[i], Arrays.asList(metrics)); } } } } if (source.size() < 1) { throw new BadRequestException("Missing values to assign UIDs"); } final Map<String, TreeMap<String, String>> response = new HashMap<String, TreeMap<String, String>>(); int error_count = 0; for (Map.Entry<String, List<String>> entry : source.entrySet()) { final TreeMap<String, String> results = new TreeMap<String, String>(); final TreeMap<String, String> errors = new TreeMap<String, String>(); for (String name : entry.getValue()) { try { final byte[] uid = tsdb.assignUid(entry.getKey(), name); results.put(name, UniqueId.uidToString(uid)); } catch (IllegalArgumentException e) { errors.put(name, e.getMessage()); error_count++; } } response.put(entry.getKey(), results); if (errors.size() > 0) { response.put(entry.getKey() + "_errors", errors); } } if (error_count < 1) { query.sendReply(query.serializer().formatUidAssignV1(response)); } else { query.sendReply(HttpResponseStatus.BAD_REQUEST, query.serializer().formatUidAssignV1(response)); } } }
public class class_name { private void handleAssign(final TSDB tsdb, final HttpQuery query) { // only accept GET And POST if (query.method() != HttpMethod.GET && query.method() != HttpMethod.POST) { throw new BadRequestException(HttpResponseStatus.METHOD_NOT_ALLOWED, "Method not allowed", "The HTTP method [" + query.method().getName() + "] is not permitted for this endpoint"); } final HashMap<String, List<String>> source; if (query.method() == HttpMethod.POST) { source = query.serializer().parseUidAssignV1(); // depends on control dependency: [if], data = [none] } else { source = new HashMap<String, List<String>>(3); // depends on control dependency: [if], data = [none] // cut down on some repetitive code, split the query string values by // comma and add them to the source hash String[] types = {"metric", "tagk", "tagv"}; for (int i = 0; i < types.length; i++) { final String values = query.getQueryStringParam(types[i]); if (values != null && !values.isEmpty()) { final String[] metrics = values.split(","); if (metrics != null && metrics.length > 0) { source.put(types[i], Arrays.asList(metrics)); // depends on control dependency: [if], data = [(metrics] } } } } if (source.size() < 1) { throw new BadRequestException("Missing values to assign UIDs"); } final Map<String, TreeMap<String, String>> response = new HashMap<String, TreeMap<String, String>>(); int error_count = 0; for (Map.Entry<String, List<String>> entry : source.entrySet()) { final TreeMap<String, String> results = new TreeMap<String, String>(); final TreeMap<String, String> errors = new TreeMap<String, String>(); for (String name : entry.getValue()) { try { final byte[] uid = tsdb.assignUid(entry.getKey(), name); results.put(name, UniqueId.uidToString(uid)); // depends on control dependency: [try], data = [none] } catch (IllegalArgumentException e) { errors.put(name, e.getMessage()); error_count++; } // depends on control dependency: [catch], data = [none] } response.put(entry.getKey(), results); // depends on control dependency: [for], data = [entry] if (errors.size() > 0) { response.put(entry.getKey() + "_errors", errors); // depends on control dependency: [if], data = [none] } } if (error_count < 1) { query.sendReply(query.serializer().formatUidAssignV1(response)); // depends on control dependency: [if], data = [none] } else { query.sendReply(HttpResponseStatus.BAD_REQUEST, query.serializer().formatUidAssignV1(response)); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static File getJNLPLocalScratch() { try { String machineName = InetAddress.getLocalHost().getHostName().split("\\.")[0]; String username = System.getProperty("user.name"); return new File("/"+machineName+"/scr1/"+username); } catch (Exception e) { return new File("./scr/"); // default scratch } } }
public class class_name { public static File getJNLPLocalScratch() { try { String machineName = InetAddress.getLocalHost().getHostName().split("\\.")[0]; String username = System.getProperty("user.name"); return new File("/"+machineName+"/scr1/"+username); // depends on control dependency: [try], data = [none] } catch (Exception e) { return new File("./scr/"); // default scratch } // depends on control dependency: [catch], data = [none] } }
public class class_name { protected void pushDownExpressions(List<AbstractExpression> noneList) { JoinType joinType = getJoinType(); if (joinType == JoinType.FULL) { return; } JoinNode outerNode = getLeftNode(); if (outerNode instanceof BranchNode) { ((BranchNode)outerNode).pushDownExpressionsRecursively(m_whereOuterList, noneList); } JoinNode innerNode = getRightNode(); if (innerNode instanceof BranchNode && joinType == JoinType.INNER) { ((BranchNode)innerNode).pushDownExpressionsRecursively(m_whereInnerList, noneList); } } }
public class class_name { protected void pushDownExpressions(List<AbstractExpression> noneList) { JoinType joinType = getJoinType(); if (joinType == JoinType.FULL) { return; // depends on control dependency: [if], data = [none] } JoinNode outerNode = getLeftNode(); if (outerNode instanceof BranchNode) { ((BranchNode)outerNode).pushDownExpressionsRecursively(m_whereOuterList, noneList); // depends on control dependency: [if], data = [none] } JoinNode innerNode = getRightNode(); if (innerNode instanceof BranchNode && joinType == JoinType.INNER) { ((BranchNode)innerNode).pushDownExpressionsRecursively(m_whereInnerList, noneList); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override protected String createDialogHtml(String dialog) { StringBuffer result = new StringBuffer(1024); result.append(createWidgetTableStart()); // show error header once if there were validation errors result.append(createWidgetErrorHeader()); if (dialog.equals(PAGES[0])) { boolean webuserOu = false; try { webuserOu = OpenCms.getOrgUnitManager().readOrganizationalUnit( getCms(), getParamOufqn()).hasFlagWebuser(); } catch (CmsException e) { // ignore } // create the widgets for the first dialog page result.append(dialogBlockStart(key(Messages.GUI_USER_EDITOR_LABEL_IDENTIFICATION_BLOCK_0))); result.append(createWidgetTableStart()); result.append(createDialogRowsHtml(0, 5)); result.append(createWidgetTableEnd()); result.append(dialogBlockEnd()); result.append(dialogBlockStart(key(Messages.GUI_USER_EDITOR_LABEL_ADDRESS_BLOCK_0))); result.append(createWidgetTableStart()); result.append(createDialogRowsHtml(6, 10)); result.append(createWidgetTableEnd()); result.append(dialogBlockEnd()); int row = isNewUser() ? 17 : 15; if (!webuserOu) { if (getSites().isEmpty()) { row -= 1; } result.append(dialogBlockStart(key(Messages.GUI_USER_EDITOR_LABEL_SETTINGS_BLOCK_0))); result.append(createWidgetTableStart()); result.append(createDialogRowsHtml(11, row)); result.append(createWidgetTableEnd()); result.append(dialogBlockEnd()); } else { row = 10; } row++; result.append(dialogBlockStart(key(Messages.GUI_USER_EDITOR_LABEL_AUTHENTIFICATION_BLOCK_0))); result.append(createWidgetTableStart()); if (isPwdChangeAllowed(m_user)) { result.append(createDialogRowsHtml(row, row + 3)); } else { result.append(createDialogRowsHtml(row, row + 1)); } result.append(createWidgetTableEnd()); result.append(dialogBlockEnd()); } result.append(createWidgetTableEnd()); return result.toString(); } }
public class class_name { @Override protected String createDialogHtml(String dialog) { StringBuffer result = new StringBuffer(1024); result.append(createWidgetTableStart()); // show error header once if there were validation errors result.append(createWidgetErrorHeader()); if (dialog.equals(PAGES[0])) { boolean webuserOu = false; try { webuserOu = OpenCms.getOrgUnitManager().readOrganizationalUnit( getCms(), getParamOufqn()).hasFlagWebuser(); // depends on control dependency: [try], data = [none] } catch (CmsException e) { // ignore } // depends on control dependency: [catch], data = [none] // create the widgets for the first dialog page result.append(dialogBlockStart(key(Messages.GUI_USER_EDITOR_LABEL_IDENTIFICATION_BLOCK_0))); // depends on control dependency: [if], data = [none] result.append(createWidgetTableStart()); // depends on control dependency: [if], data = [none] result.append(createDialogRowsHtml(0, 5)); // depends on control dependency: [if], data = [none] result.append(createWidgetTableEnd()); // depends on control dependency: [if], data = [none] result.append(dialogBlockEnd()); // depends on control dependency: [if], data = [none] result.append(dialogBlockStart(key(Messages.GUI_USER_EDITOR_LABEL_ADDRESS_BLOCK_0))); // depends on control dependency: [if], data = [none] result.append(createWidgetTableStart()); // depends on control dependency: [if], data = [none] result.append(createDialogRowsHtml(6, 10)); // depends on control dependency: [if], data = [none] result.append(createWidgetTableEnd()); // depends on control dependency: [if], data = [none] result.append(dialogBlockEnd()); // depends on control dependency: [if], data = [none] int row = isNewUser() ? 17 : 15; if (!webuserOu) { if (getSites().isEmpty()) { row -= 1; // depends on control dependency: [if], data = [none] } result.append(dialogBlockStart(key(Messages.GUI_USER_EDITOR_LABEL_SETTINGS_BLOCK_0))); // depends on control dependency: [if], data = [none] result.append(createWidgetTableStart()); // depends on control dependency: [if], data = [none] result.append(createDialogRowsHtml(11, row)); // depends on control dependency: [if], data = [none] result.append(createWidgetTableEnd()); // depends on control dependency: [if], data = [none] result.append(dialogBlockEnd()); // depends on control dependency: [if], data = [none] } else { row = 10; // depends on control dependency: [if], data = [none] } row++; // depends on control dependency: [if], data = [none] result.append(dialogBlockStart(key(Messages.GUI_USER_EDITOR_LABEL_AUTHENTIFICATION_BLOCK_0))); // depends on control dependency: [if], data = [none] result.append(createWidgetTableStart()); // depends on control dependency: [if], data = [none] if (isPwdChangeAllowed(m_user)) { result.append(createDialogRowsHtml(row, row + 3)); // depends on control dependency: [if], data = [none] } else { result.append(createDialogRowsHtml(row, row + 1)); // depends on control dependency: [if], data = [none] } result.append(createWidgetTableEnd()); // depends on control dependency: [if], data = [none] result.append(dialogBlockEnd()); // depends on control dependency: [if], data = [none] } result.append(createWidgetTableEnd()); return result.toString(); } }
public class class_name { void setException(Throwable theException) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "setException - Future object: " + this, theException); } // set exception, we are done ivException = theException; done(); // F743-11774 } }
public class class_name { void setException(Throwable theException) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "setException - Future object: " + this, theException); // depends on control dependency: [if], data = [none] } // set exception, we are done ivException = theException; done(); // F743-11774 } }
public class class_name { void unregisterCall(@NotNull ApolloCall call) { checkNotNull(call, "call == null"); Operation operation = call.operation(); if (operation instanceof Query) { unregisterQueryCall((ApolloQueryCall) call); } else if (operation instanceof Mutation) { unregisterMutationCall((ApolloMutationCall) call); } else { throw new IllegalArgumentException("Unknown call type"); } } }
public class class_name { void unregisterCall(@NotNull ApolloCall call) { checkNotNull(call, "call == null"); Operation operation = call.operation(); if (operation instanceof Query) { unregisterQueryCall((ApolloQueryCall) call); // depends on control dependency: [if], data = [none] } else if (operation instanceof Mutation) { unregisterMutationCall((ApolloMutationCall) call); // depends on control dependency: [if], data = [none] } else { throw new IllegalArgumentException("Unknown call type"); } } }
public class class_name { public static BoundingBox overlap(BoundingBox boundingBox, BoundingBox boundingBox2, double maxLongitude, boolean allowEmpty) { BoundingBox bbox2 = boundingBox2; double adjustment = 0.0; if (maxLongitude > 0) { if (boundingBox.getMinLongitude() > boundingBox2.getMaxLongitude()) { adjustment = maxLongitude * 2.0; } else if (boundingBox.getMaxLongitude() < boundingBox2 .getMinLongitude()) { adjustment = maxLongitude * -2.0; } } if (adjustment != 0.0) { bbox2 = new BoundingBox(boundingBox2); bbox2.setMinLongitude(bbox2.getMinLongitude() + adjustment); bbox2.setMaxLongitude(bbox2.getMaxLongitude() + adjustment); } return overlap(boundingBox, bbox2, allowEmpty); } }
public class class_name { public static BoundingBox overlap(BoundingBox boundingBox, BoundingBox boundingBox2, double maxLongitude, boolean allowEmpty) { BoundingBox bbox2 = boundingBox2; double adjustment = 0.0; if (maxLongitude > 0) { if (boundingBox.getMinLongitude() > boundingBox2.getMaxLongitude()) { adjustment = maxLongitude * 2.0; // depends on control dependency: [if], data = [none] } else if (boundingBox.getMaxLongitude() < boundingBox2 .getMinLongitude()) { adjustment = maxLongitude * -2.0; // depends on control dependency: [if], data = [none] } } if (adjustment != 0.0) { bbox2 = new BoundingBox(boundingBox2); // depends on control dependency: [if], data = [none] bbox2.setMinLongitude(bbox2.getMinLongitude() + adjustment); // depends on control dependency: [if], data = [none] bbox2.setMaxLongitude(bbox2.getMaxLongitude() + adjustment); // depends on control dependency: [if], data = [none] } return overlap(boundingBox, bbox2, allowEmpty); } }
public class class_name { @Override protected void close() { this.stop = true; // Close the socket to force blocking operations to return. if (this.socket != null && !this.socket.isClosed()) { this.socket.close(); } } }
public class class_name { @Override protected void close() { this.stop = true; // Close the socket to force blocking operations to return. if (this.socket != null && !this.socket.isClosed()) { this.socket.close(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static ResourceSnippet createResourceSnippet(Resource res, int startChar, int endChar, String charset) { try { return createResourceSnippet(res.getInputStream(), startChar, endChar, charset); } catch (IOException ex) { return ResourceSnippet.Empty; } } }
public class class_name { public static ResourceSnippet createResourceSnippet(Resource res, int startChar, int endChar, String charset) { try { return createResourceSnippet(res.getInputStream(), startChar, endChar, charset); // depends on control dependency: [try], data = [none] } catch (IOException ex) { return ResourceSnippet.Empty; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public float norm () { double t = 0; for (int c = 0; c < m_nCols; ++c) { final float v = m_aValues[0][c]; t += v * v; } return (float) Math.sqrt (t); } }
public class class_name { public float norm () { double t = 0; for (int c = 0; c < m_nCols; ++c) { final float v = m_aValues[0][c]; t += v * v; // depends on control dependency: [for], data = [none] } return (float) Math.sqrt (t); } }
public class class_name { public static double[] getMin(SpatialComparable box) { final int dim = box.getDimensionality(); double[] min = new double[dim]; for(int i = 0; i < dim; i++) { min[i] = box.getMin(i); } return min; } }
public class class_name { public static double[] getMin(SpatialComparable box) { final int dim = box.getDimensionality(); double[] min = new double[dim]; for(int i = 0; i < dim; i++) { min[i] = box.getMin(i); // depends on control dependency: [for], data = [i] } return min; } }
public class class_name { private void removeNodes(String xpathExpression, Document domdoc) { List<Node> nodes = domdoc.selectNodes(xpathExpression); for (Node node : nodes) { node.detach(); } } }
public class class_name { private void removeNodes(String xpathExpression, Document domdoc) { List<Node> nodes = domdoc.selectNodes(xpathExpression); for (Node node : nodes) { node.detach(); // depends on control dependency: [for], data = [node] } } }
public class class_name { private AuthorizationIssueResponse callAuthorizationIssue( String ticket, String subject, long authTime, String acr, Map<String, Object> claims, Property[] properties, String[] scopes) { // Create a request for /api/auth/authorization/issue API. AuthorizationIssueRequest request = new AuthorizationIssueRequest() .setTicket(ticket) .setSubject(subject) .setAuthTime(authTime) .setAcr(acr) .setProperties(properties) .setScopes(scopes) ; if (claims != null && claims.size() != 0) { request.setClaims(claims); } try { // Call Authlete's /api/auth/authorization/issue API. return mApi.authorizationIssue(request); } catch (AuthleteApiException e) { // The API call failed. throw apiFailure("/api/auth/authorization/issue", e); } } }
public class class_name { private AuthorizationIssueResponse callAuthorizationIssue( String ticket, String subject, long authTime, String acr, Map<String, Object> claims, Property[] properties, String[] scopes) { // Create a request for /api/auth/authorization/issue API. AuthorizationIssueRequest request = new AuthorizationIssueRequest() .setTicket(ticket) .setSubject(subject) .setAuthTime(authTime) .setAcr(acr) .setProperties(properties) .setScopes(scopes) ; if (claims != null && claims.size() != 0) { request.setClaims(claims); // depends on control dependency: [if], data = [(claims] } try { // Call Authlete's /api/auth/authorization/issue API. return mApi.authorizationIssue(request); // depends on control dependency: [try], data = [none] } catch (AuthleteApiException e) { // The API call failed. throw apiFailure("/api/auth/authorization/issue", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Nonnull public static ISimpleURL getAsURLData (@Nonnull final String sHref, @Nullable final IDecoder <String, String> aParameterDecoder) { ValueEnforcer.notNull (sHref, "Href"); final String sRealHref = sHref.trim (); // Is it a protocol that does not allow for query parameters? final IURLProtocol eProtocol = URLProtocolRegistry.getInstance ().getProtocol (sRealHref); if (eProtocol != null && !eProtocol.allowsForQueryParameters ()) return new URLData (sRealHref, null, null); if (GlobalDebug.isDebugMode ()) if (eProtocol != null) try { new URL (sRealHref); } catch (final MalformedURLException ex) { if (LOGGER.isWarnEnabled ()) LOGGER.warn ("java.net.URL claims URL '" + sRealHref + "' to be invalid: " + ex.getMessage ()); } String sPath; URLParameterList aParams = null; String sAnchor; // First get the anchor out String sRemainingHref = sRealHref; final int nIndexAnchor = sRemainingHref.indexOf (HASH); if (nIndexAnchor >= 0) { // Extract anchor sAnchor = sRemainingHref.substring (nIndexAnchor + 1).trim (); sRemainingHref = sRemainingHref.substring (0, nIndexAnchor).trim (); } else sAnchor = null; // Find parameters final int nQuestionIndex = sRemainingHref.indexOf (QUESTIONMARK); if (nQuestionIndex >= 0) { // Use everything after the '?' final String sQueryString = sRemainingHref.substring (nQuestionIndex + 1).trim (); // Maybe empty, if the URL ends with a '?' if (StringHelper.hasText (sQueryString)) aParams = getParsedQueryParameters (sQueryString, aParameterDecoder); sPath = sRemainingHref.substring (0, nQuestionIndex).trim (); } else sPath = sRemainingHref; return new URLData (sPath, aParams, sAnchor); } }
public class class_name { @Nonnull public static ISimpleURL getAsURLData (@Nonnull final String sHref, @Nullable final IDecoder <String, String> aParameterDecoder) { ValueEnforcer.notNull (sHref, "Href"); final String sRealHref = sHref.trim (); // Is it a protocol that does not allow for query parameters? final IURLProtocol eProtocol = URLProtocolRegistry.getInstance ().getProtocol (sRealHref); if (eProtocol != null && !eProtocol.allowsForQueryParameters ()) return new URLData (sRealHref, null, null); if (GlobalDebug.isDebugMode ()) if (eProtocol != null) try { new URL (sRealHref); // depends on control dependency: [try], data = [none] } catch (final MalformedURLException ex) { if (LOGGER.isWarnEnabled ()) LOGGER.warn ("java.net.URL claims URL '" + sRealHref + "' to be invalid: " + ex.getMessage ()); } // depends on control dependency: [catch], data = [none] String sPath; URLParameterList aParams = null; String sAnchor; // First get the anchor out String sRemainingHref = sRealHref; final int nIndexAnchor = sRemainingHref.indexOf (HASH); if (nIndexAnchor >= 0) { // Extract anchor sAnchor = sRemainingHref.substring (nIndexAnchor + 1).trim (); // depends on control dependency: [if], data = [(nIndexAnchor] sRemainingHref = sRemainingHref.substring (0, nIndexAnchor).trim (); // depends on control dependency: [if], data = [none] } else sAnchor = null; // Find parameters final int nQuestionIndex = sRemainingHref.indexOf (QUESTIONMARK); if (nQuestionIndex >= 0) { // Use everything after the '?' final String sQueryString = sRemainingHref.substring (nQuestionIndex + 1).trim (); // Maybe empty, if the URL ends with a '?' if (StringHelper.hasText (sQueryString)) aParams = getParsedQueryParameters (sQueryString, aParameterDecoder); sPath = sRemainingHref.substring (0, nQuestionIndex).trim (); // depends on control dependency: [if], data = [none] } else sPath = sRemainingHref; return new URLData (sPath, aParams, sAnchor); } }
public class class_name { @SuppressWarnings("unchecked") public <R> Value<R> map(Function<? super V, ? extends R> mapper) { LettuceAssert.notNull(mapper, "Mapper function must not be null"); if (hasValue()) { return new Value<R>(mapper.apply(getValue())); } return (Value<R>) this; } }
public class class_name { @SuppressWarnings("unchecked") public <R> Value<R> map(Function<? super V, ? extends R> mapper) { LettuceAssert.notNull(mapper, "Mapper function must not be null"); if (hasValue()) { return new Value<R>(mapper.apply(getValue())); // depends on control dependency: [if], data = [none] } return (Value<R>) this; } }
public class class_name { public ListEnabledProductsForImportResult withProductSubscriptions(String... productSubscriptions) { if (this.productSubscriptions == null) { setProductSubscriptions(new java.util.ArrayList<String>(productSubscriptions.length)); } for (String ele : productSubscriptions) { this.productSubscriptions.add(ele); } return this; } }
public class class_name { public ListEnabledProductsForImportResult withProductSubscriptions(String... productSubscriptions) { if (this.productSubscriptions == null) { setProductSubscriptions(new java.util.ArrayList<String>(productSubscriptions.length)); // depends on control dependency: [if], data = [none] } for (String ele : productSubscriptions) { this.productSubscriptions.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public static String toStringUTF8(byte[] bytes) { if (bytes == null) { return null; } return toStringUTF8(bytes, 0, bytes.length); } }
public class class_name { public static String toStringUTF8(byte[] bytes) { if (bytes == null) { return null; // depends on control dependency: [if], data = [none] } return toStringUTF8(bytes, 0, bytes.length); } }
public class class_name { @Override public boolean launch(final Activity context, Bundle data) { if(entityLoaderUtils != null) { final SocializeEntityLoader entityLoader = entityLoaderUtils.initEntityLoader(); if(entityLoader != null) { Object idObj = data.get(Socialize.ENTITY_ID); if(idObj != null) { long id = Long.parseLong(idObj.toString()); try { SocializeSession session = notificationAuthenticator.authenticate(context); Entity entity = entitySystem.getEntitySynchronous(session, id); return loadEntity(context, entityLoader, entity); } catch (Exception e) { handleError("Failed to load entity", e); } } else { handleWarn("No entity id found. Entity based notification cannot be handled"); } } else { handleWarn("No entity loader found. Entity based notification cannot be handled"); } } return false; } }
public class class_name { @Override public boolean launch(final Activity context, Bundle data) { if(entityLoaderUtils != null) { final SocializeEntityLoader entityLoader = entityLoaderUtils.initEntityLoader(); if(entityLoader != null) { Object idObj = data.get(Socialize.ENTITY_ID); if(idObj != null) { long id = Long.parseLong(idObj.toString()); try { SocializeSession session = notificationAuthenticator.authenticate(context); Entity entity = entitySystem.getEntitySynchronous(session, id); return loadEntity(context, entityLoader, entity); // depends on control dependency: [try], data = [none] } catch (Exception e) { handleError("Failed to load entity", e); } // depends on control dependency: [catch], data = [none] } else { handleWarn("No entity id found. Entity based notification cannot be handled"); // depends on control dependency: [if], data = [none] } } else { handleWarn("No entity loader found. Entity based notification cannot be handled"); // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public void setDispatcherType(DispatcherType dispatcherType) { if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) logger.logp(Level.FINE, CLASS_NAME,"setDispatcherType","dispatcherType->"+dispatcherType); if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){ checkRequestObjectInUse(); } this._srtRequestHelper.dispatcherType = dispatcherType; } }
public class class_name { public void setDispatcherType(DispatcherType dispatcherType) { if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) logger.logp(Level.FINE, CLASS_NAME,"setDispatcherType","dispatcherType->"+dispatcherType); if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){ checkRequestObjectInUse(); // depends on control dependency: [if], data = [none] } this._srtRequestHelper.dispatcherType = dispatcherType; } }
public class class_name { @Override public void setAggregator(IAggregator aggregator) { if (this.aggregator != null) { throw new IllegalStateException(); } this.aggregator = aggregator; // See if the max cache entries init-param has changed and int newMaxCapacity = getMaxCapacity(aggregator); // If the maximum size has changed, then create a new layerBuildMap with the new // max size and copy the entries from the existing map to the new map ConcurrentLinkedHashMap<String, CacheEntry> oldLayerBuildMap = null; // have no layer builds in the layerBuildMap if (maxCapacity != newMaxCapacity) { maxCapacity = newMaxCapacity; oldLayerBuildMap = layerBuildMap; layerBuildMap = new ConcurrentLinkedHashMap.Builder<String, CacheEntry>() .maximumWeightedCapacity(maxCapacity) .listener(newEvictionListener()) .weigher(newWeigher()) .build(); // Need to call setLayerBuildAccessors BEFORE calling putAll because // it might result in the eviction handler being called. setLayerBuildAccessors(oldLayerBuildMap.keySet()); layerBuildMap.putAll(oldLayerBuildMap.ascendingMap()); oldLayerBuildMap.clear(); } else { setLayerBuildAccessors(layerBuildMap.keySet()); } } }
public class class_name { @Override public void setAggregator(IAggregator aggregator) { if (this.aggregator != null) { throw new IllegalStateException(); } this.aggregator = aggregator; // See if the max cache entries init-param has changed and int newMaxCapacity = getMaxCapacity(aggregator); // If the maximum size has changed, then create a new layerBuildMap with the new // max size and copy the entries from the existing map to the new map ConcurrentLinkedHashMap<String, CacheEntry> oldLayerBuildMap = null; // have no layer builds in the layerBuildMap if (maxCapacity != newMaxCapacity) { maxCapacity = newMaxCapacity; // depends on control dependency: [if], data = [none] oldLayerBuildMap = layerBuildMap; // depends on control dependency: [if], data = [none] layerBuildMap = new ConcurrentLinkedHashMap.Builder<String, CacheEntry>() .maximumWeightedCapacity(maxCapacity) .listener(newEvictionListener()) .weigher(newWeigher()) .build(); // depends on control dependency: [if], data = [none] // Need to call setLayerBuildAccessors BEFORE calling putAll because // it might result in the eviction handler being called. setLayerBuildAccessors(oldLayerBuildMap.keySet()); // depends on control dependency: [if], data = [none] layerBuildMap.putAll(oldLayerBuildMap.ascendingMap()); // depends on control dependency: [if], data = [none] oldLayerBuildMap.clear(); // depends on control dependency: [if], data = [none] } else { setLayerBuildAccessors(layerBuildMap.keySet()); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static Object newInstance(String s) { final ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); try { Class< ? > clazz = classLoader.loadClass(s); Constructor< ? > constructor = clazz.getConstructor(); return constructor.newInstance(); } catch (ClassNotFoundException e) { return null; } catch (IllegalAccessException e) { return null; } catch (InstantiationException e) { return null; } catch (NoSuchMethodException e) { // no default constructor return null; } catch (InvocationTargetException e) { // constructor threw an exception return null; } } }
public class class_name { public static Object newInstance(String s) { final ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); try { Class< ? > clazz = classLoader.loadClass(s); Constructor< ? > constructor = clazz.getConstructor(); return constructor.newInstance(); } catch (ClassNotFoundException e) { return null; } // depends on control dependency: [catch], data = [none] catch (IllegalAccessException e) { return null; } // depends on control dependency: [catch], data = [none] catch (InstantiationException e) { return null; } // depends on control dependency: [catch], data = [none] catch (NoSuchMethodException e) { // no default constructor return null; } // depends on control dependency: [catch], data = [none] catch (InvocationTargetException e) { // constructor threw an exception return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public String getRootCauseMessage() { String rcmessage = null; if (getRootCause() != null) { if (getRootCause().getCause() != null) { rcmessage = getRootCause().getCause().getMessage(); } rcmessage = rcmessage == null ? getRootCause().getMessage() : rcmessage; rcmessage = rcmessage == null ? super.getMessage() : rcmessage; rcmessage = rcmessage == null ? "NONE" : rcmessage; } return rcmessage; } }
public class class_name { public String getRootCauseMessage() { String rcmessage = null; if (getRootCause() != null) { if (getRootCause().getCause() != null) { rcmessage = getRootCause().getCause().getMessage(); // depends on control dependency: [if], data = [none] } rcmessage = rcmessage == null ? getRootCause().getMessage() : rcmessage; // depends on control dependency: [if], data = [none] rcmessage = rcmessage == null ? super.getMessage() : rcmessage; // depends on control dependency: [if], data = [none] rcmessage = rcmessage == null ? "NONE" : rcmessage; // depends on control dependency: [if], data = [none] } return rcmessage; } }
public class class_name { @Override public final String getIndexName() { final Note note = noteRenderer.getGedObject(); if (!note.isSet()) { return ""; } final String nameHtml = noteRenderer.getTitleString(); return "<a href=\"note?db=" + note.getDbName() + "&amp;id=" + note.getString() + "\" class=\"name\" id=\"note-" + note.getString() + "\">" + nameHtml + " (" + note.getString() + ")</a>"; } }
public class class_name { @Override public final String getIndexName() { final Note note = noteRenderer.getGedObject(); if (!note.isSet()) { return ""; // depends on control dependency: [if], data = [none] } final String nameHtml = noteRenderer.getTitleString(); return "<a href=\"note?db=" + note.getDbName() + "&amp;id=" + note.getString() + "\" class=\"name\" id=\"note-" + note.getString() + "\">" + nameHtml + " (" + note.getString() + ")</a>"; } }
public class class_name { public List<int[]> getNPaths(int n) { List<int[]> result = new ArrayList<int[]>(); n = Math.min(Predefine.MAX_SEGMENT_NUM, n); for (int i = 0; i < N && result.size() < n; ++i) { List<int[]> pathList = getPaths(i); for (int[] path : pathList) { if (result.size() == n) break; result.add(path); } } return result; } }
public class class_name { public List<int[]> getNPaths(int n) { List<int[]> result = new ArrayList<int[]>(); n = Math.min(Predefine.MAX_SEGMENT_NUM, n); for (int i = 0; i < N && result.size() < n; ++i) { List<int[]> pathList = getPaths(i); for (int[] path : pathList) { if (result.size() == n) break; result.add(path); // depends on control dependency: [for], data = [path] } } return result; } }
public class class_name { public static void convert(File infile, File xslfile, File outfile) { try { // Create transformer factory TransformerFactory factory = TransformerFactory.newInstance(); // Use the factory to create a template containing the xsl file Templates template = factory.newTemplates(new StreamSource( new FileInputStream(xslfile))); // Use the template to create a transformer Transformer xformer = template.newTransformer(); // passing 2 parameters String branch = outfile.getParentFile().getCanonicalPath().substring(root.length()); branch = branch.replace(File.separatorChar, '/'); StringBuffer path = new StringBuffer(); for (int i = 0; i < branch.length(); i++) { if (branch.charAt(i) == '/') path.append("/.."); } xformer.setParameter("branch", branch); xformer.setParameter("root", path.toString()); // Prepare the input and output files Source source = new StreamSource(new FileInputStream(infile)); Result result = new StreamResult(new FileOutputStream(outfile)); // Apply the xsl file to the source file and write the result to the // output file xformer.transform(source, result); } catch (Exception e) { e.printStackTrace(); } } }
public class class_name { public static void convert(File infile, File xslfile, File outfile) { try { // Create transformer factory TransformerFactory factory = TransformerFactory.newInstance(); // Use the factory to create a template containing the xsl file Templates template = factory.newTemplates(new StreamSource( new FileInputStream(xslfile))); // Use the template to create a transformer Transformer xformer = template.newTransformer(); // passing 2 parameters String branch = outfile.getParentFile().getCanonicalPath().substring(root.length()); branch = branch.replace(File.separatorChar, '/'); // depends on control dependency: [try], data = [none] StringBuffer path = new StringBuffer(); for (int i = 0; i < branch.length(); i++) { if (branch.charAt(i) == '/') path.append("/.."); } xformer.setParameter("branch", branch); // depends on control dependency: [try], data = [none] xformer.setParameter("root", path.toString()); // depends on control dependency: [try], data = [none] // Prepare the input and output files Source source = new StreamSource(new FileInputStream(infile)); Result result = new StreamResult(new FileOutputStream(outfile)); // Apply the xsl file to the source file and write the result to the // output file xformer.transform(source, result); // depends on control dependency: [try], data = [none] } catch (Exception e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void remove(int p) { npoints--; int q = index[p]; index[points[q] = points[npoints]] = q; for (int i = 0; i < npoints; i++) { if (neighbor[points[i]] == p) { findNeighbor(points[i]); } } } }
public class class_name { public void remove(int p) { npoints--; int q = index[p]; index[points[q] = points[npoints]] = q; for (int i = 0; i < npoints; i++) { if (neighbor[points[i]] == p) { findNeighbor(points[i]); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public EClass getIfcMaterialClassificationRelationship() { if (ifcMaterialClassificationRelationshipEClass == null) { ifcMaterialClassificationRelationshipEClass = (EClass) EPackage.Registry.INSTANCE .getEPackage(Ifc2x3tc1Package.eNS_URI).getEClassifiers().get(307); } return ifcMaterialClassificationRelationshipEClass; } }
public class class_name { public EClass getIfcMaterialClassificationRelationship() { if (ifcMaterialClassificationRelationshipEClass == null) { ifcMaterialClassificationRelationshipEClass = (EClass) EPackage.Registry.INSTANCE .getEPackage(Ifc2x3tc1Package.eNS_URI).getEClassifiers().get(307); // depends on control dependency: [if], data = [none] } return ifcMaterialClassificationRelationshipEClass; } }
public class class_name { public synchronized IndexTreePath<DeLiCluEntry> setHandled(DBID id, O obj) { if(LOG.isDebugging()) { LOG.debugFine("setHandled " + id + ", " + obj + "\n"); } // find the leaf node containing o IndexTreePath<DeLiCluEntry> pathToObject = findPathToObject(getRootPath(), obj, id); if(pathToObject == null) { throw new AbortException("Object not found in setHandled."); } // set o handled DeLiCluEntry entry = pathToObject.getEntry(); entry.setHasHandled(true); entry.setHasUnhandled(false); for(IndexTreePath<DeLiCluEntry> path = pathToObject; path.getParentPath() != null; path = path.getParentPath()) { DeLiCluEntry parentEntry = path.getParentPath().getEntry(); DeLiCluNode node = getNode(parentEntry); boolean hasHandled = false; boolean hasUnhandled = false; for(int i = 0; i < node.getNumEntries(); i++) { final DeLiCluEntry nodeEntry = node.getEntry(i); hasHandled = hasHandled || nodeEntry.hasHandled(); hasUnhandled = hasUnhandled || nodeEntry.hasUnhandled(); } parentEntry.setHasUnhandled(hasUnhandled); parentEntry.setHasHandled(hasHandled); } return pathToObject; } }
public class class_name { public synchronized IndexTreePath<DeLiCluEntry> setHandled(DBID id, O obj) { if(LOG.isDebugging()) { LOG.debugFine("setHandled " + id + ", " + obj + "\n"); // depends on control dependency: [if], data = [none] } // find the leaf node containing o IndexTreePath<DeLiCluEntry> pathToObject = findPathToObject(getRootPath(), obj, id); if(pathToObject == null) { throw new AbortException("Object not found in setHandled."); } // set o handled DeLiCluEntry entry = pathToObject.getEntry(); entry.setHasHandled(true); entry.setHasUnhandled(false); for(IndexTreePath<DeLiCluEntry> path = pathToObject; path.getParentPath() != null; path = path.getParentPath()) { DeLiCluEntry parentEntry = path.getParentPath().getEntry(); DeLiCluNode node = getNode(parentEntry); boolean hasHandled = false; boolean hasUnhandled = false; for(int i = 0; i < node.getNumEntries(); i++) { final DeLiCluEntry nodeEntry = node.getEntry(i); hasHandled = hasHandled || nodeEntry.hasHandled(); // depends on control dependency: [for], data = [none] hasUnhandled = hasUnhandled || nodeEntry.hasUnhandled(); // depends on control dependency: [for], data = [none] } parentEntry.setHasUnhandled(hasUnhandled); // depends on control dependency: [for], data = [none] parentEntry.setHasHandled(hasHandled); // depends on control dependency: [for], data = [none] } return pathToObject; } }
public class class_name { protected void closeConnection(final String meUuid, boolean alreadyClosed) { final String methodName = "closeConnection"; if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) { SibTr.entry(this, TRACE, methodName, meUuid); } synchronized (_sessionsByMeUuid) { super.closeConnection(meUuid, alreadyClosed); _sessionsByMeUuid.remove(meUuid); } if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) { SibTr.exit(this, TRACE, methodName); } } }
public class class_name { protected void closeConnection(final String meUuid, boolean alreadyClosed) { final String methodName = "closeConnection"; if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) { SibTr.entry(this, TRACE, methodName, meUuid); // depends on control dependency: [if], data = [none] } synchronized (_sessionsByMeUuid) { super.closeConnection(meUuid, alreadyClosed); _sessionsByMeUuid.remove(meUuid); } if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) { SibTr.exit(this, TRACE, methodName); // depends on control dependency: [if], data = [none] } } }
public class class_name { private String getTargetTable(ClassDescriptorDef targetClassDef, String indirectionTable, String foreignKeys) { ModelDef modelDef = (ModelDef)targetClassDef.getOwner(); String tableName = null; for (Iterator classIt = modelDef.getClasses(); classIt.hasNext();) { ClassDescriptorDef curClassDef = (ClassDescriptorDef)classIt.next(); if (!curClassDef.getBooleanProperty(PropertyHelper.OJB_PROPERTY_GENERATE_TABLE_INFO, true)) { continue; } for (Iterator collIt = curClassDef.getCollections(); collIt.hasNext();) { CollectionDescriptorDef curCollDef = (CollectionDescriptorDef)collIt.next(); if (!indirectionTable.equals(curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_INDIRECTION_TABLE)) || !CommaListIterator.sameLists(foreignKeys, curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY))) { continue; } // ok, collection fits if (tableName != null) { if (!tableName.equals(curClassDef.getProperty(PropertyHelper.OJB_PROPERTY_TABLE))) { // maps to a different table return null; } } else { tableName = curClassDef.getProperty(PropertyHelper.OJB_PROPERTY_TABLE); } } } if (tableName == null) { // no fitting collection found -> indirection table with only one collection // we have to check whether the hierarchy of the target class maps to one table only return getHierarchyTable(targetClassDef); } else { return tableName; } } }
public class class_name { private String getTargetTable(ClassDescriptorDef targetClassDef, String indirectionTable, String foreignKeys) { ModelDef modelDef = (ModelDef)targetClassDef.getOwner(); String tableName = null; for (Iterator classIt = modelDef.getClasses(); classIt.hasNext();) { ClassDescriptorDef curClassDef = (ClassDescriptorDef)classIt.next(); if (!curClassDef.getBooleanProperty(PropertyHelper.OJB_PROPERTY_GENERATE_TABLE_INFO, true)) { continue; } for (Iterator collIt = curClassDef.getCollections(); collIt.hasNext();) { CollectionDescriptorDef curCollDef = (CollectionDescriptorDef)collIt.next(); if (!indirectionTable.equals(curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_INDIRECTION_TABLE)) || !CommaListIterator.sameLists(foreignKeys, curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY))) { continue; } // ok, collection fits if (tableName != null) { if (!tableName.equals(curClassDef.getProperty(PropertyHelper.OJB_PROPERTY_TABLE))) { // maps to a different table return null; // depends on control dependency: [if], data = [none] } } else { tableName = curClassDef.getProperty(PropertyHelper.OJB_PROPERTY_TABLE); // depends on control dependency: [if], data = [none] } } } if (tableName == null) { // no fitting collection found -> indirection table with only one collection // we have to check whether the hierarchy of the target class maps to one table only return getHierarchyTable(targetClassDef); // depends on control dependency: [if], data = [none] } else { return tableName; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static IAtomContainer convertExplicitToImplicitHydrogens(IAtomContainer atomContainer) { IAtomContainer mol = atomContainer.getBuilder().newInstance(IAtomContainer.class, atomContainer); convertImplicitToExplicitHydrogens(mol); if (mol.getAtomCount() > 1) { mol = removeHydrogens(mol); } else if (atomContainer.atoms().iterator().next().getSymbol().equalsIgnoreCase("H")) { System.err.println("WARNING: single hydrogen atom removal not supported!"); } mol.addProperties(atomContainer.getProperties()); mol.setFlags(atomContainer.getFlags()); if (atomContainer.getID() != null) { mol.setID(atomContainer.getID()); } return mol; } }
public class class_name { public static IAtomContainer convertExplicitToImplicitHydrogens(IAtomContainer atomContainer) { IAtomContainer mol = atomContainer.getBuilder().newInstance(IAtomContainer.class, atomContainer); convertImplicitToExplicitHydrogens(mol); if (mol.getAtomCount() > 1) { mol = removeHydrogens(mol); // depends on control dependency: [if], data = [none] } else if (atomContainer.atoms().iterator().next().getSymbol().equalsIgnoreCase("H")) { System.err.println("WARNING: single hydrogen atom removal not supported!"); // depends on control dependency: [if], data = [none] } mol.addProperties(atomContainer.getProperties()); mol.setFlags(atomContainer.getFlags()); if (atomContainer.getID() != null) { mol.setID(atomContainer.getID()); // depends on control dependency: [if], data = [(atomContainer.getID()] } return mol; } }
public class class_name { private void validateIfdBP(IFD ifd, int p) { IfdTags metadata = ifd.getMetadata(); if (p == 1 || p == 2) { checkRequiredTag(metadata, "NewSubfileType", 1, new long[]{0}); } checkRequiredTag(metadata, "ImageLength", 1); checkRequiredTag(metadata, "ImageWidth", 1); checkRequiredTag(metadata, "BitsPerSample", 1, new long[]{1}); if (p == 0) { checkRequiredTag(metadata, "Compression", 1, new long[]{1,4,8}); } else if (p == 1) { checkRequiredTag(metadata, "Compression", 1, new long[]{1}); } else { checkRequiredTag(metadata, "Compression", 1, new long[]{1,4,8}); } if (p == 0) { checkRequiredTag(metadata, "PhotometricInterpretation", 1); } else { checkRequiredTag(metadata, "PhotometricInterpretation", 1, new long[] {0}); } checkRequiredTag(metadata, "StripOffsets", 1); if (p == 0) { checkRequiredTag(metadata, "Orientation", 1, new long[]{1,4,5,8}); } else { checkRequiredTag(metadata, "Orientation", 1, new long[]{1}); } checkRequiredTag(metadata, "SamplesPerPixel", 1, new long[]{1}); checkRequiredTag(metadata, "StripBYTECount", 1); checkRequiredTag(metadata, "XResolution", 1); checkRequiredTag(metadata, "YResolution", 1); if (p == 1 || p == 2) { checkRequiredTag(metadata, "ResolutionUnit", 1, new long[]{2, 3}); checkRequiredTag(metadata, "DotRange", 2, new long[]{0, 255}); } checkRequiredTag(metadata, "ImageColorIndicator", 1, new long[]{0, 1, 2}); checkRequiredTag(metadata, "BackgroundColorIndicator", 1, new long[]{0, 1, 2}); } }
public class class_name { private void validateIfdBP(IFD ifd, int p) { IfdTags metadata = ifd.getMetadata(); if (p == 1 || p == 2) { checkRequiredTag(metadata, "NewSubfileType", 1, new long[]{0}); // depends on control dependency: [if], data = [none] } checkRequiredTag(metadata, "ImageLength", 1); checkRequiredTag(metadata, "ImageWidth", 1); checkRequiredTag(metadata, "BitsPerSample", 1, new long[]{1}); if (p == 0) { checkRequiredTag(metadata, "Compression", 1, new long[]{1,4,8}); // depends on control dependency: [if], data = [none] } else if (p == 1) { checkRequiredTag(metadata, "Compression", 1, new long[]{1}); // depends on control dependency: [if], data = [none] } else { checkRequiredTag(metadata, "Compression", 1, new long[]{1,4,8}); // depends on control dependency: [if], data = [none] } if (p == 0) { checkRequiredTag(metadata, "PhotometricInterpretation", 1); // depends on control dependency: [if], data = [none] } else { checkRequiredTag(metadata, "PhotometricInterpretation", 1, new long[] {0}); // depends on control dependency: [if], data = [none] } checkRequiredTag(metadata, "StripOffsets", 1); if (p == 0) { checkRequiredTag(metadata, "Orientation", 1, new long[]{1,4,5,8}); // depends on control dependency: [if], data = [none] } else { checkRequiredTag(metadata, "Orientation", 1, new long[]{1}); // depends on control dependency: [if], data = [none] } checkRequiredTag(metadata, "SamplesPerPixel", 1, new long[]{1}); checkRequiredTag(metadata, "StripBYTECount", 1); checkRequiredTag(metadata, "XResolution", 1); checkRequiredTag(metadata, "YResolution", 1); if (p == 1 || p == 2) { checkRequiredTag(metadata, "ResolutionUnit", 1, new long[]{2, 3}); // depends on control dependency: [if], data = [none] checkRequiredTag(metadata, "DotRange", 2, new long[]{0, 255}); // depends on control dependency: [if], data = [none] } checkRequiredTag(metadata, "ImageColorIndicator", 1, new long[]{0, 1, 2}); checkRequiredTag(metadata, "BackgroundColorIndicator", 1, new long[]{0, 1, 2}); } }