code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
|---|---|
public class class_name {
@SuppressWarnings("unused")
private void close() {
try {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Doing client selector close");
}
selector.close();
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Closed client selector");
}
} catch (IOException e) {
LOGGER.warn("Ignoring exception during selector close", e);
}
} }
|
public class class_name {
@SuppressWarnings("unused")
private void close() {
try {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Doing client selector close"); // depends on control dependency: [if], data = [none]
}
selector.close(); // depends on control dependency: [try], data = [none]
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Closed client selector"); // depends on control dependency: [if], data = [none]
}
} catch (IOException e) {
LOGGER.warn("Ignoring exception during selector close", e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private static List<EntryElement> convertAllObjectsToEntryElements(Collection<? extends Object> collection) {
List<EntryElement> result = new ArrayList<EntryElement>();
for (Object o : collection) {
result.add(transformObjectToEntryElement(o));
}
return result;
} }
|
public class class_name {
private static List<EntryElement> convertAllObjectsToEntryElements(Collection<? extends Object> collection) {
List<EntryElement> result = new ArrayList<EntryElement>();
for (Object o : collection) {
result.add(transformObjectToEntryElement(o)); // depends on control dependency: [for], data = [o]
}
return result;
} }
|
public class class_name {
public String signCookie(String str) {
if (str == null || str.isEmpty()) {
throw new IllegalArgumentException("NULL or empty string to sign");
}
String signature = getSignature(str);
if (LOG.isDebugEnabled()) {
LOG.debug("Signature generated for " + str + " is " + signature);
}
return str + SIGNATURE + signature;
} }
|
public class class_name {
public String signCookie(String str) {
if (str == null || str.isEmpty()) {
throw new IllegalArgumentException("NULL or empty string to sign");
}
String signature = getSignature(str);
if (LOG.isDebugEnabled()) {
LOG.debug("Signature generated for " + str + " is " + signature); // depends on control dependency: [if], data = [none]
}
return str + SIGNATURE + signature;
} }
|
public class class_name {
protected void setChangedInLastRequest(final boolean changed) {
if (isChangedInLastRequest() != changed) {
InputModel model = getOrCreateComponentModel();
model.changedInLastRequest = changed;
}
} }
|
public class class_name {
protected void setChangedInLastRequest(final boolean changed) {
if (isChangedInLastRequest() != changed) {
InputModel model = getOrCreateComponentModel();
model.changedInLastRequest = changed; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public boolean proofStep(ResolutionState state)
{
Functor goalTerm = state.getGoalStack().peek().getFunctor();
Functor matchTerm = state.getCurrentClause().getHead();
// This is used to record variables bound on the domain side of the unificiation. This information seems
// like is does not need to be kept because usually all of these bindings are in the stack frame.
// However, this is not always the case as unification can capture a variable on the domain side.
// These variables need to be unbound on backtracking too.
List<Variable> domainVariables = new LinkedList<Variable>();
// This is used to record variables bound on the query goal side of the unification. This information
// must be kept so that the undo operation can unbind these variables before placing the goal back
// onto the stack when backtracking.
List<Variable> boundVariables = new LinkedList<Variable>();
// Unify the current query goal with the possibly matching clause, creating variable bindings.
boolean matched = state.getUnifier().unifyInternal(goalTerm, matchTerm, boundVariables, domainVariables);
// Even if unification fails, any partial bindings created are remembered, to ensure that they are cleaned
// up when this proof steps state is undone.
for (Variable binding : boundVariables)
{
state.getVariableBindings().offer(binding);
}
for (Variable binding : domainVariables)
{
state.getVariableBindings().offer(binding);
}
// If the unification succeeded, establish a new state with the unified query removed from the goal stack, the
// body of the unified with clause added to it for resolution, and the variable binding trail extended with
// any additional bindings resulting from the unification.
if (matched)
{
if (TRACE)
{
/*trace.fine(state.getTraceIndenter().generateTraceIndent() + "Unify " +
goalTerm.toString(state.getInterner(), true, true) + " against " +
matchTerm.toString(state.getInterner(), true, true) + ", ok.");*/
}
// Consume the successfully unified goal from the goal stack.
state.getGoalStack().poll();
// Add all functors on the body side of the unified clause onto the goal stack for resolution.
Functor[] body = state.getCurrentClause().getBody();
if ((body != null) && (body.length != 0))
{
// The new goals are placed onto the goal stack backwards. It is a stack, hence they get
// explored first, depth first, but their insertion order is reversed for an intuitive
// left-to-right evaluation order.
for (int i = body.length - 1; i >= 0; i--)
{
BuiltInFunctor newGoal = state.getBuiltInTransform().apply(body[i]);
newGoal.setParentChoicePointState(state.getLastChoicePoint());
state.getGoalStack().offer(newGoal);
}
}
return true;
}
else
{
if (TRACE)
{
/*trace.fine(state.getTraceIndenter().generateTraceIndent() + "Failed to unify " +
goalTerm.toString(state.getInterner(), true, true) + " against " +
matchTerm.toString(state.getInterner(), true, true) + ".");*/
}
return false;
}
} }
|
public class class_name {
public boolean proofStep(ResolutionState state)
{
Functor goalTerm = state.getGoalStack().peek().getFunctor();
Functor matchTerm = state.getCurrentClause().getHead();
// This is used to record variables bound on the domain side of the unificiation. This information seems
// like is does not need to be kept because usually all of these bindings are in the stack frame.
// However, this is not always the case as unification can capture a variable on the domain side.
// These variables need to be unbound on backtracking too.
List<Variable> domainVariables = new LinkedList<Variable>();
// This is used to record variables bound on the query goal side of the unification. This information
// must be kept so that the undo operation can unbind these variables before placing the goal back
// onto the stack when backtracking.
List<Variable> boundVariables = new LinkedList<Variable>();
// Unify the current query goal with the possibly matching clause, creating variable bindings.
boolean matched = state.getUnifier().unifyInternal(goalTerm, matchTerm, boundVariables, domainVariables);
// Even if unification fails, any partial bindings created are remembered, to ensure that they are cleaned
// up when this proof steps state is undone.
for (Variable binding : boundVariables)
{
state.getVariableBindings().offer(binding); // depends on control dependency: [for], data = [binding]
}
for (Variable binding : domainVariables)
{
state.getVariableBindings().offer(binding); // depends on control dependency: [for], data = [binding]
}
// If the unification succeeded, establish a new state with the unified query removed from the goal stack, the
// body of the unified with clause added to it for resolution, and the variable binding trail extended with
// any additional bindings resulting from the unification.
if (matched)
{
if (TRACE)
{
/*trace.fine(state.getTraceIndenter().generateTraceIndent() + "Unify " +
goalTerm.toString(state.getInterner(), true, true) + " against " +
matchTerm.toString(state.getInterner(), true, true) + ", ok.");*/
}
// Consume the successfully unified goal from the goal stack.
state.getGoalStack().poll(); // depends on control dependency: [if], data = [none]
// Add all functors on the body side of the unified clause onto the goal stack for resolution.
Functor[] body = state.getCurrentClause().getBody();
if ((body != null) && (body.length != 0))
{
// The new goals are placed onto the goal stack backwards. It is a stack, hence they get
// explored first, depth first, but their insertion order is reversed for an intuitive
// left-to-right evaluation order.
for (int i = body.length - 1; i >= 0; i--)
{
BuiltInFunctor newGoal = state.getBuiltInTransform().apply(body[i]);
newGoal.setParentChoicePointState(state.getLastChoicePoint()); // depends on control dependency: [for], data = [none]
state.getGoalStack().offer(newGoal); // depends on control dependency: [for], data = [none]
}
}
return true; // depends on control dependency: [if], data = [none]
}
else
{
if (TRACE)
{
/*trace.fine(state.getTraceIndenter().generateTraceIndent() + "Failed to unify " +
goalTerm.toString(state.getInterner(), true, true) + " against " +
matchTerm.toString(state.getInterner(), true, true) + ".");*/
}
return false; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static String getTargetFeaturesRange(
final TrainingParameters params) {
String lemmaRangeFlag = null;
if (params.getSettings().get("TargetFeaturesRange") != null) {
lemmaRangeFlag = params.getSettings().get("TargetFeaturesRange");
} else {
lemmaRangeFlag = Flags.DEFAULT_TARGET_RANGE;
}
return lemmaRangeFlag;
} }
|
public class class_name {
public static String getTargetFeaturesRange(
final TrainingParameters params) {
String lemmaRangeFlag = null;
if (params.getSettings().get("TargetFeaturesRange") != null) {
lemmaRangeFlag = params.getSettings().get("TargetFeaturesRange"); // depends on control dependency: [if], data = [none]
} else {
lemmaRangeFlag = Flags.DEFAULT_TARGET_RANGE; // depends on control dependency: [if], data = [none]
}
return lemmaRangeFlag;
} }
|
public class class_name {
public void runTask() {
MainTask tasks[] = new MainTask[jTableAlgorithms.getModel().getRowCount() * jTableStreams.getModel().getRowCount()];
int taskCount = 0;
String dir = "";
try {
this.currentTask = (MainTask) ClassOption.cliStringToObject(
this.jTextFieldTask.getText(), MainTask.class, null);
} catch (Exception ex) {
Logger.getLogger(TaskManagerTabPanel.class.getName()).log(Level.SEVERE, null, ex);
}
MainTask auxTask = (MainTask) this.currentTask.copy();
dir += this.resultsPath;
File f = new File(dir);
if (f.exists()) {
Object[] options = {"Yes", "No"};
String cancel = "NO";
int resp = JOptionPane.showOptionDialog(this,
"The selected folder is not empty. This action may overwrite "
+ "previous experiment results. Do you want to continue?", "Warning",
JOptionPane.OK_CANCEL_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, cancel);
if (resp == JOptionPane.OK_OPTION) {
ReadFile.deleteDrectory(f);
} else {
JOptionPane.showMessageDialog(this, "Please specify another directory", "Message",
JOptionPane.INFORMATION_MESSAGE);
return;
}
}
f.mkdir();
String algNames = "";
String streamNames = "";
for (int i = 0; i < jTableAlgorithms.getModel().getRowCount(); i++) {
String alg = jTableAlgorithms.getModel().getValueAt(i, 0).toString();
String algFile = jTableAlgorithms.getModel().getValueAt(i, 1).toString();
algNames += algFile;
if (i != jTableAlgorithms.getModel().getRowCount() - 1) {
algNames += ",";
}
for (int j = 0; j < jTableStreams.getModel().getRowCount(); j++) {
String stream = jTableStreams.getModel().getValueAt(j, 0).toString();
String streamFile = jTableStreams.getModel().getValueAt(j, 1).toString();
streamNames += streamFile.split(" ")[0];
if (j != jTableStreams.getModel().getRowCount() - 1) {
streamNames += ",";
}
if (i == 0) {
String sfile = FilenameUtils.separatorsToSystem(dir + "\\\\" + streamFile);
f = new File(sfile);
f.mkdir();
}
String task = " -l ";
if (alg.split(" ") != null) {
task += "(" + alg + ") -s (" + stream + ")" + " -d (" + dir + File.separator
+ streamFile.split(" ")[0] + File.separator + algFile + ".txt" + ")";
} else {
task += alg + " -s (" + stream + ")" + " -d (" + dir + File.separator
+ streamFile.split(" ")[0] + File.separator + algFile + ".txt" + ")";
}
// String task = FilenameUtils.separatorsToSystem(" -l (" + alg + ") -s (" + stream + ") " + " -d " + "(" + dir + "\\\\"
// + streamFile.split(" ")[0] + "\\\\" + algFile + ".txt" + ")");
auxTask.getOptions().setViaCLIString(task);
try {
tasks[taskCount] = (MainTask) auxTask.copy();
} catch (Exception ex) {
Logger.getLogger(TaskManagerTabPanel.class.getName()).log(Level.SEVERE, null, ex);
}
taskCount++;
}
}
this.jButtonRun.setEnabled(false);
Buffer buffer = new Buffer(tasks);
int proc = 1;
if (!this.jTextFieldProcess.getText().equals("")) {
proc = Integer.parseInt(this.jTextFieldProcess.getText());
}
if (proc > tasks.length) {
proc = tasks.length;
}
for (int i = 0; i < proc; i++) {
ExpTaskThread thread = new ExpTaskThread(buffer);
thread.start();
this.taskList.add(0, thread);
this.taskTableModel.fireTableDataChanged();
this.taskTable.setRowSelectionInterval(0, 0);
}
Thread obs = new Thread() {
public void run() {
while (true) {
int count = 0;
for (ExpTaskThread thread : TaskManagerTabPanel.this.taskList) {
if (thread.isCompleted == true) {
count++;
//System.out.println(count);
}
}
if (count == TaskManagerTabPanel.this.taskList.size()) {
TaskManagerTabPanel.this.summary.readData(resultsPath);
TaskManagerTabPanel.this.plot.readData(resultsPath);
TaskManagerTabPanel.this.analizeTab.readData(resultsPath);
TaskManagerTabPanel.this.jButtonRun.setEnabled(true);
break;
}
}
}
};
obs.start();
} }
|
public class class_name {
public void runTask() {
MainTask tasks[] = new MainTask[jTableAlgorithms.getModel().getRowCount() * jTableStreams.getModel().getRowCount()];
int taskCount = 0;
String dir = "";
try {
this.currentTask = (MainTask) ClassOption.cliStringToObject(
this.jTextFieldTask.getText(), MainTask.class, null); // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
Logger.getLogger(TaskManagerTabPanel.class.getName()).log(Level.SEVERE, null, ex);
} // depends on control dependency: [catch], data = [none]
MainTask auxTask = (MainTask) this.currentTask.copy();
dir += this.resultsPath;
File f = new File(dir);
if (f.exists()) {
Object[] options = {"Yes", "No"};
String cancel = "NO";
int resp = JOptionPane.showOptionDialog(this,
"The selected folder is not empty. This action may overwrite "
+ "previous experiment results. Do you want to continue?", "Warning",
JOptionPane.OK_CANCEL_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, cancel);
if (resp == JOptionPane.OK_OPTION) {
ReadFile.deleteDrectory(f); // depends on control dependency: [if], data = [none]
} else {
JOptionPane.showMessageDialog(this, "Please specify another directory", "Message",
JOptionPane.INFORMATION_MESSAGE); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
}
f.mkdir();
String algNames = "";
String streamNames = "";
for (int i = 0; i < jTableAlgorithms.getModel().getRowCount(); i++) {
String alg = jTableAlgorithms.getModel().getValueAt(i, 0).toString();
String algFile = jTableAlgorithms.getModel().getValueAt(i, 1).toString();
algNames += algFile;
if (i != jTableAlgorithms.getModel().getRowCount() - 1) {
algNames += ","; // depends on control dependency: [if], data = [none]
}
for (int j = 0; j < jTableStreams.getModel().getRowCount(); j++) {
String stream = jTableStreams.getModel().getValueAt(j, 0).toString();
String streamFile = jTableStreams.getModel().getValueAt(j, 1).toString();
streamNames += streamFile.split(" ")[0];
if (j != jTableStreams.getModel().getRowCount() - 1) {
streamNames += ","; // depends on control dependency: [if], data = [none]
}
if (i == 0) {
String sfile = FilenameUtils.separatorsToSystem(dir + "\\\\" + streamFile);
f = new File(sfile); // depends on control dependency: [if], data = [none]
f.mkdir(); // depends on control dependency: [if], data = [none]
}
String task = " -l ";
if (alg.split(" ") != null) {
task += "(" + alg + ") -s (" + stream + ")" + " -d (" + dir + File.separator
+ streamFile.split(" ")[0] + File.separator + algFile + ".txt" + ")";
} else {
task += alg + " -s (" + stream + ")" + " -d (" + dir + File.separator
+ streamFile.split(" ")[0] + File.separator + algFile + ".txt" + ")";
}
// String task = FilenameUtils.separatorsToSystem(" -l (" + alg + ") -s (" + stream + ") " + " -d " + "(" + dir + "\\\\"
// + streamFile.split(" ")[0] + "\\\\" + algFile + ".txt" + ")");
auxTask.getOptions().setViaCLIString(task);
try {
tasks[taskCount] = (MainTask) auxTask.copy();
} catch (Exception ex) {
Logger.getLogger(TaskManagerTabPanel.class.getName()).log(Level.SEVERE, null, ex);
}
taskCount++;
}
}
this.jButtonRun.setEnabled(false);
Buffer buffer = new Buffer(tasks);
int proc = 1;
if (!this.jTextFieldProcess.getText().equals("")) {
proc = Integer.parseInt(this.jTextFieldProcess.getText());
}
if (proc > tasks.length) {
proc = tasks.length;
}
for (int i = 0; i < proc; i++) {
ExpTaskThread thread = new ExpTaskThread(buffer);
thread.start();
this.taskList.add(0, thread);
this.taskTableModel.fireTableDataChanged();
this.taskTable.setRowSelectionInterval(0, 0);
}
Thread obs = new Thread() {
public void run() {
while (true) {
int count = 0;
for (ExpTaskThread thread : TaskManagerTabPanel.this.taskList) {
if (thread.isCompleted == true) {
count++; // depends on control dependency: [if], data = [none]
//System.out.println(count);
}
}
if (count == TaskManagerTabPanel.this.taskList.size()) {
TaskManagerTabPanel.this.summary.readData(resultsPath); // depends on control dependency: [if], data = [none]
TaskManagerTabPanel.this.plot.readData(resultsPath); // depends on control dependency: [if], data = [none]
TaskManagerTabPanel.this.analizeTab.readData(resultsPath); // depends on control dependency: [if], data = [none]
TaskManagerTabPanel.this.jButtonRun.setEnabled(true); // depends on control dependency: [if], data = [none]
break;
}
}
}
};
obs.start();
} }
|
public class class_name {
@Override
public List<InstalledIdentity> getInstalledIdentities() throws PatchingException {
List<InstalledIdentity> installedIdentities;
final File metadataDir = installedImage.getInstallationMetadata();
if(!metadataDir.exists()) {
installedIdentities = Collections.singletonList(defaultIdentity);
} else {
final String defaultConf = defaultIdentity.getIdentity().getName() + Constants.DOT_CONF;
final File[] identityConfs = metadataDir.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.isFile() &&
pathname.getName().endsWith(Constants.DOT_CONF) &&
!pathname.getName().equals(defaultConf);
}
});
if(identityConfs == null || identityConfs.length == 0) {
installedIdentities = Collections.singletonList(defaultIdentity);
} else {
installedIdentities = new ArrayList<InstalledIdentity>(identityConfs.length + 1);
installedIdentities.add(defaultIdentity);
for(File conf : identityConfs) {
final Properties props = loadProductConf(conf);
String productName = conf.getName();
productName = productName.substring(0, productName.length() - Constants.DOT_CONF.length());
final String productVersion = props.getProperty(Constants.CURRENT_VERSION);
InstalledIdentity identity;
try {
identity = LayersFactory.load(installedImage, new ProductConfig(productName, productVersion, null), moduleRoots, bundleRoots);
} catch (IOException e) {
throw new PatchingException(PatchLogger.ROOT_LOGGER.failedToLoadInfo(productName), e);
}
installedIdentities.add(identity);
}
}
}
return installedIdentities;
} }
|
public class class_name {
@Override
public List<InstalledIdentity> getInstalledIdentities() throws PatchingException {
List<InstalledIdentity> installedIdentities;
final File metadataDir = installedImage.getInstallationMetadata();
if(!metadataDir.exists()) {
installedIdentities = Collections.singletonList(defaultIdentity);
} else {
final String defaultConf = defaultIdentity.getIdentity().getName() + Constants.DOT_CONF;
final File[] identityConfs = metadataDir.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.isFile() &&
pathname.getName().endsWith(Constants.DOT_CONF) &&
!pathname.getName().equals(defaultConf);
}
});
if(identityConfs == null || identityConfs.length == 0) {
installedIdentities = Collections.singletonList(defaultIdentity); // depends on control dependency: [if], data = [none]
} else {
installedIdentities = new ArrayList<InstalledIdentity>(identityConfs.length + 1); // depends on control dependency: [if], data = [(identityConfs]
installedIdentities.add(defaultIdentity); // depends on control dependency: [if], data = [none]
for(File conf : identityConfs) {
final Properties props = loadProductConf(conf);
String productName = conf.getName();
productName = productName.substring(0, productName.length() - Constants.DOT_CONF.length()); // depends on control dependency: [for], data = [none]
final String productVersion = props.getProperty(Constants.CURRENT_VERSION);
InstalledIdentity identity;
try {
identity = LayersFactory.load(installedImage, new ProductConfig(productName, productVersion, null), moduleRoots, bundleRoots); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw new PatchingException(PatchLogger.ROOT_LOGGER.failedToLoadInfo(productName), e);
} // depends on control dependency: [catch], data = [none]
installedIdentities.add(identity); // depends on control dependency: [for], data = [none]
}
}
}
return installedIdentities;
} }
|
public class class_name {
public List<Feature> getFeature(String label) {
if (label == null) {
throw new IllegalArgumentException("feature label is null");
}
label = label.toUpperCase();
List<Feature> foundFeatures = new ArrayList();
for( Feature feature:features ) {
if(feature.getLabel().equals(label)) {
foundFeatures.add(feature);
}
}
return foundFeatures;
} }
|
public class class_name {
public List<Feature> getFeature(String label) {
if (label == null) {
throw new IllegalArgumentException("feature label is null");
}
label = label.toUpperCase();
List<Feature> foundFeatures = new ArrayList();
for( Feature feature:features ) {
if(feature.getLabel().equals(label)) {
foundFeatures.add(feature); // depends on control dependency: [if], data = [none]
}
}
return foundFeatures;
} }
|
public class class_name {
@Override
public void visit(final FamS fams) {
final FamilyNavigator navigator = new FamilyNavigator(fams);
final Family family = navigator.getFamily();
if (family.isSet()) {
familySNavigators.add(navigator);
}
} }
|
public class class_name {
@Override
public void visit(final FamS fams) {
final FamilyNavigator navigator = new FamilyNavigator(fams);
final Family family = navigator.getFamily();
if (family.isSet()) {
familySNavigators.add(navigator); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
protected void doProcess(ITemplateContext context, IProcessableElementTag tag, AttributeName attributeName, String attributeValue,
Object expressionResult, IElementTagStructureHandler structureHandler) {
if (expressionResult == null) { // e.g. la:property="${detarame}"
throw new IllegalStateException("The expressionResult cannot be null: " + attributeName + ", " + attributeValue);
}
// #hope p1us2er0 pri.C nest property (2018/09/04)
final String propertyName = expressionResult.toString();
final boolean hasThName = tag.hasAttribute(StandardDialect.PREFIX, "name");
final boolean hasThText = tag.hasAttribute(StandardDialect.PREFIX, "text");
final boolean hasThValue = tag.hasAttribute(StandardDialect.PREFIX, "value");
switch (tag.getElementCompleteName()) {
case "input":
if (!hasThName) {
structureHandler.setAttribute("th:name", propertyName);
}
if (!hasThValue) {
if (!Arrays.asList("checkbox", "radio").contains(tag.getAttributeValue("type"))) {
structureHandler.setAttribute("th:value", "${" + propertyName + "}");
}
}
break;
case "select":
if (!hasThName) {
structureHandler.setAttribute("th:name", propertyName);
}
break;
case "textarea":
if (!hasThName) {
structureHandler.setAttribute("th:name", propertyName);
}
if (!hasThText) {
structureHandler.setAttribute("th:text", "${" + propertyName + "}");
}
break;
default:
if (!hasThText) {
structureHandler.setAttribute("th:text", "${" + propertyName + "}");
}
break;
}
} }
|
public class class_name {
@Override
protected void doProcess(ITemplateContext context, IProcessableElementTag tag, AttributeName attributeName, String attributeValue,
Object expressionResult, IElementTagStructureHandler structureHandler) {
if (expressionResult == null) { // e.g. la:property="${detarame}"
throw new IllegalStateException("The expressionResult cannot be null: " + attributeName + ", " + attributeValue);
}
// #hope p1us2er0 pri.C nest property (2018/09/04)
final String propertyName = expressionResult.toString();
final boolean hasThName = tag.hasAttribute(StandardDialect.PREFIX, "name");
final boolean hasThText = tag.hasAttribute(StandardDialect.PREFIX, "text");
final boolean hasThValue = tag.hasAttribute(StandardDialect.PREFIX, "value");
switch (tag.getElementCompleteName()) {
case "input":
if (!hasThName) {
structureHandler.setAttribute("th:name", propertyName); // depends on control dependency: [if], data = [none]
}
if (!hasThValue) {
if (!Arrays.asList("checkbox", "radio").contains(tag.getAttributeValue("type"))) {
structureHandler.setAttribute("th:value", "${" + propertyName + "}"); // depends on control dependency: [if], data = [none]
}
}
break;
case "select":
if (!hasThName) {
structureHandler.setAttribute("th:name", propertyName); // depends on control dependency: [if], data = [none]
}
break;
case "textarea":
if (!hasThName) {
structureHandler.setAttribute("th:name", propertyName); // depends on control dependency: [if], data = [none]
}
if (!hasThText) {
structureHandler.setAttribute("th:text", "${" + propertyName + "}"); // depends on control dependency: [if], data = [none]
}
break;
default:
if (!hasThText) {
structureHandler.setAttribute("th:text", "${" + propertyName + "}"); // depends on control dependency: [if], data = [none]
}
break;
}
} }
|
public class class_name {
@Override
public void clear(int fromIndex, int toIndex)
{
if (fromIndex > toIndex) {
throw new IndexOutOfBoundsException(
"fromIndex: " + fromIndex
+ " > toIndex: " + toIndex
);
}
if (fromIndex == toIndex) {
remove(fromIndex);
return;
}
int startWordIndex = wordIndex(fromIndex);
if (startWordIndex >= firstEmptyWord) {
return;
}
int endWordIndex = wordIndex(toIndex);
if (endWordIndex >= firstEmptyWord) {
toIndex = last();
endWordIndex = firstEmptyWord - 1;
}
final int[] localWords = words; // faster
boolean modified = false;
int firstWordMask = ALL_ONES_WORD << fromIndex;
int lastWordMask = ALL_ONES_WORD >>> -(toIndex + 1);
if (startWordIndex == endWordIndex) {
// Case 1: One word
int before = localWords[startWordIndex];
localWords[startWordIndex] &= ~(firstWordMask & lastWordMask);
modified = localWords[startWordIndex] != before;
} else {
// Case 2: Multiple words
// Handle first word
int before = localWords[startWordIndex];
localWords[startWordIndex] &= ~firstWordMask;
modified = localWords[startWordIndex] != before;
// Handle intermediate words, if any
for (int i = startWordIndex + 1; i < endWordIndex; i++) {
modified = modified || localWords[i] != 0;
localWords[i] = 0;
}
// Handle last word
before = localWords[endWordIndex];
localWords[endWordIndex] &= ~lastWordMask;
modified = modified || localWords[endWordIndex] != before;
}
if (modified) {
fixFirstEmptyWord();
size = -1;
}
} }
|
public class class_name {
@Override
public void clear(int fromIndex, int toIndex)
{
if (fromIndex > toIndex) {
throw new IndexOutOfBoundsException(
"fromIndex: " + fromIndex
+ " > toIndex: " + toIndex
);
}
if (fromIndex == toIndex) {
remove(fromIndex);
// depends on control dependency: [if], data = [(fromIndex]
return;
// depends on control dependency: [if], data = [none]
}
int startWordIndex = wordIndex(fromIndex);
if (startWordIndex >= firstEmptyWord) {
return;
// depends on control dependency: [if], data = [none]
}
int endWordIndex = wordIndex(toIndex);
if (endWordIndex >= firstEmptyWord) {
toIndex = last();
// depends on control dependency: [if], data = [none]
endWordIndex = firstEmptyWord - 1;
// depends on control dependency: [if], data = [none]
}
final int[] localWords = words; // faster
boolean modified = false;
int firstWordMask = ALL_ONES_WORD << fromIndex;
int lastWordMask = ALL_ONES_WORD >>> -(toIndex + 1);
if (startWordIndex == endWordIndex) {
// Case 1: One word
int before = localWords[startWordIndex];
localWords[startWordIndex] &= ~(firstWordMask & lastWordMask);
// depends on control dependency: [if], data = [none]
modified = localWords[startWordIndex] != before;
// depends on control dependency: [if], data = [none]
} else {
// Case 2: Multiple words
// Handle first word
int before = localWords[startWordIndex];
localWords[startWordIndex] &= ~firstWordMask;
// depends on control dependency: [if], data = [none]
modified = localWords[startWordIndex] != before;
// depends on control dependency: [if], data = [none]
// Handle intermediate words, if any
for (int i = startWordIndex + 1; i < endWordIndex; i++) {
modified = modified || localWords[i] != 0;
// depends on control dependency: [for], data = [i]
localWords[i] = 0;
// depends on control dependency: [for], data = [i]
}
// Handle last word
before = localWords[endWordIndex];
// depends on control dependency: [if], data = [none]
localWords[endWordIndex] &= ~lastWordMask;
// depends on control dependency: [if], data = [none]
modified = modified || localWords[endWordIndex] != before;
// depends on control dependency: [if], data = [none]
}
if (modified) {
fixFirstEmptyWord();
// depends on control dependency: [if], data = [none]
size = -1;
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public <D extends Declaration> Collection<D>
filter(Collection<? extends Declaration> decls, Class<D> resType) {
ArrayList<D> res = new ArrayList<D>(decls.size());
for (Declaration d : decls) {
if (resType.isInstance(d) && matches(d)) {
res.add(resType.cast(d));
}
}
return res;
} }
|
public class class_name {
public <D extends Declaration> Collection<D>
filter(Collection<? extends Declaration> decls, Class<D> resType) {
ArrayList<D> res = new ArrayList<D>(decls.size());
for (Declaration d : decls) {
if (resType.isInstance(d) && matches(d)) {
res.add(resType.cast(d));
// depends on control dependency: [if], data = [none]
}
}
return res;
} }
|
public class class_name {
private Element searchForKey(final Element root, final String key, final String tagName) {
if (root == null || StringUtils.isEmptyString(key)) {
return null;
}
final Queue<Element> queue = new LinkedList<>();
queue.offer(root);
while (!queue.isEmpty()) {
final Element pe = queue.poll();
final NodeList pchildrenList = pe.getChildNodes();
for (int i = 0; i < pchildrenList.getLength(); i++) {
final Node node = pchildrenList.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
queue.offer((Element)node);
}
}
String value = pe.getNodeName();
if (StringUtils.isEmptyString(value)||
!value.equals(tagName)) {
continue;
}
value = pe.getAttribute(ATTRIBUTE_NAME_NAME);
if (StringUtils.isEmptyString(value)) {
continue;
}
if (value.equals(key)) {
return pe;
}
}
return null;
} }
|
public class class_name {
private Element searchForKey(final Element root, final String key, final String tagName) {
if (root == null || StringUtils.isEmptyString(key)) {
return null; // depends on control dependency: [if], data = [none]
}
final Queue<Element> queue = new LinkedList<>();
queue.offer(root);
while (!queue.isEmpty()) {
final Element pe = queue.poll();
final NodeList pchildrenList = pe.getChildNodes();
for (int i = 0; i < pchildrenList.getLength(); i++) {
final Node node = pchildrenList.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
queue.offer((Element)node); // depends on control dependency: [if], data = [none]
}
}
String value = pe.getNodeName();
if (StringUtils.isEmptyString(value)||
!value.equals(tagName)) {
continue;
}
value = pe.getAttribute(ATTRIBUTE_NAME_NAME); // depends on control dependency: [while], data = [none]
if (StringUtils.isEmptyString(value)) {
continue;
}
if (value.equals(key)) {
return pe; // depends on control dependency: [if], data = [none]
}
}
return null;
} }
|
public class class_name {
protected void recoverFromException(final String channel, final Exception e) {
final RecoveryStrategy recoveryStrategy = this.exceptionHandlerRef.get().onException(this, e, channel);
switch (recoveryStrategy) {
case RECONNECT:
LOG.info("Reconnecting to Redis in response to exception", e);
final int reconAttempts = getReconnectAttempts();
if (!JedisUtils.reconnect(this.jedis, reconAttempts, RECONNECT_SLEEP_TIME)) {
LOG.warn("Terminating in response to exception after " + reconAttempts + " to reconnect", e);
end(false);
} else {
LOG.info("Reconnected to Redis");
}
break;
case TERMINATE:
LOG.warn("Terminating in response to exception", e);
end(false);
break;
case PROCEED:
break;
default:
LOG.error("Unknown RecoveryStrategy: " + recoveryStrategy
+ " while attempting to recover from the following exception; Admin proceeding...", e);
break;
}
} }
|
public class class_name {
protected void recoverFromException(final String channel, final Exception e) {
final RecoveryStrategy recoveryStrategy = this.exceptionHandlerRef.get().onException(this, e, channel);
switch (recoveryStrategy) {
case RECONNECT:
LOG.info("Reconnecting to Redis in response to exception", e);
final int reconAttempts = getReconnectAttempts();
if (!JedisUtils.reconnect(this.jedis, reconAttempts, RECONNECT_SLEEP_TIME)) {
LOG.warn("Terminating in response to exception after " + reconAttempts + " to reconnect", e); // depends on control dependency: [if], data = [none]
end(false); // depends on control dependency: [if], data = [none]
} else {
LOG.info("Reconnected to Redis"); // depends on control dependency: [if], data = [none]
}
break;
case TERMINATE:
LOG.warn("Terminating in response to exception", e);
end(false);
break;
case PROCEED:
break;
default:
LOG.error("Unknown RecoveryStrategy: " + recoveryStrategy
+ " while attempting to recover from the following exception; Admin proceeding...", e);
break;
}
} }
|
public class class_name {
private void lockDescendants(Iterable<ElementBase> children, boolean lock) {
for (ElementBase child : children) {
child.setLocked(lock);
lockDescendants(child.getChildren(), lock);
}
} }
|
public class class_name {
private void lockDescendants(Iterable<ElementBase> children, boolean lock) {
for (ElementBase child : children) {
child.setLocked(lock); // depends on control dependency: [for], data = [child]
lockDescendants(child.getChildren(), lock); // depends on control dependency: [for], data = [child]
}
} }
|
public class class_name {
public static String read(final Reader reader) {
final BufferedReader buffer = new BufferedReader(reader, BUFFER_SIZE);
try {
StringBuilder sb = new StringBuilder();
String line;
boolean started = false;
// Read line by line
while ((line = buffer.readLine()) != null) {
if (started) {
sb.append(LINE_SEPARATOR);
}
sb.append(line);
started = true;
}
return sb.toString();
}
catch (IOException ex) {
throw new MustacheIOException(ex);
}
finally {
closeQuietly(buffer);
}
} }
|
public class class_name {
public static String read(final Reader reader) {
final BufferedReader buffer = new BufferedReader(reader, BUFFER_SIZE);
try {
StringBuilder sb = new StringBuilder();
String line;
boolean started = false;
// Read line by line
while ((line = buffer.readLine()) != null) {
if (started) {
sb.append(LINE_SEPARATOR); // depends on control dependency: [if], data = [none]
}
sb.append(line); // depends on control dependency: [while], data = [none]
started = true; // depends on control dependency: [while], data = [none]
}
return sb.toString(); // depends on control dependency: [try], data = [none]
}
catch (IOException ex) {
throw new MustacheIOException(ex);
} // depends on control dependency: [catch], data = [none]
finally {
closeQuietly(buffer);
}
} }
|
public class class_name {
public synchronized K firstKey() {
if (!validState) {
throw new InvalidStateException();
}
final LeafNode<K, V> node = findSideLeafNode(true);
if (node == null) {
return null;
}
return node.keys[0];
} }
|
public class class_name {
public synchronized K firstKey() {
if (!validState) {
throw new InvalidStateException();
}
final LeafNode<K, V> node = findSideLeafNode(true);
if (node == null) {
return null; // depends on control dependency: [if], data = [none]
}
return node.keys[0];
} }
|
public class class_name {
private Animator createAnimation(final View view, float startAlpha, float endAlpha, @Nullable TransitionValues values) {
final float curAlpha = view.getAlpha();
startAlpha = curAlpha * startAlpha;
endAlpha = curAlpha * endAlpha;
if (values != null && values.values.containsKey(PROPNAME_ALPHA)) {
float savedAlpha = (Float) values.values.get(PROPNAME_ALPHA);
// if saved value is not equal curAlpha it means that previous
// transition was interrupted and in the onTransitionEnd
// we've applied endListenerAlpha. we should apply proper value to
// continue animation from the interrupted state
if (savedAlpha != curAlpha) {
startAlpha = savedAlpha;
}
}
view.setAlpha(startAlpha);
final ObjectAnimator anim = ObjectAnimator.ofFloat(view, View.ALPHA, endAlpha);
final FadeAnimatorListener listener = new FadeAnimatorListener(view, curAlpha);
anim.addListener(listener);
addListener(new TransitionListenerAdapter() {
@Override
public void onTransitionEnd(@NonNull Transition transition) {
view.setAlpha(curAlpha);
transition.removeListener(this);
}
});
return anim;
} }
|
public class class_name {
private Animator createAnimation(final View view, float startAlpha, float endAlpha, @Nullable TransitionValues values) {
final float curAlpha = view.getAlpha();
startAlpha = curAlpha * startAlpha;
endAlpha = curAlpha * endAlpha;
if (values != null && values.values.containsKey(PROPNAME_ALPHA)) {
float savedAlpha = (Float) values.values.get(PROPNAME_ALPHA);
// if saved value is not equal curAlpha it means that previous
// transition was interrupted and in the onTransitionEnd
// we've applied endListenerAlpha. we should apply proper value to
// continue animation from the interrupted state
if (savedAlpha != curAlpha) {
startAlpha = savedAlpha; // depends on control dependency: [if], data = [none]
}
}
view.setAlpha(startAlpha);
final ObjectAnimator anim = ObjectAnimator.ofFloat(view, View.ALPHA, endAlpha);
final FadeAnimatorListener listener = new FadeAnimatorListener(view, curAlpha);
anim.addListener(listener);
addListener(new TransitionListenerAdapter() {
@Override
public void onTransitionEnd(@NonNull Transition transition) {
view.setAlpha(curAlpha);
transition.removeListener(this);
}
});
return anim;
} }
|
public class class_name {
public byte[] toByteArray() {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(this);
return baos.toByteArray();
} catch (IOException e) {
throw E.ioException(e);
}
} }
|
public class class_name {
public byte[] toByteArray() {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(this); // depends on control dependency: [try], data = [none]
return baos.toByteArray(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw E.ioException(e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static void Shuffle(double[] array, long seed) {
Random random = new Random();
if (seed != 0) random.setSeed(seed);
for (int i = array.length - 1; i > 0; i--) {
int index = random.nextInt(i + 1);
double temp = array[index];
array[index] = array[i];
array[i] = temp;
}
} }
|
public class class_name {
public static void Shuffle(double[] array, long seed) {
Random random = new Random();
if (seed != 0) random.setSeed(seed);
for (int i = array.length - 1; i > 0; i--) {
int index = random.nextInt(i + 1);
double temp = array[index];
array[index] = array[i]; // depends on control dependency: [for], data = [i]
array[i] = temp; // depends on control dependency: [for], data = [i]
}
} }
|
public class class_name {
public static int[] intArrayCopyOf(CollectionNumber coll) {
int[] data = new int[coll.size()];
IteratorNumber iter = coll.iterator();
int index = 0;
while (iter.hasNext()) {
data[index] = iter.nextInt();
index++;
}
return data;
} }
|
public class class_name {
public static int[] intArrayCopyOf(CollectionNumber coll) {
int[] data = new int[coll.size()];
IteratorNumber iter = coll.iterator();
int index = 0;
while (iter.hasNext()) {
data[index] = iter.nextInt();
// depends on control dependency: [while], data = [none]
index++;
// depends on control dependency: [while], data = [none]
}
return data;
} }
|
public class class_name {
public void marshall(EventDestination eventDestination, ProtocolMarshaller protocolMarshaller) {
if (eventDestination == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(eventDestination.getCloudWatchLogsDestination(), CLOUDWATCHLOGSDESTINATION_BINDING);
protocolMarshaller.marshall(eventDestination.getEnabled(), ENABLED_BINDING);
protocolMarshaller.marshall(eventDestination.getKinesisFirehoseDestination(), KINESISFIREHOSEDESTINATION_BINDING);
protocolMarshaller.marshall(eventDestination.getMatchingEventTypes(), MATCHINGEVENTTYPES_BINDING);
protocolMarshaller.marshall(eventDestination.getName(), NAME_BINDING);
protocolMarshaller.marshall(eventDestination.getSnsDestination(), SNSDESTINATION_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(EventDestination eventDestination, ProtocolMarshaller protocolMarshaller) {
if (eventDestination == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(eventDestination.getCloudWatchLogsDestination(), CLOUDWATCHLOGSDESTINATION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(eventDestination.getEnabled(), ENABLED_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(eventDestination.getKinesisFirehoseDestination(), KINESISFIREHOSEDESTINATION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(eventDestination.getMatchingEventTypes(), MATCHINGEVENTTYPES_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(eventDestination.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(eventDestination.getSnsDestination(), SNSDESTINATION_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public double dot(LongDoubleVector y) {
if (y instanceof LongDoubleSortedVector) {
LongDoubleSortedVector other = ((LongDoubleSortedVector) y);
double dot = 0;
int oc = 0;
for (int c = 0; c < used; c++) {
while (oc < other.used) {
if (other.indices[oc] < indices[c]) {
oc++;
} else if (indices[c] == other.indices[oc]) {
dot += values[c] * other.values[oc];
break;
} else {
break;
}
}
}
return dot;
} else {
double dot = 0;
for (int c = 0; c < used; c++) {
dot += this.values[c] * y.get(indices[c]);
}
return dot;
}
} }
|
public class class_name {
public double dot(LongDoubleVector y) {
if (y instanceof LongDoubleSortedVector) {
LongDoubleSortedVector other = ((LongDoubleSortedVector) y);
double dot = 0;
int oc = 0;
for (int c = 0; c < used; c++) {
while (oc < other.used) {
if (other.indices[oc] < indices[c]) {
oc++; // depends on control dependency: [if], data = [none]
} else if (indices[c] == other.indices[oc]) {
dot += values[c] * other.values[oc]; // depends on control dependency: [if], data = [none]
break;
} else {
break;
}
}
}
return dot; // depends on control dependency: [if], data = [none]
} else {
double dot = 0;
for (int c = 0; c < used; c++) {
dot += this.values[c] * y.get(indices[c]); // depends on control dependency: [for], data = [c]
}
return dot; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private long checkRegions(final long iOffset, final long iLength) {
acquireReadLock();
try {
if (iOffset < 0 || iOffset + iLength > size) {
throw new OIOException(
"You cannot access outside the file size (" + size + " bytes). You have requested portion " + iOffset + "-" + (iOffset
+ iLength) + " bytes. File: " + this);
}
return iOffset + HEADER_SIZE;
} finally {
releaseReadLock();
}
} }
|
public class class_name {
private long checkRegions(final long iOffset, final long iLength) {
acquireReadLock();
try {
if (iOffset < 0 || iOffset + iLength > size) {
throw new OIOException(
"You cannot access outside the file size (" + size + " bytes). You have requested portion " + iOffset + "-" + (iOffset
+ iLength) + " bytes. File: " + this);
}
return iOffset + HEADER_SIZE; // depends on control dependency: [try], data = [none]
} finally {
releaseReadLock();
}
} }
|
public class class_name {
public Resource getResource(final String catalog) {
ResourceSelection selection = contextConfig.getSelection(catalog);
if (selection == null) {
return null;
}
return repository.getResource(selection);
} }
|
public class class_name {
public Resource getResource(final String catalog) {
ResourceSelection selection = contextConfig.getSelection(catalog);
if (selection == null) {
return null; // depends on control dependency: [if], data = [none]
}
return repository.getResource(selection);
} }
|
public class class_name {
public static void rank1UpdateMultL_LeftCol( final int blockLength ,
final DSubmatrixD1 A ,
final int row , final double gamma , int zeroOffset )
{
final int heightU = Math.min(blockLength,A.row1 - A.row0);
final int width = Math.min(blockLength,A.col1-A.col0);
final double data[] = A.original.data;
for( int blockStart = A.row0+blockLength; blockStart < A.row1; blockStart += blockLength) {
final int heightA = Math.min(blockLength,A.row1 - blockStart);
for( int i = 0; i < heightA; i++ ) {
// total = U^T * A(i,:)
double total = innerProdRow(blockLength, A, row, A, i+(blockStart-A.row0), zeroOffset);
total *= gamma;
// A(i,:) - gamma*U*total
// plusScale_row(blockLength,);
int indexU = A.row0*A.original.numCols + heightU*A.col0 + row*width;
int indexA = blockStart*A.original.numCols + heightA*A.col0 + i*width;
// skip over zeros and assume first element in U is 1
indexU += zeroOffset+1;
indexA += zeroOffset;
data[indexA++] -= total;
for( int k = zeroOffset+1; k < width; k++ ) {
data[ indexA++ ] -= total*data[ indexU++ ];
}
}
}
} }
|
public class class_name {
public static void rank1UpdateMultL_LeftCol( final int blockLength ,
final DSubmatrixD1 A ,
final int row , final double gamma , int zeroOffset )
{
final int heightU = Math.min(blockLength,A.row1 - A.row0);
final int width = Math.min(blockLength,A.col1-A.col0);
final double data[] = A.original.data;
for( int blockStart = A.row0+blockLength; blockStart < A.row1; blockStart += blockLength) {
final int heightA = Math.min(blockLength,A.row1 - blockStart);
for( int i = 0; i < heightA; i++ ) {
// total = U^T * A(i,:)
double total = innerProdRow(blockLength, A, row, A, i+(blockStart-A.row0), zeroOffset);
total *= gamma; // depends on control dependency: [for], data = [none]
// A(i,:) - gamma*U*total
// plusScale_row(blockLength,);
int indexU = A.row0*A.original.numCols + heightU*A.col0 + row*width;
int indexA = blockStart*A.original.numCols + heightA*A.col0 + i*width;
// skip over zeros and assume first element in U is 1
indexU += zeroOffset+1; // depends on control dependency: [for], data = [none]
indexA += zeroOffset; // depends on control dependency: [for], data = [none]
data[indexA++] -= total; // depends on control dependency: [for], data = [none]
for( int k = zeroOffset+1; k < width; k++ ) {
data[ indexA++ ] -= total*data[ indexU++ ]; // depends on control dependency: [for], data = [none]
}
}
}
} }
|
public class class_name {
protected void attemptToSplit(LeafNode node, Node parent, int parentIndex) {
// Initialize the split criterion
SplitCriterion splitCriterion = (SplitCriterion) getPreparedClassOption(splitCriterionOption);
// Using this criterion, find the best split per attribute and rank the results
AttributeSplitSuggestion[] bestSplitSuggestions = node.getBestSplitSuggestions(splitCriterion);
List<AttributeSplitSuggestion> acceptedSplits = new LinkedList<AttributeSplitSuggestion>();
Arrays.sort(bestSplitSuggestions);
// Declare a variable to determine the number of splits to be performed
int numSplits = 0;
// If only one split was returned, use it (this generally shouldn't happen)
if (bestSplitSuggestions.length == 1) {
numSplits = 1;
acceptedSplits.add(bestSplitSuggestions[0]);
} else if (bestSplitSuggestions.length > 1) { // Otherwise, consider which of the splits proposed may be worth trying
// Determine the Hoeffding bound value, used to select how many instances should be used to make a test decision
// to feel reasonably confident that the test chosen by this sample is the same as what would be chosen using infinite examples
double hoeffdingBound = computeHoeffdingBound(1, splitConfidenceOption.getValue(), node.examplesSeen);
// Determine the top two ranked splitting suggestions
AttributeSplitSuggestion bestSuggestion = bestSplitSuggestions[bestSplitSuggestions.length - 1];
AttributeSplitSuggestion secondBestSuggestion = bestSplitSuggestions[bestSplitSuggestions.length - 2];
// If the upper bound of the sample mean for the ratio of SDR(best suggestion) to SDR(second best suggestion),
// as determined using the Hoeffding bound, is less than 1, then the true mean is also less than 1, and thus at this
// particular moment of observation the bestSuggestion is indeed the best split option with confidence 1-delta, and
// splitting should occur.
// Alternatively, if two or more splits are very similar or identical in terms of their splits, then a threshold limit
// (default 0.05) is applied to the Hoeffding bound; if the Hoeffding bound is smaller than this limit then the two
// competing attributes are equally good, and the split will be made on the one with the higher SDR value.
if (secondBestSuggestion.merit / bestSuggestion.merit < 1 - hoeffdingBound) {
numSplits = 1;
acceptedSplits.add(bestSuggestion);
} else if (numTrees < maxTreesOption.getValue() && node.getLevel() <= maxOptionLevelOption.getValue()) {
for (AttributeSplitSuggestion suggestion : bestSplitSuggestions) {
if (suggestion.merit / bestSuggestion.merit >= 1 - hoeffdingBound) {
numSplits++;
acceptedSplits.add(suggestion);
}
}
} else if (hoeffdingBound < tieThresholdOption.getValue()) {
numSplits = 1;
acceptedSplits.add(bestSplitSuggestions[0]);
} else { // If the splitting criterion was not met, initiate pruning of the E-BST structures in each attribute observer
for (int i = 0; i < node.attributeObservers.size(); i++) {
AttributeClassObserver obs = node.attributeObservers.get(i);
if (obs != null) {
((FIMTDDNumericAttributeClassObserver) obs).removeBadSplits(splitCriterion, secondBestSuggestion.merit / bestSuggestion.merit, bestSuggestion.merit, hoeffdingBound);
}
}
}
}
// If the splitting criterion was met, split the current node using the chosen attribute test, and
// make two new branches leading to (empty) leaves
if (numSplits > 0) {
double optionFactor = numSplits * Math.pow(optionDecayFactorOption.getValue(), (double) node.getLevel());
if (numSplits == 1 || optionFactor < 2.0 || maxTreesOption.getValue() - numTrees <= 1) {
AttributeSplitSuggestion splitDecision = acceptedSplits.get(0);
SplitNode newSplit = newSplitNode(splitDecision.splitTest);
for (int i = 0; i < splitDecision.numSplits(); i++) {
LeafNode newChild = newLeafNode();
newChild.setParent(newSplit);
newSplit.setChild(i, newChild);
}
leafNodeCount--;
innerNodeCount++;
leafNodeCount += splitDecision.numSplits();
if (parent == null) {
treeRoot = newSplit;
} else {
parent.setChild(parent.getChildIndex(node), newSplit);
newSplit.setParent(parent);
}
} else {
OptionNode optionNode = newOptionNode();
leafNodeCount--;
int j = 0;
for (AttributeSplitSuggestion splitDecision : acceptedSplits) {
if (j > optionFactor || maxTreesOption.getValue() - numTrees <= 0) {
break;
}
SplitNode newSplit = newSplitNode(splitDecision.splitTest);
for (int i = 0; i < splitDecision.numSplits(); i++) {
LeafNode newChild = newLeafNode();
newChild.setParent(newSplit);
newSplit.setChild(i, newChild);
}
leafNodeCount += splitDecision.numSplits();
innerNodeCount++;
numTrees++;
newSplit.setParent(optionNode);
optionNode.setChild(j, newSplit);
j++;
}
innerNodeCount++;
optionNodeCount++;
if (parent == null) {
treeRoot = optionNode;
} else {
parent.setChild(parent.getChildIndex(node), optionNode);
optionNode.setParent(parent);
}
optionNode.resetFF();
}
}
} }
|
public class class_name {
protected void attemptToSplit(LeafNode node, Node parent, int parentIndex) {
// Initialize the split criterion
SplitCriterion splitCriterion = (SplitCriterion) getPreparedClassOption(splitCriterionOption);
// Using this criterion, find the best split per attribute and rank the results
AttributeSplitSuggestion[] bestSplitSuggestions = node.getBestSplitSuggestions(splitCriterion);
List<AttributeSplitSuggestion> acceptedSplits = new LinkedList<AttributeSplitSuggestion>();
Arrays.sort(bestSplitSuggestions);
// Declare a variable to determine the number of splits to be performed
int numSplits = 0;
// If only one split was returned, use it (this generally shouldn't happen)
if (bestSplitSuggestions.length == 1) {
numSplits = 1; // depends on control dependency: [if], data = [none]
acceptedSplits.add(bestSplitSuggestions[0]); // depends on control dependency: [if], data = [none]
} else if (bestSplitSuggestions.length > 1) { // Otherwise, consider which of the splits proposed may be worth trying
// Determine the Hoeffding bound value, used to select how many instances should be used to make a test decision
// to feel reasonably confident that the test chosen by this sample is the same as what would be chosen using infinite examples
double hoeffdingBound = computeHoeffdingBound(1, splitConfidenceOption.getValue(), node.examplesSeen);
// Determine the top two ranked splitting suggestions
AttributeSplitSuggestion bestSuggestion = bestSplitSuggestions[bestSplitSuggestions.length - 1];
AttributeSplitSuggestion secondBestSuggestion = bestSplitSuggestions[bestSplitSuggestions.length - 2];
// If the upper bound of the sample mean for the ratio of SDR(best suggestion) to SDR(second best suggestion),
// as determined using the Hoeffding bound, is less than 1, then the true mean is also less than 1, and thus at this
// particular moment of observation the bestSuggestion is indeed the best split option with confidence 1-delta, and
// splitting should occur.
// Alternatively, if two or more splits are very similar or identical in terms of their splits, then a threshold limit
// (default 0.05) is applied to the Hoeffding bound; if the Hoeffding bound is smaller than this limit then the two
// competing attributes are equally good, and the split will be made on the one with the higher SDR value.
if (secondBestSuggestion.merit / bestSuggestion.merit < 1 - hoeffdingBound) {
numSplits = 1; // depends on control dependency: [if], data = [none]
acceptedSplits.add(bestSuggestion); // depends on control dependency: [if], data = [none]
} else if (numTrees < maxTreesOption.getValue() && node.getLevel() <= maxOptionLevelOption.getValue()) {
for (AttributeSplitSuggestion suggestion : bestSplitSuggestions) {
if (suggestion.merit / bestSuggestion.merit >= 1 - hoeffdingBound) {
numSplits++; // depends on control dependency: [if], data = [none]
acceptedSplits.add(suggestion); // depends on control dependency: [if], data = [none]
}
}
} else if (hoeffdingBound < tieThresholdOption.getValue()) {
numSplits = 1; // depends on control dependency: [if], data = [none]
acceptedSplits.add(bestSplitSuggestions[0]); // depends on control dependency: [if], data = [none]
} else { // If the splitting criterion was not met, initiate pruning of the E-BST structures in each attribute observer
for (int i = 0; i < node.attributeObservers.size(); i++) {
AttributeClassObserver obs = node.attributeObservers.get(i);
if (obs != null) {
((FIMTDDNumericAttributeClassObserver) obs).removeBadSplits(splitCriterion, secondBestSuggestion.merit / bestSuggestion.merit, bestSuggestion.merit, hoeffdingBound); // depends on control dependency: [if], data = [none]
}
}
}
}
// If the splitting criterion was met, split the current node using the chosen attribute test, and
// make two new branches leading to (empty) leaves
if (numSplits > 0) {
double optionFactor = numSplits * Math.pow(optionDecayFactorOption.getValue(), (double) node.getLevel());
if (numSplits == 1 || optionFactor < 2.0 || maxTreesOption.getValue() - numTrees <= 1) {
AttributeSplitSuggestion splitDecision = acceptedSplits.get(0);
SplitNode newSplit = newSplitNode(splitDecision.splitTest);
for (int i = 0; i < splitDecision.numSplits(); i++) {
LeafNode newChild = newLeafNode();
newChild.setParent(newSplit); // depends on control dependency: [for], data = [none]
newSplit.setChild(i, newChild); // depends on control dependency: [for], data = [i]
}
leafNodeCount--; // depends on control dependency: [if], data = [none]
innerNodeCount++; // depends on control dependency: [if], data = [none]
leafNodeCount += splitDecision.numSplits(); // depends on control dependency: [if], data = [none]
if (parent == null) {
treeRoot = newSplit; // depends on control dependency: [if], data = [none]
} else {
parent.setChild(parent.getChildIndex(node), newSplit); // depends on control dependency: [if], data = [(parent]
newSplit.setParent(parent); // depends on control dependency: [if], data = [(parent]
}
} else {
OptionNode optionNode = newOptionNode();
leafNodeCount--; // depends on control dependency: [if], data = [none]
int j = 0;
for (AttributeSplitSuggestion splitDecision : acceptedSplits) {
if (j > optionFactor || maxTreesOption.getValue() - numTrees <= 0) {
break;
}
SplitNode newSplit = newSplitNode(splitDecision.splitTest);
for (int i = 0; i < splitDecision.numSplits(); i++) {
LeafNode newChild = newLeafNode();
newChild.setParent(newSplit); // depends on control dependency: [for], data = [none]
newSplit.setChild(i, newChild); // depends on control dependency: [for], data = [i]
}
leafNodeCount += splitDecision.numSplits(); // depends on control dependency: [for], data = [splitDecision]
innerNodeCount++; // depends on control dependency: [for], data = [none]
numTrees++; // depends on control dependency: [for], data = [none]
newSplit.setParent(optionNode); // depends on control dependency: [for], data = [none]
optionNode.setChild(j, newSplit); // depends on control dependency: [for], data = [none]
j++; // depends on control dependency: [for], data = [none]
}
innerNodeCount++; // depends on control dependency: [if], data = [none]
optionNodeCount++; // depends on control dependency: [if], data = [none]
if (parent == null) {
treeRoot = optionNode; // depends on control dependency: [if], data = [none]
} else {
parent.setChild(parent.getChildIndex(node), optionNode); // depends on control dependency: [if], data = [(parent]
optionNode.setParent(parent); // depends on control dependency: [if], data = [(parent]
}
optionNode.resetFF(); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
static void deleteOldBuildDataDirs(File tmpDir, Log logger) {
if (!tmpDir.exists()) {
// Before creation of the @tmp directory
return;
}
File[] buildDataDirs = tmpDir.listFiles(buildDataDir -> {
long ageInMilliseconds = new Date().getTime() - buildDataDir.lastModified();
return ageInMilliseconds > TimeUnit.DAYS.toMillis(1);
});
if (buildDataDirs == null) {
logger.error("Failed while attempting to delete old build data dirs. Could not list files in " + tmpDir);
return;
}
for (File buildDataDir : buildDataDirs) {
try {
FileUtils.deleteDirectory(buildDataDir);
logger.debug(buildDataDir.getAbsolutePath() + " deleted");
} catch (IOException e) {
logger.error("Failed while attempting to delete old build data dir: " + buildDataDir.toString(), e);
}
}
} }
|
public class class_name {
static void deleteOldBuildDataDirs(File tmpDir, Log logger) {
if (!tmpDir.exists()) {
// Before creation of the @tmp directory
return; // depends on control dependency: [if], data = [none]
}
File[] buildDataDirs = tmpDir.listFiles(buildDataDir -> {
long ageInMilliseconds = new Date().getTime() - buildDataDir.lastModified();
return ageInMilliseconds > TimeUnit.DAYS.toMillis(1);
});
if (buildDataDirs == null) {
logger.error("Failed while attempting to delete old build data dirs. Could not list files in " + tmpDir);
return;
}
for (File buildDataDir : buildDataDirs) {
try {
FileUtils.deleteDirectory(buildDataDir); // depends on control dependency: [try], data = [none]
logger.debug(buildDataDir.getAbsolutePath() + " deleted"); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
logger.error("Failed while attempting to delete old build data dir: " + buildDataDir.toString(), e);
} // depends on control dependency: [catch], data = [none]
}
} }
|
public class class_name {
public void setInputList(final List<PathLabel> inputRefs) {
this.inputRefs = inputRefs;
SwingUtilities.invokeLater(new Runnable() {
public void run() {
for( int i = 0; i < inputRefs.size(); i++ ) {
imageBox.addItem(inputRefs.get(i).getLabel());
}
}});
} }
|
public class class_name {
public void setInputList(final List<PathLabel> inputRefs) {
this.inputRefs = inputRefs;
SwingUtilities.invokeLater(new Runnable() {
public void run() {
for( int i = 0; i < inputRefs.size(); i++ ) {
imageBox.addItem(inputRefs.get(i).getLabel()); // depends on control dependency: [for], data = [i]
}
}});
} }
|
public class class_name {
public synchronized void stop() {
if (isRunning()) {
VirtualCdj.getInstance().removeUpdateListener(updateListener);
running.set(false);
pendingUpdates.clear();
queueHandler.interrupt();
queueHandler = null;
// Report the loss of our hot cached metadata on the proper thread, outside our lock
final Set<DeckReference> dyingCache = new HashSet<DeckReference>(hotCache.keySet());
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
for (DeckReference deck : dyingCache) {
if (deck.hotCue == 0) {
deliverTrackMetadataUpdate(deck.player, null);
}
}
}
});
hotCache.clear();
deliverLifecycleAnnouncement(logger, false);
}
} }
|
public class class_name {
public synchronized void stop() {
if (isRunning()) {
VirtualCdj.getInstance().removeUpdateListener(updateListener); // depends on control dependency: [if], data = [none]
running.set(false); // depends on control dependency: [if], data = [none]
pendingUpdates.clear(); // depends on control dependency: [if], data = [none]
queueHandler.interrupt(); // depends on control dependency: [if], data = [none]
queueHandler = null; // depends on control dependency: [if], data = [none]
// Report the loss of our hot cached metadata on the proper thread, outside our lock
final Set<DeckReference> dyingCache = new HashSet<DeckReference>(hotCache.keySet());
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
for (DeckReference deck : dyingCache) {
if (deck.hotCue == 0) {
deliverTrackMetadataUpdate(deck.player, null); // depends on control dependency: [if], data = [none]
}
}
}
}); // depends on control dependency: [if], data = [none]
hotCache.clear(); // depends on control dependency: [if], data = [none]
deliverLifecycleAnnouncement(logger, false); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Nonnull
@ReturnsMutableCopy
public ICommonsList <DATATYPE> next ()
{
if (!hasNext ())
throw new NoSuchElementException ();
// Not for the very first item, as the first item is the original order
final boolean bFirstItem = m_bUseLong ? m_nCombinationsLeft == m_nTotalCombinations
: m_aCombinationsLeft.equals (m_aTotalCombinations);
if (!bFirstItem)
{
final int nElementCount = m_aElements.length;
final int nSlotCount = m_aIndexResult.length;
int i = nSlotCount - 1;
while (m_aIndexResult[i] == (nElementCount - nSlotCount + i))
{
i--;
}
m_aIndexResult[i]++;
final int nIndexResultI = m_aIndexResult[i];
for (int j = i + 1; j < nSlotCount; j++)
{
m_aIndexResult[j] = nIndexResultI + j - i;
}
}
// One combination less
if (m_bUseLong)
m_nCombinationsLeft--;
else
m_aCombinationsLeft = m_aCombinationsLeft.subtract (BigInteger.ONE);
// Build result list
final ICommonsList <DATATYPE> aResult = new CommonsArrayList <> (m_aIndexResult.length);
for (final int nIndex : m_aIndexResult)
aResult.add (m_aElements[nIndex]);
return aResult;
} }
|
public class class_name {
@Nonnull
@ReturnsMutableCopy
public ICommonsList <DATATYPE> next ()
{
if (!hasNext ())
throw new NoSuchElementException ();
// Not for the very first item, as the first item is the original order
final boolean bFirstItem = m_bUseLong ? m_nCombinationsLeft == m_nTotalCombinations
: m_aCombinationsLeft.equals (m_aTotalCombinations);
if (!bFirstItem)
{
final int nElementCount = m_aElements.length;
final int nSlotCount = m_aIndexResult.length;
int i = nSlotCount - 1;
while (m_aIndexResult[i] == (nElementCount - nSlotCount + i))
{
i--; // depends on control dependency: [while], data = [none]
}
m_aIndexResult[i]++; // depends on control dependency: [if], data = [none]
final int nIndexResultI = m_aIndexResult[i];
for (int j = i + 1; j < nSlotCount; j++)
{
m_aIndexResult[j] = nIndexResultI + j - i; // depends on control dependency: [for], data = [j]
}
}
// One combination less
if (m_bUseLong)
m_nCombinationsLeft--;
else
m_aCombinationsLeft = m_aCombinationsLeft.subtract (BigInteger.ONE);
// Build result list
final ICommonsList <DATATYPE> aResult = new CommonsArrayList <> (m_aIndexResult.length);
for (final int nIndex : m_aIndexResult)
aResult.add (m_aElements[nIndex]);
return aResult;
} }
|
public class class_name {
public void setItem(String itemName, String value) {
itemName = lookupItemName(itemName, value != null);
if (value == null) {
items.remove(itemName);
index.remove(itemName.toLowerCase());
} else {
items.put(itemName, value);
}
} }
|
public class class_name {
public void setItem(String itemName, String value) {
itemName = lookupItemName(itemName, value != null);
if (value == null) {
items.remove(itemName); // depends on control dependency: [if], data = [none]
index.remove(itemName.toLowerCase()); // depends on control dependency: [if], data = [none]
} else {
items.put(itemName, value); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public Date getValue() {
try {
return dateTimeFormat != null && textBox.getValue() != null ? dateTimeFormat.parse(textBox.getValue()) : null;
} catch (final Exception e) {
return null;
}
} }
|
public class class_name {
@Override
public Date getValue() {
try {
return dateTimeFormat != null && textBox.getValue() != null ? dateTimeFormat.parse(textBox.getValue()) : null; // depends on control dependency: [try], data = [none]
} catch (final Exception e) {
return null;
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
@SuppressWarnings("unchecked")
public MessagingConfiguration getMessagingConfiguration(ServletConfig servletConfig) {
Assert.isTrue(JdkVersion.getMajorJavaVersion() >= JdkVersion.JAVA_15, "Spring BlazeDS Integration requires a minimum of Java 1.5");
Assert.notNull(servletConfig, "FlexConfigurationManager requires a non-null ServletConfig - "
+ "Is it being used outside a WebApplicationContext?");
MessagingConfiguration configuration = new MessagingConfiguration();
configuration.getSecuritySettings().setServerInfo(servletConfig.getServletContext().getServerInfo());
if (CollectionUtils.isEmpty(configuration.getSecuritySettings().getLoginCommands())) {
LoginCommandSettings settings = new LoginCommandSettings();
settings.setClassName(NoOpLoginCommand.class.getName());
configuration.getSecuritySettings().getLoginCommands().put(LoginCommandSettings.SERVER_MATCH_OVERRIDE, settings);
}
if (this.parser == null) {
this.parser = getDefaultConfigurationParser();
}
Assert.notNull(this.parser, "Unable to create a parser to load Flex messaging configuration.");
this.parser.parse(this.configurationPath, new ResourceResolverAdapter(this.resourceLoader), configuration);
return configuration;
} }
|
public class class_name {
@SuppressWarnings("unchecked")
public MessagingConfiguration getMessagingConfiguration(ServletConfig servletConfig) {
Assert.isTrue(JdkVersion.getMajorJavaVersion() >= JdkVersion.JAVA_15, "Spring BlazeDS Integration requires a minimum of Java 1.5");
Assert.notNull(servletConfig, "FlexConfigurationManager requires a non-null ServletConfig - "
+ "Is it being used outside a WebApplicationContext?");
MessagingConfiguration configuration = new MessagingConfiguration();
configuration.getSecuritySettings().setServerInfo(servletConfig.getServletContext().getServerInfo());
if (CollectionUtils.isEmpty(configuration.getSecuritySettings().getLoginCommands())) {
LoginCommandSettings settings = new LoginCommandSettings();
settings.setClassName(NoOpLoginCommand.class.getName()); // depends on control dependency: [if], data = [none]
configuration.getSecuritySettings().getLoginCommands().put(LoginCommandSettings.SERVER_MATCH_OVERRIDE, settings); // depends on control dependency: [if], data = [none]
}
if (this.parser == null) {
this.parser = getDefaultConfigurationParser(); // depends on control dependency: [if], data = [none]
}
Assert.notNull(this.parser, "Unable to create a parser to load Flex messaging configuration.");
this.parser.parse(this.configurationPath, new ResourceResolverAdapter(this.resourceLoader), configuration);
return configuration;
} }
|
public class class_name {
public synchronized void init(ImageLoaderConfiguration configuration) {
if (configuration == null) {
throw new IllegalArgumentException(ERROR_INIT_CONFIG_WITH_NULL);
}
if (this.configuration == null) {
L.d(LOG_INIT_CONFIG);
engine = new ImageLoaderEngine(configuration);
this.configuration = configuration;
} else {
L.w(WARNING_RE_INIT_CONFIG);
}
} }
|
public class class_name {
public synchronized void init(ImageLoaderConfiguration configuration) {
if (configuration == null) {
throw new IllegalArgumentException(ERROR_INIT_CONFIG_WITH_NULL);
}
if (this.configuration == null) {
L.d(LOG_INIT_CONFIG); // depends on control dependency: [if], data = [none]
engine = new ImageLoaderEngine(configuration); // depends on control dependency: [if], data = [none]
this.configuration = configuration; // depends on control dependency: [if], data = [none]
} else {
L.w(WARNING_RE_INIT_CONFIG); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void copy(ILocator destination, String depth, boolean overwrite, Collection propertyNames) throws DAVException {
// Define the request context.
IContext context = newContext();
context.setDepth(depth);
context.setOverwrite(overwrite);
// Set up the request body to specify which properties should be kept alive.
Document document = newDocument();
PropertyBehavior propertyBehavior = PropertyBehavior.create(document);
if (propertyNames == null)
propertyBehavior.setIsKeepAllAlive(true);
else {
Iterator namesItr = propertyNames.iterator();
while (namesItr.hasNext()) {
QualifiedName name = (QualifiedName) namesItr.next();
String nameURI = name.getQualifier() + "/" + name.getLocalName(); //$NON-NLS-1$
propertyBehavior.addProperty(nameURI);
} // end-while
} // end-if
// Call the server to perform the copy.
IResponse response = null;
try {
response = davClient.copy(locator, destination, context, document);
examineResponse(response);
examineMultiStatusResponse(response);
} catch (IOException e) {
throw new SystemException(e);
} finally {
closeResponse(response);
}
} }
|
public class class_name {
public void copy(ILocator destination, String depth, boolean overwrite, Collection propertyNames) throws DAVException {
// Define the request context.
IContext context = newContext();
context.setDepth(depth);
context.setOverwrite(overwrite);
// Set up the request body to specify which properties should be kept alive.
Document document = newDocument();
PropertyBehavior propertyBehavior = PropertyBehavior.create(document);
if (propertyNames == null)
propertyBehavior.setIsKeepAllAlive(true);
else {
Iterator namesItr = propertyNames.iterator();
while (namesItr.hasNext()) {
QualifiedName name = (QualifiedName) namesItr.next();
String nameURI = name.getQualifier() + "/" + name.getLocalName(); //$NON-NLS-1$
propertyBehavior.addProperty(nameURI); // depends on control dependency: [while], data = [none]
} // end-while
} // end-if
// Call the server to perform the copy.
IResponse response = null;
try {
response = davClient.copy(locator, destination, context, document);
examineResponse(response);
examineMultiStatusResponse(response);
} catch (IOException e) {
throw new SystemException(e);
} finally {
closeResponse(response);
}
} }
|
public class class_name {
public static Iterator<Cell> gatherTombstones(final ColumnFamily returnCF, final Iterator<? extends OnDiskAtom> iter)
{
return new Iterator<Cell>()
{
private Cell next;
public boolean hasNext()
{
if (next != null)
return true;
getNext();
return next != null;
}
public Cell next()
{
if (next == null)
getNext();
assert next != null;
Cell toReturn = next;
next = null;
return toReturn;
}
private void getNext()
{
while (iter.hasNext())
{
OnDiskAtom atom = iter.next();
if (atom instanceof Cell)
{
next = (Cell)atom;
break;
}
else
{
returnCF.addAtom(atom);
}
}
}
public void remove()
{
throw new UnsupportedOperationException();
}
};
} }
|
public class class_name {
public static Iterator<Cell> gatherTombstones(final ColumnFamily returnCF, final Iterator<? extends OnDiskAtom> iter)
{
return new Iterator<Cell>()
{
private Cell next;
public boolean hasNext()
{
if (next != null)
return true;
getNext();
return next != null;
}
public Cell next()
{
if (next == null)
getNext();
assert next != null;
Cell toReturn = next;
next = null;
return toReturn;
}
private void getNext()
{
while (iter.hasNext())
{
OnDiskAtom atom = iter.next();
if (atom instanceof Cell)
{
next = (Cell)atom; // depends on control dependency: [if], data = [none]
break;
}
else
{
returnCF.addAtom(atom); // depends on control dependency: [if], data = [none]
}
}
}
public void remove()
{
throw new UnsupportedOperationException();
}
};
} }
|
public class class_name {
public EClass getIfcDefinedSymbol() {
if (ifcDefinedSymbolEClass == null) {
ifcDefinedSymbolEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(143);
}
return ifcDefinedSymbolEClass;
} }
|
public class class_name {
public EClass getIfcDefinedSymbol() {
if (ifcDefinedSymbolEClass == null) {
ifcDefinedSymbolEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(143);
// depends on control dependency: [if], data = [none]
}
return ifcDefinedSymbolEClass;
} }
|
public class class_name {
@Override
public final int read() throws IOException
{
assert cursor <= end;
if (cursor >= end)
{
if (includeLevel.in == null)
{
return -1;
}
int cp = (int)(cursor % size);
long len = size-(cursor-waterMark);
int il;
if (len > size - cp)
{
buffer1.position(cp);
buffer1.limit(size);
buffer2.position(0);
buffer2.limit((int)(len-(size-cp)));
if (!buffer1.hasRemaining() && !buffer2.hasRemaining())
{
throw new UnderflowException("Buffer size="+size+" too small for operation");
}
il = fill(includeLevel.in, array2);
}
else
{
buffer1.position(cp);
buffer1.limit((int)(cp+len));
if (!buffer1.hasRemaining())
{
throw new UnderflowException("Buffer size="+size+" too small for operation");
}
il = fill(includeLevel.in, array1);
}
if (il == -1)
{
if (includeStack != null)
{
while (!includeStack.isEmpty() && il == -1)
{
close(includeLevel.in);
includeLevel = includeStack.pop();
return read();
}
}
return -1;
}
if (il == 0)
{
throw new IOException("No input! Use blocking mode?");
}
buffer1.clear();
buffer2.clear();
end+=il;
if (end < 0)
{
throw new IOException("end = "+end);
}
}
int rc = get(cursor++);
if (cursor < 0)
{
throw new IOException("cursor = "+cursor);
}
includeLevel.forward(rc);
length++;
if (length > size)
{
throw new IOException("input size "+length+" exceeds buffer size "+size);
}
if (checksum != null)
{
checksum.update(cursor-1, rc);
}
return rc;
} }
|
public class class_name {
@Override
public final int read() throws IOException
{
assert cursor <= end;
if (cursor >= end)
{
if (includeLevel.in == null)
{
return -1; // depends on control dependency: [if], data = [none]
}
int cp = (int)(cursor % size);
long len = size-(cursor-waterMark);
int il;
if (len > size - cp)
{
buffer1.position(cp); // depends on control dependency: [if], data = [none]
buffer1.limit(size); // depends on control dependency: [if], data = [none]
buffer2.position(0); // depends on control dependency: [if], data = [none]
buffer2.limit((int)(len-(size-cp))); // depends on control dependency: [if], data = [(len]
if (!buffer1.hasRemaining() && !buffer2.hasRemaining())
{
throw new UnderflowException("Buffer size="+size+" too small for operation");
}
il = fill(includeLevel.in, array2); // depends on control dependency: [if], data = [none]
}
else
{
buffer1.position(cp); // depends on control dependency: [if], data = [none]
buffer1.limit((int)(cp+len)); // depends on control dependency: [if], data = [none]
if (!buffer1.hasRemaining())
{
throw new UnderflowException("Buffer size="+size+" too small for operation");
}
il = fill(includeLevel.in, array1); // depends on control dependency: [if], data = [none]
}
if (il == -1)
{
if (includeStack != null)
{
while (!includeStack.isEmpty() && il == -1)
{
close(includeLevel.in); // depends on control dependency: [while], data = [none]
includeLevel = includeStack.pop(); // depends on control dependency: [while], data = [none]
return read(); // depends on control dependency: [while], data = [none]
}
}
return -1; // depends on control dependency: [if], data = [none]
}
if (il == 0)
{
throw new IOException("No input! Use blocking mode?");
}
buffer1.clear();
buffer2.clear();
end+=il;
if (end < 0)
{
throw new IOException("end = "+end);
}
}
int rc = get(cursor++);
if (cursor < 0)
{
throw new IOException("cursor = "+cursor);
}
includeLevel.forward(rc);
length++;
if (length > size)
{
throw new IOException("input size "+length+" exceeds buffer size "+size);
}
if (checksum != null)
{
checksum.update(cursor-1, rc);
}
return rc;
} }
|
public class class_name {
public void addLostStorage(Map<String, StorageList> lostStorage) {
for (Map.Entry<String, StorageList> entry : lostStorage.entrySet()) {
List<String> paths = mLostStorage.getOrDefault(entry.getKey(), new ArrayList<>());
paths.addAll(entry.getValue().getStorageList());
mLostStorage.put(entry.getKey(), paths);
}
} }
|
public class class_name {
public void addLostStorage(Map<String, StorageList> lostStorage) {
for (Map.Entry<String, StorageList> entry : lostStorage.entrySet()) {
List<String> paths = mLostStorage.getOrDefault(entry.getKey(), new ArrayList<>());
paths.addAll(entry.getValue().getStorageList()); // depends on control dependency: [for], data = [entry]
mLostStorage.put(entry.getKey(), paths); // depends on control dependency: [for], data = [entry]
}
} }
|
public class class_name {
public boolean register(SipURI requestUri, String user, String password, String contact,
int expiry, long timeout) {
initErrorInfo();
try {
AddressFactory addr_factory = parent.getAddressFactory();
HeaderFactory hdr_factory = parent.getHeaderFactory();
if (requestUri == null) {
requestUri = addr_factory.createSipURI(null, ((SipURI) (myAddress.getURI())).getHost());
requestUri.setPort(((SipURI) (myAddress.getURI())).getPort());
if (((SipURI) (myAddress.getURI())).getTransportParam() != null) {
requestUri.setTransportParam(((SipURI) (myAddress.getURI())).getTransportParam());
}
}
String method = Request.REGISTER;
ToHeader to_header = hdr_factory.createToHeader(myAddress, null);
FromHeader from_header = hdr_factory.createFromHeader(myAddress, generateNewTag());
CallIdHeader callid_header = hdr_factory.createCallIdHeader(myRegistrationId);
cseq = hdr_factory.createCSeqHeader(cseq == null ? 1 : (cseq.getSeqNumber() + 1), method);
MaxForwardsHeader max_forwards = hdr_factory.createMaxForwardsHeader(MAX_FORWARDS_DEFAULT);
if (contact != null) {
URI uri = addr_factory.createURI(contact);
if (uri.isSipURI() == false) {
setReturnCode(INVALID_ARGUMENT);
setErrorMessage("URI " + contact + " is not a Sip URI");
return false;
}
Address contact_address = addr_factory.createAddress(uri);
ContactHeader hdr = hdr_factory.createContactHeader(contact_address);
hdr.setExpires(expiry);
synchronized (contactLock) {
contactInfo = new SipContact();
contactInfo.setContactHeader(hdr);
}
}
List<ViaHeader> via_headers = getViaHeaders();
Request msg = parent.getMessageFactory().createRequest(requestUri, method, callid_header,
cseq, from_header, to_header, via_headers, max_forwards);
msg.addHeader(contactInfo.getContactHeader()); // use
// setHeader()?
if (expiry > 0) {
ExpiresHeader expires = hdr_factory.createExpiresHeader(expiry);
msg.setExpires(expires);
}
// include any auth information for this User Agent's registration
// if any exists
Map<String, AuthorizationHeader> auth_list =
getAuthorizations().get(myRegistrationId);
if (auth_list != null) {
List<AuthorizationHeader> auth_headers =
new ArrayList<>(auth_list.values());
Iterator<AuthorizationHeader> i = auth_headers.iterator();
while (i.hasNext()) {
AuthorizationHeader auth = i.next();
msg.addHeader(auth);
}
} else {
// create the auth list entry for this phone's registrations
enableAuthorization(myRegistrationId);
}
// send the REGISTRATION request and get the response
Response response = sendRegistrationMessage(msg, user, password, timeout);
if (response == null) {
return false;
}
// update our contact info with that of the server response -
// server may have reset our contact expiry
ListIterator<?> contacts = response.getHeaders(ContactHeader.NAME);
if (contacts != null) {
while (contacts.hasNext()) {
// TODO - at some point save ALL the contact headers and
// provide a getter for the list of SipContact objects
// (gobalContactList).
// dispose() and unregister() can use the list of contact
// headers.
// for now just save this agent's info
ContactHeader hdr = (ContactHeader) contacts.next();
if (hdr.getAddress().getURI().toString().equals(contactInfo.getURI()) == true) {
contactInfo.setContactHeader(hdr);
break;
}
}
}
return true;
} catch (Exception ex) {
setReturnCode(EXCEPTION_ENCOUNTERED);
setException(ex);
setErrorMessage("Exception: " + ex.getClass().getName() + ": " + ex.getMessage());
return false;
}
} }
|
public class class_name {
public boolean register(SipURI requestUri, String user, String password, String contact,
int expiry, long timeout) {
initErrorInfo();
try {
AddressFactory addr_factory = parent.getAddressFactory();
HeaderFactory hdr_factory = parent.getHeaderFactory();
if (requestUri == null) {
requestUri = addr_factory.createSipURI(null, ((SipURI) (myAddress.getURI())).getHost()); // depends on control dependency: [if], data = [none]
requestUri.setPort(((SipURI) (myAddress.getURI())).getPort()); // depends on control dependency: [if], data = [none]
if (((SipURI) (myAddress.getURI())).getTransportParam() != null) {
requestUri.setTransportParam(((SipURI) (myAddress.getURI())).getTransportParam()); // depends on control dependency: [if], data = [(((SipURI) (myAddress.getURI())).getTransportParam()]
}
}
String method = Request.REGISTER;
ToHeader to_header = hdr_factory.createToHeader(myAddress, null);
FromHeader from_header = hdr_factory.createFromHeader(myAddress, generateNewTag());
CallIdHeader callid_header = hdr_factory.createCallIdHeader(myRegistrationId);
cseq = hdr_factory.createCSeqHeader(cseq == null ? 1 : (cseq.getSeqNumber() + 1), method); // depends on control dependency: [try], data = [none]
MaxForwardsHeader max_forwards = hdr_factory.createMaxForwardsHeader(MAX_FORWARDS_DEFAULT);
if (contact != null) {
URI uri = addr_factory.createURI(contact);
if (uri.isSipURI() == false) {
setReturnCode(INVALID_ARGUMENT); // depends on control dependency: [if], data = [none]
setErrorMessage("URI " + contact + " is not a Sip URI"); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
Address contact_address = addr_factory.createAddress(uri);
ContactHeader hdr = hdr_factory.createContactHeader(contact_address);
hdr.setExpires(expiry); // depends on control dependency: [if], data = [none]
synchronized (contactLock) { // depends on control dependency: [if], data = [(contact]
contactInfo = new SipContact();
contactInfo.setContactHeader(hdr);
}
}
List<ViaHeader> via_headers = getViaHeaders();
Request msg = parent.getMessageFactory().createRequest(requestUri, method, callid_header,
cseq, from_header, to_header, via_headers, max_forwards);
msg.addHeader(contactInfo.getContactHeader()); // use // depends on control dependency: [try], data = [none]
// setHeader()?
if (expiry > 0) {
ExpiresHeader expires = hdr_factory.createExpiresHeader(expiry);
msg.setExpires(expires); // depends on control dependency: [if], data = [none]
}
// include any auth information for this User Agent's registration
// if any exists
Map<String, AuthorizationHeader> auth_list =
getAuthorizations().get(myRegistrationId);
if (auth_list != null) {
List<AuthorizationHeader> auth_headers =
new ArrayList<>(auth_list.values());
Iterator<AuthorizationHeader> i = auth_headers.iterator();
while (i.hasNext()) {
AuthorizationHeader auth = i.next();
msg.addHeader(auth); // depends on control dependency: [while], data = [none]
}
} else {
// create the auth list entry for this phone's registrations
enableAuthorization(myRegistrationId); // depends on control dependency: [if], data = [none]
}
// send the REGISTRATION request and get the response
Response response = sendRegistrationMessage(msg, user, password, timeout);
if (response == null) {
return false; // depends on control dependency: [if], data = [none]
}
// update our contact info with that of the server response -
// server may have reset our contact expiry
ListIterator<?> contacts = response.getHeaders(ContactHeader.NAME);
if (contacts != null) {
while (contacts.hasNext()) {
// TODO - at some point save ALL the contact headers and
// provide a getter for the list of SipContact objects
// (gobalContactList).
// dispose() and unregister() can use the list of contact
// headers.
// for now just save this agent's info
ContactHeader hdr = (ContactHeader) contacts.next();
if (hdr.getAddress().getURI().toString().equals(contactInfo.getURI()) == true) {
contactInfo.setContactHeader(hdr); // depends on control dependency: [if], data = [none]
break;
}
}
}
return true; // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
setReturnCode(EXCEPTION_ENCOUNTERED);
setException(ex);
setErrorMessage("Exception: " + ex.getClass().getName() + ": " + ex.getMessage());
return false;
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private static String formetFileSize(long fileSize) {
DecimalFormat decimalFormat = new DecimalFormat("#.00");
String fileSizeString = "";
String wrongSize = "0B";
if (fileSize == 0) {
return wrongSize;
}
if (fileSize < 1024) {
fileSizeString = decimalFormat.format((double) fileSize) + "B";
} else if (fileSize < 1048576) {
fileSizeString = decimalFormat.format((double) fileSize / 1024)
+ "KB";
} else if (fileSize < 1073741824) {
fileSizeString = decimalFormat.format((double) fileSize / 1048576)
+ "MB";
} else {
fileSizeString = decimalFormat
.format((double) fileSize / 1073741824) + "GB";
}
return fileSizeString;
} }
|
public class class_name {
private static String formetFileSize(long fileSize) {
DecimalFormat decimalFormat = new DecimalFormat("#.00");
String fileSizeString = "";
String wrongSize = "0B";
if (fileSize == 0) {
return wrongSize; // depends on control dependency: [if], data = [none]
}
if (fileSize < 1024) {
fileSizeString = decimalFormat.format((double) fileSize) + "B"; // depends on control dependency: [if], data = [none]
} else if (fileSize < 1048576) {
fileSizeString = decimalFormat.format((double) fileSize / 1024)
+ "KB"; // depends on control dependency: [if], data = [none]
} else if (fileSize < 1073741824) {
fileSizeString = decimalFormat.format((double) fileSize / 1048576)
+ "MB"; // depends on control dependency: [if], data = [none]
} else {
fileSizeString = decimalFormat
.format((double) fileSize / 1073741824) + "GB"; // depends on control dependency: [if], data = [none]
}
return fileSizeString;
} }
|
public class class_name {
static int writeUtf8(AbstractByteBuf buffer, int writerIndex, CharSequence seq, int len) {
int oldWriterIndex = writerIndex;
// We can use the _set methods as these not need to do any index checks and reference checks.
// This is possible as we called ensureWritable(...) before.
for (int i = 0; i < len; i++) {
char c = seq.charAt(i);
if (c < 0x80) {
buffer._setByte(writerIndex++, (byte) c);
} else if (c < 0x800) {
buffer._setByte(writerIndex++, (byte) (0xc0 | (c >> 6)));
buffer._setByte(writerIndex++, (byte) (0x80 | (c & 0x3f)));
} else if (isSurrogate(c)) {
if (!Character.isHighSurrogate(c)) {
buffer._setByte(writerIndex++, WRITE_UTF_UNKNOWN);
continue;
}
final char c2;
try {
// Surrogate Pair consumes 2 characters. Optimistically try to get the next character to avoid
// duplicate bounds checking with charAt. If an IndexOutOfBoundsException is thrown we will
// re-throw a more informative exception describing the problem.
c2 = seq.charAt(++i);
} catch (IndexOutOfBoundsException ignored) {
buffer._setByte(writerIndex++, WRITE_UTF_UNKNOWN);
break;
}
// Extra method to allow inlining the rest of writeUtf8 which is the most likely code path.
writerIndex = writeUtf8Surrogate(buffer, writerIndex, c, c2);
} else {
buffer._setByte(writerIndex++, (byte) (0xe0 | (c >> 12)));
buffer._setByte(writerIndex++, (byte) (0x80 | ((c >> 6) & 0x3f)));
buffer._setByte(writerIndex++, (byte) (0x80 | (c & 0x3f)));
}
}
return writerIndex - oldWriterIndex;
} }
|
public class class_name {
static int writeUtf8(AbstractByteBuf buffer, int writerIndex, CharSequence seq, int len) {
int oldWriterIndex = writerIndex;
// We can use the _set methods as these not need to do any index checks and reference checks.
// This is possible as we called ensureWritable(...) before.
for (int i = 0; i < len; i++) {
char c = seq.charAt(i);
if (c < 0x80) {
buffer._setByte(writerIndex++, (byte) c); // depends on control dependency: [if], data = [none]
} else if (c < 0x800) {
buffer._setByte(writerIndex++, (byte) (0xc0 | (c >> 6))); // depends on control dependency: [if], data = [(c]
buffer._setByte(writerIndex++, (byte) (0x80 | (c & 0x3f))); // depends on control dependency: [if], data = [(c]
} else if (isSurrogate(c)) {
if (!Character.isHighSurrogate(c)) {
buffer._setByte(writerIndex++, WRITE_UTF_UNKNOWN); // depends on control dependency: [if], data = [none]
continue;
}
final char c2;
try {
// Surrogate Pair consumes 2 characters. Optimistically try to get the next character to avoid
// duplicate bounds checking with charAt. If an IndexOutOfBoundsException is thrown we will
// re-throw a more informative exception describing the problem.
c2 = seq.charAt(++i); // depends on control dependency: [try], data = [none]
} catch (IndexOutOfBoundsException ignored) {
buffer._setByte(writerIndex++, WRITE_UTF_UNKNOWN);
break;
} // depends on control dependency: [catch], data = [none]
// Extra method to allow inlining the rest of writeUtf8 which is the most likely code path.
writerIndex = writeUtf8Surrogate(buffer, writerIndex, c, c2); // depends on control dependency: [if], data = [none]
} else {
buffer._setByte(writerIndex++, (byte) (0xe0 | (c >> 12))); // depends on control dependency: [if], data = [none]
buffer._setByte(writerIndex++, (byte) (0x80 | ((c >> 6) & 0x3f))); // depends on control dependency: [if], data = [none]
buffer._setByte(writerIndex++, (byte) (0x80 | (c & 0x3f))); // depends on control dependency: [if], data = [none]
}
}
return writerIndex - oldWriterIndex;
} }
|
public class class_name {
public List<V> getAllServices(){
if(cache == null || cache.size() == 0){
return Collections.emptyList();
}
List<V> list = new ArrayList<V>();
for(V svc : cache.values()){
list.add(svc);
}
return list;
} }
|
public class class_name {
public List<V> getAllServices(){
if(cache == null || cache.size() == 0){
return Collections.emptyList(); // depends on control dependency: [if], data = [none]
}
List<V> list = new ArrayList<V>();
for(V svc : cache.values()){
list.add(svc); // depends on control dependency: [for], data = [svc]
}
return list;
} }
|
public class class_name {
@SuppressWarnings("unchecked")
public Workbook create(Object bean) {
optionsSheet = sheet.getWorkbook().getSheet("Options");
for (val field : bean.getClass().getDeclaredFields()) {
if (Fields.shouldIgnored(field, ExcelColIgnore.class)) continue;
if (field.getName().endsWith("Tmpl")) continue;
processExcelCellAnnotation(field, bean);
processExcelRowsAnnotation(field, bean);
}
fixRowsHeight();
PoiUtil.removeOtherSheets(sheet);
val wb = sheet.getWorkbook();
wb.setPrintArea(0, 0, PoiUtil.findMaxCol(sheet), 0, sheet.getLastRowNum());
return wb;
} }
|
public class class_name {
@SuppressWarnings("unchecked")
public Workbook create(Object bean) {
optionsSheet = sheet.getWorkbook().getSheet("Options");
for (val field : bean.getClass().getDeclaredFields()) {
if (Fields.shouldIgnored(field, ExcelColIgnore.class)) continue;
if (field.getName().endsWith("Tmpl")) continue;
processExcelCellAnnotation(field, bean); // depends on control dependency: [for], data = [field]
processExcelRowsAnnotation(field, bean); // depends on control dependency: [for], data = [field]
}
fixRowsHeight();
PoiUtil.removeOtherSheets(sheet);
val wb = sheet.getWorkbook();
wb.setPrintArea(0, 0, PoiUtil.findMaxCol(sheet), 0, sheet.getLastRowNum());
return wb;
} }
|
public class class_name {
ExtensionLoader createDefaultExtensionLoader() {
// First find the right Class/ClassLoader
final Class<?> extensionLoaderImplClass;
try {
extensionLoaderImplClass = ClassLoaderSearchUtil.findClassFromClassLoaders(EXTENSION_LOADER_IMPL,
getClassLoaders());
} catch (final ClassNotFoundException cnfe) {
throw new IllegalStateException(
"Could not find extension loader impl class in any of the configured ClassLoaders", cnfe);
}
// Return
return SecurityActions.newInstance(extensionLoaderImplClass, new Class<?>[] { Iterable.class },
new Object[] { this.getClassLoaders() }, ExtensionLoader.class);
} }
|
public class class_name {
ExtensionLoader createDefaultExtensionLoader() {
// First find the right Class/ClassLoader
final Class<?> extensionLoaderImplClass;
try {
extensionLoaderImplClass = ClassLoaderSearchUtil.findClassFromClassLoaders(EXTENSION_LOADER_IMPL,
getClassLoaders()); // depends on control dependency: [try], data = [none]
} catch (final ClassNotFoundException cnfe) {
throw new IllegalStateException(
"Could not find extension loader impl class in any of the configured ClassLoaders", cnfe);
} // depends on control dependency: [catch], data = [none]
// Return
return SecurityActions.newInstance(extensionLoaderImplClass, new Class<?>[] { Iterable.class },
new Object[] { this.getClassLoaders() }, ExtensionLoader.class);
} }
|
public class class_name {
public List<Pair> parameterToPairs(String collectionFormat, String name, Object value){
List<Pair> params = new ArrayList<Pair>();
// preconditions
if (name == null || name.isEmpty() || value == null) return params;
Collection<?> valueCollection = null;
if (value instanceof Collection<?>) {
valueCollection = (Collection<?>) value;
} else {
params.add(new Pair(name, parameterToString(value)));
return params;
}
if (valueCollection.isEmpty()){
return params;
}
// get the collection format
collectionFormat = (collectionFormat == null || collectionFormat.isEmpty() ? "csv" : collectionFormat); // default: csv
// create the params based on the collection format
if (collectionFormat.equals("multi")) {
for (Object item : valueCollection) {
params.add(new Pair(name, parameterToString(item)));
}
return params;
}
String delimiter = ",";
if (collectionFormat.equals("csv")) {
delimiter = ",";
} else if (collectionFormat.equals("ssv")) {
delimiter = " ";
} else if (collectionFormat.equals("tsv")) {
delimiter = "\t";
} else if (collectionFormat.equals("pipes")) {
delimiter = "|";
}
StringBuilder sb = new StringBuilder() ;
for (Object item : valueCollection) {
sb.append(delimiter);
sb.append(parameterToString(item));
}
params.add(new Pair(name, sb.substring(1)));
return params;
} }
|
public class class_name {
public List<Pair> parameterToPairs(String collectionFormat, String name, Object value){
List<Pair> params = new ArrayList<Pair>();
// preconditions
if (name == null || name.isEmpty() || value == null) return params;
Collection<?> valueCollection = null;
if (value instanceof Collection<?>) {
valueCollection = (Collection<?>) value; // depends on control dependency: [if], data = [)]
} else {
params.add(new Pair(name, parameterToString(value))); // depends on control dependency: [if], data = [)]
return params; // depends on control dependency: [if], data = [none]
}
if (valueCollection.isEmpty()){
return params; // depends on control dependency: [if], data = [none]
}
// get the collection format
collectionFormat = (collectionFormat == null || collectionFormat.isEmpty() ? "csv" : collectionFormat); // default: csv
// create the params based on the collection format
if (collectionFormat.equals("multi")) {
for (Object item : valueCollection) {
params.add(new Pair(name, parameterToString(item))); // depends on control dependency: [for], data = [item]
}
return params; // depends on control dependency: [if], data = [none]
}
String delimiter = ",";
if (collectionFormat.equals("csv")) {
delimiter = ","; // depends on control dependency: [if], data = [none]
} else if (collectionFormat.equals("ssv")) {
delimiter = " "; // depends on control dependency: [if], data = [none]
} else if (collectionFormat.equals("tsv")) {
delimiter = "\t"; // depends on control dependency: [if], data = [none]
} else if (collectionFormat.equals("pipes")) {
delimiter = "|"; // depends on control dependency: [if], data = [none]
}
StringBuilder sb = new StringBuilder() ;
for (Object item : valueCollection) {
sb.append(delimiter); // depends on control dependency: [for], data = [none]
sb.append(parameterToString(item)); // depends on control dependency: [for], data = [item]
}
params.add(new Pair(name, sb.substring(1)));
return params;
} }
|
public class class_name {
public boolean refine( DetectPolygonFromContour.Info info ) {
double before,after;
if( edgeIntensity.computeEdge(info.polygon,!detector.isOutputClockwise()) ) {
before = edgeIntensity.getAverageOutside() - edgeIntensity.getAverageInside();
} else {
return false;
}
boolean success = false;
if( refineContour != null ) {
List<Point2D_I32> contour = detector.getContour(info);
refineContour.process(contour,info.splits,work);
if( adjustForBias != null )
adjustForBias.process(work, detector.isOutputClockwise());
if( edgeIntensity.computeEdge(work,!detector.isOutputClockwise()) ) {
after = edgeIntensity.getAverageOutside() - edgeIntensity.getAverageInside();
if( after > before ) {
info.edgeInside = edgeIntensity.getAverageInside();
info.edgeOutside = edgeIntensity.getAverageOutside();
info.polygon.set(work);
success = true;
before = after;
}
}
}
if( functionAdjust != null ) {
functionAdjust.adjust(info, detector.isOutputClockwise());
}
if( refineGray != null ) {
work.vertexes.resize(info.polygon.size());
if( refineGray.refine(info.polygon,work) ) {
if( edgeIntensity.computeEdge(work,!detector.isOutputClockwise()) ) {
after = edgeIntensity.getAverageOutside() - edgeIntensity.getAverageInside();
// basically, unless it diverged stick with this optimization
// a near tie
if( after*1.5 > before ) {
info.edgeInside = edgeIntensity.getAverageInside();
info.edgeOutside = edgeIntensity.getAverageOutside();
info.polygon.set(work);
success = true;
}
}
}
}
return success;
} }
|
public class class_name {
public boolean refine( DetectPolygonFromContour.Info info ) {
double before,after;
if( edgeIntensity.computeEdge(info.polygon,!detector.isOutputClockwise()) ) {
before = edgeIntensity.getAverageOutside() - edgeIntensity.getAverageInside(); // depends on control dependency: [if], data = [none]
} else {
return false; // depends on control dependency: [if], data = [none]
}
boolean success = false;
if( refineContour != null ) {
List<Point2D_I32> contour = detector.getContour(info);
refineContour.process(contour,info.splits,work); // depends on control dependency: [if], data = [none]
if( adjustForBias != null )
adjustForBias.process(work, detector.isOutputClockwise());
if( edgeIntensity.computeEdge(work,!detector.isOutputClockwise()) ) {
after = edgeIntensity.getAverageOutside() - edgeIntensity.getAverageInside(); // depends on control dependency: [if], data = [none]
if( after > before ) {
info.edgeInside = edgeIntensity.getAverageInside(); // depends on control dependency: [if], data = [none]
info.edgeOutside = edgeIntensity.getAverageOutside(); // depends on control dependency: [if], data = [none]
info.polygon.set(work); // depends on control dependency: [if], data = [none]
success = true; // depends on control dependency: [if], data = [none]
before = after; // depends on control dependency: [if], data = [none]
}
}
}
if( functionAdjust != null ) {
functionAdjust.adjust(info, detector.isOutputClockwise()); // depends on control dependency: [if], data = [none]
}
if( refineGray != null ) {
work.vertexes.resize(info.polygon.size()); // depends on control dependency: [if], data = [none]
if( refineGray.refine(info.polygon,work) ) {
if( edgeIntensity.computeEdge(work,!detector.isOutputClockwise()) ) {
after = edgeIntensity.getAverageOutside() - edgeIntensity.getAverageInside(); // depends on control dependency: [if], data = [none]
// basically, unless it diverged stick with this optimization
// a near tie
if( after*1.5 > before ) {
info.edgeInside = edgeIntensity.getAverageInside(); // depends on control dependency: [if], data = [none]
info.edgeOutside = edgeIntensity.getAverageOutside(); // depends on control dependency: [if], data = [none]
info.polygon.set(work); // depends on control dependency: [if], data = [none]
success = true; // depends on control dependency: [if], data = [none]
}
}
}
}
return success;
} }
|
public class class_name {
public void loadRasters(int imageHeight, boolean save, String prefix)
{
Check.notNull(prefix);
final Raster raster = Raster.load(rasterFile);
final int max = UtilConversion.boolToInt(rasterSmooth) + 1;
for (int m = 0; m < max; m++)
{
for (int i = 0; i < MAX_RASTERS; i++)
{
final String folder = prefix + Constant.UNDERSCORE + UtilFile.removeExtension(rasterFile.getName());
final String file = String.valueOf(i + m * MAX_RASTERS) + Constant.DOT + ImageFormat.PNG;
final Media rasterMedia = Medias.create(rasterFile.getParentPath(), folder, file);
final ImageBuffer rasterBuffer = createRaster(rasterMedia, raster, i, save);
rasters.add(rasterBuffer);
}
}
} }
|
public class class_name {
public void loadRasters(int imageHeight, boolean save, String prefix)
{
Check.notNull(prefix);
final Raster raster = Raster.load(rasterFile);
final int max = UtilConversion.boolToInt(rasterSmooth) + 1;
for (int m = 0; m < max; m++)
{
for (int i = 0; i < MAX_RASTERS; i++)
{
final String folder = prefix + Constant.UNDERSCORE + UtilFile.removeExtension(rasterFile.getName());
final String file = String.valueOf(i + m * MAX_RASTERS) + Constant.DOT + ImageFormat.PNG;
final Media rasterMedia = Medias.create(rasterFile.getParentPath(), folder, file);
final ImageBuffer rasterBuffer = createRaster(rasterMedia, raster, i, save);
rasters.add(rasterBuffer);
// depends on control dependency: [for], data = [none]
}
}
} }
|
public class class_name {
public static void recursivelySquashNulls(Object input) {
// Makes two passes thru the data.
Objects.squashNulls( input );
if ( input instanceof List ) {
List inputList = (List) input;
inputList.forEach( i -> recursivelySquashNulls( i ) );
}
else if ( input instanceof Map ) {
Map<String,Object> inputMap = (Map<String,Object>) input;
for (Map.Entry<String,Object> entry : inputMap.entrySet()) {
recursivelySquashNulls( entry.getValue() );
}
}
} }
|
public class class_name {
public static void recursivelySquashNulls(Object input) {
// Makes two passes thru the data.
Objects.squashNulls( input );
if ( input instanceof List ) {
List inputList = (List) input;
inputList.forEach( i -> recursivelySquashNulls( i ) ); // depends on control dependency: [if], data = [none]
}
else if ( input instanceof Map ) {
Map<String,Object> inputMap = (Map<String,Object>) input;
for (Map.Entry<String,Object> entry : inputMap.entrySet()) {
recursivelySquashNulls( entry.getValue() ); // depends on control dependency: [for], data = [entry]
}
}
} }
|
public class class_name {
private void send(final byte[] buffer, final Session destination) {
synchronized (connections) {
// execute asynchronously to avoid slower clients from interfering with faster clients
final ExecutorService connectionThread = connectionThreads.get(destination);
if (connectionThread == null) {
// Maybe the client has disconnected in the mean time.
return;
}
connectionThread.execute(new Runnable() {
@Override
public void run() {
try {
// for the case that the runnable was committed shortly before the connection was closed.
if (destination.isOpen()) {
// FIXME replace with getAsyncRemote and removeconnectionThreads as soon as
// getAsyncRemote on tomcat is thread-safe
destination.getBasicRemote().sendBinary(ByteBuffer.wrap(buffer));
}
} catch (final IOException e) {
try {
if (destination.isOpen()) {
destination.close(new CloseReason(CloseCodes.PROTOCOL_ERROR, "Failed to send data."));
}
} catch (final IOException e1) {
// The outer exception already indicated that something went wrong.
ignore(e1);
}
handleClientError(destination, e);
connectionCloses(destination);
}
}
});
}
} }
|
public class class_name {
private void send(final byte[] buffer, final Session destination) {
synchronized (connections) {
// execute asynchronously to avoid slower clients from interfering with faster clients
final ExecutorService connectionThread = connectionThreads.get(destination);
if (connectionThread == null) {
// Maybe the client has disconnected in the mean time.
return; // depends on control dependency: [if], data = [none]
}
connectionThread.execute(new Runnable() {
@Override
public void run() {
try {
// for the case that the runnable was committed shortly before the connection was closed.
if (destination.isOpen()) {
// FIXME replace with getAsyncRemote and removeconnectionThreads as soon as
// getAsyncRemote on tomcat is thread-safe
destination.getBasicRemote().sendBinary(ByteBuffer.wrap(buffer)); // depends on control dependency: [if], data = [none]
}
} catch (final IOException e) {
try {
if (destination.isOpen()) {
destination.close(new CloseReason(CloseCodes.PROTOCOL_ERROR, "Failed to send data.")); // depends on control dependency: [if], data = [none]
}
} catch (final IOException e1) {
// The outer exception already indicated that something went wrong.
ignore(e1);
} // depends on control dependency: [catch], data = [none]
handleClientError(destination, e);
connectionCloses(destination);
} // depends on control dependency: [catch], data = [none]
}
});
}
} }
|
public class class_name {
@Modified(id = "importDeclarations")
void modifiedImportDeclaration(ServiceReference<ImportDeclaration> importDeclarationSRef) {
LOG.debug(linkerName + " : Modify the ImportDeclaration "
+ declarationsManager.getDeclaration(importDeclarationSRef));
synchronized (lock) {
declarationsManager.removeLinks(importDeclarationSRef);
declarationsManager.modified(importDeclarationSRef);
if (!declarationsManager.matched(importDeclarationSRef)) {
return;
}
declarationsManager.createLinks(importDeclarationSRef);
}
} }
|
public class class_name {
@Modified(id = "importDeclarations")
void modifiedImportDeclaration(ServiceReference<ImportDeclaration> importDeclarationSRef) {
LOG.debug(linkerName + " : Modify the ImportDeclaration "
+ declarationsManager.getDeclaration(importDeclarationSRef));
synchronized (lock) {
declarationsManager.removeLinks(importDeclarationSRef);
declarationsManager.modified(importDeclarationSRef);
if (!declarationsManager.matched(importDeclarationSRef)) {
return; // depends on control dependency: [if], data = [none]
}
declarationsManager.createLinks(importDeclarationSRef);
}
} }
|
public class class_name {
public String getProperty(String key)
{
// Try to get the callers class name and method name by examing the stack.
String className = null;
String methodName = null;
// Java 1.4 onwards only.
/*try
{
throw new Exception();
}
catch (Exception e)
{
StackTraceElement[] stack = e.getStackTrace();
// Check that the stack trace contains at least two elements, one for this method and one for the caller.
if (stack.length >= 2)
{
className = stack[1].getClassName();
methodName = stack[1].getMethodName();
}
}*/
// Java 1.5 onwards only.
StackTraceElement[] stack = Thread.currentThread().getStackTrace();
// Check that the stack trace contains at least two elements, one for this method and one for the caller.
if (stack.length >= 2)
{
className = stack[1].getClassName();
methodName = stack[1].getMethodName();
}
// Java 1.3 and before? Not sure, some horrible thing that parses the text spat out by printStackTrace?
return getProperty(className, methodName, key);
} }
|
public class class_name {
public String getProperty(String key)
{
// Try to get the callers class name and method name by examing the stack.
String className = null;
String methodName = null;
// Java 1.4 onwards only.
/*try
{
throw new Exception();
}
catch (Exception e)
{
StackTraceElement[] stack = e.getStackTrace();
// Check that the stack trace contains at least two elements, one for this method and one for the caller.
if (stack.length >= 2)
{
className = stack[1].getClassName();
methodName = stack[1].getMethodName();
}
}*/
// Java 1.5 onwards only.
StackTraceElement[] stack = Thread.currentThread().getStackTrace();
// Check that the stack trace contains at least two elements, one for this method and one for the caller.
if (stack.length >= 2)
{
className = stack[1].getClassName(); // depends on control dependency: [if], data = [none]
methodName = stack[1].getMethodName(); // depends on control dependency: [if], data = [none]
}
// Java 1.3 and before? Not sure, some horrible thing that parses the text spat out by printStackTrace?
return getProperty(className, methodName, key);
} }
|
public class class_name {
private void performNotificationImp(boolean performUpdate) {
// Getting pending notifications list
List<PendingNotification> destNotifications = getNotifications();
if (destNotifications.size() == 0) {
hideNotification();
return;
}
// Converting to PendingNotifications
List<Notification> res = new ArrayList<>();
for (PendingNotification p : destNotifications) {
boolean isChannel = false;
if (p.getPeer().getPeerType() == PeerType.GROUP) {
isChannel = groups().getValue(p.getPeer().getPeerId()).getGroupType() == GroupType.CHANNEL;
}
res.add(new Notification(p.getPeer(), isChannel, p.getSender(), p.getContent()));
}
// Performing notifications
if (performUpdate) {
config().getNotificationProvider().onUpdateNotification(context().getMessenger(), res,
pendingStorage.getMessagesCount(), pendingStorage.getDialogsCount());
} else {
config().getNotificationProvider().onNotification(context().getMessenger(), res,
pendingStorage.getMessagesCount(), pendingStorage.getDialogsCount());
}
} }
|
public class class_name {
private void performNotificationImp(boolean performUpdate) {
// Getting pending notifications list
List<PendingNotification> destNotifications = getNotifications();
if (destNotifications.size() == 0) {
hideNotification(); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
// Converting to PendingNotifications
List<Notification> res = new ArrayList<>();
for (PendingNotification p : destNotifications) {
boolean isChannel = false;
if (p.getPeer().getPeerType() == PeerType.GROUP) {
isChannel = groups().getValue(p.getPeer().getPeerId()).getGroupType() == GroupType.CHANNEL; // depends on control dependency: [if], data = [none]
}
res.add(new Notification(p.getPeer(), isChannel, p.getSender(), p.getContent())); // depends on control dependency: [for], data = [p]
}
// Performing notifications
if (performUpdate) {
config().getNotificationProvider().onUpdateNotification(context().getMessenger(), res,
pendingStorage.getMessagesCount(), pendingStorage.getDialogsCount()); // depends on control dependency: [if], data = [none]
} else {
config().getNotificationProvider().onNotification(context().getMessenger(), res,
pendingStorage.getMessagesCount(), pendingStorage.getDialogsCount()); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private static boolean hasAnnotation(String pkgInfo, String annotation) {
if (!annotation.contains(".")) {
ErrorUtil.warning(annotation + " is not a fully qualified name");
}
if (pkgInfo.contains("@" + annotation)) {
return true;
}
int idx = annotation.lastIndexOf(".");
String annotationPackageName = annotation.substring(0, idx);
String annotationSimpleName = annotation.substring(idx + 1);
if (pkgInfo.contains("@" + annotationSimpleName)) {
String importRegex =
"import\\s*" + annotationPackageName + "(\\.\\*|\\." + annotationSimpleName + ")";
Pattern p = Pattern.compile(importRegex);
Matcher m = p.matcher(pkgInfo);
if (m.find()) {
return true;
}
}
return false;
} }
|
public class class_name {
private static boolean hasAnnotation(String pkgInfo, String annotation) {
if (!annotation.contains(".")) {
ErrorUtil.warning(annotation + " is not a fully qualified name"); // depends on control dependency: [if], data = [none]
}
if (pkgInfo.contains("@" + annotation)) {
return true; // depends on control dependency: [if], data = [none]
}
int idx = annotation.lastIndexOf(".");
String annotationPackageName = annotation.substring(0, idx);
String annotationSimpleName = annotation.substring(idx + 1);
if (pkgInfo.contains("@" + annotationSimpleName)) {
String importRegex =
"import\\s*" + annotationPackageName + "(\\.\\*|\\." + annotationSimpleName + ")";
Pattern p = Pattern.compile(importRegex);
Matcher m = p.matcher(pkgInfo);
if (m.find()) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} }
|
public class class_name {
public void flip(final int x) {
final short hb = BufferUtil.highbits(x);
final int i = highLowContainer.getIndex(hb);
if (i >= 0) {
MappeableContainer c = highLowContainer.getContainerAtIndex(i);
c = c.flip(BufferUtil.lowbits(x));
if (!c.isEmpty()) {
((MutableRoaringArray) highLowContainer).setContainerAtIndex(i, c);
} else {
((MutableRoaringArray) highLowContainer).removeAtIndex(i);
}
} else {
final MappeableArrayContainer newac = new MappeableArrayContainer();
((MutableRoaringArray) highLowContainer).insertNewKeyValueAt(-i - 1, hb,
newac.add(BufferUtil.lowbits(x)));
}
} }
|
public class class_name {
public void flip(final int x) {
final short hb = BufferUtil.highbits(x);
final int i = highLowContainer.getIndex(hb);
if (i >= 0) {
MappeableContainer c = highLowContainer.getContainerAtIndex(i);
c = c.flip(BufferUtil.lowbits(x)); // depends on control dependency: [if], data = [none]
if (!c.isEmpty()) {
((MutableRoaringArray) highLowContainer).setContainerAtIndex(i, c); // depends on control dependency: [if], data = [none]
} else {
((MutableRoaringArray) highLowContainer).removeAtIndex(i); // depends on control dependency: [if], data = [none]
}
} else {
final MappeableArrayContainer newac = new MappeableArrayContainer();
((MutableRoaringArray) highLowContainer).insertNewKeyValueAt(-i - 1, hb,
newac.add(BufferUtil.lowbits(x))); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private static void generateProtobufDefinedForField(StringBuilder code, Field field, Set<String> enumNames) {
code.append("@").append(Protobuf.class.getSimpleName()).append("(");
String fieldType = fieldTypeMapping.get(field.getType());
if (fieldType == null) {
if (enumNames.contains(field.getType())) {
fieldType = "FieldType.ENUM";
} else {
fieldType = "FieldType.OBJECT";
}
}
code.append("fieldType=").append(fieldType);
code.append(", order=").append(field.getTag());
if (Label.OPTIONAL == field.getLabel()) {
code.append(", required=false");
} else if (Label.REQUIRED == field.getLabel()) {
code.append(", required=true");
}
code.append(")\n");
} }
|
public class class_name {
private static void generateProtobufDefinedForField(StringBuilder code, Field field, Set<String> enumNames) {
code.append("@").append(Protobuf.class.getSimpleName()).append("(");
String fieldType = fieldTypeMapping.get(field.getType());
if (fieldType == null) {
if (enumNames.contains(field.getType())) {
fieldType = "FieldType.ENUM"; // depends on control dependency: [if], data = [none]
} else {
fieldType = "FieldType.OBJECT"; // depends on control dependency: [if], data = [none]
}
}
code.append("fieldType=").append(fieldType);
code.append(", order=").append(field.getTag());
if (Label.OPTIONAL == field.getLabel()) {
code.append(", required=false"); // depends on control dependency: [if], data = [none]
} else if (Label.REQUIRED == field.getLabel()) {
code.append(", required=true"); // depends on control dependency: [if], data = [none]
}
code.append(")\n");
} }
|
public class class_name {
public static void closeIOStream(InputStream in, OutputStream out) {
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (out != null) {
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
} }
|
public class class_name {
public static void closeIOStream(InputStream in, OutputStream out) {
if (in != null) {
try {
in.close(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
}
if (out != null) {
try {
out.close(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
}
} }
|
public class class_name {
public void set(String value, int idx)
{
int rowOffset = getM();
for ( int i = 0; i < getN(); i++ )
{
if ( i < value.length())
{
setChar( value.charAt(i), idx + (rowOffset * i));
}
else
{
setChar(' ', idx + (rowOffset * i));
}
}
} }
|
public class class_name {
public void set(String value, int idx)
{
int rowOffset = getM();
for ( int i = 0; i < getN(); i++ )
{
if ( i < value.length())
{
setChar( value.charAt(i), idx + (rowOffset * i)); // depends on control dependency: [if], data = [none]
}
else
{
setChar(' ', idx + (rowOffset * i)); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
static int[] insert(int[] itemset, int item) {
if (itemset == null) {
int[] newItemset = {item};
return newItemset;
} else {
int n = itemset.length + 1;
int[] newItemset = new int[n];
newItemset[0] = item;
System.arraycopy(itemset, 0, newItemset, 1, n - 1);
return newItemset;
}
} }
|
public class class_name {
static int[] insert(int[] itemset, int item) {
if (itemset == null) {
int[] newItemset = {item};
return newItemset; // depends on control dependency: [if], data = [none]
} else {
int n = itemset.length + 1;
int[] newItemset = new int[n];
newItemset[0] = item; // depends on control dependency: [if], data = [none]
System.arraycopy(itemset, 0, newItemset, 1, n - 1); // depends on control dependency: [if], data = [(itemset]
return newItemset; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private void readEnd(ZipEntry e) throws IOException {
int n = inf.getRemaining();
if (n > 0) {
((PushbackInputStream)in).unread(buf, len - n, n);
}
if ((flag & 8) == 8) {
/* "Data Descriptor" present */
if (inf.getBytesWritten() > ZIP64_MAGICVAL ||
inf.getBytesRead() > ZIP64_MAGICVAL) {
// ZIP64 format
readFully(tmpbuf, 0, ZIP64_EXTHDR);
long sig = get32(tmpbuf, 0);
if (sig != EXTSIG) { // no EXTSIG present
e.crc = sig;
e.csize = get64(tmpbuf, ZIP64_EXTSIZ - ZIP64_EXTCRC);
e.size = get64(tmpbuf, ZIP64_EXTLEN - ZIP64_EXTCRC);
((PushbackInputStream)in).unread(
tmpbuf, ZIP64_EXTHDR - ZIP64_EXTCRC - 1, ZIP64_EXTCRC);
} else {
e.crc = get32(tmpbuf, ZIP64_EXTCRC);
e.csize = get64(tmpbuf, ZIP64_EXTSIZ);
e.size = get64(tmpbuf, ZIP64_EXTLEN);
}
} else {
readFully(tmpbuf, 0, EXTHDR);
long sig = get32(tmpbuf, 0);
if (sig != EXTSIG) { // no EXTSIG present
e.crc = sig;
e.csize = get32(tmpbuf, EXTSIZ - EXTCRC);
e.size = get32(tmpbuf, EXTLEN - EXTCRC);
((PushbackInputStream)in).unread(
tmpbuf, EXTHDR - EXTCRC - 1, EXTCRC);
} else {
e.crc = get32(tmpbuf, EXTCRC);
e.csize = get32(tmpbuf, EXTSIZ);
e.size = get32(tmpbuf, EXTLEN);
}
}
}
if (e.size != inf.getBytesWritten()) {
throw new ZipException(
"invalid entry size (expected " + e.size +
" but got " + inf.getBytesWritten() + " bytes)");
}
if (e.csize != inf.getBytesRead()) {
throw new ZipException(
"invalid entry compressed size (expected " + e.csize +
" but got " + inf.getBytesRead() + " bytes)");
}
if (e.crc != crc.getValue()) {
throw new ZipException(
"invalid entry CRC (expected 0x" + Long.toHexString(e.crc) +
" but got 0x" + Long.toHexString(crc.getValue()) + ")");
}
} }
|
public class class_name {
private void readEnd(ZipEntry e) throws IOException {
int n = inf.getRemaining();
if (n > 0) {
((PushbackInputStream)in).unread(buf, len - n, n);
}
if ((flag & 8) == 8) {
/* "Data Descriptor" present */
if (inf.getBytesWritten() > ZIP64_MAGICVAL ||
inf.getBytesRead() > ZIP64_MAGICVAL) {
// ZIP64 format
readFully(tmpbuf, 0, ZIP64_EXTHDR); // depends on control dependency: [if], data = [none]
long sig = get32(tmpbuf, 0);
if (sig != EXTSIG) { // no EXTSIG present
e.crc = sig; // depends on control dependency: [if], data = [none]
e.csize = get64(tmpbuf, ZIP64_EXTSIZ - ZIP64_EXTCRC); // depends on control dependency: [if], data = [none]
e.size = get64(tmpbuf, ZIP64_EXTLEN - ZIP64_EXTCRC); // depends on control dependency: [if], data = [none]
((PushbackInputStream)in).unread(
tmpbuf, ZIP64_EXTHDR - ZIP64_EXTCRC - 1, ZIP64_EXTCRC); // depends on control dependency: [if], data = [none]
} else {
e.crc = get32(tmpbuf, ZIP64_EXTCRC); // depends on control dependency: [if], data = [none]
e.csize = get64(tmpbuf, ZIP64_EXTSIZ); // depends on control dependency: [if], data = [none]
e.size = get64(tmpbuf, ZIP64_EXTLEN); // depends on control dependency: [if], data = [none]
}
} else {
readFully(tmpbuf, 0, EXTHDR); // depends on control dependency: [if], data = [none]
long sig = get32(tmpbuf, 0);
if (sig != EXTSIG) { // no EXTSIG present
e.crc = sig; // depends on control dependency: [if], data = [none]
e.csize = get32(tmpbuf, EXTSIZ - EXTCRC); // depends on control dependency: [if], data = [none]
e.size = get32(tmpbuf, EXTLEN - EXTCRC); // depends on control dependency: [if], data = [none]
((PushbackInputStream)in).unread(
tmpbuf, EXTHDR - EXTCRC - 1, EXTCRC); // depends on control dependency: [if], data = [none]
} else {
e.crc = get32(tmpbuf, EXTCRC); // depends on control dependency: [if], data = [none]
e.csize = get32(tmpbuf, EXTSIZ); // depends on control dependency: [if], data = [none]
e.size = get32(tmpbuf, EXTLEN); // depends on control dependency: [if], data = [none]
}
}
}
if (e.size != inf.getBytesWritten()) {
throw new ZipException(
"invalid entry size (expected " + e.size +
" but got " + inf.getBytesWritten() + " bytes)");
}
if (e.csize != inf.getBytesRead()) {
throw new ZipException(
"invalid entry compressed size (expected " + e.csize +
" but got " + inf.getBytesRead() + " bytes)");
}
if (e.crc != crc.getValue()) {
throw new ZipException(
"invalid entry CRC (expected 0x" + Long.toHexString(e.crc) +
" but got 0x" + Long.toHexString(crc.getValue()) + ")");
}
} }
|
public class class_name {
public static void applyToOr(ColorHolder colorHolder, TextView textView, ColorStateList colorDefault) {
if (colorHolder != null && textView != null) {
colorHolder.applyToOr(textView, colorDefault);
} else if (textView != null) {
textView.setTextColor(colorDefault);
}
} }
|
public class class_name {
public static void applyToOr(ColorHolder colorHolder, TextView textView, ColorStateList colorDefault) {
if (colorHolder != null && textView != null) {
colorHolder.applyToOr(textView, colorDefault); // depends on control dependency: [if], data = [none]
} else if (textView != null) {
textView.setTextColor(colorDefault); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private static void reflectionAppend(final Object object, final Class<?> clazz, final HashCodeBuilder builder, final boolean useTransients,
final String[] excludeFields) {
if (isRegistered(object)) {
return;
}
try {
register(object);
final Field[] fields = clazz.getDeclaredFields();
AccessibleObject.setAccessible(fields, true);
for (final Field field : fields) {
if (false == ArrayUtil.contains(excludeFields, field.getName())
&& (field.getName().indexOf('$') == -1)
&& (useTransients || !Modifier.isTransient(field.getModifiers()))
&& (!Modifier.isStatic(field.getModifiers()))) {
try {
final Object fieldValue = field.get(object);
builder.append(fieldValue);
} catch (final IllegalAccessException e) {
// this can't happen. Would get a Security exception instead
// throw a runtime exception in case the impossible happens.
throw new InternalError("Unexpected IllegalAccessException");
}
}
}
} finally {
unregister(object);
}
} }
|
public class class_name {
private static void reflectionAppend(final Object object, final Class<?> clazz, final HashCodeBuilder builder, final boolean useTransients,
final String[] excludeFields) {
if (isRegistered(object)) {
return; // depends on control dependency: [if], data = [none]
}
try {
register(object); // depends on control dependency: [try], data = [none]
final Field[] fields = clazz.getDeclaredFields();
AccessibleObject.setAccessible(fields, true); // depends on control dependency: [try], data = [none]
for (final Field field : fields) {
if (false == ArrayUtil.contains(excludeFields, field.getName())
&& (field.getName().indexOf('$') == -1)
&& (useTransients || !Modifier.isTransient(field.getModifiers()))
&& (!Modifier.isStatic(field.getModifiers()))) {
try {
final Object fieldValue = field.get(object);
builder.append(fieldValue); // depends on control dependency: [try], data = [none]
} catch (final IllegalAccessException e) {
// this can't happen. Would get a Security exception instead
// throw a runtime exception in case the impossible happens.
throw new InternalError("Unexpected IllegalAccessException");
} // depends on control dependency: [catch], data = [none]
}
}
} finally {
unregister(object);
}
} }
|
public class class_name {
protected Object jsonDeserialize(byte[] in, Class<?> cls) {
if (in == null || in.length == 0) {
return null;
}
Object res = null;
try {
res = JsonUtils.json2Object(new String(in, "utf-8"), cls);
} catch (UnsupportedEncodingException e) {
logger.error("DeSerialize object fail ", e);
}
return res;
} }
|
public class class_name {
protected Object jsonDeserialize(byte[] in, Class<?> cls) {
if (in == null || in.length == 0) {
return null; // depends on control dependency: [if], data = [none]
}
Object res = null;
try {
res = JsonUtils.json2Object(new String(in, "utf-8"), cls); // depends on control dependency: [try], data = [none]
} catch (UnsupportedEncodingException e) {
logger.error("DeSerialize object fail ", e);
} // depends on control dependency: [catch], data = [none]
return res;
} }
|
public class class_name {
private void openTargetPage(CmsSitemapTreeNodeData nodeData, boolean second) {
CmsUUID id = nodeData.getClientEntry().getId();
CmsUUID defaultFileId = nodeData.getClientEntry().getDefaultFileId();
CmsUUID targetId = defaultFileId;
if (targetId == null) {
targetId = id;
}
try {
CmsResource resource = A_CmsUI.getCmsObject().readResource(targetId, CmsResourceFilter.IGNORE_EXPIRATION);
String link = OpenCms.getLinkManager().substituteLink(A_CmsUI.getCmsObject(), resource);
if (second) {
resource = A_CmsUI.getCmsObject().readResource(
nodeData.getLinkedResource().getStructureId(),
CmsResourceFilter.IGNORE_EXPIRATION);
link = OpenCms.getLinkManager().substituteLink(A_CmsUI.getCmsObject(), resource);
}
String mySiteRoot = A_CmsUI.getCmsObject().getRequestContext().getSiteRoot();
final boolean sameSite = mySiteRoot.equals(OpenCms.getSiteManager().getSiteRoot(resource.getRootPath()));
if (sameSite) {
A_CmsUI.get().getPage().setLocation(link);
} else {
String message = CmsVaadinUtils.getMessageText(
Messages.GUI_LOCALECOMPARE_SHOW_WRONGSITE_1,
resource.getRootPath());
Notification.show(message, Type.ERROR_MESSAGE);
}
} catch (CmsException e) {
LOG.error(e.getLocalizedMessage(), e);
}
} }
|
public class class_name {
private void openTargetPage(CmsSitemapTreeNodeData nodeData, boolean second) {
CmsUUID id = nodeData.getClientEntry().getId();
CmsUUID defaultFileId = nodeData.getClientEntry().getDefaultFileId();
CmsUUID targetId = defaultFileId;
if (targetId == null) {
targetId = id; // depends on control dependency: [if], data = [none]
}
try {
CmsResource resource = A_CmsUI.getCmsObject().readResource(targetId, CmsResourceFilter.IGNORE_EXPIRATION);
String link = OpenCms.getLinkManager().substituteLink(A_CmsUI.getCmsObject(), resource);
if (second) {
resource = A_CmsUI.getCmsObject().readResource(
nodeData.getLinkedResource().getStructureId(),
CmsResourceFilter.IGNORE_EXPIRATION); // depends on control dependency: [if], data = [none]
link = OpenCms.getLinkManager().substituteLink(A_CmsUI.getCmsObject(), resource); // depends on control dependency: [if], data = [none]
}
String mySiteRoot = A_CmsUI.getCmsObject().getRequestContext().getSiteRoot();
final boolean sameSite = mySiteRoot.equals(OpenCms.getSiteManager().getSiteRoot(resource.getRootPath()));
if (sameSite) {
A_CmsUI.get().getPage().setLocation(link); // depends on control dependency: [if], data = [none]
} else {
String message = CmsVaadinUtils.getMessageText(
Messages.GUI_LOCALECOMPARE_SHOW_WRONGSITE_1,
resource.getRootPath());
Notification.show(message, Type.ERROR_MESSAGE); // depends on control dependency: [if], data = [none]
}
} catch (CmsException e) {
LOG.error(e.getLocalizedMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
void printDotEdges(PrintStream os, int maxLevelsToPrintPerEdge, float totalWeight, boolean detail,
PrintMojo.PrintTreeOptions treeOptions) {
assert (leftChild == null) == (rightChild == null);
if (leftChild != null) {
os.print("\"" + getDotName() + "\"" + " -> " + "\"" + leftChild.getDotName() + "\"" + " [");
ArrayList<String> arr = new ArrayList<>();
if (leftChild.getInclusiveNa()) {
arr.add("[NA]");
}
if (naVsRest) {
arr.add("[Not NA]");
}
else {
if (! isBitset()) {
arr.add("<");
}
}
printDotEdgesCommon(os, maxLevelsToPrintPerEdge, arr, leftChild, totalWeight, detail, treeOptions);
}
if (rightChild != null) {
os.print("\"" + getDotName() + "\"" + " -> " + "\"" + rightChild.getDotName() + "\"" + " [");
ArrayList<String> arr = new ArrayList<>();
if (rightChild.getInclusiveNa()) {
arr.add("[NA]");
}
if (! naVsRest) {
if (! isBitset()) {
arr.add(">=");
}
}
printDotEdgesCommon(os, maxLevelsToPrintPerEdge, arr, rightChild, totalWeight, detail, treeOptions);
}
} }
|
public class class_name {
void printDotEdges(PrintStream os, int maxLevelsToPrintPerEdge, float totalWeight, boolean detail,
PrintMojo.PrintTreeOptions treeOptions) {
assert (leftChild == null) == (rightChild == null);
if (leftChild != null) {
os.print("\"" + getDotName() + "\"" + " -> " + "\"" + leftChild.getDotName() + "\"" + " [");
ArrayList<String> arr = new ArrayList<>(); // depends on control dependency: [if], data = [none]
if (leftChild.getInclusiveNa()) {
arr.add("[NA]"); // depends on control dependency: [if], data = [none]
}
if (naVsRest) {
arr.add("[Not NA]"); // depends on control dependency: [if], data = [none]
}
else {
if (! isBitset()) {
arr.add("<"); // depends on control dependency: [if], data = [none]
}
}
printDotEdgesCommon(os, maxLevelsToPrintPerEdge, arr, leftChild, totalWeight, detail, treeOptions); // depends on control dependency: [if], data = [none]
}
if (rightChild != null) {
os.print("\"" + getDotName() + "\"" + " -> " + "\"" + rightChild.getDotName() + "\"" + " [");
ArrayList<String> arr = new ArrayList<>(); // depends on control dependency: [if], data = [none]
if (rightChild.getInclusiveNa()) {
arr.add("[NA]"); // depends on control dependency: [if], data = [none]
}
if (! naVsRest) {
if (! isBitset()) {
arr.add(">="); // depends on control dependency: [if], data = [none]
}
}
printDotEdgesCommon(os, maxLevelsToPrintPerEdge, arr, rightChild, totalWeight, detail, treeOptions); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private void openFiles() throws IOException {
LogManager manager = LogManager.getLogManager();
manager.checkPermission();
if (count < 1) {
throw new IllegalArgumentException("file count = " + count);
}
if (limit < 0) {
limit = 0;
}
// We register our own ErrorManager during initialization
// so we can record exceptions.
InitializationErrorManager em = new InitializationErrorManager();
setErrorManager(em);
// Create a lock file. This grants us exclusive access
// to our set of output files, as long as we are alive.
int unique = -1;
for (;;) {
unique++;
if (unique > MAX_LOCKS) {
throw new IOException("Couldn't get lock for " + pattern);
}
// Generate a lock file name from the "unique" int.
lockFileName = generate(pattern, 0, unique).toString() + ".lck";
// Now try to lock that filename.
// Because some systems (e.g., Solaris) can only do file locks
// between processes (and not within a process), we first check
// if we ourself already have the file locked.
synchronized(locks) {
if (locks.get(lockFileName) != null) {
// We already own this lock, for a different FileHandler
// object. Try again.
continue;
}
FileChannel fc;
try {
lockStream = new FileOutputStream(lockFileName);
fc = lockStream.getChannel();
} catch (IOException ix) {
// We got an IOException while trying to open the file.
// Try the next file.
continue;
}
boolean available;
try {
available = fc.tryLock() != null;
// We got the lock OK.
} catch (IOException ix) {
// We got an IOException while trying to get the lock.
// This normally indicates that locking is not supported
// on the target directory. We have to proceed without
// getting a lock. Drop through.
available = true;
}
if (available) {
// We got the lock. Remember it.
locks.put(lockFileName, lockFileName);
break;
}
// We failed to get the lock. Try next file.
fc.close();
}
}
files = new File[count];
for (int i = 0; i < count; i++) {
files[i] = generate(pattern, i, unique);
}
// Create the initial log file.
if (append) {
open(files[0], true);
} else {
rotate();
}
// Did we detect any exceptions during initialization?
Exception ex = em.lastException;
if (ex != null) {
if (ex instanceof IOException) {
throw (IOException) ex;
} else if (ex instanceof SecurityException) {
throw (SecurityException) ex;
} else {
throw new IOException("Exception: " + ex);
}
}
// Install the normal default ErrorManager.
setErrorManager(new ErrorManager());
} }
|
public class class_name {
private void openFiles() throws IOException {
LogManager manager = LogManager.getLogManager();
manager.checkPermission();
if (count < 1) {
throw new IllegalArgumentException("file count = " + count);
}
if (limit < 0) {
limit = 0;
}
// We register our own ErrorManager during initialization
// so we can record exceptions.
InitializationErrorManager em = new InitializationErrorManager();
setErrorManager(em);
// Create a lock file. This grants us exclusive access
// to our set of output files, as long as we are alive.
int unique = -1;
for (;;) {
unique++;
if (unique > MAX_LOCKS) {
throw new IOException("Couldn't get lock for " + pattern);
}
// Generate a lock file name from the "unique" int.
lockFileName = generate(pattern, 0, unique).toString() + ".lck";
// Now try to lock that filename.
// Because some systems (e.g., Solaris) can only do file locks
// between processes (and not within a process), we first check
// if we ourself already have the file locked.
synchronized(locks) {
if (locks.get(lockFileName) != null) {
// We already own this lock, for a different FileHandler
// object. Try again.
continue;
}
FileChannel fc;
try {
lockStream = new FileOutputStream(lockFileName); // depends on control dependency: [try], data = [none]
fc = lockStream.getChannel(); // depends on control dependency: [try], data = [none]
} catch (IOException ix) {
// We got an IOException while trying to open the file.
// Try the next file.
continue;
} // depends on control dependency: [catch], data = [none]
boolean available;
try {
available = fc.tryLock() != null; // depends on control dependency: [try], data = [none]
// We got the lock OK.
} catch (IOException ix) {
// We got an IOException while trying to get the lock.
// This normally indicates that locking is not supported
// on the target directory. We have to proceed without
// getting a lock. Drop through.
available = true;
} // depends on control dependency: [catch], data = [none]
if (available) {
// We got the lock. Remember it.
locks.put(lockFileName, lockFileName); // depends on control dependency: [if], data = [none]
break;
}
// We failed to get the lock. Try next file.
fc.close();
}
}
files = new File[count];
for (int i = 0; i < count; i++) {
files[i] = generate(pattern, i, unique);
}
// Create the initial log file.
if (append) {
open(files[0], true);
} else {
rotate();
}
// Did we detect any exceptions during initialization?
Exception ex = em.lastException;
if (ex != null) {
if (ex instanceof IOException) {
throw (IOException) ex;
} else if (ex instanceof SecurityException) {
throw (SecurityException) ex;
} else {
throw new IOException("Exception: " + ex);
}
}
// Install the normal default ErrorManager.
setErrorManager(new ErrorManager());
} }
|
public class class_name {
@Override
public CommerceNotificationQueueEntry fetchByPrimaryKey(
Serializable primaryKey) {
Serializable serializable = entityCache.getResult(CommerceNotificationQueueEntryModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationQueueEntryImpl.class, primaryKey);
if (serializable == nullModel) {
return null;
}
CommerceNotificationQueueEntry commerceNotificationQueueEntry = (CommerceNotificationQueueEntry)serializable;
if (commerceNotificationQueueEntry == null) {
Session session = null;
try {
session = openSession();
commerceNotificationQueueEntry = (CommerceNotificationQueueEntry)session.get(CommerceNotificationQueueEntryImpl.class,
primaryKey);
if (commerceNotificationQueueEntry != null) {
cacheResult(commerceNotificationQueueEntry);
}
else {
entityCache.putResult(CommerceNotificationQueueEntryModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationQueueEntryImpl.class, primaryKey,
nullModel);
}
}
catch (Exception e) {
entityCache.removeResult(CommerceNotificationQueueEntryModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationQueueEntryImpl.class, primaryKey);
throw processException(e);
}
finally {
closeSession(session);
}
}
return commerceNotificationQueueEntry;
} }
|
public class class_name {
@Override
public CommerceNotificationQueueEntry fetchByPrimaryKey(
Serializable primaryKey) {
Serializable serializable = entityCache.getResult(CommerceNotificationQueueEntryModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationQueueEntryImpl.class, primaryKey);
if (serializable == nullModel) {
return null; // depends on control dependency: [if], data = [none]
}
CommerceNotificationQueueEntry commerceNotificationQueueEntry = (CommerceNotificationQueueEntry)serializable;
if (commerceNotificationQueueEntry == null) {
Session session = null;
try {
session = openSession(); // depends on control dependency: [try], data = [none]
commerceNotificationQueueEntry = (CommerceNotificationQueueEntry)session.get(CommerceNotificationQueueEntryImpl.class,
primaryKey); // depends on control dependency: [try], data = [none]
if (commerceNotificationQueueEntry != null) {
cacheResult(commerceNotificationQueueEntry); // depends on control dependency: [if], data = [(commerceNotificationQueueEntry]
}
else {
entityCache.putResult(CommerceNotificationQueueEntryModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationQueueEntryImpl.class, primaryKey,
nullModel); // depends on control dependency: [if], data = [none]
}
}
catch (Exception e) {
entityCache.removeResult(CommerceNotificationQueueEntryModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationQueueEntryImpl.class, primaryKey);
throw processException(e);
} // depends on control dependency: [catch], data = [none]
finally {
closeSession(session);
}
}
return commerceNotificationQueueEntry;
} }
|
public class class_name {
public @NotNull MediaArgs mediaFormats(MediaFormat... values) {
if (values == null || values.length == 0) {
this.mediaFormats = null;
}
else {
this.mediaFormats = values;
}
return this;
} }
|
public class class_name {
public @NotNull MediaArgs mediaFormats(MediaFormat... values) {
if (values == null || values.length == 0) {
this.mediaFormats = null; // depends on control dependency: [if], data = [none]
}
else {
this.mediaFormats = values; // depends on control dependency: [if], data = [none]
}
return this;
} }
|
public class class_name {
public void setThreadMax(int max)
{
if (max == _threadMax) {
// avoid update() overhead if unchanged
return;
}
if (max <= 0) {
max = DEFAULT_THREAD_MAX;
}
if (max < _idleMin)
throw new ConfigException(L.l("IdleMin ({0}) must be less than ThreadMax ({1})", _idleMin, max));
if (max < 1)
throw new ConfigException(L.l("ThreadMax ({0}) must be greater than zero",
max));
_threadMax = max;
update();
} }
|
public class class_name {
public void setThreadMax(int max)
{
if (max == _threadMax) {
// avoid update() overhead if unchanged
return; // depends on control dependency: [if], data = [none]
}
if (max <= 0) {
max = DEFAULT_THREAD_MAX; // depends on control dependency: [if], data = [none]
}
if (max < _idleMin)
throw new ConfigException(L.l("IdleMin ({0}) must be less than ThreadMax ({1})", _idleMin, max));
if (max < 1)
throw new ConfigException(L.l("ThreadMax ({0}) must be greater than zero",
max));
_threadMax = max;
update();
} }
|
public class class_name {
public static String describePlatform() {
String desc = "Java " + SPECIFICATION_VERSION + " ("
+ "VM vendor name=\"" + VENDOR + "\", "
+ "VM vendor version=" + VENDOR_VERSION + ", "
+ "JVM name=\"" + JVM_NAME + "\", "
+ "JVM version=" + VM_VERSION + ", "
+ "JVM info=" + VM_INFO;
// Add the API level if it's an Android platform
if(ANDROID_VERSION != 0) {
desc += ", API level=" + ANDROID_VERSION;
}
desc += ")";
return desc;
} }
|
public class class_name {
public static String describePlatform() {
String desc = "Java " + SPECIFICATION_VERSION + " ("
+ "VM vendor name=\"" + VENDOR + "\", "
+ "VM vendor version=" + VENDOR_VERSION + ", "
+ "JVM name=\"" + JVM_NAME + "\", "
+ "JVM version=" + VM_VERSION + ", "
+ "JVM info=" + VM_INFO;
// Add the API level if it's an Android platform
if(ANDROID_VERSION != 0) {
desc += ", API level=" + ANDROID_VERSION; // depends on control dependency: [if], data = [none]
}
desc += ")";
return desc;
} }
|
public class class_name {
@Override
public void updateStepStatus(long stepExecutionId, StepStatus stepStatus) {
logger.entering(CLASSNAME, "updateStepStatus", new Object[] {stepExecutionId, stepStatus});
if (logger.isLoggable(Level.FINE)) {
logger.fine("Updating StepStatus to: " + stepStatus.getBatchStatus());
}
Connection conn = null;
PreparedStatement statement = null;
try {
conn = getConnection();
statement = conn.prepareStatement("UPDATE stepstatus SET obj = ? WHERE id = ?");
statement.setBytes(1, serializeObject(stepStatus));
statement.setLong(2, stepExecutionId);
statement.executeUpdate();
} catch (SQLException e) {
throw new PersistenceException(e);
} catch (IOException e) {
throw new PersistenceException(e);
} finally {
cleanupConnection(conn, null, statement);
}
logger.exiting(CLASSNAME, "updateStepStatus");
} }
|
public class class_name {
@Override
public void updateStepStatus(long stepExecutionId, StepStatus stepStatus) {
logger.entering(CLASSNAME, "updateStepStatus", new Object[] {stepExecutionId, stepStatus});
if (logger.isLoggable(Level.FINE)) {
logger.fine("Updating StepStatus to: " + stepStatus.getBatchStatus());
// depends on control dependency: [if], data = [none]
}
Connection conn = null;
PreparedStatement statement = null;
try {
conn = getConnection();
// depends on control dependency: [try], data = [none]
statement = conn.prepareStatement("UPDATE stepstatus SET obj = ? WHERE id = ?");
// depends on control dependency: [try], data = [none]
statement.setBytes(1, serializeObject(stepStatus));
// depends on control dependency: [try], data = [none]
statement.setLong(2, stepExecutionId);
// depends on control dependency: [try], data = [none]
statement.executeUpdate();
// depends on control dependency: [try], data = [none]
} catch (SQLException e) {
throw new PersistenceException(e);
} catch (IOException e) {
// depends on control dependency: [catch], data = [none]
throw new PersistenceException(e);
} finally {
// depends on control dependency: [catch], data = [none]
cleanupConnection(conn, null, statement);
}
logger.exiting(CLASSNAME, "updateStepStatus");
} }
|
public class class_name {
public static Boolean checkTokenBoundaries(MatchResult r, Sentence s, JCas jcas){
Boolean beginOK = false;
Boolean endOK = false;
// whole expression is marked as a sentence
if ((r.end() - r.start()) == (s.getEnd() -s.getBegin())){
return true;
}
// Only check Token boundaries if no white-spaces in front of and behind the match-result
if ((r.start() > 0)
&& ((s.getCoveredText().subSequence(r.start()-1, r.start()).equals(" ")))
&& ((r.end() < s.getCoveredText().length()) && ((s.getCoveredText().subSequence(r.end(), r.end()+1).equals(" "))))) {
return true;
}
// other token boundaries than white-spaces
else {
FSIterator iterToken = jcas.getAnnotationIndex(Token.type).subiterator(s);
while (iterToken.hasNext()) {
Token t = (Token) iterToken.next();
// Check begin
if ((r.start() + s.getBegin()) == t.getBegin()){
beginOK = true;
}
// Tokenizer does not split number from some symbols (".", "/", "-", "–"),
// e.g., "...12 August-24 Augsut..."
else if ((r.start() > 0)
&& ((s.getCoveredText().subSequence(r.start()-1, r.start()).equals("."))
|| (s.getCoveredText().subSequence(r.start()-1, r.start()).equals("/"))
|| (s.getCoveredText().subSequence(r.start()-1, r.start()).equals("–"))
|| (s.getCoveredText().subSequence(r.start()-1, r.start()).equals("-")))) {
beginOK = true;
}
// Check end
if ((r.end() + s.getBegin()) == t.getEnd()) {
endOK = true;
}
// Tokenizer does not split number from some symbols (".", "/", "-", "–"),
// e.g., "... in 1990. New Sentence ..."
else if ((r.end() < s.getCoveredText().length())
&& ((s.getCoveredText().subSequence(r.end(), r.end()+1).equals("."))
|| (s.getCoveredText().subSequence(r.end(), r.end()+1).equals("/"))
|| (s.getCoveredText().subSequence(r.end(), r.end()+1).equals("–"))
|| (s.getCoveredText().subSequence(r.end(), r.end()+1).equals("-")))) {
endOK = true;
}
if (beginOK && endOK)
return true;
}
}
return false;
} }
|
public class class_name {
public static Boolean checkTokenBoundaries(MatchResult r, Sentence s, JCas jcas){
Boolean beginOK = false;
Boolean endOK = false;
// whole expression is marked as a sentence
if ((r.end() - r.start()) == (s.getEnd() -s.getBegin())){
return true; // depends on control dependency: [if], data = [none]
}
// Only check Token boundaries if no white-spaces in front of and behind the match-result
if ((r.start() > 0)
&& ((s.getCoveredText().subSequence(r.start()-1, r.start()).equals(" ")))
&& ((r.end() < s.getCoveredText().length()) && ((s.getCoveredText().subSequence(r.end(), r.end()+1).equals(" "))))) {
return true; // depends on control dependency: [if], data = [none]
}
// other token boundaries than white-spaces
else {
FSIterator iterToken = jcas.getAnnotationIndex(Token.type).subiterator(s);
while (iterToken.hasNext()) {
Token t = (Token) iterToken.next();
// Check begin
if ((r.start() + s.getBegin()) == t.getBegin()){
beginOK = true; // depends on control dependency: [if], data = [none]
}
// Tokenizer does not split number from some symbols (".", "/", "-", "–"),
// e.g., "...12 August-24 Augsut..."
else if ((r.start() > 0)
&& ((s.getCoveredText().subSequence(r.start()-1, r.start()).equals("."))
|| (s.getCoveredText().subSequence(r.start()-1, r.start()).equals("/"))
|| (s.getCoveredText().subSequence(r.start()-1, r.start()).equals("–"))
|| (s.getCoveredText().subSequence(r.start()-1, r.start()).equals("-")))) {
beginOK = true; // depends on control dependency: [if], data = [none]
}
// Check end
if ((r.end() + s.getBegin()) == t.getEnd()) {
endOK = true; // depends on control dependency: [if], data = [none]
}
// Tokenizer does not split number from some symbols (".", "/", "-", "–"),
// e.g., "... in 1990. New Sentence ..."
else if ((r.end() < s.getCoveredText().length())
&& ((s.getCoveredText().subSequence(r.end(), r.end()+1).equals("."))
|| (s.getCoveredText().subSequence(r.end(), r.end()+1).equals("/"))
|| (s.getCoveredText().subSequence(r.end(), r.end()+1).equals("–"))
|| (s.getCoveredText().subSequence(r.end(), r.end()+1).equals("-")))) {
endOK = true; // depends on control dependency: [if], data = [none]
}
if (beginOK && endOK)
return true;
}
}
return false;
} }
|
public class class_name {
private UpdateResult delete(long key, Node node) {
UpdateResult result;
if (node == null) {
// Item not found.
result = new UpdateResult();
} else {
long itemKey = node.item.key();
if (key < itemKey) {
// Given key is smaller than the current node's item key; proceed to the node's left child.
result = delete(key, node.left);
node.left = result.node;
} else if (key > itemKey) {
// Given key is larger than the current node's item key; proceed to the node's right child.
result = delete(key, node.right);
node.right = result.node;
} else {
// Found the node. Remember it's item.
result = new UpdateResult();
result.updatedItem = node.item;
if (node.left != null && node.right != null) {
// The node has two children. Replace the node's item with its in-order successor, and then remove
// that successor's node.
node.item = findSmallest(node.right);
node.right = delete(node.item.key(), node.right).node;
} else {
// The node has just one child. Replace it with that child.
node = (node.left != null) ? node.left : node.right;
this.size--;
this.modCount++;
}
}
// Rebalance the sub-tree, if necessary.
result.node = balance(node);
}
return result;
} }
|
public class class_name {
private UpdateResult delete(long key, Node node) {
UpdateResult result;
if (node == null) {
// Item not found.
result = new UpdateResult(); // depends on control dependency: [if], data = [none]
} else {
long itemKey = node.item.key();
if (key < itemKey) {
// Given key is smaller than the current node's item key; proceed to the node's left child.
result = delete(key, node.left); // depends on control dependency: [if], data = [(key]
node.left = result.node; // depends on control dependency: [if], data = [none]
} else if (key > itemKey) {
// Given key is larger than the current node's item key; proceed to the node's right child.
result = delete(key, node.right); // depends on control dependency: [if], data = [(key]
node.right = result.node; // depends on control dependency: [if], data = [none]
} else {
// Found the node. Remember it's item.
result = new UpdateResult(); // depends on control dependency: [if], data = [none]
result.updatedItem = node.item; // depends on control dependency: [if], data = [none]
if (node.left != null && node.right != null) {
// The node has two children. Replace the node's item with its in-order successor, and then remove
// that successor's node.
node.item = findSmallest(node.right); // depends on control dependency: [if], data = [none]
node.right = delete(node.item.key(), node.right).node; // depends on control dependency: [if], data = [none]
} else {
// The node has just one child. Replace it with that child.
node = (node.left != null) ? node.left : node.right; // depends on control dependency: [if], data = [(node.left]
this.size--; // depends on control dependency: [if], data = [none]
this.modCount++; // depends on control dependency: [if], data = [none]
}
}
// Rebalance the sub-tree, if necessary.
result.node = balance(node); // depends on control dependency: [if], data = [(node]
}
return result;
} }
|
public class class_name {
@RequestMapping(value = "/{id}", method = RequestMethod.DELETE)
public ResponseEntity<E> delete(@PathVariable int id) {
try {
E entityToDelete = this.service.findById(id);
this.service.delete(entityToDelete);
// extract the original classname from the name of the proxy, which
// also contains _$$_ and some kind of hash after the original
// classname
final String proxyClassName = entityToDelete.getClass().getSimpleName();
final String simpleClassName = StringUtils.substringBefore(proxyClassName, "_$$_");
LOG.trace("Deleted " + simpleClassName + " with ID " + id);
return new ResponseEntity<E>(HttpStatus.NO_CONTENT);
} catch (Exception e) {
LOG.error("Error deleting entity with ID " + id + ": "
+ e.getMessage());
return new ResponseEntity<E>(HttpStatus.NOT_FOUND);
}
} }
|
public class class_name {
@RequestMapping(value = "/{id}", method = RequestMethod.DELETE)
public ResponseEntity<E> delete(@PathVariable int id) {
try {
E entityToDelete = this.service.findById(id);
this.service.delete(entityToDelete); // depends on control dependency: [try], data = [none]
// extract the original classname from the name of the proxy, which
// also contains _$$_ and some kind of hash after the original
// classname
final String proxyClassName = entityToDelete.getClass().getSimpleName();
final String simpleClassName = StringUtils.substringBefore(proxyClassName, "_$$_");
LOG.trace("Deleted " + simpleClassName + " with ID " + id); // depends on control dependency: [try], data = [none]
return new ResponseEntity<E>(HttpStatus.NO_CONTENT); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
LOG.error("Error deleting entity with ID " + id + ": "
+ e.getMessage());
return new ResponseEntity<E>(HttpStatus.NOT_FOUND);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static String substitute(String input, List<VariableInstance> variables)
throws MdwException {
StringBuffer substituted = new StringBuffer(input.length());
try {
Matcher matcher = tokenPattern.matcher(input);
int index = 0;
while (matcher.find()) {
String match = matcher.group();
substituted.append(input.substring(index, matcher.start()));
Object value = getVariableValue(match.substring(2, match.length() - 1), variables);
if (value != null)
substituted.append(value);
index = matcher.end();
}
substituted.append(input.substring(index));
return substituted.toString();
}
catch (Exception ex) {
throw new MdwException("Error substituting expression value(s) in input: '" + input + "'", ex);
}
} }
|
public class class_name {
public static String substitute(String input, List<VariableInstance> variables)
throws MdwException {
StringBuffer substituted = new StringBuffer(input.length());
try {
Matcher matcher = tokenPattern.matcher(input);
int index = 0;
while (matcher.find()) {
String match = matcher.group();
substituted.append(input.substring(index, matcher.start())); // depends on control dependency: [while], data = [none]
Object value = getVariableValue(match.substring(2, match.length() - 1), variables);
if (value != null)
substituted.append(value);
index = matcher.end(); // depends on control dependency: [while], data = [none]
}
substituted.append(input.substring(index));
return substituted.toString();
}
catch (Exception ex) {
throw new MdwException("Error substituting expression value(s) in input: '" + input + "'", ex);
}
} }
|
public class class_name {
public Observable<ServiceResponse<Page<JobExecutionInner>>> listByJobWithServiceResponseAsync(final String resourceGroupName, final String serverName, final String jobAgentName, final String jobName) {
return listByJobSinglePageAsync(resourceGroupName, serverName, jobAgentName, jobName)
.concatMap(new Func1<ServiceResponse<Page<JobExecutionInner>>, Observable<ServiceResponse<Page<JobExecutionInner>>>>() {
@Override
public Observable<ServiceResponse<Page<JobExecutionInner>>> call(ServiceResponse<Page<JobExecutionInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listByJobNextWithServiceResponseAsync(nextPageLink));
}
});
} }
|
public class class_name {
public Observable<ServiceResponse<Page<JobExecutionInner>>> listByJobWithServiceResponseAsync(final String resourceGroupName, final String serverName, final String jobAgentName, final String jobName) {
return listByJobSinglePageAsync(resourceGroupName, serverName, jobAgentName, jobName)
.concatMap(new Func1<ServiceResponse<Page<JobExecutionInner>>, Observable<ServiceResponse<Page<JobExecutionInner>>>>() {
@Override
public Observable<ServiceResponse<Page<JobExecutionInner>>> call(ServiceResponse<Page<JobExecutionInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page); // depends on control dependency: [if], data = [none]
}
return Observable.just(page).concatWith(listByJobNextWithServiceResponseAsync(nextPageLink));
}
});
} }
|
public class class_name {
public void setLocale(Locale locale) {
if (locale == getLocale() || ignoreDatePatternChange) {
return;
}
super.setLocale(locale);
dateFormatter = (SimpleDateFormat) DateFormat.getDateInstance(DateFormat.MEDIUM, locale);
setToolTipText(dateFormatter.toPattern());
setDate(date, false);
doLayout();
} }
|
public class class_name {
public void setLocale(Locale locale) {
if (locale == getLocale() || ignoreDatePatternChange) {
return; // depends on control dependency: [if], data = [none]
}
super.setLocale(locale);
dateFormatter = (SimpleDateFormat) DateFormat.getDateInstance(DateFormat.MEDIUM, locale);
setToolTipText(dateFormatter.toPattern());
setDate(date, false);
doLayout();
} }
|
public class class_name {
public void configureHttpPipeline(ChannelPipeline serverPipeline, String initialHttpScheme) {
if (initialHttpScheme.equals(Constants.HTTP_SCHEME)) {
serverPipeline.addLast(Constants.HTTP_ENCODER, new HttpResponseEncoder());
serverPipeline.addLast(Constants.HTTP_DECODER,
new HttpRequestDecoder(reqSizeValidationConfig.getMaxUriLength(),
reqSizeValidationConfig.getMaxHeaderSize(),
reqSizeValidationConfig.getMaxChunkSize()));
serverPipeline.addLast(Constants.HTTP_COMPRESSOR, new CustomHttpContentCompressor());
serverPipeline.addLast(Constants.HTTP_CHUNK_WRITER, new ChunkedWriteHandler());
if (httpTraceLogEnabled) {
serverPipeline.addLast(HTTP_TRACE_LOG_HANDLER, new HttpTraceLoggingHandler(TRACE_LOG_DOWNSTREAM));
}
if (httpAccessLogEnabled) {
serverPipeline.addLast(HTTP_ACCESS_LOG_HANDLER, new HttpAccessLoggingHandler(ACCESS_LOG));
}
}
serverPipeline.addLast("uriLengthValidator", new UriAndHeaderLengthValidator(this.serverName));
if (reqSizeValidationConfig.getMaxEntityBodySize() > -1) {
serverPipeline.addLast("maxEntityBodyValidator", new MaxEntityBodyValidator(this.serverName,
reqSizeValidationConfig.getMaxEntityBodySize()));
}
serverPipeline.addLast(Constants.WEBSOCKET_SERVER_HANDSHAKE_HANDLER,
new WebSocketServerHandshakeHandler(this.serverConnectorFuture, this.interfaceId));
serverPipeline.addLast(Constants.BACK_PRESSURE_HANDLER, new BackPressureHandler());
serverPipeline.addLast(Constants.HTTP_SOURCE_HANDLER,
new SourceHandler(this.serverConnectorFuture, this.interfaceId, this.chunkConfig,
keepAliveConfig, this.serverName, this.allChannels,
this.pipeliningEnabled, this.pipeliningLimit, this.pipeliningGroup));
if (socketIdleTimeout >= 0) {
serverPipeline.addBefore(Constants.HTTP_SOURCE_HANDLER, Constants.IDLE_STATE_HANDLER,
new IdleStateHandler(0, 0, socketIdleTimeout, TimeUnit.MILLISECONDS));
}
} }
|
public class class_name {
public void configureHttpPipeline(ChannelPipeline serverPipeline, String initialHttpScheme) {
if (initialHttpScheme.equals(Constants.HTTP_SCHEME)) {
serverPipeline.addLast(Constants.HTTP_ENCODER, new HttpResponseEncoder()); // depends on control dependency: [if], data = [none]
serverPipeline.addLast(Constants.HTTP_DECODER,
new HttpRequestDecoder(reqSizeValidationConfig.getMaxUriLength(),
reqSizeValidationConfig.getMaxHeaderSize(),
reqSizeValidationConfig.getMaxChunkSize())); // depends on control dependency: [if], data = [none]
serverPipeline.addLast(Constants.HTTP_COMPRESSOR, new CustomHttpContentCompressor()); // depends on control dependency: [if], data = [none]
serverPipeline.addLast(Constants.HTTP_CHUNK_WRITER, new ChunkedWriteHandler()); // depends on control dependency: [if], data = [none]
if (httpTraceLogEnabled) {
serverPipeline.addLast(HTTP_TRACE_LOG_HANDLER, new HttpTraceLoggingHandler(TRACE_LOG_DOWNSTREAM)); // depends on control dependency: [if], data = [none]
}
if (httpAccessLogEnabled) {
serverPipeline.addLast(HTTP_ACCESS_LOG_HANDLER, new HttpAccessLoggingHandler(ACCESS_LOG)); // depends on control dependency: [if], data = [none]
}
}
serverPipeline.addLast("uriLengthValidator", new UriAndHeaderLengthValidator(this.serverName));
if (reqSizeValidationConfig.getMaxEntityBodySize() > -1) {
serverPipeline.addLast("maxEntityBodyValidator", new MaxEntityBodyValidator(this.serverName,
reqSizeValidationConfig.getMaxEntityBodySize())); // depends on control dependency: [if], data = [none]
}
serverPipeline.addLast(Constants.WEBSOCKET_SERVER_HANDSHAKE_HANDLER,
new WebSocketServerHandshakeHandler(this.serverConnectorFuture, this.interfaceId));
serverPipeline.addLast(Constants.BACK_PRESSURE_HANDLER, new BackPressureHandler());
serverPipeline.addLast(Constants.HTTP_SOURCE_HANDLER,
new SourceHandler(this.serverConnectorFuture, this.interfaceId, this.chunkConfig,
keepAliveConfig, this.serverName, this.allChannels,
this.pipeliningEnabled, this.pipeliningLimit, this.pipeliningGroup));
if (socketIdleTimeout >= 0) {
serverPipeline.addBefore(Constants.HTTP_SOURCE_HANDLER, Constants.IDLE_STATE_HANDLER,
new IdleStateHandler(0, 0, socketIdleTimeout, TimeUnit.MILLISECONDS)); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public AsciiTable setPaddingRightChar(Character paddingRightChar) {
for(AT_Row row : this.rows){
if(row.getType()==TableRowType.CONTENT){
row.setPaddingRightChar(paddingRightChar);
}
}
return this;
} }
|
public class class_name {
public AsciiTable setPaddingRightChar(Character paddingRightChar) {
for(AT_Row row : this.rows){
if(row.getType()==TableRowType.CONTENT){
row.setPaddingRightChar(paddingRightChar);
// depends on control dependency: [if], data = [none]
}
}
return this;
} }
|
public class class_name {
public static StmtRunner get()
{
if (StmtRunner.RUNNER == null) {
StmtRunner.RUNNER = new StmtRunner();
EQLRUNNERS.add(SQLRunner.class);
}
return StmtRunner.RUNNER;
} }
|
public class class_name {
public static StmtRunner get()
{
if (StmtRunner.RUNNER == null) {
StmtRunner.RUNNER = new StmtRunner(); // depends on control dependency: [if], data = [none]
EQLRUNNERS.add(SQLRunner.class); // depends on control dependency: [if], data = [none]
}
return StmtRunner.RUNNER;
} }
|
public class class_name {
public static Object toValue(String name, Object value) {
String[] parts = StringUtils.split(name, "@", 2);
String fieldName = null;
String fieldType = "String";
if (parts.length == 2) {
fieldType = parts[1];
fieldName = parts[0];
} else if (parts.length == 1) {
fieldName = parts[0];
} else {
throw new IllegalArgumentException("Invalid property name");
}
try {
int l = Array.getLength(value);
RequestParameterType<?> rpt = TYPES.get(fieldType);
if (rpt == null) {
rpt = TYPES.get(RequestParameterType.STRING);
}
if (!fieldName.endsWith("[]") && l == 1) {
return rpt.newInstance(Array.get(value, 0));
}
Class<?> componentType = rpt.getComponentType();
Object[] a = (Object[]) Array.newInstance(componentType, l);
for (int i = 0; i < l; i++) {
a[i] = rpt.newInstance(Array.get(value, i));
}
return a;
} catch (IllegalArgumentException e) {
RequestParameterType<?> rpt = TYPES.get(fieldType);
if (rpt == null) {
rpt = TYPES.get(RequestParameterType.STRING);
}
return rpt.newInstance(value);
}
} }
|
public class class_name {
public static Object toValue(String name, Object value) {
String[] parts = StringUtils.split(name, "@", 2);
String fieldName = null;
String fieldType = "String";
if (parts.length == 2) {
fieldType = parts[1]; // depends on control dependency: [if], data = [none]
fieldName = parts[0]; // depends on control dependency: [if], data = [none]
} else if (parts.length == 1) {
fieldName = parts[0]; // depends on control dependency: [if], data = [none]
} else {
throw new IllegalArgumentException("Invalid property name");
}
try {
int l = Array.getLength(value);
RequestParameterType<?> rpt = TYPES.get(fieldType);
if (rpt == null) {
rpt = TYPES.get(RequestParameterType.STRING); // depends on control dependency: [if], data = [none]
}
if (!fieldName.endsWith("[]") && l == 1) {
return rpt.newInstance(Array.get(value, 0)); // depends on control dependency: [if], data = [none]
}
Class<?> componentType = rpt.getComponentType();
Object[] a = (Object[]) Array.newInstance(componentType, l);
for (int i = 0; i < l; i++) {
a[i] = rpt.newInstance(Array.get(value, i)); // depends on control dependency: [for], data = [i]
}
return a; // depends on control dependency: [try], data = [none]
} catch (IllegalArgumentException e) {
RequestParameterType<?> rpt = TYPES.get(fieldType);
if (rpt == null) {
rpt = TYPES.get(RequestParameterType.STRING); // depends on control dependency: [if], data = [none]
}
return rpt.newInstance(value);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static synchronized void addOperation(Runnable runnable, boolean preserveForNextShutdown) {
shutdownOperations.add(runnable);
if(preserveForNextShutdown) {
preservedShutdownOperations.add(runnable);
}
} }
|
public class class_name {
public static synchronized void addOperation(Runnable runnable, boolean preserveForNextShutdown) {
shutdownOperations.add(runnable);
if(preserveForNextShutdown) {
preservedShutdownOperations.add(runnable); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
protected void fireContextDestroyed(AgentContext context) {
final ContextRepositoryListener[] ilisteners = this.listeners.getListeners(ContextRepositoryListener.class);
this.logger.getKernelLogger().info(MessageFormat.format(Messages.StandardContextSpaceService_1, context.getID()));
for (final ContextRepositoryListener listener : ilisteners) {
listener.contextDestroyed(context);
}
} }
|
public class class_name {
protected void fireContextDestroyed(AgentContext context) {
final ContextRepositoryListener[] ilisteners = this.listeners.getListeners(ContextRepositoryListener.class);
this.logger.getKernelLogger().info(MessageFormat.format(Messages.StandardContextSpaceService_1, context.getID()));
for (final ContextRepositoryListener listener : ilisteners) {
listener.contextDestroyed(context); // depends on control dependency: [for], data = [listener]
}
} }
|
public class class_name {
public double getCoverDelta(GrammarRuleRecord rule) {
// counts which uncovered points shall be covered
int new_cover = 0;
// counts overlaps with previously covered ranges
int overlapping_cover = 0;
// perform the sum computation
for (RuleInterval i : rule.getRuleIntervals()) {
int start = i.getStart();
int end = i.getEnd();
for (int j = start; j < end; j++) {
if (range[j]) {
overlapping_cover++;
}
else {
new_cover++;
}
}
}
// if covers nothing, return 0
if (0 == new_cover) {
return 0.0;
}
// if zero overlap, return full weighted cover
if (0 == overlapping_cover) {
return (double) new_cover
/ (double) (rule.getExpandedRuleString().length() + rule.getRuleIntervals().size());
}
// else divide newly covered points amount by the sum of the rule string length and occurrence
// (i.e. encoding size)
return ((double) new_cover / (double) (new_cover + overlapping_cover))
/ (double) (rule.getExpandedRuleString().length() + rule.getRuleIntervals().size());
} }
|
public class class_name {
public double getCoverDelta(GrammarRuleRecord rule) {
// counts which uncovered points shall be covered
int new_cover = 0;
// counts overlaps with previously covered ranges
int overlapping_cover = 0;
// perform the sum computation
for (RuleInterval i : rule.getRuleIntervals()) {
int start = i.getStart();
int end = i.getEnd();
for (int j = start; j < end; j++) {
if (range[j]) {
overlapping_cover++; // depends on control dependency: [if], data = [none]
}
else {
new_cover++; // depends on control dependency: [if], data = [none]
}
}
}
// if covers nothing, return 0
if (0 == new_cover) {
return 0.0; // depends on control dependency: [if], data = [none]
}
// if zero overlap, return full weighted cover
if (0 == overlapping_cover) {
return (double) new_cover
/ (double) (rule.getExpandedRuleString().length() + rule.getRuleIntervals().size()); // depends on control dependency: [if], data = [none]
}
// else divide newly covered points amount by the sum of the rule string length and occurrence
// (i.e. encoding size)
return ((double) new_cover / (double) (new_cover + overlapping_cover))
/ (double) (rule.getExpandedRuleString().length() + rule.getRuleIntervals().size());
} }
|
public class class_name {
private static void verifyJUnit4Present() {
try {
Class<?> clazz = Class.forName("org.junit.runner.Description");
if (!Serializable.class.isAssignableFrom(clazz)) {
JvmExit.halt(SlaveMain.ERR_OLD_JUNIT);
}
} catch (ClassNotFoundException e) {
JvmExit.halt(SlaveMain.ERR_NO_JUNIT);
}
} }
|
public class class_name {
private static void verifyJUnit4Present() {
try {
Class<?> clazz = Class.forName("org.junit.runner.Description");
if (!Serializable.class.isAssignableFrom(clazz)) {
JvmExit.halt(SlaveMain.ERR_OLD_JUNIT); // depends on control dependency: [if], data = [none]
}
} catch (ClassNotFoundException e) {
JvmExit.halt(SlaveMain.ERR_NO_JUNIT);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
String servletPath = (String) request.getAttribute(Dispatcher.__INCLUDE_SERVLET_PATH);
String pathInfo = null;
if (servletPath == null)
{
servletPath = request.getServletPath();
pathInfo = request.getPathInfo();
}
else
pathInfo = (String) request.getAttribute(Dispatcher.__INCLUDE_PATH_INFO);
String pathInContext = URI.addPaths(servletPath, pathInfo);
boolean endsWithSlash = pathInContext.endsWith("/");
Resource resource = getResource(pathInContext);
// Is the method allowed?
String method = request.getMethod();
if (_AllowString.indexOf(method) < 0)
{
if (resource != null && resource.exists())
{
response.setHeader(HttpFields.__Allow, _AllowString);
response.sendError(HttpResponse.__405_Method_Not_Allowed);
}
else
response.sendError(HttpResponse.__404_Not_Found);
return;
}
// Handle the request
try
{
// handle by method.
if (method.equals(HttpRequest.__GET) || method.equals(HttpRequest.__POST) || method.equals(HttpRequest.__HEAD))
handleGet(request, response, pathInContext, resource, endsWithSlash);
else if (_putAllowed && method.equals(HttpRequest.__PUT))
handlePut(request, response, pathInContext, resource);
else if (_delAllowed && method.equals(HttpRequest.__DELETE))
handleDelete(request, response, pathInContext, resource);
else if (_putAllowed && _delAllowed && method.equals(HttpRequest.__MOVE))
handleMove(request, response, pathInContext, resource);
else if (method.equals(HttpRequest.__OPTIONS))
handleOptions(request, response);
else if (method.equals(HttpRequest.__TRACE))
_servletHandler.handleTrace(request, response);
else
{
// anything else...
try
{
if (resource.exists())
response.sendError(HttpResponse.__501_Not_Implemented);
else
_servletHandler.notFound(request, response);
}
catch (Exception e)
{
LogSupport.ignore(log, e);
}
}
}
catch (IllegalArgumentException e)
{
LogSupport.ignore(log, e);
}
finally
{
if (resource != null && !(resource instanceof CachedResource))
resource.release();
}
} }
|
public class class_name {
protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
String servletPath = (String) request.getAttribute(Dispatcher.__INCLUDE_SERVLET_PATH);
String pathInfo = null;
if (servletPath == null)
{
servletPath = request.getServletPath();
pathInfo = request.getPathInfo();
}
else
pathInfo = (String) request.getAttribute(Dispatcher.__INCLUDE_PATH_INFO);
String pathInContext = URI.addPaths(servletPath, pathInfo);
boolean endsWithSlash = pathInContext.endsWith("/");
Resource resource = getResource(pathInContext);
// Is the method allowed?
String method = request.getMethod();
if (_AllowString.indexOf(method) < 0)
{
if (resource != null && resource.exists())
{
response.setHeader(HttpFields.__Allow, _AllowString); // depends on control dependency: [if], data = [none]
response.sendError(HttpResponse.__405_Method_Not_Allowed); // depends on control dependency: [if], data = [none]
}
else
response.sendError(HttpResponse.__404_Not_Found);
return;
}
// Handle the request
try
{
// handle by method.
if (method.equals(HttpRequest.__GET) || method.equals(HttpRequest.__POST) || method.equals(HttpRequest.__HEAD))
handleGet(request, response, pathInContext, resource, endsWithSlash);
else if (_putAllowed && method.equals(HttpRequest.__PUT))
handlePut(request, response, pathInContext, resource);
else if (_delAllowed && method.equals(HttpRequest.__DELETE))
handleDelete(request, response, pathInContext, resource);
else if (_putAllowed && _delAllowed && method.equals(HttpRequest.__MOVE))
handleMove(request, response, pathInContext, resource);
else if (method.equals(HttpRequest.__OPTIONS))
handleOptions(request, response);
else if (method.equals(HttpRequest.__TRACE))
_servletHandler.handleTrace(request, response);
else
{
// anything else...
try
{
if (resource.exists())
response.sendError(HttpResponse.__501_Not_Implemented);
else
_servletHandler.notFound(request, response);
}
catch (Exception e)
{
LogSupport.ignore(log, e);
} // depends on control dependency: [catch], data = [none]
}
}
catch (IllegalArgumentException e)
{
LogSupport.ignore(log, e);
}
finally
{
if (resource != null && !(resource instanceof CachedResource))
resource.release();
}
} }
|
public class class_name {
private void ensureAccessible(Key<?> key, GinjectorBindings parent, GinjectorBindings child) {
// Parent will be null if it is was an optional dependency and it couldn't be created.
if (parent != null && !child.equals(parent) && !child.isBound(key)) {
PrettyPrinter.log(logger, TreeLogger.DEBUG,
"In %s: inheriting binding for %s from the parent %s", child, key, parent);
Context context = Context.format("Inheriting %s from parent", key);
// We don't strictly need all the extra checks in addBinding, but it can't hurt. We know, for
// example, that there will not be any unresolved bindings for this key.
child.addBinding(key, bindingFactory.getParentBinding(key, parent, context));
}
} }
|
public class class_name {
private void ensureAccessible(Key<?> key, GinjectorBindings parent, GinjectorBindings child) {
// Parent will be null if it is was an optional dependency and it couldn't be created.
if (parent != null && !child.equals(parent) && !child.isBound(key)) {
PrettyPrinter.log(logger, TreeLogger.DEBUG,
"In %s: inheriting binding for %s from the parent %s", child, key, parent); // depends on control dependency: [if], data = [none]
Context context = Context.format("Inheriting %s from parent", key);
// We don't strictly need all the extra checks in addBinding, but it can't hurt. We know, for
// example, that there will not be any unresolved bindings for this key.
child.addBinding(key, bindingFactory.getParentBinding(key, parent, context)); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public Content getSerializedContent(Content serializedTreeContent) {
HtmlTree divContent = HtmlTree.DIV(HtmlStyle.serializedFormContainer,
serializedTreeContent);
if (configuration.allowTag(HtmlTag.MAIN)) {
mainTree.addContent(divContent);
return mainTree;
} else {
return divContent;
}
} }
|
public class class_name {
public Content getSerializedContent(Content serializedTreeContent) {
HtmlTree divContent = HtmlTree.DIV(HtmlStyle.serializedFormContainer,
serializedTreeContent);
if (configuration.allowTag(HtmlTag.MAIN)) {
mainTree.addContent(divContent); // depends on control dependency: [if], data = [none]
return mainTree; // depends on control dependency: [if], data = [none]
} else {
return divContent; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private void readEntity(StringBuilder out, boolean isEntityToken, boolean throwOnResolveFailure, ValueContext valueContext) throws IOException, KriptonRuntimeException {
int start = out.length();
if (buffer[position++] != '&') {
throw new AssertionError();
}
out.append('&');
while (true) {
int c = peekCharacter();
if (c == ';') {
out.append(';');
position++;
break;
} else if (c >= 128 || (c >= '0' && c <= '9') || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || c == '-' || c == '#') {
position++;
out.append((char) c);
} else if (relaxed) {
// intentionally leave the partial reference in 'out'
return;
} else {
throw new KriptonRuntimeException("unterminated entity ref", true, this.getLineNumber(), this.getColumnNumber(), getPositionDescription(), null);
}
}
String code = out.substring(start + 1, out.length() - 1);
if (isEntityToken) {
name = code;
}
if (code.startsWith("#")) {
try {
int c = code.startsWith("#x") ? Integer.parseInt(code.substring(2), 16) : Integer.parseInt(code.substring(1));
out.delete(start, out.length());
out.appendCodePoint(c);
unresolved = false;
return;
} catch (NumberFormatException notANumber) {
throw new KriptonRuntimeException("Invalid character reference: &" + code);
} catch (IllegalArgumentException invalidCodePoint) {
throw new KriptonRuntimeException("Invalid character reference: &" + code);
}
}
if (valueContext == ValueContext.ENTITY_DECLARATION) {
// keep the unresolved &code; in the text to resolve later
return;
}
String defaultEntity = DEFAULT_ENTITIES.get(code);
if (defaultEntity != null) {
out.delete(start, out.length());
unresolved = false;
out.append(defaultEntity);
return;
}
char[] resolved;
if (documentEntities != null && (resolved = documentEntities.get(code)) != null) {
out.delete(start, out.length());
unresolved = false;
if (processDocDecl) {
pushContentSource(resolved); // parse the entity as XML
} else {
out.append(resolved); // include the entity value as text
}
return;
}
/*
* The parser skipped an external DTD, and now we've encountered an
* unknown entity that could have been declared there. Map it to the
* empty string. This is dishonest, but it's consistent with Android's
* old ExpatPullParser.
*/
if (systemId != null) {
out.delete(start, out.length());
return;
}
// keep the unresolved entity "&code;" in the text for relaxed clients
unresolved = true;
if (throwOnResolveFailure) {
checkRelaxed("unresolved: &" + code + ";");
}
} }
|
public class class_name {
private void readEntity(StringBuilder out, boolean isEntityToken, boolean throwOnResolveFailure, ValueContext valueContext) throws IOException, KriptonRuntimeException {
int start = out.length();
if (buffer[position++] != '&') {
throw new AssertionError();
}
out.append('&');
while (true) {
int c = peekCharacter();
if (c == ';') {
out.append(';'); // depends on control dependency: [if], data = [';')]
position++; // depends on control dependency: [if], data = [none]
break;
} else if (c >= 128 || (c >= '0' && c <= '9') || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || c == '-' || c == '#') {
position++; // depends on control dependency: [if], data = [none]
out.append((char) c); // depends on control dependency: [if], data = [none]
} else if (relaxed) {
// intentionally leave the partial reference in 'out'
return; // depends on control dependency: [if], data = [none]
} else {
throw new KriptonRuntimeException("unterminated entity ref", true, this.getLineNumber(), this.getColumnNumber(), getPositionDescription(), null);
}
}
String code = out.substring(start + 1, out.length() - 1);
if (isEntityToken) {
name = code;
}
if (code.startsWith("#")) {
try {
int c = code.startsWith("#x") ? Integer.parseInt(code.substring(2), 16) : Integer.parseInt(code.substring(1));
out.delete(start, out.length());
out.appendCodePoint(c);
unresolved = false;
return;
} catch (NumberFormatException notANumber) {
throw new KriptonRuntimeException("Invalid character reference: &" + code);
} catch (IllegalArgumentException invalidCodePoint) {
throw new KriptonRuntimeException("Invalid character reference: &" + code);
}
}
if (valueContext == ValueContext.ENTITY_DECLARATION) {
// keep the unresolved &code; in the text to resolve later
return;
}
String defaultEntity = DEFAULT_ENTITIES.get(code);
if (defaultEntity != null) {
out.delete(start, out.length());
unresolved = false;
out.append(defaultEntity);
return;
}
char[] resolved;
if (documentEntities != null && (resolved = documentEntities.get(code)) != null) {
out.delete(start, out.length());
unresolved = false;
if (processDocDecl) {
pushContentSource(resolved); // parse the entity as XML
} else {
out.append(resolved); // include the entity value as text
}
return;
}
/*
* The parser skipped an external DTD, and now we've encountered an
* unknown entity that could have been declared there. Map it to the
* empty string. This is dishonest, but it's consistent with Android's
* old ExpatPullParser.
*/
if (systemId != null) {
out.delete(start, out.length());
return;
}
// keep the unresolved entity "&code;" in the text for relaxed clients
unresolved = true;
if (throwOnResolveFailure) {
checkRelaxed("unresolved: &" + code + ";");
}
} }
|
public class class_name {
public Map<String, String> getControls(Version serverVersion) {
final CompressionMode effectiveMode = compression(serverVersion);
if (compressionMode != effectiveMode) {
LOGGER.info("Couchbase Server version {} does not support {} compression mode; falling back to {}.",
serverVersion, compressionMode, effectiveMode);
} else {
LOGGER.debug("Compression mode: {}", compressionMode);
}
final Map<String, String> result = new HashMap<>(values);
result.putAll(effectiveMode.getDcpControls(serverVersion));
return result;
} }
|
public class class_name {
public Map<String, String> getControls(Version serverVersion) {
final CompressionMode effectiveMode = compression(serverVersion);
if (compressionMode != effectiveMode) {
LOGGER.info("Couchbase Server version {} does not support {} compression mode; falling back to {}.",
serverVersion, compressionMode, effectiveMode); // depends on control dependency: [if], data = [none]
} else {
LOGGER.debug("Compression mode: {}", compressionMode); // depends on control dependency: [if], data = [none]
}
final Map<String, String> result = new HashMap<>(values);
result.putAll(effectiveMode.getDcpControls(serverVersion));
return result;
} }
|
public class class_name {
public static void ensureClusteringResult(final Database db, final Result result) {
Collection<Clustering<?>> clusterings = ResultUtil.filterResults(db.getHierarchy(), result, Clustering.class);
if(clusterings.isEmpty()) {
ResultUtil.addChildResult(db, new ByLabelOrAllInOneClustering().run(db));
}
} }
|
public class class_name {
public static void ensureClusteringResult(final Database db, final Result result) {
Collection<Clustering<?>> clusterings = ResultUtil.filterResults(db.getHierarchy(), result, Clustering.class);
if(clusterings.isEmpty()) {
ResultUtil.addChildResult(db, new ByLabelOrAllInOneClustering().run(db)); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
final public void SetData() throws ParseException {
/*@bgen(jjtree) SetData */
AstSetData jjtn000 = new AstSetData(JJTSETDATA);
boolean jjtc000 = true;
jjtree.openNodeScope(jjtn000);
try {
jj_consume_token(START_SET_OR_MAP);
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case START_SET_OR_MAP:
case INTEGER_LITERAL:
case FLOATING_POINT_LITERAL:
case STRING_LITERAL:
case TRUE:
case FALSE:
case NULL:
case LPAREN:
case LBRACK:
case NOT0:
case NOT1:
case EMPTY:
case MINUS:
case IDENTIFIER:
Expression();
label_16:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case COMMA:
;
break;
default:
jj_la1[42] = jj_gen;
break label_16;
}
jj_consume_token(COMMA);
Expression();
}
break;
default:
jj_la1[43] = jj_gen;
;
}
jj_consume_token(RBRACE);
} catch (Throwable jjte000) {
if (jjtc000) {
jjtree.clearNodeScope(jjtn000);
jjtc000 = false;
} else {
jjtree.popNode();
}
if (jjte000 instanceof RuntimeException) {
{if (true) throw (RuntimeException)jjte000;}
}
if (jjte000 instanceof ParseException) {
{if (true) throw (ParseException)jjte000;}
}
{if (true) throw (Error)jjte000;}
} finally {
if (jjtc000) {
jjtree.closeNodeScope(jjtn000, true);
}
}
} }
|
public class class_name {
final public void SetData() throws ParseException {
/*@bgen(jjtree) SetData */
AstSetData jjtn000 = new AstSetData(JJTSETDATA);
boolean jjtc000 = true;
jjtree.openNodeScope(jjtn000);
try {
jj_consume_token(START_SET_OR_MAP);
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case START_SET_OR_MAP:
case INTEGER_LITERAL:
case FLOATING_POINT_LITERAL:
case STRING_LITERAL:
case TRUE:
case FALSE:
case NULL:
case LPAREN:
case LBRACK:
case NOT0:
case NOT1:
case EMPTY:
case MINUS:
case IDENTIFIER:
Expression();
label_16:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case COMMA:
;
break;
default:
jj_la1[42] = jj_gen;
break label_16;
}
jj_consume_token(COMMA); // depends on control dependency: [while], data = [none]
Expression(); // depends on control dependency: [while], data = [none]
}
break;
default:
jj_la1[43] = jj_gen;
;
}
jj_consume_token(RBRACE);
} catch (Throwable jjte000) {
if (jjtc000) {
jjtree.clearNodeScope(jjtn000); // depends on control dependency: [if], data = [none]
jjtc000 = false; // depends on control dependency: [if], data = [none]
} else {
jjtree.popNode(); // depends on control dependency: [if], data = [none]
}
if (jjte000 instanceof RuntimeException) {
{if (true) throw (RuntimeException)jjte000;}
}
if (jjte000 instanceof ParseException) {
{if (true) throw (ParseException)jjte000;}
}
{if (true) throw (Error)jjte000;}
} finally {
if (jjtc000) {
jjtree.closeNodeScope(jjtn000, true); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
@Override
public RTMPConnection removeConnection(int clientId) {
log.trace("Removing connection with id: {}", clientId);
// remove from map
for (RTMPConnection conn : connMap.values()) {
if (conn.getId() == clientId) {
// remove the conn
return removeConnection(conn.getSessionId());
}
}
log.warn("Connection was not removed by id: {}", clientId);
return null;
} }
|
public class class_name {
@Override
public RTMPConnection removeConnection(int clientId) {
log.trace("Removing connection with id: {}", clientId);
// remove from map
for (RTMPConnection conn : connMap.values()) {
if (conn.getId() == clientId) {
// remove the conn
return removeConnection(conn.getSessionId()); // depends on control dependency: [if], data = [none]
}
}
log.warn("Connection was not removed by id: {}", clientId);
return null;
} }
|
public class class_name {
private void updateLru(CacheItem<K,V> item)
{
long lruCounter = _lruCounter;
long itemCounter = item._lruCounter;
long delta = (lruCounter - itemCounter) & 0x3fffffff;
if (_lruTimeout < delta || delta < 0) {
// update LRU only if not used recently
updateLruImpl(item);
}
} }
|
public class class_name {
private void updateLru(CacheItem<K,V> item)
{
long lruCounter = _lruCounter;
long itemCounter = item._lruCounter;
long delta = (lruCounter - itemCounter) & 0x3fffffff;
if (_lruTimeout < delta || delta < 0) {
// update LRU only if not used recently
updateLruImpl(item); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public void initialize(Settings settings) {
super.initialize(settings);
if (this.isEnabled()) {
this.filters = settings.getArray(Settings.KEYS.ANALYZER_RETIREJS_FILTERS);
}
} }
|
public class class_name {
@Override
public void initialize(Settings settings) {
super.initialize(settings);
if (this.isEnabled()) {
this.filters = settings.getArray(Settings.KEYS.ANALYZER_RETIREJS_FILTERS); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public WaybackRequest parse(HttpServletRequest httpRequest,
AccessPoint accessPoint) throws BetterRequestException {
WaybackRequest wbRequest = null;
@SuppressWarnings("unchecked")
Map<String, String[]> queryMap = httpRequest.getParameterMap();
if (queryMap.size() > 0) {
wbRequest = new WaybackRequest();
String base = accessPoint.translateRequestPath(httpRequest);
if (base.startsWith(REPLAY_BASE)) {
wbRequest.setReplayRequest();
} else if (base.startsWith(QUERY_BASE)) {
wbRequest.setCaptureQueryRequest();
} else if (base.startsWith(XQUERY_BASE)) {
wbRequest.setCaptureQueryRequest();
wbRequest.setXMLMode(true);
} else {
return null;
}
wbRequest.setResultsPerPage(getMaxRecords());
Set<String> keys = queryMap.keySet();
Iterator<String> itr = keys.iterator();
while (itr.hasNext()) {
String key = itr.next();
if (key.equals(SUBMIT_BUTTON)) {
continue;
}
// just jam everything else in:
String val = AccessPoint.getMapParam(queryMap, key);
if (key.equals(WaybackRequest.REQUEST_URL)) {
String scheme = UrlOperations.urlToScheme(val);
if (scheme == null) {
val = UrlOperations.HTTP_SCHEME + val;
}
}
wbRequest.put(key, val);
}
String partialTS = wbRequest.getReplayTimestamp();
if (partialTS != null) {
if (wbRequest.getStartTimestamp() == null) {
String startTS = Timestamp.parseBefore(partialTS)
.getDateStr();
wbRequest.setStartTimestamp(startTS);
}
if (wbRequest.getEndTimestamp() == null) {
String endTS = Timestamp.parseAfter(partialTS).getDateStr();
wbRequest.setEndTimestamp(endTS);
}
} else {
if (wbRequest.getStartTimestamp() == null) {
wbRequest.setStartTimestamp(getEarliestTimestamp());
}
if (wbRequest.getEndTimestamp() == null) {
wbRequest.setEndTimestamp(getLatestTimestamp());
}
}
}
return wbRequest;
} }
|
public class class_name {
public WaybackRequest parse(HttpServletRequest httpRequest,
AccessPoint accessPoint) throws BetterRequestException {
WaybackRequest wbRequest = null;
@SuppressWarnings("unchecked")
Map<String, String[]> queryMap = httpRequest.getParameterMap();
if (queryMap.size() > 0) {
wbRequest = new WaybackRequest();
String base = accessPoint.translateRequestPath(httpRequest);
if (base.startsWith(REPLAY_BASE)) {
wbRequest.setReplayRequest();
} else if (base.startsWith(QUERY_BASE)) {
wbRequest.setCaptureQueryRequest();
} else if (base.startsWith(XQUERY_BASE)) {
wbRequest.setCaptureQueryRequest();
wbRequest.setXMLMode(true);
} else {
return null;
}
wbRequest.setResultsPerPage(getMaxRecords());
Set<String> keys = queryMap.keySet();
Iterator<String> itr = keys.iterator();
while (itr.hasNext()) {
String key = itr.next();
if (key.equals(SUBMIT_BUTTON)) {
continue;
}
// just jam everything else in:
String val = AccessPoint.getMapParam(queryMap, key);
if (key.equals(WaybackRequest.REQUEST_URL)) {
String scheme = UrlOperations.urlToScheme(val);
if (scheme == null) {
val = UrlOperations.HTTP_SCHEME + val; // depends on control dependency: [if], data = [none]
}
}
wbRequest.put(key, val);
}
String partialTS = wbRequest.getReplayTimestamp();
if (partialTS != null) {
if (wbRequest.getStartTimestamp() == null) {
String startTS = Timestamp.parseBefore(partialTS)
.getDateStr();
wbRequest.setStartTimestamp(startTS);
}
if (wbRequest.getEndTimestamp() == null) {
String endTS = Timestamp.parseAfter(partialTS).getDateStr();
wbRequest.setEndTimestamp(endTS);
}
} else {
if (wbRequest.getStartTimestamp() == null) {
wbRequest.setStartTimestamp(getEarliestTimestamp());
}
if (wbRequest.getEndTimestamp() == null) {
wbRequest.setEndTimestamp(getLatestTimestamp());
}
}
}
return wbRequest;
} }
|
public class class_name {
public AbstractByteList times(int times) {
AbstractByteList newList = new ByteArrayList(times*size());
for (int i=times; --i >= 0; ) {
newList.addAllOfFromTo(this,0,size()-1);
}
return newList;
} }
|
public class class_name {
public AbstractByteList times(int times) {
AbstractByteList newList = new ByteArrayList(times*size());
for (int i=times; --i >= 0; ) {
newList.addAllOfFromTo(this,0,size()-1);
// depends on control dependency: [for], data = [none]
}
return newList;
} }
|
public class class_name {
public java.util.List<InstanceStatus> getInstanceStatuses() {
if (instanceStatuses == null) {
instanceStatuses = new com.amazonaws.internal.SdkInternalList<InstanceStatus>();
}
return instanceStatuses;
} }
|
public class class_name {
public java.util.List<InstanceStatus> getInstanceStatuses() {
if (instanceStatuses == null) {
instanceStatuses = new com.amazonaws.internal.SdkInternalList<InstanceStatus>(); // depends on control dependency: [if], data = [none]
}
return instanceStatuses;
} }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.