code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
private static void distributeShareMax(
Collection<? extends Schedulable> schedulableVector) {
for (Schedulable schedulable : schedulableVector) {
double minShare = Math.max(schedulable.getMinimum(),
schedulable.getRequested());
schedulable.share += Math.min(schedulable.getMaximum(),
minShare);
}
} } | public class class_name {
private static void distributeShareMax(
Collection<? extends Schedulable> schedulableVector) {
for (Schedulable schedulable : schedulableVector) {
double minShare = Math.max(schedulable.getMinimum(),
schedulable.getRequested());
schedulable.share += Math.min(schedulable.getMaximum(),
minShare); // depends on control dependency: [for], data = [schedulable]
}
} } |
public class class_name {
private synchronized void tryProcessMethod(Class<?> clazz, String methodName, boolean expectedToExist, Class<?> ... parameterTypes) {
final Method method;
try {
method = clazz.getMethod(methodName, parameterTypes);
} catch (NoSuchMethodException ex) {
Level logLevel = expectedToExist ? Level.INFO : Level.CONFIG;
if (LOGGER.isLoggable(logLevel)) {
String methodSpec = String.format("%s(%s)", methodName, StringUtils.join(parameterTypes, ","));
LOGGER.log(logLevel, "No method {0} for class {1}", new Object[] {methodSpec, clazz});
}
return;
} catch (RuntimeException ex) {
Level logLevel = expectedToExist ? Level.INFO : Level.CONFIG;
if (LOGGER.isLoggable(logLevel)) {
String methodSpec = String.format("%s(%s)", methodName, StringUtils.join(parameterTypes, ","));
LOGGER.log(logLevel, "Failed to retrieve the method {0} for class {1}", new Object[] {methodSpec, clazz});
}
return;
}
Class<?> returnType = method.getReturnType();
// We do not veto the the root class
if (ParameterValue.class.isAssignableFrom(returnType)) {
if (!ParameterValue.class.equals(returnType)) {
// Add this class to the cache
paramValueCache_maskedClasses.add(returnType.getName());
}
}
} } | public class class_name {
private synchronized void tryProcessMethod(Class<?> clazz, String methodName, boolean expectedToExist, Class<?> ... parameterTypes) {
final Method method;
try {
method = clazz.getMethod(methodName, parameterTypes); // depends on control dependency: [try], data = [none]
} catch (NoSuchMethodException ex) {
Level logLevel = expectedToExist ? Level.INFO : Level.CONFIG;
if (LOGGER.isLoggable(logLevel)) {
String methodSpec = String.format("%s(%s)", methodName, StringUtils.join(parameterTypes, ","));
LOGGER.log(logLevel, "No method {0} for class {1}", new Object[] {methodSpec, clazz}); // depends on control dependency: [if], data = [none]
}
return;
} catch (RuntimeException ex) { // depends on control dependency: [catch], data = [none]
Level logLevel = expectedToExist ? Level.INFO : Level.CONFIG;
if (LOGGER.isLoggable(logLevel)) {
String methodSpec = String.format("%s(%s)", methodName, StringUtils.join(parameterTypes, ","));
LOGGER.log(logLevel, "Failed to retrieve the method {0} for class {1}", new Object[] {methodSpec, clazz}); // depends on control dependency: [if], data = [none]
}
return;
} // depends on control dependency: [catch], data = [none]
Class<?> returnType = method.getReturnType();
// We do not veto the the root class
if (ParameterValue.class.isAssignableFrom(returnType)) {
if (!ParameterValue.class.equals(returnType)) {
// Add this class to the cache
paramValueCache_maskedClasses.add(returnType.getName()); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public String getObject(Component component) {
if(objectModel!=null)
{
T object = objectModel.getObject();
if(object==null) return null;
resourceKey = getResourceKey(object);
}
String defaultValue = getDefault();
if(defaultValue==null) defaultValue = Strings.lastPathComponent(resourceKey, '.');
if(defaultValue!=null) defaultValue = buitify(defaultValue);
return Application.get()
.getResourceSettings()
.getLocalizer()
.getString(resourceKey, null, defaultValue);
} } | public class class_name {
public String getObject(Component component) {
if(objectModel!=null)
{
T object = objectModel.getObject();
if(object==null) return null;
resourceKey = getResourceKey(object); // depends on control dependency: [if], data = [none]
}
String defaultValue = getDefault();
if(defaultValue==null) defaultValue = Strings.lastPathComponent(resourceKey, '.');
if(defaultValue!=null) defaultValue = buitify(defaultValue);
return Application.get()
.getResourceSettings()
.getLocalizer()
.getString(resourceKey, null, defaultValue);
} } |
public class class_name {
public List<Charset> getAcceptCharset() {
List<Charset> result = new ArrayList<Charset>();
String value = getFirst(ACCEPT_CHARSET);
if (value != null) {
String[] tokens = value.split(",\\s*");
for (String token : tokens) {
int paramIdx = token.indexOf(';');
String charsetName;
if (paramIdx == -1) {
charsetName = token;
}
else {
charsetName = token.substring(0, paramIdx);
}
if (!charsetName.equals("*")) {
result.add(Charset.forName(charsetName));
}
}
}
return result;
} } | public class class_name {
public List<Charset> getAcceptCharset() {
List<Charset> result = new ArrayList<Charset>();
String value = getFirst(ACCEPT_CHARSET);
if (value != null) {
String[] tokens = value.split(",\\s*");
for (String token : tokens) {
int paramIdx = token.indexOf(';');
String charsetName;
if (paramIdx == -1) {
charsetName = token; // depends on control dependency: [if], data = [none]
}
else {
charsetName = token.substring(0, paramIdx); // depends on control dependency: [if], data = [none]
}
if (!charsetName.equals("*")) {
result.add(Charset.forName(charsetName)); // depends on control dependency: [if], data = [none]
}
}
}
return result;
} } |
public class class_name {
@Override
public void removeByC_COC(long CPDefinitionId, long CPOptionCategoryId) {
for (CPDefinitionSpecificationOptionValue cpDefinitionSpecificationOptionValue : findByC_COC(
CPDefinitionId, CPOptionCategoryId, QueryUtil.ALL_POS,
QueryUtil.ALL_POS, null)) {
remove(cpDefinitionSpecificationOptionValue);
}
} } | public class class_name {
@Override
public void removeByC_COC(long CPDefinitionId, long CPOptionCategoryId) {
for (CPDefinitionSpecificationOptionValue cpDefinitionSpecificationOptionValue : findByC_COC(
CPDefinitionId, CPOptionCategoryId, QueryUtil.ALL_POS,
QueryUtil.ALL_POS, null)) {
remove(cpDefinitionSpecificationOptionValue); // depends on control dependency: [for], data = [cpDefinitionSpecificationOptionValue]
}
} } |
public class class_name {
public boolean isSameType(FieldInfo field)
{ // Copy this data to a field //Change this to lock the objects down first!
boolean bSameType = false;
if (this.getClass().getName().equals(field.getClass().getName()))
bSameType = true;
else
{
Object data = this.getData();
Class<?> classData = this.getDataClass();
if (data != null)
classData = data.getClass();
Object fieldData = field.getData();
Class<?> classField = field.getDataClass();
if (fieldData != null)
classField = fieldData.getClass();
if (classData.equals(classField))
bSameType = true;
}
return bSameType;
} } | public class class_name {
public boolean isSameType(FieldInfo field)
{ // Copy this data to a field //Change this to lock the objects down first!
boolean bSameType = false;
if (this.getClass().getName().equals(field.getClass().getName()))
bSameType = true;
else
{
Object data = this.getData();
Class<?> classData = this.getDataClass(); // depends on control dependency: [if], data = [none]
if (data != null)
classData = data.getClass();
Object fieldData = field.getData();
Class<?> classField = field.getDataClass();
if (fieldData != null)
classField = fieldData.getClass();
if (classData.equals(classField))
bSameType = true;
}
return bSameType;
} } |
public class class_name {
public static String[] getMethodParamNames(final Method m) {
if (METHOD_NAMES_POOL.containsKey(m)) return METHOD_NAMES_POOL.get(m);
final String[] paramNames = new String[m.getParameterTypes().length];
final String n = m.getDeclaringClass().getName();
ClassReader cr;
try {
cr = new ClassReader(n);
} catch (IOException e) {
return null;
}
cr.accept(new ClassVisitor(Opcodes.ASM5) {
@Override
public MethodVisitor visitMethod(final int access, final String name, final String desc, final String signature, final String[] exceptions) {
final Type[] args = Type.getArgumentTypes(desc);
// The method name is the same and the number of parameters is the same
if (!name.equals(m.getName()) || !sameType(args, m.getParameterTypes())) {
return super.visitMethod(access, name, desc, signature, exceptions);
}
MethodVisitor v = super.visitMethod(access, name, desc, signature, exceptions);
return new MethodVisitor(Opcodes.ASM5, v) {
@Override
public void visitLocalVariable(String name, String desc, String signature, Label start, Label end, int index) {
int i = index - 1;
// if it is a static method, the first is the parameter
// if it's not a static method, the first one is "this" and then the parameter of the method
if (Modifier.isStatic(m.getModifiers())) {
i = index;
}
if (i >= 0 && i < paramNames.length) {
paramNames[i] = name;
}
super.visitLocalVariable(name, desc, signature, start, end, index);
}
};
}
}, 0);
METHOD_NAMES_POOL.put(m, paramNames);
return paramNames;
} } | public class class_name {
public static String[] getMethodParamNames(final Method m) {
if (METHOD_NAMES_POOL.containsKey(m)) return METHOD_NAMES_POOL.get(m);
final String[] paramNames = new String[m.getParameterTypes().length];
final String n = m.getDeclaringClass().getName();
ClassReader cr;
try {
cr = new ClassReader(n); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
return null;
} // depends on control dependency: [catch], data = [none]
cr.accept(new ClassVisitor(Opcodes.ASM5) {
@Override
public MethodVisitor visitMethod(final int access, final String name, final String desc, final String signature, final String[] exceptions) {
final Type[] args = Type.getArgumentTypes(desc);
// The method name is the same and the number of parameters is the same
if (!name.equals(m.getName()) || !sameType(args, m.getParameterTypes())) {
return super.visitMethod(access, name, desc, signature, exceptions); // depends on control dependency: [if], data = [none]
}
MethodVisitor v = super.visitMethod(access, name, desc, signature, exceptions);
return new MethodVisitor(Opcodes.ASM5, v) {
@Override
public void visitLocalVariable(String name, String desc, String signature, Label start, Label end, int index) {
int i = index - 1;
// if it is a static method, the first is the parameter
// if it's not a static method, the first one is "this" and then the parameter of the method
if (Modifier.isStatic(m.getModifiers())) {
i = index; // depends on control dependency: [if], data = [none]
}
if (i >= 0 && i < paramNames.length) {
paramNames[i] = name; // depends on control dependency: [if], data = [none]
}
super.visitLocalVariable(name, desc, signature, start, end, index);
}
};
}
}, 0);
METHOD_NAMES_POOL.put(m, paramNames);
return paramNames;
} } |
public class class_name {
public Node insertBefore(Node newChild, Node refChild) throws DOMException
{
if(null == refChild)
{
appendChild(newChild);
return newChild;
}
if(newChild == refChild)
{
// hmm...
return newChild;
}
Node node = m_firstChild;
Node prev = null;
boolean foundit = false;
while (null != node)
{
// If the newChild is already in the tree, it is first removed.
if(newChild == node)
{
if(null != prev)
((ElemTemplateElement)prev).m_nextSibling =
(ElemTemplateElement)node.getNextSibling();
else
m_firstChild = (ElemTemplateElement)node.getNextSibling();
node = node.getNextSibling();
continue; // prev remains the same.
}
if(refChild == node)
{
if(null != prev)
{
((ElemTemplateElement)prev).m_nextSibling = (ElemTemplateElement)newChild;
}
else
{
m_firstChild = (ElemTemplateElement)newChild;
}
((ElemTemplateElement)newChild).m_nextSibling = (ElemTemplateElement)refChild;
((ElemTemplateElement)newChild).setParentElem(this);
prev = newChild;
node = node.getNextSibling();
foundit = true;
continue;
}
prev = node;
node = node.getNextSibling();
}
if(!foundit)
throw new DOMException(DOMException.NOT_FOUND_ERR,
"refChild was not found in insertBefore method!");
else
return newChild;
} } | public class class_name {
public Node insertBefore(Node newChild, Node refChild) throws DOMException
{
if(null == refChild)
{
appendChild(newChild);
return newChild;
}
if(newChild == refChild)
{
// hmm...
return newChild;
}
Node node = m_firstChild;
Node prev = null;
boolean foundit = false;
while (null != node)
{
// If the newChild is already in the tree, it is first removed.
if(newChild == node)
{
if(null != prev)
((ElemTemplateElement)prev).m_nextSibling =
(ElemTemplateElement)node.getNextSibling();
else
m_firstChild = (ElemTemplateElement)node.getNextSibling();
node = node.getNextSibling(); // depends on control dependency: [if], data = [none]
continue; // prev remains the same.
}
if(refChild == node)
{
if(null != prev)
{
((ElemTemplateElement)prev).m_nextSibling = (ElemTemplateElement)newChild; // depends on control dependency: [if], data = [prev)]
}
else
{
m_firstChild = (ElemTemplateElement)newChild; // depends on control dependency: [if], data = [none]
}
((ElemTemplateElement)newChild).m_nextSibling = (ElemTemplateElement)refChild; // depends on control dependency: [if], data = [none]
((ElemTemplateElement)newChild).setParentElem(this); // depends on control dependency: [if], data = [none]
prev = newChild; // depends on control dependency: [if], data = [none]
node = node.getNextSibling(); // depends on control dependency: [if], data = [none]
foundit = true; // depends on control dependency: [if], data = [none]
continue;
}
prev = node;
node = node.getNextSibling();
}
if(!foundit)
throw new DOMException(DOMException.NOT_FOUND_ERR,
"refChild was not found in insertBefore method!");
else
return newChild;
} } |
public class class_name {
public static TerminalOp<Long, OptionalLong>
makeLong(LongBinaryOperator operator) {
Objects.requireNonNull(operator);
class ReducingSink
implements AccumulatingSink<Long, OptionalLong, ReducingSink>, Sink.OfLong {
private boolean empty;
private long state;
public void begin(long size) {
empty = true;
state = 0;
}
@Override
public void accept(long t) {
if (empty) {
empty = false;
state = t;
}
else {
state = operator.applyAsLong(state, t);
}
}
@Override
public OptionalLong get() {
return empty ? OptionalLong.empty() : OptionalLong.of(state);
}
@Override
public void combine(ReducingSink other) {
if (!other.empty)
accept(other.state);
}
}
return new ReduceOp<Long, OptionalLong, ReducingSink>(StreamShape.LONG_VALUE) {
@Override
public ReducingSink makeSink() {
return new ReducingSink();
}
};
} } | public class class_name {
public static TerminalOp<Long, OptionalLong>
makeLong(LongBinaryOperator operator) {
Objects.requireNonNull(operator);
class ReducingSink
implements AccumulatingSink<Long, OptionalLong, ReducingSink>, Sink.OfLong {
private boolean empty;
private long state;
public void begin(long size) {
empty = true;
state = 0;
}
@Override
public void accept(long t) {
if (empty) {
empty = false; // depends on control dependency: [if], data = [none]
state = t; // depends on control dependency: [if], data = [none]
}
else {
state = operator.applyAsLong(state, t); // depends on control dependency: [if], data = [none]
}
}
@Override
public OptionalLong get() {
return empty ? OptionalLong.empty() : OptionalLong.of(state);
}
@Override
public void combine(ReducingSink other) {
if (!other.empty)
accept(other.state);
}
}
return new ReduceOp<Long, OptionalLong, ReducingSink>(StreamShape.LONG_VALUE) {
@Override
public ReducingSink makeSink() {
return new ReducingSink();
}
};
} } |
public class class_name {
public static void waitUntilJobEnded(Key jobkey, int pollingIntervalMillis) {
while (true) {
if (Job.isEnded(jobkey)) {
return;
}
try { Thread.sleep (pollingIntervalMillis); } catch (Exception ignore) {}
}
} } | public class class_name {
public static void waitUntilJobEnded(Key jobkey, int pollingIntervalMillis) {
while (true) {
if (Job.isEnded(jobkey)) {
return; // depends on control dependency: [if], data = [none]
}
try { Thread.sleep (pollingIntervalMillis); } catch (Exception ignore) {} // depends on control dependency: [try], data = [none] // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
@ConfigArg(0)
public void setPath(PathImpl path)
{
_classDir = path;
if (_sourceDir == null) {
_sourceDir = path;
}
} } | public class class_name {
@ConfigArg(0)
public void setPath(PathImpl path)
{
_classDir = path;
if (_sourceDir == null) {
_sourceDir = path; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static void assertFalse(String message, boolean value) {
if (!value) {
pass(message);
} else {
fail(message, null);
}
} } | public class class_name {
public static void assertFalse(String message, boolean value) {
if (!value) {
pass(message); // depends on control dependency: [if], data = [none]
} else {
fail(message, null); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String arrayToString(String[] strs) {
if (strs.length == 0) { return ""; }
StringBuilder sbuf = new StringBuilder();
sbuf.append(strs[0]);
for (int idx = 1; idx < strs.length; idx++) {
sbuf.append(",");
sbuf.append(strs[idx]);
}
return sbuf.toString();
} } | public class class_name {
public static String arrayToString(String[] strs) {
if (strs.length == 0) { return ""; } // depends on control dependency: [if], data = [none]
StringBuilder sbuf = new StringBuilder();
sbuf.append(strs[0]);
for (int idx = 1; idx < strs.length; idx++) {
sbuf.append(","); // depends on control dependency: [for], data = [none]
sbuf.append(strs[idx]); // depends on control dependency: [for], data = [idx]
}
return sbuf.toString();
} } |
public class class_name {
public static void mult(DMatrix1Row a , DMatrix1Row b , DMatrix1Row c )
{
if( b.numCols == 1 ) {
MatrixVectorMult_DDRM.mult(a, b, c);
} else if( b.numCols >= EjmlParameters.MULT_COLUMN_SWITCH ) {
MatrixMatrixMult_DDRM.mult_reorder(a,b,c);
} else {
MatrixMatrixMult_DDRM.mult_small(a,b,c);
}
} } | public class class_name {
public static void mult(DMatrix1Row a , DMatrix1Row b , DMatrix1Row c )
{
if( b.numCols == 1 ) {
MatrixVectorMult_DDRM.mult(a, b, c); // depends on control dependency: [if], data = [none]
} else if( b.numCols >= EjmlParameters.MULT_COLUMN_SWITCH ) {
MatrixMatrixMult_DDRM.mult_reorder(a,b,c); // depends on control dependency: [if], data = [none]
} else {
MatrixMatrixMult_DDRM.mult_small(a,b,c); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static TreeMap<Integer,SpaceGroup> parseSymopLib(InputStream symoplibIS) {
TreeMap<Integer, SpaceGroup> map = new TreeMap<Integer, SpaceGroup>();
name2sgs = new HashMap<String, SpaceGroup>();
try {
BufferedReader br = new BufferedReader(new InputStreamReader(symoplibIS));
String line;
SpaceGroup currentSG = null;
while ((line=br.readLine())!=null) {
if (!line.startsWith(" ")) {
if (currentSG!=null) {
map.put(currentSG.getId(),currentSG);
name2sgs.put(currentSG.getShortSymbol(), currentSG);
if (currentSG.getAltShortSymbol()!=null) {
// we add also alternative name to map so we can look it up
name2sgs.put(currentSG.getAltShortSymbol(), currentSG);
}
}
int idxFirstSpace = line.indexOf(' ');
int idxSecondSpace = line.indexOf(' ',idxFirstSpace+1);
int idxThirdSpace = line.indexOf(' ',idxSecondSpace+1);
int id = Integer.parseInt(line.substring(0, idxFirstSpace));
int multiplicity = Integer.parseInt(line.substring(idxFirstSpace+1, idxSecondSpace));
int primitiveMultiplicity = Integer.parseInt(line.substring(idxSecondSpace+1, idxThirdSpace));
Matcher m = namePat.matcher(line);
String shortSymbol = null;
String altShortSymbol = null;
String brav = null;
if (m.matches()) {
brav = m.group(1);
altShortSymbol = m.group(2); // null if there is no match
if (altShortSymbol!=null) altShortSymbol = altShortSymbol.trim().replaceAll("'", "");
shortSymbol = m.group(3);
}
currentSG = new SpaceGroup(id, multiplicity, primitiveMultiplicity, shortSymbol, altShortSymbol, BravaisLattice.getByName(brav));
} else {
currentSG.addTransformation(line.trim());
}
}
br.close();
// and we add the last SG
map.put(currentSG.getId(), currentSG);
name2sgs.put(currentSG.getShortSymbol(), currentSG);
if (currentSG.getAltShortSymbol()!=null) {
// we add also alternative name to map so we can look it up
name2sgs.put(currentSG.getAltShortSymbol(), currentSG);
}
} catch (IOException e) {
logger.error("Fatal error! Can't read symop.lib file. Error: "+e.getMessage()+". ");
System.exit(1);
}
for (SpaceGroup sg:map.values()) {
sg.initializeCellTranslations();
}
return map;
} } | public class class_name {
public static TreeMap<Integer,SpaceGroup> parseSymopLib(InputStream symoplibIS) {
TreeMap<Integer, SpaceGroup> map = new TreeMap<Integer, SpaceGroup>();
name2sgs = new HashMap<String, SpaceGroup>();
try {
BufferedReader br = new BufferedReader(new InputStreamReader(symoplibIS));
String line;
SpaceGroup currentSG = null;
while ((line=br.readLine())!=null) {
if (!line.startsWith(" ")) {
if (currentSG!=null) {
map.put(currentSG.getId(),currentSG); // depends on control dependency: [if], data = [(currentSG]
name2sgs.put(currentSG.getShortSymbol(), currentSG); // depends on control dependency: [if], data = [(currentSG]
if (currentSG.getAltShortSymbol()!=null) {
// we add also alternative name to map so we can look it up
name2sgs.put(currentSG.getAltShortSymbol(), currentSG); // depends on control dependency: [if], data = [(currentSG.getAltShortSymbol()]
}
}
int idxFirstSpace = line.indexOf(' ');
int idxSecondSpace = line.indexOf(' ',idxFirstSpace+1);
int idxThirdSpace = line.indexOf(' ',idxSecondSpace+1);
int id = Integer.parseInt(line.substring(0, idxFirstSpace));
int multiplicity = Integer.parseInt(line.substring(idxFirstSpace+1, idxSecondSpace));
int primitiveMultiplicity = Integer.parseInt(line.substring(idxSecondSpace+1, idxThirdSpace));
Matcher m = namePat.matcher(line);
String shortSymbol = null;
String altShortSymbol = null;
String brav = null;
if (m.matches()) {
brav = m.group(1); // depends on control dependency: [if], data = [none]
altShortSymbol = m.group(2); // null if there is no match // depends on control dependency: [if], data = [none]
if (altShortSymbol!=null) altShortSymbol = altShortSymbol.trim().replaceAll("'", "");
shortSymbol = m.group(3); // depends on control dependency: [if], data = [none]
}
currentSG = new SpaceGroup(id, multiplicity, primitiveMultiplicity, shortSymbol, altShortSymbol, BravaisLattice.getByName(brav)); // depends on control dependency: [if], data = [none]
} else {
currentSG.addTransformation(line.trim()); // depends on control dependency: [if], data = [none]
}
}
br.close(); // depends on control dependency: [try], data = [none]
// and we add the last SG
map.put(currentSG.getId(), currentSG); // depends on control dependency: [try], data = [none]
name2sgs.put(currentSG.getShortSymbol(), currentSG); // depends on control dependency: [try], data = [none]
if (currentSG.getAltShortSymbol()!=null) {
// we add also alternative name to map so we can look it up
name2sgs.put(currentSG.getAltShortSymbol(), currentSG); // depends on control dependency: [if], data = [(currentSG.getAltShortSymbol()]
}
} catch (IOException e) {
logger.error("Fatal error! Can't read symop.lib file. Error: "+e.getMessage()+". ");
System.exit(1);
} // depends on control dependency: [catch], data = [none]
for (SpaceGroup sg:map.values()) {
sg.initializeCellTranslations(); // depends on control dependency: [for], data = [sg]
}
return map;
} } |
public class class_name {
static ThreadLocal<Map<String, String>> createThreadLocalMap(final boolean isMapEnabled) {
final PropertiesUtil managerProps = PropertiesUtil.getProperties();
final boolean inheritable = managerProps.getBooleanProperty(INHERITABLE_MAP);
if (inheritable) {
return new InheritableThreadLocal<Map<String, String>>() {
@Override
protected Map<String, String> childValue(final Map<String, String> parentValue) {
return parentValue != null && isMapEnabled //
? Collections.unmodifiableMap(new HashMap<>(parentValue)) //
: null;
}
};
}
// if not inheritable, return plain ThreadLocal with null as initial value
return new ThreadLocal<>();
} } | public class class_name {
static ThreadLocal<Map<String, String>> createThreadLocalMap(final boolean isMapEnabled) {
final PropertiesUtil managerProps = PropertiesUtil.getProperties();
final boolean inheritable = managerProps.getBooleanProperty(INHERITABLE_MAP);
if (inheritable) {
return new InheritableThreadLocal<Map<String, String>>() {
@Override
protected Map<String, String> childValue(final Map<String, String> parentValue) {
return parentValue != null && isMapEnabled //
? Collections.unmodifiableMap(new HashMap<>(parentValue)) //
: null;
}
}; // depends on control dependency: [if], data = [none]
}
// if not inheritable, return plain ThreadLocal with null as initial value
return new ThreadLocal<>();
} } |
public class class_name {
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
boolean xml = false;
logger.debug("Got request: {}?{}", request.getRequestURL(),
request.getQueryString());
// Check for xml parameter.
for (Enumeration<?> e = request.getParameterNames(); e.hasMoreElements();) {
String name = URLDecoder.decode((String) e.nextElement(), "UTF-8");
if (name.equalsIgnoreCase("xml")) {
xml = Boolean.parseBoolean(request.getParameter(name));
}
}
Context context =
ReadOnlyContext.getContext(Constants.HTTP_REQUEST.REST.uri,
request);
try {
describeRepository(context, xml, response);
} catch (AuthzException ae) {
throw RootException.getServletException(ae,
request,
ACTION_LABEL,
EMPTY_STRING_ARRAY);
} catch (Throwable th) {
throw new InternalError500Exception("",
th,
request,
ACTION_LABEL,
"",
EMPTY_STRING_ARRAY);
}
} } | public class class_name {
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
boolean xml = false;
logger.debug("Got request: {}?{}", request.getRequestURL(),
request.getQueryString());
// Check for xml parameter.
for (Enumeration<?> e = request.getParameterNames(); e.hasMoreElements();) {
String name = URLDecoder.decode((String) e.nextElement(), "UTF-8");
if (name.equalsIgnoreCase("xml")) {
xml = Boolean.parseBoolean(request.getParameter(name)); // depends on control dependency: [if], data = [none]
}
}
Context context =
ReadOnlyContext.getContext(Constants.HTTP_REQUEST.REST.uri,
request);
try {
describeRepository(context, xml, response);
} catch (AuthzException ae) {
throw RootException.getServletException(ae,
request,
ACTION_LABEL,
EMPTY_STRING_ARRAY);
} catch (Throwable th) {
throw new InternalError500Exception("",
th,
request,
ACTION_LABEL,
"",
EMPTY_STRING_ARRAY);
}
} } |
public class class_name {
protected Fieldable createFulltextField(Reader value)
{
if (supportHighlighting)
{
return new TextFieldExtractor(FieldNames.FULLTEXT, value, true, true);
}
else
{
return new TextFieldExtractor(FieldNames.FULLTEXT, value, false, false);
}
} } | public class class_name {
protected Fieldable createFulltextField(Reader value)
{
if (supportHighlighting)
{
return new TextFieldExtractor(FieldNames.FULLTEXT, value, true, true); // depends on control dependency: [if], data = [none]
}
else
{
return new TextFieldExtractor(FieldNames.FULLTEXT, value, false, false); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public boolean isSet(PropertyKey key) {
if (isSetByUser(key)) {
return true;
}
// In case key is not the reference to the original key
return PropertyKey.fromString(key.toString()).getDefaultValue() != null;
} } | public class class_name {
public boolean isSet(PropertyKey key) {
if (isSetByUser(key)) {
return true; // depends on control dependency: [if], data = [none]
}
// In case key is not the reference to the original key
return PropertyKey.fromString(key.toString()).getDefaultValue() != null;
} } |
public class class_name {
public void setAlgorithm(KeyAgreementType dh)
{
log("DH algorithm set: " + getDHName(dhMode) + " -> " + getDHName(dh));
try {
if(dhMode != null && dh.keyType == dhMode.keyType) return;
dhMode = dh;
switch (dhMode.keyType) {
case KeyAgreementType.DH_MODE_DH3K:
DHParameterSpec paramSpec = new DHParameterSpec(dhP, dhG, DH_EXP_LENGTH);
dhKeyGen = KeyPairGenerator.getInstance(ALGORITHM_DH);
dhKeyGen.initialize(paramSpec, sr);
dhKeyPair = dhKeyGen.generateKeyPair();
clearEcdh();
break;
case KeyAgreementType.DH_MODE_EC25:
setupEC(256);
break;
case KeyAgreementType.DH_MODE_EC38:
default:
setupEC(384);
break;
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
throw new RuntimeException("Failed init Diffie-Hellman: " + e.getClass().getName() + ": " + e.getMessage() + ", bitlength p = " + dhP.bitCount());
}
} } | public class class_name {
public void setAlgorithm(KeyAgreementType dh)
{
log("DH algorithm set: " + getDHName(dhMode) + " -> " + getDHName(dh));
try {
if(dhMode != null && dh.keyType == dhMode.keyType) return;
dhMode = dh; // depends on control dependency: [try], data = [none]
switch (dhMode.keyType) {
case KeyAgreementType.DH_MODE_DH3K:
DHParameterSpec paramSpec = new DHParameterSpec(dhP, dhG, DH_EXP_LENGTH);
dhKeyGen = KeyPairGenerator.getInstance(ALGORITHM_DH);
dhKeyGen.initialize(paramSpec, sr);
dhKeyPair = dhKeyGen.generateKeyPair();
clearEcdh();
break;
case KeyAgreementType.DH_MODE_EC25:
setupEC(256);
break;
case KeyAgreementType.DH_MODE_EC38:
default:
setupEC(384);
break;
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
throw new RuntimeException("Failed init Diffie-Hellman: " + e.getClass().getName() + ": " + e.getMessage() + ", bitlength p = " + dhP.bitCount());
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void marshall(CreateInterconnectRequest createInterconnectRequest, ProtocolMarshaller protocolMarshaller) {
if (createInterconnectRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(createInterconnectRequest.getInterconnectName(), INTERCONNECTNAME_BINDING);
protocolMarshaller.marshall(createInterconnectRequest.getBandwidth(), BANDWIDTH_BINDING);
protocolMarshaller.marshall(createInterconnectRequest.getLocation(), LOCATION_BINDING);
protocolMarshaller.marshall(createInterconnectRequest.getLagId(), LAGID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(CreateInterconnectRequest createInterconnectRequest, ProtocolMarshaller protocolMarshaller) {
if (createInterconnectRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(createInterconnectRequest.getInterconnectName(), INTERCONNECTNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createInterconnectRequest.getBandwidth(), BANDWIDTH_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createInterconnectRequest.getLocation(), LOCATION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createInterconnectRequest.getLagId(), LAGID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
void onEvent(Device device, ProfilingEvent event) {
if (event == null) {
logger.log(Level.WARNING, "Discarding profiling event " + event + " for null device, for Kernel class: " + kernelClass.getName());
return;
}
final KernelDeviceProfile deviceProfile = deviceProfiles.get(device);
switch (event) {
case CLASS_MODEL_BUILT: // fallthrough
case OPENCL_GENERATED: // fallthrough
case INIT_JNI: // fallthrough
case OPENCL_COMPILED: // fallthrough
case PREPARE_EXECUTE: // fallthrough
case EXECUTED: // fallthrough
{
if (deviceProfile == null) {
logger.log(Level.SEVERE, "Error in KernelProfile, no currentDevice (synchronization error?");
}
deviceProfile.onEvent(event);
break;
}
case START:
throw new IllegalArgumentException("must use onStart(Device) to start profiling");
default:
throw new IllegalArgumentException("Unhandled event " + event);
}
} } | public class class_name {
void onEvent(Device device, ProfilingEvent event) {
if (event == null) {
logger.log(Level.WARNING, "Discarding profiling event " + event + " for null device, for Kernel class: " + kernelClass.getName()); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
final KernelDeviceProfile deviceProfile = deviceProfiles.get(device);
switch (event) {
case CLASS_MODEL_BUILT: // fallthrough
case OPENCL_GENERATED: // fallthrough
case INIT_JNI: // fallthrough
case OPENCL_COMPILED: // fallthrough
case PREPARE_EXECUTE: // fallthrough
case EXECUTED: // fallthrough
{
if (deviceProfile == null) {
logger.log(Level.SEVERE, "Error in KernelProfile, no currentDevice (synchronization error?"); // depends on control dependency: [if], data = [none]
}
deviceProfile.onEvent(event);
break;
}
case START:
throw new IllegalArgumentException("must use onStart(Device) to start profiling");
default:
throw new IllegalArgumentException("Unhandled event " + event);
}
} } |
public class class_name {
protected StrongCounter getCounterOrCreateIt(String counterName, int initialValue) {
CounterManager counterManager = EmbeddedCounterManagerFactory.asCounterManager( cacheManager );
if ( !counterManager.isDefined( counterName ) ) {
LOG.tracef( "Counter %s is not defined, creating it", counterName );
// global configuration is mandatory in order to define
// a new clustered counter with persistent storage
validateGlobalConfiguration();
counterManager.defineCounter( counterName,
CounterConfiguration.builder(
CounterType.UNBOUNDED_STRONG )
.initialValue( initialValue )
.storage( Storage.PERSISTENT )
.build() );
}
StrongCounter strongCounter = counterManager.getStrongCounter( counterName );
return strongCounter;
} } | public class class_name {
protected StrongCounter getCounterOrCreateIt(String counterName, int initialValue) {
CounterManager counterManager = EmbeddedCounterManagerFactory.asCounterManager( cacheManager );
if ( !counterManager.isDefined( counterName ) ) {
LOG.tracef( "Counter %s is not defined, creating it", counterName ); // depends on control dependency: [if], data = [none]
// global configuration is mandatory in order to define
// a new clustered counter with persistent storage
validateGlobalConfiguration(); // depends on control dependency: [if], data = [none]
counterManager.defineCounter( counterName,
CounterConfiguration.builder(
CounterType.UNBOUNDED_STRONG )
.initialValue( initialValue )
.storage( Storage.PERSISTENT )
.build() ); // depends on control dependency: [if], data = [none]
}
StrongCounter strongCounter = counterManager.getStrongCounter( counterName );
return strongCounter;
} } |
public class class_name {
public static AbstractButton makeButtcon( Icon icon, Icon selected, String tooltip, boolean is_toggle) {
AbstractButton butt;
if (is_toggle)
butt = new JToggleButton();
else
butt = new JButton();
if (debug) System.out.println(" makeButtcon"+ icon+ " "+ selected+ " "+ tooltip+ " "+ is_toggle);
if (icon != null)
butt.setIcon(icon);
if (selected != null) {
if (is_toggle) {
butt.setSelectedIcon( selected);
} else {
butt.setRolloverIcon(selected);
butt.setRolloverSelectedIcon(selected);
butt.setPressedIcon(selected);
butt.setRolloverEnabled(true);
}
}
butt.setMaximumSize(new Dimension(28,28)); // kludge
butt.setPreferredSize(new Dimension(28,28));
butt.setToolTipText(tooltip);
butt.setFocusPainted(false);
return butt;
} } | public class class_name {
public static AbstractButton makeButtcon( Icon icon, Icon selected, String tooltip, boolean is_toggle) {
AbstractButton butt;
if (is_toggle)
butt = new JToggleButton();
else
butt = new JButton();
if (debug) System.out.println(" makeButtcon"+ icon+ " "+ selected+ " "+ tooltip+ " "+ is_toggle);
if (icon != null)
butt.setIcon(icon);
if (selected != null) {
if (is_toggle) {
butt.setSelectedIcon( selected); // depends on control dependency: [if], data = [none]
} else {
butt.setRolloverIcon(selected); // depends on control dependency: [if], data = [none]
butt.setRolloverSelectedIcon(selected); // depends on control dependency: [if], data = [none]
butt.setPressedIcon(selected); // depends on control dependency: [if], data = [none]
butt.setRolloverEnabled(true); // depends on control dependency: [if], data = [none]
}
}
butt.setMaximumSize(new Dimension(28,28)); // kludge
butt.setPreferredSize(new Dimension(28,28));
butt.setToolTipText(tooltip);
butt.setFocusPainted(false);
return butt;
} } |
public class class_name {
public int read() throws IOException {
if (this.bytesRead == this.length) {
// All chunks and final additional chunk are read.
// This means we have reached EOF.
return -1;
}
try {
// Read a chunk from given input stream when
// it is first chunk or all bytes in chunk body is read
if (this.streamBytesRead == 0 || this.chunkPos == this.chunkBody.length) {
// Check if there are data available to read from given input stream.
if (this.streamBytesRead != this.streamSize) {
// Send all data chunks.
int chunkSize = CHUNK_SIZE;
if (this.streamBytesRead + chunkSize > this.streamSize) {
chunkSize = this.streamSize - this.streamBytesRead;
}
if (readChunk(chunkSize) < 0) {
return -1;
}
this.streamBytesRead += chunkSize;
} else {
// Send final additional chunk to complete chunk upload.
byte[] chunk = new byte[0];
createChunkBody(chunk);
}
}
this.bytesRead++;
// Value must be between 0 to 255.
int value = this.chunkBody[this.chunkPos] & 0xFF;
this.chunkPos++;
return value;
} catch (NoSuchAlgorithmException | InvalidKeyException | InsufficientDataException | InternalException e) {
throw new IOException(e.getCause());
}
} } | public class class_name {
public int read() throws IOException {
if (this.bytesRead == this.length) {
// All chunks and final additional chunk are read.
// This means we have reached EOF.
return -1;
}
try {
// Read a chunk from given input stream when
// it is first chunk or all bytes in chunk body is read
if (this.streamBytesRead == 0 || this.chunkPos == this.chunkBody.length) {
// Check if there are data available to read from given input stream.
if (this.streamBytesRead != this.streamSize) {
// Send all data chunks.
int chunkSize = CHUNK_SIZE;
if (this.streamBytesRead + chunkSize > this.streamSize) {
chunkSize = this.streamSize - this.streamBytesRead; // depends on control dependency: [if], data = [none]
}
if (readChunk(chunkSize) < 0) {
return -1; // depends on control dependency: [if], data = [none]
}
this.streamBytesRead += chunkSize; // depends on control dependency: [if], data = [none]
} else {
// Send final additional chunk to complete chunk upload.
byte[] chunk = new byte[0];
createChunkBody(chunk); // depends on control dependency: [if], data = [none]
}
}
this.bytesRead++;
// Value must be between 0 to 255.
int value = this.chunkBody[this.chunkPos] & 0xFF;
this.chunkPos++;
return value;
} catch (NoSuchAlgorithmException | InvalidKeyException | InsufficientDataException | InternalException e) {
throw new IOException(e.getCause());
}
} } |
public class class_name {
public static List<String> getDistinctCssUrls(String htmlContent) {
Document doc = Jsoup.parse(htmlContent);
Elements els = doc.select(CSS_LINKS_SELECTOR);
List<String> cssFiles = new ArrayList<>(els.size());
for (Element e : els) {
String path = e.attr(HREF_ATTR);
if (!cssFiles.contains(path)) {
cssFiles.add(path);
}
}
return cssFiles;
} } | public class class_name {
public static List<String> getDistinctCssUrls(String htmlContent) {
Document doc = Jsoup.parse(htmlContent);
Elements els = doc.select(CSS_LINKS_SELECTOR);
List<String> cssFiles = new ArrayList<>(els.size());
for (Element e : els) {
String path = e.attr(HREF_ATTR);
if (!cssFiles.contains(path)) {
cssFiles.add(path); // depends on control dependency: [if], data = [none]
}
}
return cssFiles;
} } |
public class class_name {
public static String getSimpleName(Class<?> type, String separator) {
final StringBuilder buf = new StringBuilder();
Class<?> c = type;
while (c != null) {
if (buf.length() > 0 && separator != null) {
buf.insert(0, separator);
}
buf.insert(0, c.getSimpleName());
c = c.getEnclosingClass();
}
return buf.toString();
} } | public class class_name {
public static String getSimpleName(Class<?> type, String separator) {
final StringBuilder buf = new StringBuilder();
Class<?> c = type;
while (c != null) {
if (buf.length() > 0 && separator != null) {
buf.insert(0, separator); // depends on control dependency: [if], data = [none]
}
buf.insert(0, c.getSimpleName()); // depends on control dependency: [while], data = [none]
c = c.getEnclosingClass(); // depends on control dependency: [while], data = [none]
}
return buf.toString();
} } |
public class class_name {
private DifferenceEngineContract getDifferenceEngine() {
if (differenceEngine == null) {
if (
XMLUnit.getIgnoreAttributeOrder()
&&
(!usesUnknownElementQualifier()
|| XMLUnit.getCompareUnmatched())
) {
return new NewDifferenceEngine(this, matchTrackerDelegate);
}
return new DifferenceEngine(this, matchTrackerDelegate);
}
return differenceEngine;
} } | public class class_name {
private DifferenceEngineContract getDifferenceEngine() {
if (differenceEngine == null) {
if (
XMLUnit.getIgnoreAttributeOrder()
&&
(!usesUnknownElementQualifier()
|| XMLUnit.getCompareUnmatched())
) {
return new NewDifferenceEngine(this, matchTrackerDelegate); // depends on control dependency: [if], data = [(]
}
return new DifferenceEngine(this, matchTrackerDelegate); // depends on control dependency: [if], data = [none]
}
return differenceEngine;
} } |
public class class_name {
@Nonnull
protected File getTempFile ()
{
if (m_aTempFile == null)
{
// If you manage to get more than 100 million of ids, you'll
// start getting ids longer than 8 characters.
final String sUniqueID = StringHelper.getLeadingZero (s_aTempFileCounter.getAndIncrement (), 8);
final String sTempFileName = "upload_" + UID + "_" + sUniqueID + ".tmp";
m_aTempFile = new File (m_aTempDir, sTempFileName);
}
return m_aTempFile;
} } | public class class_name {
@Nonnull
protected File getTempFile ()
{
if (m_aTempFile == null)
{
// If you manage to get more than 100 million of ids, you'll
// start getting ids longer than 8 characters.
final String sUniqueID = StringHelper.getLeadingZero (s_aTempFileCounter.getAndIncrement (), 8);
final String sTempFileName = "upload_" + UID + "_" + sUniqueID + ".tmp";
m_aTempFile = new File (m_aTempDir, sTempFileName); // depends on control dependency: [if], data = [none]
}
return m_aTempFile;
} } |
public class class_name {
public PactDslRequestWithoutPath body(DslPart body) {
DslPart parent = body.close();
requestMatchers.addCategory(parent.matchers);
requestGenerators.addGenerators(parent.generators);
requestBody = OptionalBody.body(parent.toString().getBytes());
if (!requestHeaders.containsKey(CONTENT_TYPE)) {
requestHeaders.put(CONTENT_TYPE, Collections.singletonList(ContentType.APPLICATION_JSON.toString()));
}
return this;
} } | public class class_name {
public PactDslRequestWithoutPath body(DslPart body) {
DslPart parent = body.close();
requestMatchers.addCategory(parent.matchers);
requestGenerators.addGenerators(parent.generators);
requestBody = OptionalBody.body(parent.toString().getBytes());
if (!requestHeaders.containsKey(CONTENT_TYPE)) {
requestHeaders.put(CONTENT_TYPE, Collections.singletonList(ContentType.APPLICATION_JSON.toString())); // depends on control dependency: [if], data = [none]
}
return this;
} } |
public class class_name {
@Override
public Subject authenticate(String jaasEntryName, CallbackHandler callbackHandler, Subject subject) throws AuthenticationException {
CallbackHandlerAuthenticationData cAuthData = new CallbackHandlerAuthenticationData(callbackHandler);
AuthenticationData authenticationData = null;
try {
authenticationData = cAuthData.createAuthenticationData();
} catch (Exception e) {
throw new AuthenticationException(e.getMessage());
}
ReentrantLock currentLock = optionallyObtainLockedLock(authenticationData);
try {
// If basic auth login to a different realm, then create a basic auth subject
if (isBasicAuthLogin(authenticationData)) {
return createBasicAuthSubject(authenticationData, subject);
} else {
Subject authenticatedSubject = findSubjectInAuthCache(authenticationData, subject);
if (authenticatedSubject == null) {
authenticatedSubject = performJAASLogin(jaasEntryName, callbackHandler, subject);
insertSubjectInAuthCache(authenticationData, authenticatedSubject);
}
return authenticatedSubject;
}
} finally {
releaseLock(authenticationData, currentLock);
}
} } | public class class_name {
@Override
public Subject authenticate(String jaasEntryName, CallbackHandler callbackHandler, Subject subject) throws AuthenticationException {
CallbackHandlerAuthenticationData cAuthData = new CallbackHandlerAuthenticationData(callbackHandler);
AuthenticationData authenticationData = null;
try {
authenticationData = cAuthData.createAuthenticationData();
} catch (Exception e) {
throw new AuthenticationException(e.getMessage());
}
ReentrantLock currentLock = optionallyObtainLockedLock(authenticationData);
try {
// If basic auth login to a different realm, then create a basic auth subject
if (isBasicAuthLogin(authenticationData)) {
return createBasicAuthSubject(authenticationData, subject); // depends on control dependency: [if], data = [none]
} else {
Subject authenticatedSubject = findSubjectInAuthCache(authenticationData, subject);
if (authenticatedSubject == null) {
authenticatedSubject = performJAASLogin(jaasEntryName, callbackHandler, subject); // depends on control dependency: [if], data = [none]
insertSubjectInAuthCache(authenticationData, authenticatedSubject); // depends on control dependency: [if], data = [none]
}
return authenticatedSubject; // depends on control dependency: [if], data = [none]
}
} finally {
releaseLock(authenticationData, currentLock);
}
} } |
public class class_name {
public static BatchASTFuture analyze(final BatchASTListener listener, final WildcardImportResolver importResolver,
final Set<String> libraryPaths,
final Set<String> sourcePaths, Set<Path> sourceFiles)
{
final String[] encodings = null;
final String[] bindingKeys = new String[0];
final ExecutorService executor = WindupExecutors.newFixedThreadPool(WindupExecutors.getDefaultThreadCount());
final FileASTRequestor requestor = new FileASTRequestor()
{
@Override
public void acceptAST(String sourcePath, CompilationUnit ast)
{
try
{
/*
* This super() call doesn't do anything, but we call it just to be nice, in case that ever changes.
*/
super.acceptAST(sourcePath, ast);
ReferenceResolvingVisitor visitor = new ReferenceResolvingVisitor(importResolver, ast, sourcePath);
ast.accept(visitor);
listener.processed(Paths.get(sourcePath), visitor.getJavaClassReferences());
}
catch (WindupStopException ex)
{
throw ex;
}
catch (Throwable t)
{
listener.failed(Paths.get(sourcePath), t);
}
}
};
List<List<String>> batches = createBatches(sourceFiles);
for (final List<String> batch : batches)
{
executor.submit(new Callable<Void>()
{
@Override
public Void call() throws Exception
{
ASTParser parser = ASTParser.newParser(AST.JLS8);
parser.setBindingsRecovery(false);
parser.setResolveBindings(true);
Map<String, String> options = JavaCore.getOptions();
JavaCore.setComplianceOptions(JavaCore.VERSION_1_8, options);
// these options seem to slightly reduce the number of times that JDT aborts on compilation errors
options.put(JavaCore.CORE_INCOMPLETE_CLASSPATH, "warning");
options.put(JavaCore.COMPILER_PB_ENUM_IDENTIFIER, "warning");
options.put(JavaCore.COMPILER_PB_FORBIDDEN_REFERENCE, "warning");
options.put(JavaCore.CORE_CIRCULAR_CLASSPATH, "warning");
options.put(JavaCore.COMPILER_PB_ASSERT_IDENTIFIER, "warning");
options.put(JavaCore.COMPILER_PB_NULL_SPECIFICATION_VIOLATION, "warning");
options.put(JavaCore.CORE_JAVA_BUILD_INVALID_CLASSPATH, "ignore");
options.put(JavaCore.COMPILER_PB_NULL_ANNOTATION_INFERENCE_CONFLICT, "warning");
options.put(JavaCore.CORE_OUTPUT_LOCATION_OVERLAPPING_ANOTHER_SOURCE, "warning");
options.put(JavaCore.CORE_JAVA_BUILD_DUPLICATE_RESOURCE, "warning");
parser.setCompilerOptions(options);
parser.setEnvironment(libraryPaths.toArray(new String[libraryPaths.size()]),
sourcePaths.toArray(new String[sourcePaths.size()]),
null,
true);
parser.createASTs(batch.toArray(new String[batch.size()]), encodings, bindingKeys, requestor, null);
return null;
}
});
}
executor.shutdown();
return new BatchASTFuture()
{
@Override
public boolean isDone()
{
return executor.isTerminated();
}
};
} } | public class class_name {
public static BatchASTFuture analyze(final BatchASTListener listener, final WildcardImportResolver importResolver,
final Set<String> libraryPaths,
final Set<String> sourcePaths, Set<Path> sourceFiles)
{
final String[] encodings = null;
final String[] bindingKeys = new String[0];
final ExecutorService executor = WindupExecutors.newFixedThreadPool(WindupExecutors.getDefaultThreadCount());
final FileASTRequestor requestor = new FileASTRequestor()
{
@Override
public void acceptAST(String sourcePath, CompilationUnit ast)
{
try
{
/*
* This super() call doesn't do anything, but we call it just to be nice, in case that ever changes.
*/
super.acceptAST(sourcePath, ast); // depends on control dependency: [try], data = [none]
ReferenceResolvingVisitor visitor = new ReferenceResolvingVisitor(importResolver, ast, sourcePath);
ast.accept(visitor); // depends on control dependency: [try], data = [none]
listener.processed(Paths.get(sourcePath), visitor.getJavaClassReferences()); // depends on control dependency: [try], data = [none]
}
catch (WindupStopException ex)
{
throw ex;
} // depends on control dependency: [catch], data = [none]
catch (Throwable t)
{
listener.failed(Paths.get(sourcePath), t);
} // depends on control dependency: [catch], data = [none]
}
};
List<List<String>> batches = createBatches(sourceFiles);
for (final List<String> batch : batches)
{
executor.submit(new Callable<Void>()
{
@Override
public Void call() throws Exception
{
ASTParser parser = ASTParser.newParser(AST.JLS8);
parser.setBindingsRecovery(false);
parser.setResolveBindings(true);
Map<String, String> options = JavaCore.getOptions();
JavaCore.setComplianceOptions(JavaCore.VERSION_1_8, options);
// these options seem to slightly reduce the number of times that JDT aborts on compilation errors
options.put(JavaCore.CORE_INCOMPLETE_CLASSPATH, "warning");
options.put(JavaCore.COMPILER_PB_ENUM_IDENTIFIER, "warning");
options.put(JavaCore.COMPILER_PB_FORBIDDEN_REFERENCE, "warning");
options.put(JavaCore.CORE_CIRCULAR_CLASSPATH, "warning");
options.put(JavaCore.COMPILER_PB_ASSERT_IDENTIFIER, "warning");
options.put(JavaCore.COMPILER_PB_NULL_SPECIFICATION_VIOLATION, "warning");
options.put(JavaCore.CORE_JAVA_BUILD_INVALID_CLASSPATH, "ignore");
options.put(JavaCore.COMPILER_PB_NULL_ANNOTATION_INFERENCE_CONFLICT, "warning");
options.put(JavaCore.CORE_OUTPUT_LOCATION_OVERLAPPING_ANOTHER_SOURCE, "warning");
options.put(JavaCore.CORE_JAVA_BUILD_DUPLICATE_RESOURCE, "warning");
parser.setCompilerOptions(options);
parser.setEnvironment(libraryPaths.toArray(new String[libraryPaths.size()]),
sourcePaths.toArray(new String[sourcePaths.size()]),
null,
true); // depends on control dependency: [for], data = [none]
parser.createASTs(batch.toArray(new String[batch.size()]), encodings, bindingKeys, requestor, null); // depends on control dependency: [for], data = [batch]
return null; // depends on control dependency: [for], data = [none]
}
});
}
executor.shutdown();
return new BatchASTFuture()
{
@Override
public boolean isDone()
{
return executor.isTerminated();
}
};
} } |
public class class_name {
protected Map<Locator, MetricData> resultSetsToMetricData(Map<Locator, List<ResultSetFuture>> resultSets,
Map<Locator, DAbstractMetricIO> locatorIO,
String columnFamily,
Range range) {
MetadataCache metadataCache = MetadataCache.getInstance();
// iterate through all ResultSetFuture
Map<Locator, MetricData> locatorMetricDataMap = new HashMap<Locator, MetricData>();
for (Map.Entry<Locator, List<ResultSetFuture>> entry : resultSets.entrySet() ) {
Locator locator = entry.getKey();
List<ResultSetFuture> futures = entry.getValue();
DAbstractMetricIO io = locatorIO.get(locator);
// get ResultSets to a Table of locator, timestamp, rollup
Table<Locator, Long, Object> locatorTimestampRollup = io.toLocatorTimestampValue(futures, locator, columnFamily, range);
Map<Long, Object> tsRollupMap = locatorTimestampRollup.row( locator );
// convert to Points and MetricData
Points points = convertToPoints( tsRollupMap );
// create MetricData
MetricData metricData = new MetricData( points, metadataCache.getUnitString( locator ) );
locatorMetricDataMap.put( locator, metricData );
}
return locatorMetricDataMap;
} } | public class class_name {
protected Map<Locator, MetricData> resultSetsToMetricData(Map<Locator, List<ResultSetFuture>> resultSets,
Map<Locator, DAbstractMetricIO> locatorIO,
String columnFamily,
Range range) {
MetadataCache metadataCache = MetadataCache.getInstance();
// iterate through all ResultSetFuture
Map<Locator, MetricData> locatorMetricDataMap = new HashMap<Locator, MetricData>();
for (Map.Entry<Locator, List<ResultSetFuture>> entry : resultSets.entrySet() ) {
Locator locator = entry.getKey();
List<ResultSetFuture> futures = entry.getValue();
DAbstractMetricIO io = locatorIO.get(locator);
// get ResultSets to a Table of locator, timestamp, rollup
Table<Locator, Long, Object> locatorTimestampRollup = io.toLocatorTimestampValue(futures, locator, columnFamily, range);
Map<Long, Object> tsRollupMap = locatorTimestampRollup.row( locator );
// convert to Points and MetricData
Points points = convertToPoints( tsRollupMap );
// create MetricData
MetricData metricData = new MetricData( points, metadataCache.getUnitString( locator ) );
locatorMetricDataMap.put( locator, metricData ); // depends on control dependency: [for], data = [none]
}
return locatorMetricDataMap;
} } |
public class class_name {
public static Integer isInteger(String value) {
Validate.notNull(value);
Validate.notEmpty(value);
Integer intValue = null;
try{
intValue = Integer.parseInt(value);
} catch(NumberFormatException e){
throw new TypeException("Integer");
}
return intValue;
} } | public class class_name {
public static Integer isInteger(String value) {
Validate.notNull(value);
Validate.notEmpty(value);
Integer intValue = null;
try{
intValue = Integer.parseInt(value); // depends on control dependency: [try], data = [none]
} catch(NumberFormatException e){
throw new TypeException("Integer");
} // depends on control dependency: [catch], data = [none]
return intValue;
} } |
public class class_name {
public String getDependencyFileVersion( Artifact artifact, Boolean useUniqueVersions )
{
if ( useUniqueVersions != null && useUniqueVersions )
{
return UniqueVersionsHelper.getUniqueVersion( artifact );
}
return artifact.getVersion();
} } | public class class_name {
public String getDependencyFileVersion( Artifact artifact, Boolean useUniqueVersions )
{
if ( useUniqueVersions != null && useUniqueVersions )
{
return UniqueVersionsHelper.getUniqueVersion( artifact ); // depends on control dependency: [if], data = [none]
}
return artifact.getVersion();
} } |
public class class_name {
public List<CmsFormatterChangeSet> getFormatterChangeSets() {
CmsADEConfigData currentConfig = this;
List<CmsFormatterChangeSet> result = Lists.newArrayList();
while (currentConfig != null) {
CmsFormatterChangeSet changes = currentConfig.getOwnFormatterChangeSet();
if (changes != null) {
result.add(changes);
}
currentConfig = currentConfig.parent();
}
Collections.reverse(result);
return result;
} } | public class class_name {
public List<CmsFormatterChangeSet> getFormatterChangeSets() {
CmsADEConfigData currentConfig = this;
List<CmsFormatterChangeSet> result = Lists.newArrayList();
while (currentConfig != null) {
CmsFormatterChangeSet changes = currentConfig.getOwnFormatterChangeSet();
if (changes != null) {
result.add(changes); // depends on control dependency: [if], data = [(changes]
}
currentConfig = currentConfig.parent(); // depends on control dependency: [while], data = [none]
}
Collections.reverse(result);
return result;
} } |
public class class_name {
public static boolean getIdentifierToken(final Parser parser, final char separator,
final char separatorReplace) {
boolean consumedChar = false;
while (parser.hasMore()) {
final char c = parser.peek();
if (c == separator) {
parser.appendToToken(separatorReplace);
parser.next();
consumedChar = true;
} else if (c != ';' && c != '[' && c != '<' && c != '>' && c != ':' && c != '/' && c != '.') {
parser.appendToToken(c);
parser.next();
consumedChar = true;
} else {
break;
}
}
return consumedChar;
} } | public class class_name {
public static boolean getIdentifierToken(final Parser parser, final char separator,
final char separatorReplace) {
boolean consumedChar = false;
while (parser.hasMore()) {
final char c = parser.peek();
if (c == separator) {
parser.appendToToken(separatorReplace); // depends on control dependency: [if], data = [none]
parser.next(); // depends on control dependency: [if], data = [none]
consumedChar = true; // depends on control dependency: [if], data = [none]
} else if (c != ';' && c != '[' && c != '<' && c != '>' && c != ':' && c != '/' && c != '.') {
parser.appendToToken(c); // depends on control dependency: [if], data = [(c]
parser.next(); // depends on control dependency: [if], data = [none]
consumedChar = true; // depends on control dependency: [if], data = [none]
} else {
break;
}
}
return consumedChar;
} } |
public class class_name {
@Nonnull
@Override
public NodeProvisioner.StrategyDecision apply(@Nonnull NodeProvisioner.StrategyState strategyState) {
LOG.debug("Applying provisioning.");
final Label label = strategyState.getLabel();
LoadStatisticsSnapshot snapshot = strategyState.getSnapshot();
List<DockerCloud> provisionClouds;
DockerCloudOrder cloudOrder = dockerGlobalConfig().getCloudOrder();
if (isNull(cloudOrder)) {
provisionClouds = DEFAULT.getDockerClouds(label);
} else {
provisionClouds = cloudOrder.getDockerClouds(label);
}
for (DockerCloud dockerCloud : provisionClouds) {
for (DockerSlaveTemplate template : dockerCloud.getTemplates(label)) {
if (notAllowedStrategy(template)) {
continue;
}
int availableCapacity = snapshot.getAvailableExecutors() +
snapshot.getConnectingExecutors() +
strategyState.getAdditionalPlannedCapacity() +
strategyState.getPlannedCapacitySnapshot();
int currentDemand = snapshot.getQueueLength();
LOG.debug("Available capacity={}, currentDemand={}", availableCapacity, currentDemand);
if (availableCapacity < currentDemand) {
// may happen that would be provisioned with other template
Collection<PlannedNode> plannedNodes = dockerCloud.provision(label, currentDemand - availableCapacity);
LOG.debug("Planned {} new nodes", plannedNodes.size());
strategyState.recordPendingLaunches(plannedNodes);
// FIXME calculate executors number?
availableCapacity += plannedNodes.size();
LOG.debug("After '{}' provisioning, available capacity={}, currentDemand={}",
dockerCloud, availableCapacity, currentDemand);
}
if (availableCapacity >= currentDemand) {
LOG.debug("Provisioning completed");
return NodeProvisioner.StrategyDecision.PROVISIONING_COMPLETED;
} else {
LOG.debug("Provisioning not complete, trying next template");
}
}
LOG.debug("Provisioning not complete, trying next YAD Cloud");
}
LOG.debug("Provisioning not complete, consulting remaining strategies");
return NodeProvisioner.StrategyDecision.CONSULT_REMAINING_STRATEGIES;
} } | public class class_name {
@Nonnull
@Override
public NodeProvisioner.StrategyDecision apply(@Nonnull NodeProvisioner.StrategyState strategyState) {
LOG.debug("Applying provisioning.");
final Label label = strategyState.getLabel();
LoadStatisticsSnapshot snapshot = strategyState.getSnapshot();
List<DockerCloud> provisionClouds;
DockerCloudOrder cloudOrder = dockerGlobalConfig().getCloudOrder();
if (isNull(cloudOrder)) {
provisionClouds = DEFAULT.getDockerClouds(label); // depends on control dependency: [if], data = [none]
} else {
provisionClouds = cloudOrder.getDockerClouds(label); // depends on control dependency: [if], data = [none]
}
for (DockerCloud dockerCloud : provisionClouds) {
for (DockerSlaveTemplate template : dockerCloud.getTemplates(label)) {
if (notAllowedStrategy(template)) {
continue;
}
int availableCapacity = snapshot.getAvailableExecutors() +
snapshot.getConnectingExecutors() +
strategyState.getAdditionalPlannedCapacity() +
strategyState.getPlannedCapacitySnapshot();
int currentDemand = snapshot.getQueueLength();
LOG.debug("Available capacity={}, currentDemand={}", availableCapacity, currentDemand); // depends on control dependency: [for], data = [none]
if (availableCapacity < currentDemand) {
// may happen that would be provisioned with other template
Collection<PlannedNode> plannedNodes = dockerCloud.provision(label, currentDemand - availableCapacity);
LOG.debug("Planned {} new nodes", plannedNodes.size()); // depends on control dependency: [if], data = [none]
strategyState.recordPendingLaunches(plannedNodes); // depends on control dependency: [if], data = [none]
// FIXME calculate executors number?
availableCapacity += plannedNodes.size(); // depends on control dependency: [if], data = [none]
LOG.debug("After '{}' provisioning, available capacity={}, currentDemand={}",
dockerCloud, availableCapacity, currentDemand); // depends on control dependency: [if], data = [none]
}
if (availableCapacity >= currentDemand) {
LOG.debug("Provisioning completed"); // depends on control dependency: [if], data = [none]
return NodeProvisioner.StrategyDecision.PROVISIONING_COMPLETED; // depends on control dependency: [if], data = [none]
} else {
LOG.debug("Provisioning not complete, trying next template"); // depends on control dependency: [if], data = [none]
}
}
LOG.debug("Provisioning not complete, trying next YAD Cloud"); // depends on control dependency: [for], data = [none]
}
LOG.debug("Provisioning not complete, consulting remaining strategies");
return NodeProvisioner.StrategyDecision.CONSULT_REMAINING_STRATEGIES;
} } |
public class class_name {
private List<ProgramElementDoc> getInheritedPackagePrivateMethods(Configuration configuration) {
List<ProgramElementDoc> results = new ArrayList<>();
for (ClassDoc currentClass : visibleClasses) {
if (currentClass != classdoc &&
currentClass.isPackagePrivate() &&
!utils.isLinkable(currentClass, configuration)) {
// Document these members in the child class because
// the parent is inaccessible.
results.addAll(getMembersFor(currentClass));
}
}
return results;
} } | public class class_name {
private List<ProgramElementDoc> getInheritedPackagePrivateMethods(Configuration configuration) {
List<ProgramElementDoc> results = new ArrayList<>();
for (ClassDoc currentClass : visibleClasses) {
if (currentClass != classdoc &&
currentClass.isPackagePrivate() &&
!utils.isLinkable(currentClass, configuration)) {
// Document these members in the child class because
// the parent is inaccessible.
results.addAll(getMembersFor(currentClass)); // depends on control dependency: [if], data = [(currentClass]
}
}
return results;
} } |
public class class_name {
private String convertMethod(List<String> methodList, boolean negative) {
boolean first = true;
StringBuffer methodSB = new StringBuffer();
for (String method : methodList) {
if (first) {
first = false;
if (negative) {
methodSB.append("!");
}
} else {
methodSB.append(",");
}
methodSB.append(method);
}
if (tc.isDebugEnabled())
Tr.debug(tc, "convertMethod : " + methodSB.toString());
return methodSB.toString();
} } | public class class_name {
private String convertMethod(List<String> methodList, boolean negative) {
boolean first = true;
StringBuffer methodSB = new StringBuffer();
for (String method : methodList) {
if (first) {
first = false; // depends on control dependency: [if], data = [none]
if (negative) {
methodSB.append("!"); // depends on control dependency: [if], data = [none]
}
} else {
methodSB.append(","); // depends on control dependency: [if], data = [none]
}
methodSB.append(method); // depends on control dependency: [for], data = [method]
}
if (tc.isDebugEnabled())
Tr.debug(tc, "convertMethod : " + methodSB.toString());
return methodSB.toString();
} } |
public class class_name {
public void marshall(DeploymentStyle deploymentStyle, ProtocolMarshaller protocolMarshaller) {
if (deploymentStyle == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deploymentStyle.getDeploymentType(), DEPLOYMENTTYPE_BINDING);
protocolMarshaller.marshall(deploymentStyle.getDeploymentOption(), DEPLOYMENTOPTION_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(DeploymentStyle deploymentStyle, ProtocolMarshaller protocolMarshaller) {
if (deploymentStyle == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deploymentStyle.getDeploymentType(), DEPLOYMENTTYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(deploymentStyle.getDeploymentOption(), DEPLOYMENTOPTION_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public OperaSettings merge(Capabilities capabilities) {
for (Map.Entry<String, ?> capability : capabilities.asMap().entrySet()) {
Capability capabilityReference = Capability.findCapability(capability.getKey());
if (options.containsKey(capabilityReference)) {
options.get(capabilityReference)
.setValue(capabilityReference.sanitize(capability.getValue()));
continue;
}
// Store the rest as surplus capabilities
surplusCapabilities.setCapability(capability.getKey(), capability.getValue());
}
initializeLogging();
return this;
} } | public class class_name {
public OperaSettings merge(Capabilities capabilities) {
for (Map.Entry<String, ?> capability : capabilities.asMap().entrySet()) {
Capability capabilityReference = Capability.findCapability(capability.getKey());
if (options.containsKey(capabilityReference)) {
options.get(capabilityReference)
.setValue(capabilityReference.sanitize(capability.getValue())); // depends on control dependency: [if], data = [none]
continue;
}
// Store the rest as surplus capabilities
surplusCapabilities.setCapability(capability.getKey(), capability.getValue()); // depends on control dependency: [for], data = [capability]
}
initializeLogging();
return this;
} } |
public class class_name {
private CharSequence recurseElement(Element element, int indexWithinParent, int numberOfSiblings,
ImageGetter imageGetter) {
Type type = element.getType();
boolean isOrderedList = false;
if (type == Type.LIST) {
String flagsStr = element.getAttribute("flags");
if (flagsStr != null) {
int flags = Integer.parseInt(flagsStr);
isOrderedList = (flags & Element.F_LIST_ORDERED) != 0;
if (isOrderedList) {
mOrderedListNumber.put(element, 1);
}
}
}
int size = element.size();
CharSequence[] spans = new CharSequence[size];
for (int i = 0; i < size; i++) {
spans[i] = recurseElement(element.children[i], i, size, imageGetter);
}
// Clean up after we're done
if (isOrderedList) {
mOrderedListNumber.remove(this);
}
CharSequence concat = TextUtils.concat(spans);
SpannableStringBuilder builder = new ReverseSpannableStringBuilder();
String text = element.getText();
if (element.size() == 0
&& element.getParent() != null
&& element.getParent().getType() != Type.BLOCK_CODE) {
text = text.replace('\n', ' ');
}
// Retrieve the image now so we know whether we're going to have something to display later
// If we don't, then show the alt text instead (if available).
Drawable imageDrawable = null;
if (type == Type.IMAGE && imageGetter != null && !TextUtils.isEmpty(element.getAttribute("link"))) {
imageDrawable = imageGetter.getDrawable(element.getAttribute("link"));
}
switch (type) {
case LIST:
if (element.getParent() != null
&& element.getParent().getType() == Type.LIST_ITEM) {
builder.append("\n");
}
break;
case LINEBREAK:
builder.append("\n");
break;
case LIST_ITEM:
builder.append(" ");
if (mOrderedListNumber.containsKey(element.getParent())) {
int number = mOrderedListNumber.get(element.getParent());
builder.append(Integer.toString(number) + ".");
mOrderedListNumber.put(element.getParent(), number + 1);
}
else {
builder.append(mOptions.mUnorderedListItem);
}
builder.append(" ");
break;
case AUTOLINK:
builder.append(element.getAttribute("link"));
break;
case HRULE:
// This ultimately gets drawn over by the line span, but
// we need something here or the span isn't even drawn.
builder.append("-");
break;
case IMAGE:
// Display alt text (or title text) if there is no image
if (imageDrawable == null) {
String show = element.getAttribute("alt");
if (TextUtils.isEmpty(show)) {
show = element.getAttribute("title");
}
if (!TextUtils.isEmpty(show)) {
show = "[" + show + "]";
builder.append(show);
}
}
else {
// Character to be replaced
builder.append("\uFFFC");
}
break;
}
builder.append(text);
builder.append(concat);
// Don't auto-append whitespace after last item in document. The 'numberOfSiblings'
// is the number of children the parent of the current element has (including the
// element itself), hence subtracting a number from that count gives us the index
// of the last child within the parent.
if (element.getParent() != null || indexWithinParent < (numberOfSiblings - 1)) {
if (type == Type.LIST_ITEM) {
if (element.size() == 0 || !element.children[element.size() - 1].isBlockElement()) {
builder.append("\n");
}
}
else if (element.isBlockElement() && type != Type.BLOCK_QUOTE) {
if (type == Type.LIST) {
// If this is a nested list, don't include newlines
if (element.getParent() == null || element.getParent().getType() != Type.LIST_ITEM) {
builder.append("\n");
}
}
else if (element.getParent() != null
&& element.getParent().getType() == Type.LIST_ITEM) {
// List items should never double-space their entries
builder.append("\n");
}
else {
builder.append("\n\n");
}
}
}
switch (type) {
case HEADER:
String levelStr = element.getAttribute("level");
int level = Integer.parseInt(levelStr);
setSpan(builder, new RelativeSizeSpan(mOptions.mHeaderSizes[level - 1]));
setSpan(builder, new StyleSpan(Typeface.BOLD));
break;
case LIST:
setBlockSpan(builder, new LeadingMarginSpan.Standard(mListItemIndent));
break;
case EMPHASIS:
setSpan(builder, new StyleSpan(Typeface.ITALIC));
break;
case DOUBLE_EMPHASIS:
setSpan(builder, new StyleSpan(Typeface.BOLD));
break;
case TRIPLE_EMPHASIS:
setSpan(builder, new StyleSpan(Typeface.BOLD_ITALIC));
break;
case BLOCK_CODE:
setSpan(builder, new LeadingMarginSpan.Standard(mCodeBlockIndent));
setSpan(builder, new TypefaceSpan("monospace"));
break;
case CODE_SPAN:
setSpan(builder, new TypefaceSpan("monospace"));
break;
case LINK:
case AUTOLINK:
String link = element.getAttribute("link");
if (!TextUtils.isEmpty(link) && Patterns.EMAIL_ADDRESS.matcher(link).matches()) {
link = "mailto:" + link;
}
setSpan(builder, new URLSpan(link));
break;
case BLOCK_QUOTE:
// We add two leading margin spans so that when the order is reversed,
// the QuoteSpan will always be in the same spot.
setBlockSpan(builder, new LeadingMarginSpan.Standard(mBlockQuoteIndent));
setBlockSpan(builder, new QuoteSpan(mOptions.mBlockQuoteColor));
setBlockSpan(builder, new LeadingMarginSpan.Standard(mBlockQuoteIndent));
setBlockSpan(builder, new StyleSpan(Typeface.ITALIC));
break;
case STRIKETHROUGH:
setSpan(builder, new StrikethroughSpan());
break;
case HRULE:
setSpan(builder, new HorizontalLineSpan(mOptions.mHruleColor, mHruleSize, mHruleTopBottomPadding));
break;
case IMAGE:
if (imageDrawable != null) {
setSpan(builder, new ImageSpan(imageDrawable));
}
break;
}
return builder;
} } | public class class_name {
private CharSequence recurseElement(Element element, int indexWithinParent, int numberOfSiblings,
ImageGetter imageGetter) {
Type type = element.getType();
boolean isOrderedList = false;
if (type == Type.LIST) {
String flagsStr = element.getAttribute("flags");
if (flagsStr != null) {
int flags = Integer.parseInt(flagsStr);
isOrderedList = (flags & Element.F_LIST_ORDERED) != 0; // depends on control dependency: [if], data = [none]
if (isOrderedList) {
mOrderedListNumber.put(element, 1); // depends on control dependency: [if], data = [none]
}
}
}
int size = element.size();
CharSequence[] spans = new CharSequence[size];
for (int i = 0; i < size; i++) {
spans[i] = recurseElement(element.children[i], i, size, imageGetter); // depends on control dependency: [for], data = [i]
}
// Clean up after we're done
if (isOrderedList) {
mOrderedListNumber.remove(this); // depends on control dependency: [if], data = [none]
}
CharSequence concat = TextUtils.concat(spans);
SpannableStringBuilder builder = new ReverseSpannableStringBuilder();
String text = element.getText();
if (element.size() == 0
&& element.getParent() != null
&& element.getParent().getType() != Type.BLOCK_CODE) {
text = text.replace('\n', ' '); // depends on control dependency: [if], data = [none]
}
// Retrieve the image now so we know whether we're going to have something to display later
// If we don't, then show the alt text instead (if available).
Drawable imageDrawable = null;
if (type == Type.IMAGE && imageGetter != null && !TextUtils.isEmpty(element.getAttribute("link"))) {
imageDrawable = imageGetter.getDrawable(element.getAttribute("link")); // depends on control dependency: [if], data = [none]
}
switch (type) {
case LIST:
if (element.getParent() != null
&& element.getParent().getType() == Type.LIST_ITEM) {
builder.append("\n"); // depends on control dependency: [if], data = [none]
}
break;
case LINEBREAK:
builder.append("\n");
break;
case LIST_ITEM:
builder.append(" ");
if (mOrderedListNumber.containsKey(element.getParent())) {
int number = mOrderedListNumber.get(element.getParent());
builder.append(Integer.toString(number) + "."); // depends on control dependency: [if], data = [none]
mOrderedListNumber.put(element.getParent(), number + 1); // depends on control dependency: [if], data = [none]
}
else {
builder.append(mOptions.mUnorderedListItem); // depends on control dependency: [if], data = [none]
}
builder.append(" ");
break;
case AUTOLINK:
builder.append(element.getAttribute("link"));
break;
case HRULE:
// This ultimately gets drawn over by the line span, but
// we need something here or the span isn't even drawn.
builder.append("-");
break;
case IMAGE:
// Display alt text (or title text) if there is no image
if (imageDrawable == null) {
String show = element.getAttribute("alt");
if (TextUtils.isEmpty(show)) {
show = element.getAttribute("title"); // depends on control dependency: [if], data = [none]
}
if (!TextUtils.isEmpty(show)) {
show = "[" + show + "]"; // depends on control dependency: [if], data = [none]
builder.append(show); // depends on control dependency: [if], data = [none]
}
}
else {
// Character to be replaced
builder.append("\uFFFC"); // depends on control dependency: [if], data = [none]
}
break;
}
builder.append(text);
builder.append(concat);
// Don't auto-append whitespace after last item in document. The 'numberOfSiblings'
// is the number of children the parent of the current element has (including the
// element itself), hence subtracting a number from that count gives us the index
// of the last child within the parent.
if (element.getParent() != null || indexWithinParent < (numberOfSiblings - 1)) {
if (type == Type.LIST_ITEM) {
if (element.size() == 0 || !element.children[element.size() - 1].isBlockElement()) {
builder.append("\n"); // depends on control dependency: [if], data = [none]
}
}
else if (element.isBlockElement() && type != Type.BLOCK_QUOTE) {
if (type == Type.LIST) {
// If this is a nested list, don't include newlines
if (element.getParent() == null || element.getParent().getType() != Type.LIST_ITEM) {
builder.append("\n"); // depends on control dependency: [if], data = [none]
}
}
else if (element.getParent() != null
&& element.getParent().getType() == Type.LIST_ITEM) {
// List items should never double-space their entries
builder.append("\n"); // depends on control dependency: [if], data = [none]
}
else {
builder.append("\n\n"); // depends on control dependency: [if], data = [none]
}
}
}
switch (type) {
case HEADER:
String levelStr = element.getAttribute("level");
int level = Integer.parseInt(levelStr);
setSpan(builder, new RelativeSizeSpan(mOptions.mHeaderSizes[level - 1]));
setSpan(builder, new StyleSpan(Typeface.BOLD));
break;
case LIST:
setBlockSpan(builder, new LeadingMarginSpan.Standard(mListItemIndent));
break;
case EMPHASIS:
setSpan(builder, new StyleSpan(Typeface.ITALIC));
break;
case DOUBLE_EMPHASIS:
setSpan(builder, new StyleSpan(Typeface.BOLD));
break;
case TRIPLE_EMPHASIS:
setSpan(builder, new StyleSpan(Typeface.BOLD_ITALIC));
break;
case BLOCK_CODE:
setSpan(builder, new LeadingMarginSpan.Standard(mCodeBlockIndent));
setSpan(builder, new TypefaceSpan("monospace"));
break;
case CODE_SPAN:
setSpan(builder, new TypefaceSpan("monospace"));
break;
case LINK:
case AUTOLINK:
String link = element.getAttribute("link");
if (!TextUtils.isEmpty(link) && Patterns.EMAIL_ADDRESS.matcher(link).matches()) {
link = "mailto:" + link; // depends on control dependency: [if], data = [none]
}
setSpan(builder, new URLSpan(link));
break;
case BLOCK_QUOTE:
// We add two leading margin spans so that when the order is reversed,
// the QuoteSpan will always be in the same spot.
setBlockSpan(builder, new LeadingMarginSpan.Standard(mBlockQuoteIndent));
setBlockSpan(builder, new QuoteSpan(mOptions.mBlockQuoteColor));
setBlockSpan(builder, new LeadingMarginSpan.Standard(mBlockQuoteIndent));
setBlockSpan(builder, new StyleSpan(Typeface.ITALIC));
break;
case STRIKETHROUGH:
setSpan(builder, new StrikethroughSpan());
break;
case HRULE:
setSpan(builder, new HorizontalLineSpan(mOptions.mHruleColor, mHruleSize, mHruleTopBottomPadding));
break;
case IMAGE:
if (imageDrawable != null) {
setSpan(builder, new ImageSpan(imageDrawable)); // depends on control dependency: [if], data = [(imageDrawable]
}
break;
}
return builder;
} } |
public class class_name {
public void clearLocalPrefixes() {
final Map<Integer, PrefixConfig> prefixConfigs = getPrefixConfigs();
for (final PrefixConfig config : prefixConfigs.values()) {
config.setPrefix(null);
}
} } | public class class_name {
public void clearLocalPrefixes() {
final Map<Integer, PrefixConfig> prefixConfigs = getPrefixConfigs();
for (final PrefixConfig config : prefixConfigs.values()) {
config.setPrefix(null); // depends on control dependency: [for], data = [config]
}
} } |
public class class_name {
private static void insertionSortReverse(double[] keys, int[] vals, final int start, final int end) {
// Classic insertion sort.
for(int i = start + 1; i < end; i++) {
for(int j = i; j > start; j--) {
if(keys[j] <= keys[j - 1]) {
break;
}
swap(keys, vals, j, j - 1);
}
}
} } | public class class_name {
private static void insertionSortReverse(double[] keys, int[] vals, final int start, final int end) {
// Classic insertion sort.
for(int i = start + 1; i < end; i++) {
for(int j = i; j > start; j--) {
if(keys[j] <= keys[j - 1]) {
break;
}
swap(keys, vals, j, j - 1); // depends on control dependency: [for], data = [j]
}
}
} } |
public class class_name {
protected void selectBoundaryCorners() {
List<Point2D_F64> layout = detector.getLayout();
Polygon2D_F64 hull = new Polygon2D_F64();
UtilPolygons2D_F64.convexHull(layout,hull);
UtilPolygons2D_F64.removeAlmostParallel(hull,0.02);
boundaryIndexes = new int[hull.size()];
for (int i = 0; i < hull.size(); i++) {
Point2D_F64 h = hull.get(i);
boolean matched = false;
for (int j = 0; j < layout.size(); j++) {
if( h.isIdentical(layout.get(j),1e-6)) {
matched = true;
boundaryIndexes[i] = j;
break;
}
}
if( !matched )
throw new RuntimeException("Bug!");
}
} } | public class class_name {
protected void selectBoundaryCorners() {
List<Point2D_F64> layout = detector.getLayout();
Polygon2D_F64 hull = new Polygon2D_F64();
UtilPolygons2D_F64.convexHull(layout,hull);
UtilPolygons2D_F64.removeAlmostParallel(hull,0.02);
boundaryIndexes = new int[hull.size()];
for (int i = 0; i < hull.size(); i++) {
Point2D_F64 h = hull.get(i);
boolean matched = false;
for (int j = 0; j < layout.size(); j++) {
if( h.isIdentical(layout.get(j),1e-6)) {
matched = true; // depends on control dependency: [if], data = [none]
boundaryIndexes[i] = j; // depends on control dependency: [if], data = [none]
break;
}
}
if( !matched )
throw new RuntimeException("Bug!");
}
} } |
public class class_name {
List<String> findInPackage(Test test, String packageName) {
List<String> localClsssOrPkgs = new ArrayList<String>();
packageName = packageName.replace('.', '/');
Enumeration<URL> urls;
try {
urls = classloader.getResources(packageName);
// test for empty
if (!urls.hasMoreElements())
{
log.warn("Unable to find any resources for package '" + packageName + "'");
}
}
catch (IOException ioe) {
log.warn("Could not read package: " + packageName);
return localClsssOrPkgs;
}
return findInPackageWithUrls(test, packageName, urls);
} } | public class class_name {
List<String> findInPackage(Test test, String packageName) {
List<String> localClsssOrPkgs = new ArrayList<String>();
packageName = packageName.replace('.', '/');
Enumeration<URL> urls;
try {
urls = classloader.getResources(packageName); // depends on control dependency: [try], data = [none]
// test for empty
if (!urls.hasMoreElements())
{
log.warn("Unable to find any resources for package '" + packageName + "'"); // depends on control dependency: [if], data = [none]
}
}
catch (IOException ioe) {
log.warn("Could not read package: " + packageName);
return localClsssOrPkgs;
} // depends on control dependency: [catch], data = [none]
return findInPackageWithUrls(test, packageName, urls);
} } |
public class class_name {
public static List<FieldInfo> processDefaultValue(List<Field> fields, boolean ignoreNoAnnotation,
boolean isZipZap) {
if (fields == null) {
return null;
}
List<FieldInfo> ret = new ArrayList<FieldInfo>(fields.size());
int maxOrder = 0;
List<FieldInfo> unorderFields = new ArrayList<FieldInfo>(fields.size());
Set<Integer> orders = new HashSet<Integer>();
for (Field field : fields) {
Ignore ignore = field.getAnnotation(Ignore.class);
if (ignore != null) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Field name '{}' marked @Ignore annotation will be ignored.", field.getName());
}
continue;
}
Protobuf protobuf = field.getAnnotation(Protobuf.class);
if (protobuf == null && !ignoreNoAnnotation) {
throw new RuntimeException("Field '" + field.getName() + "' has no @Protobuf annotation");
}
// check field is support for protocol buffer
// any array except byte array is not support
String simpleName = field.getType().getName();
if (simpleName.startsWith("[")) {
if ((!simpleName.equals(byte[].class.getName())) && (!simpleName.equals(Byte[].class.getName()))) {
throw new RuntimeException("Array type of field '" + field.getName() + "' on class '"
+ field.getDeclaringClass().getName() + "' is not support, please use List instead.");
}
}
FieldInfo fieldInfo = new FieldInfo(field);
FieldType annFieldType = FieldType.DEFAULT;
int order = -1;
if (protobuf != null) {
fieldInfo.setRequired(protobuf.required());
fieldInfo.setDescription(protobuf.description());
annFieldType = protobuf.fieldType();
order = protobuf.order();
} else {
fieldInfo.setRequired(false);
}
// process type
if (annFieldType == FieldType.DEFAULT) {
Class fieldTypeClass = field.getType();
// if list
boolean isList = fieldInfo.isList();
if (isList) {
fieldTypeClass = fieldInfo.getGenericKeyType();
}
FieldType fieldType = TYPE_MAPPING.get(fieldTypeClass);
if (fieldType == null) {
// check if type is enum
if (Enum.class.isAssignableFrom(fieldTypeClass)) {
fieldType = FieldType.ENUM;
} else if (fieldInfo.isMap()) {
fieldType = FieldType.MAP;
} else {
fieldType = FieldType.OBJECT;
}
}
// check if enable zagzip
if (isZipZap) {
if (fieldType == FieldType.INT32) {
fieldType = FieldType.SINT32; // to convert to sint32 to enable zagzip
} else if (fieldType == FieldType.INT64) {
fieldType = FieldType.SINT64; // to convert to sint64 to enable zagzip
}
}
fieldInfo.setFieldType(fieldType);
} else {
fieldInfo.setFieldType(annFieldType);
}
if (order > 0) {
if (orders.contains(order)) {
throw new RuntimeException(
"order id '" + order + "' from field name '" + field.getName() + "' is duplicate");
}
orders.add(order);
fieldInfo.setOrder(order);
if (order > maxOrder) {
maxOrder = order;
}
} else {
unorderFields.add(fieldInfo);
}
if (fieldInfo.isList() && (fieldInfo.getFieldType().isPrimitive() || fieldInfo.getFieldType().isEnum())) {
Packed packed = field.getAnnotation(Packed.class);
if (packed == null) {
fieldInfo.setPacked(true);
} else {
fieldInfo.setPacked(packed.value());
}
}
ret.add(fieldInfo);
}
if (unorderFields.isEmpty()) {
return ret;
}
for (FieldInfo fieldInfo : unorderFields) {
maxOrder++;
fieldInfo.setOrder(maxOrder);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Field '{}' from {} with @Protobuf annotation but not set order or order is 0,"
+ " It will set order value to {}",
fieldInfo.getField().getName(), fieldInfo.getField().getDeclaringClass().getName(), maxOrder);
}
}
return ret;
} } | public class class_name {
public static List<FieldInfo> processDefaultValue(List<Field> fields, boolean ignoreNoAnnotation,
boolean isZipZap) {
if (fields == null) {
return null;
// depends on control dependency: [if], data = [none]
}
List<FieldInfo> ret = new ArrayList<FieldInfo>(fields.size());
int maxOrder = 0;
List<FieldInfo> unorderFields = new ArrayList<FieldInfo>(fields.size());
Set<Integer> orders = new HashSet<Integer>();
for (Field field : fields) {
Ignore ignore = field.getAnnotation(Ignore.class);
if (ignore != null) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Field name '{}' marked @Ignore annotation will be ignored.", field.getName());
// depends on control dependency: [if], data = [none]
}
continue;
}
Protobuf protobuf = field.getAnnotation(Protobuf.class);
if (protobuf == null && !ignoreNoAnnotation) {
throw new RuntimeException("Field '" + field.getName() + "' has no @Protobuf annotation");
}
// check field is support for protocol buffer
// any array except byte array is not support
String simpleName = field.getType().getName();
if (simpleName.startsWith("[")) {
if ((!simpleName.equals(byte[].class.getName())) && (!simpleName.equals(Byte[].class.getName()))) {
throw new RuntimeException("Array type of field '" + field.getName() + "' on class '"
+ field.getDeclaringClass().getName() + "' is not support, please use List instead.");
}
}
FieldInfo fieldInfo = new FieldInfo(field);
FieldType annFieldType = FieldType.DEFAULT;
int order = -1;
if (protobuf != null) {
fieldInfo.setRequired(protobuf.required());
// depends on control dependency: [if], data = [(protobuf]
fieldInfo.setDescription(protobuf.description());
// depends on control dependency: [if], data = [(protobuf]
annFieldType = protobuf.fieldType();
// depends on control dependency: [if], data = [none]
order = protobuf.order();
// depends on control dependency: [if], data = [none]
} else {
fieldInfo.setRequired(false);
// depends on control dependency: [if], data = [none]
}
// process type
if (annFieldType == FieldType.DEFAULT) {
Class fieldTypeClass = field.getType();
// if list
boolean isList = fieldInfo.isList();
if (isList) {
fieldTypeClass = fieldInfo.getGenericKeyType();
// depends on control dependency: [if], data = [none]
}
FieldType fieldType = TYPE_MAPPING.get(fieldTypeClass);
if (fieldType == null) {
// check if type is enum
if (Enum.class.isAssignableFrom(fieldTypeClass)) {
fieldType = FieldType.ENUM;
// depends on control dependency: [if], data = [none]
} else if (fieldInfo.isMap()) {
fieldType = FieldType.MAP;
// depends on control dependency: [if], data = [none]
} else {
fieldType = FieldType.OBJECT;
// depends on control dependency: [if], data = [none]
}
}
// check if enable zagzip
if (isZipZap) {
if (fieldType == FieldType.INT32) {
fieldType = FieldType.SINT32; // to convert to sint32 to enable zagzip
// depends on control dependency: [if], data = [none]
} else if (fieldType == FieldType.INT64) {
fieldType = FieldType.SINT64; // to convert to sint64 to enable zagzip
// depends on control dependency: [if], data = [none]
}
}
fieldInfo.setFieldType(fieldType);
// depends on control dependency: [if], data = [none]
} else {
fieldInfo.setFieldType(annFieldType);
// depends on control dependency: [if], data = [(annFieldType]
}
if (order > 0) {
if (orders.contains(order)) {
throw new RuntimeException(
"order id '" + order + "' from field name '" + field.getName() + "' is duplicate");
}
orders.add(order);
// depends on control dependency: [if], data = [(order]
fieldInfo.setOrder(order);
// depends on control dependency: [if], data = [(order]
if (order > maxOrder) {
maxOrder = order;
// depends on control dependency: [if], data = [none]
}
} else {
unorderFields.add(fieldInfo);
// depends on control dependency: [if], data = [none]
}
if (fieldInfo.isList() && (fieldInfo.getFieldType().isPrimitive() || fieldInfo.getFieldType().isEnum())) {
Packed packed = field.getAnnotation(Packed.class);
if (packed == null) {
fieldInfo.setPacked(true);
// depends on control dependency: [if], data = [none]
} else {
fieldInfo.setPacked(packed.value());
// depends on control dependency: [if], data = [(packed]
}
}
ret.add(fieldInfo);
// depends on control dependency: [for], data = [field]
}
if (unorderFields.isEmpty()) {
return ret;
// depends on control dependency: [if], data = [none]
}
for (FieldInfo fieldInfo : unorderFields) {
maxOrder++;
// depends on control dependency: [for], data = [none]
fieldInfo.setOrder(maxOrder);
// depends on control dependency: [for], data = [fieldInfo]
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Field '{}' from {} with @Protobuf annotation but not set order or order is 0,"
+ " It will set order value to {}",
fieldInfo.getField().getName(), fieldInfo.getField().getDeclaringClass().getName(), maxOrder);
// depends on control dependency: [if], data = [none]
}
}
return ret;
} } |
public class class_name {
void setMasterAddress(Address master) {
assert lock.isHeldByCurrentThread() : "Called without holding cluster service lock!";
if (logger.isFineEnabled()) {
logger.fine("Setting master address to " + master);
}
masterAddress = master;
} } | public class class_name {
void setMasterAddress(Address master) {
assert lock.isHeldByCurrentThread() : "Called without holding cluster service lock!";
if (logger.isFineEnabled()) {
logger.fine("Setting master address to " + master); // depends on control dependency: [if], data = [none]
}
masterAddress = master;
} } |
public class class_name {
public static <L, R> Either<L, R> create(L l, R r) {
if (l == null && r != null) {
return createRight(r);
} else if (l != null && r == null) {
return createLeft(l);
} else {
if (l == null) {
throw new IllegalArgumentException("Both arguments were null.");
} else {
throw new IllegalArgumentException("Both arguments were non-null: " + l + " " + r);
}
}
} } | public class class_name {
public static <L, R> Either<L, R> create(L l, R r) {
if (l == null && r != null) {
return createRight(r); // depends on control dependency: [if], data = [none]
} else if (l != null && r == null) {
return createLeft(l); // depends on control dependency: [if], data = [none]
} else {
if (l == null) {
throw new IllegalArgumentException("Both arguments were null.");
} else {
throw new IllegalArgumentException("Both arguments were non-null: " + l + " " + r);
}
}
} } |
public class class_name {
public final Object get2d(Object key1, Object key2) {
AssociativeArray tmp = internalData.get(key1);
if(tmp == null) {
return null;
}
return tmp.internalData.get(key2);
} } | public class class_name {
public final Object get2d(Object key1, Object key2) {
AssociativeArray tmp = internalData.get(key1);
if(tmp == null) {
return null; // depends on control dependency: [if], data = [none]
}
return tmp.internalData.get(key2);
} } |
public class class_name {
public Map<String, Object> getFileMetaData() {
if (file == null) {
file = new HashMap<String, Object>();
}
Map<String, Object> meta;
if (!file.containsKey(FILE_META)) {
meta = new HashMap<String, Object>();
meta.put(FILE_SIZE, actualFile.getSize());
} else {
meta = (Map<String, Object>) file.get(FILE_META);
}
return meta;
} } | public class class_name {
public Map<String, Object> getFileMetaData() {
if (file == null) {
file = new HashMap<String, Object>(); // depends on control dependency: [if], data = [none]
}
Map<String, Object> meta;
if (!file.containsKey(FILE_META)) {
meta = new HashMap<String, Object>(); // depends on control dependency: [if], data = [none]
meta.put(FILE_SIZE, actualFile.getSize()); // depends on control dependency: [if], data = [none]
} else {
meta = (Map<String, Object>) file.get(FILE_META); // depends on control dependency: [if], data = [none]
}
return meta;
} } |
public class class_name {
public boolean isValid(String value) {
if (value == null) {
return false;
}
for (int i = 0; i < patterns.length; i++) {
if (patterns[i].matcher(value).matches()) {
return true;
}
}
return false;
} } | public class class_name {
public boolean isValid(String value) {
if (value == null) {
return false; // depends on control dependency: [if], data = [none]
}
for (int i = 0; i < patterns.length; i++) {
if (patterns[i].matcher(value).matches()) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
void proceedTLSReceived() throws Exception {
SSLContext context = this.config.getCustomSSLContext();
KeyStore ks = null;
KeyManager[] kms = null;
PasswordCallback pcb = null;
if (config.getCallbackHandler() == null) {
ks = null;
} else if (context == null) {
if (config.getKeystoreType().equals("NONE")) {
ks = null;
pcb = null;
} else if (config.getKeystoreType().equals("PKCS11")) {
try {
Constructor<?> c = Class.forName(
"sun.security.pkcs11.SunPKCS11").getConstructor(
InputStream.class);
String pkcs11Config = "name = SmartCard\nlibrary = "
+ config.getPKCS11Library();
ByteArrayInputStream config = new ByteArrayInputStream(
pkcs11Config.getBytes());
Provider p = (Provider) c.newInstance(config);
Security.addProvider(p);
ks = KeyStore.getInstance("PKCS11", p);
pcb = new PasswordCallback("PKCS11 Password: ", false);
this.config.getCallbackHandler().handle(
new Callback[] { pcb });
ks.load(null, pcb.getPassword());
} catch (Exception e) {
ks = null;
pcb = null;
}
} else if (config.getKeystoreType().equals("Apple")) {
ks = KeyStore.getInstance("KeychainStore", "Apple");
ks.load(null, null);
// pcb = new PasswordCallback("Apple Keychain",false);
// pcb.setPassword(null);
} else {
ks = KeyStore.getInstance(config.getKeystoreType());
try {
pcb = new PasswordCallback("Keystore Password: ", false);
config.getCallbackHandler().handle(new Callback[] { pcb });
ks.load(new FileInputStream(config.getKeystorePath()),
pcb.getPassword());
} catch (Exception e) {
ks = null;
pcb = null;
}
}
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
try {
if (pcb == null) {
kmf.init(ks, null);
} else {
kmf.init(ks, pcb.getPassword());
pcb.clearPassword();
}
kms = kmf.getKeyManagers();
} catch (NullPointerException npe) {
kms = null;
}
}
// Verify certificate presented by the server
if (context == null) {
context = SSLContext.getInstance("TLS");
context.init(kms, null, new java.security.SecureRandom());
}
Socket plain = socket;
// Secure the plain connection
socket = context.getSocketFactory().createSocket(plain,
plain.getInetAddress().getHostAddress(), plain.getPort(), true);
socket.setSoTimeout(0);
socket.setKeepAlive(true);
// Initialize the reader and writer with the new secured version
initReaderAndWriter();
// Proceed to do the handshake
((SSLSocket) socket).startHandshake();
// if (((SSLSocket) socket).getWantClientAuth()) {
// System.err.println("Connection wants client auth");
// }
// else if (((SSLSocket) socket).getNeedClientAuth()) {
// System.err.println("Connection needs client auth");
// }
// else {
// System.err.println("Connection does not require client auth");
// }
// Set that TLS was successful
usingTLS = true;
// Set the new writer to use
packetWriter.setWriter(writer);
// Send a new opening stream to the server
packetWriter.openStream();
} } | public class class_name {
void proceedTLSReceived() throws Exception {
SSLContext context = this.config.getCustomSSLContext();
KeyStore ks = null;
KeyManager[] kms = null;
PasswordCallback pcb = null;
if (config.getCallbackHandler() == null) {
ks = null;
} else if (context == null) {
if (config.getKeystoreType().equals("NONE")) {
ks = null;
pcb = null;
} else if (config.getKeystoreType().equals("PKCS11")) {
try {
Constructor<?> c = Class.forName(
"sun.security.pkcs11.SunPKCS11").getConstructor(
InputStream.class);
String pkcs11Config = "name = SmartCard\nlibrary = "
+ config.getPKCS11Library();
ByteArrayInputStream config = new ByteArrayInputStream(
pkcs11Config.getBytes());
Provider p = (Provider) c.newInstance(config);
Security.addProvider(p); // depends on control dependency: [try], data = [none]
ks = KeyStore.getInstance("PKCS11", p); // depends on control dependency: [try], data = [none]
pcb = new PasswordCallback("PKCS11 Password: ", false); // depends on control dependency: [try], data = [none]
this.config.getCallbackHandler().handle(
new Callback[] { pcb }); // depends on control dependency: [try], data = [none]
ks.load(null, pcb.getPassword()); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
ks = null;
pcb = null;
} // depends on control dependency: [catch], data = [none]
} else if (config.getKeystoreType().equals("Apple")) {
ks = KeyStore.getInstance("KeychainStore", "Apple");
ks.load(null, null);
// pcb = new PasswordCallback("Apple Keychain",false);
// pcb.setPassword(null);
} else {
ks = KeyStore.getInstance(config.getKeystoreType());
try {
pcb = new PasswordCallback("Keystore Password: ", false);
config.getCallbackHandler().handle(new Callback[] { pcb });
ks.load(new FileInputStream(config.getKeystorePath()),
pcb.getPassword());
} catch (Exception e) {
ks = null;
pcb = null;
}
}
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
try {
if (pcb == null) {
kmf.init(ks, null);
} else {
kmf.init(ks, pcb.getPassword());
pcb.clearPassword();
}
kms = kmf.getKeyManagers();
} catch (NullPointerException npe) {
kms = null;
}
}
// Verify certificate presented by the server
if (context == null) {
context = SSLContext.getInstance("TLS");
context.init(kms, null, new java.security.SecureRandom());
}
Socket plain = socket;
// Secure the plain connection
socket = context.getSocketFactory().createSocket(plain,
plain.getInetAddress().getHostAddress(), plain.getPort(), true);
socket.setSoTimeout(0);
socket.setKeepAlive(true);
// Initialize the reader and writer with the new secured version
initReaderAndWriter();
// Proceed to do the handshake
((SSLSocket) socket).startHandshake();
// if (((SSLSocket) socket).getWantClientAuth()) {
// System.err.println("Connection wants client auth");
// }
// else if (((SSLSocket) socket).getNeedClientAuth()) {
// System.err.println("Connection needs client auth");
// }
// else {
// System.err.println("Connection does not require client auth");
// }
// Set that TLS was successful
usingTLS = true;
// Set the new writer to use
packetWriter.setWriter(writer);
// Send a new opening stream to the server
packetWriter.openStream();
} } |
public class class_name {
public byte[] initDelegation(GSSCredential credential,
Oid mechanism,
int lifetime,
byte[] buf, int off, int len)
throws GSSException {
logger.debug("Enter initDelegation: " + delegationState);
if (mechanism != null && !mechanism.equals(getMech())) {
throw new GSSException(GSSException.BAD_MECH);
}
if (this.gssMode != GSIConstants.MODE_SSL && buf != null && len > 0) {
buf = unwrap(buf, off, len);
off = 0;
len = buf.length;
}
byte [] token = null;
switch (delegationState) {
case DELEGATION_START:
this.delegationFinished = false;
token = DELEGATION_TOKEN;
this.delegationState = DELEGATION_SIGN_CERT;
break;
case DELEGATION_SIGN_CERT:
if (credential == null) {
// get default credential
GSSManager manager = new GlobusGSSManagerImpl();
credential = manager.createCredential(GSSCredential.INITIATE_AND_ACCEPT);
}
if (!(credential instanceof GlobusGSSCredentialImpl)) {
throw new GSSException(GSSException.DEFECTIVE_CREDENTIAL);
}
X509Credential cred =
((GlobusGSSCredentialImpl)credential).getX509Credential();
X509Certificate [] chain = cred.getCertificateChain();
int time = (lifetime == GSSCredential.DEFAULT_LIFETIME) ? -1 : lifetime;
ByteArrayInputStream inData = null;
ByteArrayOutputStream out = null;
try {
inData = new ByteArrayInputStream(buf, off, len);
X509Certificate cert =
this.certFactory.createCertificate(inData,
chain[0],
cred.getPrivateKey(),
time,
/*DEL
getDelegationType(chain[0]));
*/
BouncyCastleCertProcessingFactory.decideProxyType(chain[0], this.delegationType));
out = new ByteArrayOutputStream();
out.write(cert.getEncoded());
for (int i=0;i<chain.length;i++) {
out.write(chain[i].getEncoded());
}
token = out.toByteArray();
} catch (Exception e) {
throw new GlobusGSSException(GSSException.FAILURE, e);
}finally{
if (inData != null) {
try {
inData.close();
} catch (Exception e) {
logger.warn("Unable to close stream.");
}
}
if (out != null) {
try {
out.close();
} catch (Exception e) {
logger.warn("Unable to close stream.");
}
}
}
this.delegationState = DELEGATION_START;
this.delegationFinished = true;
break;
default:
throw new GSSException(GSSException.FAILURE);
}
logger.debug("Exit initDelegation");
if (this.gssMode != GSIConstants.MODE_SSL && token != null) {
// XXX: Why wrap() only when not in MODE_SSL?
return wrap(token, 0, token.length);
} else {
return token;
}
} } | public class class_name {
public byte[] initDelegation(GSSCredential credential,
Oid mechanism,
int lifetime,
byte[] buf, int off, int len)
throws GSSException {
logger.debug("Enter initDelegation: " + delegationState);
if (mechanism != null && !mechanism.equals(getMech())) {
throw new GSSException(GSSException.BAD_MECH);
}
if (this.gssMode != GSIConstants.MODE_SSL && buf != null && len > 0) {
buf = unwrap(buf, off, len);
off = 0;
len = buf.length;
}
byte [] token = null;
switch (delegationState) {
case DELEGATION_START:
this.delegationFinished = false;
token = DELEGATION_TOKEN;
this.delegationState = DELEGATION_SIGN_CERT;
break;
case DELEGATION_SIGN_CERT:
if (credential == null) {
// get default credential
GSSManager manager = new GlobusGSSManagerImpl();
credential = manager.createCredential(GSSCredential.INITIATE_AND_ACCEPT); // depends on control dependency: [if], data = [none]
}
if (!(credential instanceof GlobusGSSCredentialImpl)) {
throw new GSSException(GSSException.DEFECTIVE_CREDENTIAL);
}
X509Credential cred =
((GlobusGSSCredentialImpl)credential).getX509Credential();
X509Certificate [] chain = cred.getCertificateChain();
int time = (lifetime == GSSCredential.DEFAULT_LIFETIME) ? -1 : lifetime;
ByteArrayInputStream inData = null;
ByteArrayOutputStream out = null;
try {
inData = new ByteArrayInputStream(buf, off, len); // depends on control dependency: [try], data = [none]
X509Certificate cert =
this.certFactory.createCertificate(inData,
chain[0],
cred.getPrivateKey(),
time,
/*DEL
getDelegationType(chain[0]));
*/
BouncyCastleCertProcessingFactory.decideProxyType(chain[0], this.delegationType));
out = new ByteArrayOutputStream(); // depends on control dependency: [try], data = [none]
out.write(cert.getEncoded()); // depends on control dependency: [try], data = [none]
for (int i=0;i<chain.length;i++) {
out.write(chain[i].getEncoded()); // depends on control dependency: [for], data = [i]
}
token = out.toByteArray(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new GlobusGSSException(GSSException.FAILURE, e);
}finally{ // depends on control dependency: [catch], data = [none]
if (inData != null) {
try {
inData.close(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
logger.warn("Unable to close stream.");
} // depends on control dependency: [catch], data = [none]
}
if (out != null) {
try {
out.close(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
logger.warn("Unable to close stream.");
} // depends on control dependency: [catch], data = [none]
}
}
this.delegationState = DELEGATION_START;
this.delegationFinished = true;
break;
default:
throw new GSSException(GSSException.FAILURE);
}
logger.debug("Exit initDelegation");
if (this.gssMode != GSIConstants.MODE_SSL && token != null) {
// XXX: Why wrap() only when not in MODE_SSL?
return wrap(token, 0, token.length);
} else {
return token;
}
} } |
public class class_name {
private static List<MultiWorkUnit> bestFitDecreasingBinPacking(List<WorkUnit> workUnits, double avgGroupSize) {
// Sort workunits by data size desc
Collections.sort(workUnits, LOAD_DESC_COMPARATOR);
PriorityQueue<MultiWorkUnit> pQueue = new PriorityQueue<>(workUnits.size(), LOAD_DESC_COMPARATOR);
for (WorkUnit workUnit : workUnits) {
MultiWorkUnit bestGroup = findAndPopBestFitGroup(workUnit, pQueue, avgGroupSize);
if (bestGroup != null) {
addWorkUnitToMultiWorkUnit(workUnit, bestGroup);
} else {
bestGroup = MultiWorkUnit.createEmpty();
addWorkUnitToMultiWorkUnit(workUnit, bestGroup);
}
pQueue.add(bestGroup);
}
return Lists.newArrayList(pQueue);
} } | public class class_name {
private static List<MultiWorkUnit> bestFitDecreasingBinPacking(List<WorkUnit> workUnits, double avgGroupSize) {
// Sort workunits by data size desc
Collections.sort(workUnits, LOAD_DESC_COMPARATOR);
PriorityQueue<MultiWorkUnit> pQueue = new PriorityQueue<>(workUnits.size(), LOAD_DESC_COMPARATOR);
for (WorkUnit workUnit : workUnits) {
MultiWorkUnit bestGroup = findAndPopBestFitGroup(workUnit, pQueue, avgGroupSize);
if (bestGroup != null) {
addWorkUnitToMultiWorkUnit(workUnit, bestGroup); // depends on control dependency: [if], data = [none]
} else {
bestGroup = MultiWorkUnit.createEmpty(); // depends on control dependency: [if], data = [none]
addWorkUnitToMultiWorkUnit(workUnit, bestGroup); // depends on control dependency: [if], data = [none]
}
pQueue.add(bestGroup); // depends on control dependency: [for], data = [none]
}
return Lists.newArrayList(pQueue);
} } |
public class class_name {
public @NotNull CharAssert isGreaterThanOrEqualTo(char other) {
if (actual >= other) {
return this;
}
failIfCustomMessageIsSet();
throw failure(unexpectedLessThan(actual, other));
} } | public class class_name {
public @NotNull CharAssert isGreaterThanOrEqualTo(char other) {
if (actual >= other) {
return this; // depends on control dependency: [if], data = [none]
}
failIfCustomMessageIsSet();
throw failure(unexpectedLessThan(actual, other));
} } |
public class class_name {
private double nextRadicalInverse() {
counter++;
// Do at most MAXFAST appromate steps
if(counter >= MAXFAST) {
counter = 0;
inverse += MAXFAST;
current = radicalInverse(inverse);
return current;
}
// Fast approximation:
double nextInverse = current + invbase;
if(nextInverse < ALMOST_ONE) {
current = nextInverse;
return current;
}
else {
double digit1 = invbase, digit2 = invbase * invbase;
while(current + digit2 >= ALMOST_ONE) {
digit1 = digit2;
digit2 *= invbase;
}
current += (digit1 - 1.0) + digit2;
return current;
}
} } | public class class_name {
private double nextRadicalInverse() {
counter++;
// Do at most MAXFAST appromate steps
if(counter >= MAXFAST) {
counter = 0; // depends on control dependency: [if], data = [none]
inverse += MAXFAST; // depends on control dependency: [if], data = [none]
current = radicalInverse(inverse); // depends on control dependency: [if], data = [none]
return current; // depends on control dependency: [if], data = [none]
}
// Fast approximation:
double nextInverse = current + invbase;
if(nextInverse < ALMOST_ONE) {
current = nextInverse; // depends on control dependency: [if], data = [none]
return current; // depends on control dependency: [if], data = [none]
}
else {
double digit1 = invbase, digit2 = invbase * invbase;
while(current + digit2 >= ALMOST_ONE) {
digit1 = digit2; // depends on control dependency: [while], data = [none]
digit2 *= invbase; // depends on control dependency: [while], data = [none]
}
current += (digit1 - 1.0) + digit2; // depends on control dependency: [if], data = [none]
return current; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static boolean isAtLeastOneChildComponentVisible(Container component) {
for (Component comp : component.getComponents()) {
if (comp.isVisible()) {
return true;
}
}
return false;
} } | public class class_name {
public static boolean isAtLeastOneChildComponentVisible(Container component) {
for (Component comp : component.getComponents()) {
if (comp.isVisible()) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
public static int mode(File f) throws PosixException {
if(Functions.isWindows()) return -1;
try {
if (Util.NATIVE_CHMOD_MODE) {
return PosixAPI.jnr().stat(f.getPath()).mode();
} else {
return Util.permissionsToMode(Files.getPosixFilePermissions(fileToPath(f)));
}
} catch (IOException cause) {
PosixException e = new PosixException("Unable to get file permissions", null);
e.initCause(cause);
throw e;
}
} } | public class class_name {
public static int mode(File f) throws PosixException {
if(Functions.isWindows()) return -1;
try {
if (Util.NATIVE_CHMOD_MODE) {
return PosixAPI.jnr().stat(f.getPath()).mode(); // depends on control dependency: [if], data = [none]
} else {
return Util.permissionsToMode(Files.getPosixFilePermissions(fileToPath(f))); // depends on control dependency: [if], data = [none]
}
} catch (IOException cause) {
PosixException e = new PosixException("Unable to get file permissions", null);
e.initCause(cause);
throw e;
}
} } |
public class class_name {
public HashMap<String, String> computeSoil(Map<String, String> fullCurrentSoil, Map<String, String> previousSoil) {
HashMap<String, String> aggregatedSoil;
String fullCurrentValue;
String previousValue;
Float newValue;
newValue = 0f;
aggregatedSoil = new HashMap<String, String>();
for (String p : allParams) {
if (SLLB.equals(p)) {
newValue = (parseFloat(fullCurrentSoil.get(p)) + parseFloat(previousSoil.get(p)));
} else if ((ICNH4.equals(p) && fullCurrentSoil.containsKey(ICNH4) && previousSoil.containsKey(ICNH4)) || ICNO3.equals(p) && fullCurrentSoil.containsKey(ICNO3)
&& previousSoil.containsKey(ICNO3)) {
newValue = computeInitialConditions(p, fullCurrentSoil, previousSoil);
} else {
fullCurrentValue = fullCurrentSoil.get(p) == null ? LayerReducerUtil.defaultValue(p) : fullCurrentSoil.get(p);
previousValue = previousSoil.get(p) == null ? LayerReducerUtil.defaultValue(p) : previousSoil.get(p);
newValue = (parseFloat(fullCurrentValue) + parseFloat(previousValue)) / 2f;
}
aggregatedSoil.put(p, newValue.toString());
}
return aggregatedSoil;
} } | public class class_name {
public HashMap<String, String> computeSoil(Map<String, String> fullCurrentSoil, Map<String, String> previousSoil) {
HashMap<String, String> aggregatedSoil;
String fullCurrentValue;
String previousValue;
Float newValue;
newValue = 0f;
aggregatedSoil = new HashMap<String, String>();
for (String p : allParams) {
if (SLLB.equals(p)) {
newValue = (parseFloat(fullCurrentSoil.get(p)) + parseFloat(previousSoil.get(p))); // depends on control dependency: [if], data = [none]
} else if ((ICNH4.equals(p) && fullCurrentSoil.containsKey(ICNH4) && previousSoil.containsKey(ICNH4)) || ICNO3.equals(p) && fullCurrentSoil.containsKey(ICNO3)
&& previousSoil.containsKey(ICNO3)) {
newValue = computeInitialConditions(p, fullCurrentSoil, previousSoil); // depends on control dependency: [if], data = [fullCu]
} else {
fullCurrentValue = fullCurrentSoil.get(p) == null ? LayerReducerUtil.defaultValue(p) : fullCurrentSoil.get(p); // depends on control dependency: [if], data = [fullCu]
previousValue = previousSoil.get(p) == null ? LayerReducerUtil.defaultValue(p) : previousSoil.get(p); // depends on control dependency: [if], data = [none]
newValue = (parseFloat(fullCurrentValue) + parseFloat(previousValue)) / 2f; // depends on control dependency: [if], data = [fullCu]
}
aggregatedSoil.put(p, newValue.toString()); // depends on control dependency: [for], data = [p]
}
return aggregatedSoil;
} } |
public class class_name {
@Process(actionType = LoadThreadPoolMetrics.class)
public void loadMetrics(final LoadThreadPoolMetrics action, Channel channel) {
ResourceAddress address = METRICS_ROOT_ADDRESS.resolve(statementContext);
Operation operation = new Operation.Builder(READ_CHILDREN_RESOURCES_OPERATION, address)
.param(CHILD_TYPE, "thread-pool")
.param(INCLUDE_RUNTIME, true)
.build();
dispatcher.execute(new DMRAction(operation), new AsyncCallback<DMRResponse>() {
@Override
public void onFailure(final Throwable caught) {
channel.nack(caught);
}
@Override
public void onSuccess(final DMRResponse response) {
ModelNode result = response.get();
if (result.isFailure()) {
channel.nack(result.getFailureDescription());
} else {
threadPoolMetrics.clear();
threadPoolMetrics.addAll(result.get(RESULT).asPropertyList());
channel.ack();
}
}
});
} } | public class class_name {
@Process(actionType = LoadThreadPoolMetrics.class)
public void loadMetrics(final LoadThreadPoolMetrics action, Channel channel) {
ResourceAddress address = METRICS_ROOT_ADDRESS.resolve(statementContext);
Operation operation = new Operation.Builder(READ_CHILDREN_RESOURCES_OPERATION, address)
.param(CHILD_TYPE, "thread-pool")
.param(INCLUDE_RUNTIME, true)
.build();
dispatcher.execute(new DMRAction(operation), new AsyncCallback<DMRResponse>() {
@Override
public void onFailure(final Throwable caught) {
channel.nack(caught);
}
@Override
public void onSuccess(final DMRResponse response) {
ModelNode result = response.get();
if (result.isFailure()) {
channel.nack(result.getFailureDescription()); // depends on control dependency: [if], data = [none]
} else {
threadPoolMetrics.clear(); // depends on control dependency: [if], data = [none]
threadPoolMetrics.addAll(result.get(RESULT).asPropertyList()); // depends on control dependency: [if], data = [none]
channel.ack(); // depends on control dependency: [if], data = [none]
}
}
});
} } |
public class class_name {
public static Expression constant(String value, ClassFieldManager manager) {
int encodedLength = Utf8.encodedLength(value);
if (encodedLength <= MAX_CONSTANT_STRING_LENGTH) {
return stringConstant(value);
}
// else it is too big for a single constant pool entry so split it into a small number of
// entries and generate a static final field to hold the cat'ed value.
int startIndex = 0;
Expression stringExpression = null;
int length = value.length();
do {
int endIndex = offsetOf65KUtf8Bytes(value, startIndex, length);
// N.B. we may end up splitting the string at a surrogate pair, but the class format uses
// modified utf8 which is forgiving about such things.
Expression substringConstant = stringConstant(value.substring(startIndex, endIndex));
startIndex = endIndex;
if (stringExpression == null) {
stringExpression = substringConstant;
} else {
stringExpression = stringExpression.invoke(MethodRef.STRING_CONCAT, substringConstant);
}
} while (startIndex < length);
FieldRef fieldRef = manager.addStaticField(LARGE_STRING_CONSTANT_NAME, stringExpression);
return fieldRef.accessor();
} } | public class class_name {
public static Expression constant(String value, ClassFieldManager manager) {
int encodedLength = Utf8.encodedLength(value);
if (encodedLength <= MAX_CONSTANT_STRING_LENGTH) {
return stringConstant(value); // depends on control dependency: [if], data = [none]
}
// else it is too big for a single constant pool entry so split it into a small number of
// entries and generate a static final field to hold the cat'ed value.
int startIndex = 0;
Expression stringExpression = null;
int length = value.length();
do {
int endIndex = offsetOf65KUtf8Bytes(value, startIndex, length);
// N.B. we may end up splitting the string at a surrogate pair, but the class format uses
// modified utf8 which is forgiving about such things.
Expression substringConstant = stringConstant(value.substring(startIndex, endIndex));
startIndex = endIndex;
if (stringExpression == null) {
stringExpression = substringConstant; // depends on control dependency: [if], data = [none]
} else {
stringExpression = stringExpression.invoke(MethodRef.STRING_CONCAT, substringConstant); // depends on control dependency: [if], data = [none]
}
} while (startIndex < length);
FieldRef fieldRef = manager.addStaticField(LARGE_STRING_CONSTANT_NAME, stringExpression);
return fieldRef.accessor();
} } |
public class class_name {
@SuppressWarnings("nls")
protected void processQueue() {
try {
RequestMetric metric = queue.take();
QueryRunner run = new QueryRunner(ds);
Calendar cal = Calendar.getInstance();
cal.setTimeZone(TimeZone.getTimeZone("UTC"));
cal.setTime(metric.getRequestStart());
long rstart = cal.getTimeInMillis();
long rend = metric.getRequestEnd().getTime();
long duration = metric.getRequestDuration();
cal.set(Calendar.MILLISECOND, 0);
cal.set(Calendar.SECOND, 0);
long minute = cal.getTimeInMillis();
cal.set(Calendar.MINUTE, 0);
long hour = cal.getTimeInMillis();
cal.set(Calendar.HOUR_OF_DAY, 0);
long day = cal.getTimeInMillis();
cal.set(Calendar.DAY_OF_WEEK, cal.getFirstDayOfWeek());
long week = cal.getTimeInMillis();
cal.set(Calendar.DAY_OF_MONTH, 1);
long month = cal.getTimeInMillis();
String api_org_id = metric.getApiOrgId();
String api_id = metric.getApiId();
String api_version = metric.getApiVersion();
String client_org_id = metric.getClientOrgId();
String client_id = metric.getClientId();
String client_version = metric.getClientVersion();
String plan = metric.getPlanId();
String user_id = metric.getUser();
String rtype = null;
if (metric.isFailure()) {
rtype = "failure";
} else if (metric.isError()) {
rtype = "error";
}
long bytes_up = metric.getBytesUploaded();
long bytes_down = metric.getBytesDownloaded();
// Now insert a row for the metric.
run.update("INSERT INTO gw_requests ("
+ "rstart, rend, duration, month, week, day, hour, minute, "
+ "api_org_id, api_id, api_version, "
+ "client_org_id, client_id, client_version, plan, "
+ "user_id, resp_type, bytes_up, bytes_down) VALUES ("
+ "?, ?, ?, ?, ?, ?, ?, ?,"
+ "?, ?, ?,"
+ "?, ?, ?, ?,"
+ "?, ?, ?, ?)",
rstart, rend, duration, month, week, day, hour, minute,
api_org_id, api_id, api_version,
client_org_id, client_id, client_version, plan,
user_id, rtype, bytes_up, bytes_down
);
} catch (InterruptedException ie) {
// This means that the thread was stopped.
} catch (Exception e) {
// TODO better logging of this unlikely error
System.err.println("Error adding metric to database:"); //$NON-NLS-1$
e.printStackTrace();
return;
}
} } | public class class_name {
@SuppressWarnings("nls")
protected void processQueue() {
try {
RequestMetric metric = queue.take();
QueryRunner run = new QueryRunner(ds);
Calendar cal = Calendar.getInstance();
cal.setTimeZone(TimeZone.getTimeZone("UTC")); // depends on control dependency: [try], data = [none]
cal.setTime(metric.getRequestStart()); // depends on control dependency: [try], data = [none]
long rstart = cal.getTimeInMillis();
long rend = metric.getRequestEnd().getTime();
long duration = metric.getRequestDuration();
cal.set(Calendar.MILLISECOND, 0); // depends on control dependency: [try], data = [none]
cal.set(Calendar.SECOND, 0); // depends on control dependency: [try], data = [none]
long minute = cal.getTimeInMillis();
cal.set(Calendar.MINUTE, 0); // depends on control dependency: [try], data = [none]
long hour = cal.getTimeInMillis();
cal.set(Calendar.HOUR_OF_DAY, 0); // depends on control dependency: [try], data = [none]
long day = cal.getTimeInMillis();
cal.set(Calendar.DAY_OF_WEEK, cal.getFirstDayOfWeek()); // depends on control dependency: [try], data = [none]
long week = cal.getTimeInMillis();
cal.set(Calendar.DAY_OF_MONTH, 1); // depends on control dependency: [try], data = [none]
long month = cal.getTimeInMillis();
String api_org_id = metric.getApiOrgId();
String api_id = metric.getApiId();
String api_version = metric.getApiVersion();
String client_org_id = metric.getClientOrgId();
String client_id = metric.getClientId();
String client_version = metric.getClientVersion();
String plan = metric.getPlanId();
String user_id = metric.getUser();
String rtype = null;
if (metric.isFailure()) {
rtype = "failure"; // depends on control dependency: [if], data = [none]
} else if (metric.isError()) {
rtype = "error"; // depends on control dependency: [if], data = [none]
}
long bytes_up = metric.getBytesUploaded();
long bytes_down = metric.getBytesDownloaded();
// Now insert a row for the metric.
run.update("INSERT INTO gw_requests ("
+ "rstart, rend, duration, month, week, day, hour, minute, "
+ "api_org_id, api_id, api_version, "
+ "client_org_id, client_id, client_version, plan, "
+ "user_id, resp_type, bytes_up, bytes_down) VALUES ("
+ "?, ?, ?, ?, ?, ?, ?, ?,"
+ "?, ?, ?,"
+ "?, ?, ?, ?,"
+ "?, ?, ?, ?)",
rstart, rend, duration, month, week, day, hour, minute,
api_org_id, api_id, api_version,
client_org_id, client_id, client_version, plan,
user_id, rtype, bytes_up, bytes_down
); // depends on control dependency: [try], data = [none]
} catch (InterruptedException ie) {
// This means that the thread was stopped.
} catch (Exception e) { // depends on control dependency: [catch], data = [none]
// TODO better logging of this unlikely error
System.err.println("Error adding metric to database:"); //$NON-NLS-1$
e.printStackTrace();
return;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void setContentType(String contentType) {
// The content type may contain the charset, parse it and set it.
if (contentType != null && contentType.contains("charset=")) {
charset = Charset.forName(contentType.substring(contentType.indexOf("charset=") + 8).trim());
}
headers.put(HeaderNames.CONTENT_TYPE, contentType);
} } | public class class_name {
private void setContentType(String contentType) {
// The content type may contain the charset, parse it and set it.
if (contentType != null && contentType.contains("charset=")) {
charset = Charset.forName(contentType.substring(contentType.indexOf("charset=") + 8).trim()); // depends on control dependency: [if], data = [(contentType]
}
headers.put(HeaderNames.CONTENT_TYPE, contentType);
} } |
public class class_name {
public Observable<ServiceResponse<Page<EventHubConsumerGroupInfoInner>>> listEventHubConsumerGroupsSinglePageAsync(final String resourceGroupName, final String resourceName, final String eventHubEndpointName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (resourceName == null) {
throw new IllegalArgumentException("Parameter resourceName is required and cannot be null.");
}
if (eventHubEndpointName == null) {
throw new IllegalArgumentException("Parameter eventHubEndpointName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listEventHubConsumerGroups(this.client.subscriptionId(), resourceGroupName, resourceName, eventHubEndpointName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<EventHubConsumerGroupInfoInner>>>>() {
@Override
public Observable<ServiceResponse<Page<EventHubConsumerGroupInfoInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<EventHubConsumerGroupInfoInner>> result = listEventHubConsumerGroupsDelegate(response);
return Observable.just(new ServiceResponse<Page<EventHubConsumerGroupInfoInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
} } | public class class_name {
public Observable<ServiceResponse<Page<EventHubConsumerGroupInfoInner>>> listEventHubConsumerGroupsSinglePageAsync(final String resourceGroupName, final String resourceName, final String eventHubEndpointName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (resourceName == null) {
throw new IllegalArgumentException("Parameter resourceName is required and cannot be null.");
}
if (eventHubEndpointName == null) {
throw new IllegalArgumentException("Parameter eventHubEndpointName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listEventHubConsumerGroups(this.client.subscriptionId(), resourceGroupName, resourceName, eventHubEndpointName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<EventHubConsumerGroupInfoInner>>>>() {
@Override
public Observable<ServiceResponse<Page<EventHubConsumerGroupInfoInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<EventHubConsumerGroupInfoInner>> result = listEventHubConsumerGroupsDelegate(response);
return Observable.just(new ServiceResponse<Page<EventHubConsumerGroupInfoInner>>(result.body(), result.response())); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
return Observable.error(t);
} // depends on control dependency: [catch], data = [none]
}
});
} } |
public class class_name {
public void addComments(Type holder, MethodDoc method, Content methodDocTree) {
ClassDoc holderClassDoc = holder.asClassDoc();
if (method.inlineTags().length > 0) {
if (holder.asClassDoc().equals(classdoc) ||
(! (holderClassDoc.isPublic() ||
Util.isLinkable(holderClassDoc, configuration)))) {
writer.addInlineComment(method, methodDocTree);
} else {
Content link =
writer.getDocLink(LinkInfoImpl.Kind.METHOD_DOC_COPY,
holder.asClassDoc(), method,
holder.asClassDoc().isIncluded() ?
holder.typeName() : holder.qualifiedTypeName(),
false);
Content codelLink = HtmlTree.CODE(link);
Content descfrmLabel = HtmlTree.SPAN(HtmlStyle.descfrmTypeLabel, holder.asClassDoc().isClass()?
writer.descfrmClassLabel : writer.descfrmInterfaceLabel);
descfrmLabel.addContent(writer.getSpace());
descfrmLabel.addContent(codelLink);
methodDocTree.addContent(HtmlTree.DIV(HtmlStyle.block, descfrmLabel));
writer.addInlineComment(method, methodDocTree);
}
}
} } | public class class_name {
public void addComments(Type holder, MethodDoc method, Content methodDocTree) {
ClassDoc holderClassDoc = holder.asClassDoc();
if (method.inlineTags().length > 0) {
if (holder.asClassDoc().equals(classdoc) ||
(! (holderClassDoc.isPublic() ||
Util.isLinkable(holderClassDoc, configuration)))) {
writer.addInlineComment(method, methodDocTree); // depends on control dependency: [if], data = [none]
} else {
Content link =
writer.getDocLink(LinkInfoImpl.Kind.METHOD_DOC_COPY,
holder.asClassDoc(), method,
holder.asClassDoc().isIncluded() ?
holder.typeName() : holder.qualifiedTypeName(),
false);
Content codelLink = HtmlTree.CODE(link);
Content descfrmLabel = HtmlTree.SPAN(HtmlStyle.descfrmTypeLabel, holder.asClassDoc().isClass()?
writer.descfrmClassLabel : writer.descfrmInterfaceLabel);
descfrmLabel.addContent(writer.getSpace()); // depends on control dependency: [if], data = [none]
descfrmLabel.addContent(codelLink); // depends on control dependency: [if], data = [none]
methodDocTree.addContent(HtmlTree.DIV(HtmlStyle.block, descfrmLabel)); // depends on control dependency: [if], data = [none]
writer.addInlineComment(method, methodDocTree); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void marshall(SynthesisTask synthesisTask, ProtocolMarshaller protocolMarshaller) {
if (synthesisTask == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(synthesisTask.getTaskId(), TASKID_BINDING);
protocolMarshaller.marshall(synthesisTask.getTaskStatus(), TASKSTATUS_BINDING);
protocolMarshaller.marshall(synthesisTask.getTaskStatusReason(), TASKSTATUSREASON_BINDING);
protocolMarshaller.marshall(synthesisTask.getOutputUri(), OUTPUTURI_BINDING);
protocolMarshaller.marshall(synthesisTask.getCreationTime(), CREATIONTIME_BINDING);
protocolMarshaller.marshall(synthesisTask.getRequestCharacters(), REQUESTCHARACTERS_BINDING);
protocolMarshaller.marshall(synthesisTask.getSnsTopicArn(), SNSTOPICARN_BINDING);
protocolMarshaller.marshall(synthesisTask.getLexiconNames(), LEXICONNAMES_BINDING);
protocolMarshaller.marshall(synthesisTask.getOutputFormat(), OUTPUTFORMAT_BINDING);
protocolMarshaller.marshall(synthesisTask.getSampleRate(), SAMPLERATE_BINDING);
protocolMarshaller.marshall(synthesisTask.getSpeechMarkTypes(), SPEECHMARKTYPES_BINDING);
protocolMarshaller.marshall(synthesisTask.getTextType(), TEXTTYPE_BINDING);
protocolMarshaller.marshall(synthesisTask.getVoiceId(), VOICEID_BINDING);
protocolMarshaller.marshall(synthesisTask.getLanguageCode(), LANGUAGECODE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(SynthesisTask synthesisTask, ProtocolMarshaller protocolMarshaller) {
if (synthesisTask == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(synthesisTask.getTaskId(), TASKID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getTaskStatus(), TASKSTATUS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getTaskStatusReason(), TASKSTATUSREASON_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getOutputUri(), OUTPUTURI_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getCreationTime(), CREATIONTIME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getRequestCharacters(), REQUESTCHARACTERS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getSnsTopicArn(), SNSTOPICARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getLexiconNames(), LEXICONNAMES_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getOutputFormat(), OUTPUTFORMAT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getSampleRate(), SAMPLERATE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getSpeechMarkTypes(), SPEECHMARKTYPES_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getTextType(), TEXTTYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getVoiceId(), VOICEID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(synthesisTask.getLanguageCode(), LANGUAGECODE_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private static void sortInternal(final IndexedSortable s, int recordsPerSegment, int recordSize, int maxOffset,
int p, int pN, int pO, int r, int rN, int rO, int depth) {
while (true) {
if (r - p < 13) {
// switch to insertion sort
int i = p+1, iN, iO; if (pO == maxOffset) { iN = pN+1; iO = 0; } else { iN = pN; iO = pO+recordSize; }
while (i < r) {
int j = i, jN = iN, jO = iO;
int jd = j-1, jdN, jdO; if (jO == 0) { jdN = jN-1; jdO = maxOffset; } else { jdN = jN; jdO = jO-recordSize; }
while (j > p && s.compare(jdN, jdO, jN, jO) > 0) {
s.swap(jN, jO, jdN, jdO);
j = jd; jN = jdN; jO = jdO;
jd--; if (jdO == 0) { jdN--; jdO = maxOffset; } else { jdO -= recordSize; }
}
i++; if (iO == maxOffset) { iN++; iO = 0; } else { iO += recordSize; }
}
return;
}
if (--depth < 0) {
// switch to heap sort
alt.sort(s, p, r);
return;
}
int rdN, rdO; if (rO == 0) { rdN = rN-1; rdO = maxOffset; } else { rdN = rN; rdO = rO-recordSize; }
int m = (p+r)>>>1, mN = m / recordsPerSegment, mO = (m % recordsPerSegment) * recordSize;
// select, move pivot into first position
fix(s, mN, mO, pN, pO);
fix(s, mN, mO, rdN, rdO);
fix(s, pN, pO, rdN, rdO);
// Divide
int i = p, iN = pN, iO = pO;
int j = r, jN = rN, jO = rO;
int ll = p, llN = pN, llO = pO;
int rr = r, rrN = rN, rrO = rO;
int cr;
while (true) {
i++; if (iO == maxOffset) { iN++; iO = 0; } else { iO += recordSize; }
while (i < j) {
if ((cr = s.compare(iN, iO, pN, pO)) > 0) {
break;
}
if (0 == cr) {
ll++; if (llO == maxOffset) { llN++; llO = 0; } else { llO += recordSize; }
if (ll != i) {
s.swap(llN, llO, iN, iO);
}
}
i++; if (iO == maxOffset) { iN++; iO = 0; } else { iO += recordSize; }
}
j--; if (jO == 0) { jN--; jO = maxOffset; } else { jO -= recordSize; }
while (j > i) {
if ((cr = s.compare(pN, pO, jN, jO)) > 0) {
break;
}
if (0 == cr) {
rr--; if (rrO == 0) { rrN--; rrO = maxOffset; } else { rrO -= recordSize; }
if (rr != j) {
s.swap(rrN, rrO, jN, jO);
}
}
j--; if (jO == 0) { jN--; jO = maxOffset; } else { jO -= recordSize; }
}
if (i < j) {
s.swap(iN, iO, jN, jO);
} else {
break;
}
}
j = i; jN = iN; jO = iO;
// swap pivot- and all eq values- into position
while (ll >= p) {
i--; if (iO == 0) { iN--; iO = maxOffset; } else { iO -= recordSize; }
s.swap(llN, llO, iN, iO);
ll--; if (llO == 0) { llN--; llO = maxOffset; } else { llO -= recordSize; }
}
while (rr < r) {
s.swap(rrN, rrO, jN, jO);
rr++; if (rrO == maxOffset) { rrN++; rrO = 0; } else { rrO += recordSize; }
j++; if (jO == maxOffset) { jN++; jO = 0; } else { jO += recordSize; }
}
// Conquer
// Recurse on smaller interval first to keep stack shallow
assert i != j;
if (i - p < r - j) {
sortInternal(s, recordsPerSegment, recordSize, maxOffset, p, pN, pO, i, iN, iO, depth);
p = j; pN = jN; pO = jO;
} else {
sortInternal(s, recordsPerSegment, recordSize, maxOffset, j, jN, jO, r, rN, rO, depth);
r = i; rN = iN; rO = iO;
}
}
} } | public class class_name {
private static void sortInternal(final IndexedSortable s, int recordsPerSegment, int recordSize, int maxOffset,
int p, int pN, int pO, int r, int rN, int rO, int depth) {
while (true) {
if (r - p < 13) {
// switch to insertion sort
int i = p+1, iN, iO; if (pO == maxOffset) { iN = pN+1; iO = 0; } else { iN = pN; iO = pO+recordSize; } // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
while (i < r) {
int j = i, jN = iN, jO = iO;
int jd = j-1, jdN, jdO; if (jO == 0) { jdN = jN-1; jdO = maxOffset; } else { jdN = jN; jdO = jO-recordSize; } // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
while (j > p && s.compare(jdN, jdO, jN, jO) > 0) {
s.swap(jN, jO, jdN, jdO); // depends on control dependency: [while], data = [(j]
j = jd; jN = jdN; jO = jdO; // depends on control dependency: [while], data = [none] // depends on control dependency: [while], data = [none] // depends on control dependency: [while], data = [none]
jd--; if (jdO == 0) { jdN--; jdO = maxOffset; } else { jdO -= recordSize; } // depends on control dependency: [while], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
}
i++; if (iO == maxOffset) { iN++; iO = 0; } else { iO += recordSize; } // depends on control dependency: [while], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
}
return; // depends on control dependency: [if], data = [none]
}
if (--depth < 0) {
// switch to heap sort
alt.sort(s, p, r); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
int rdN, rdO; if (rO == 0) { rdN = rN-1; rdO = maxOffset; } else { rdN = rN; rdO = rO-recordSize; } // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
int m = (p+r)>>>1, mN = m / recordsPerSegment, mO = (m % recordsPerSegment) * recordSize;
// select, move pivot into first position
fix(s, mN, mO, pN, pO); // depends on control dependency: [while], data = [none]
fix(s, mN, mO, rdN, rdO); // depends on control dependency: [while], data = [none]
fix(s, pN, pO, rdN, rdO); // depends on control dependency: [while], data = [none]
// Divide
int i = p, iN = pN, iO = pO;
int j = r, jN = rN, jO = rO;
int ll = p, llN = pN, llO = pO;
int rr = r, rrN = rN, rrO = rO;
int cr;
while (true) {
i++; if (iO == maxOffset) { iN++; iO = 0; } else { iO += recordSize; } // depends on control dependency: [while], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
while (i < j) {
if ((cr = s.compare(iN, iO, pN, pO)) > 0) {
break;
}
if (0 == cr) {
ll++; if (llO == maxOffset) { llN++; llO = 0; } else { llO += recordSize; } // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
if (ll != i) {
s.swap(llN, llO, iN, iO); // depends on control dependency: [if], data = [(ll]
}
}
i++; if (iO == maxOffset) { iN++; iO = 0; } else { iO += recordSize; } // depends on control dependency: [while], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
}
j--; if (jO == 0) { jN--; jO = maxOffset; } else { jO -= recordSize; } // depends on control dependency: [while], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
while (j > i) {
if ((cr = s.compare(pN, pO, jN, jO)) > 0) {
break;
}
if (0 == cr) {
rr--; if (rrO == 0) { rrN--; rrO = maxOffset; } else { rrO -= recordSize; } // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
if (rr != j) {
s.swap(rrN, rrO, jN, jO); // depends on control dependency: [if], data = [(rr]
}
}
j--; if (jO == 0) { jN--; jO = maxOffset; } else { jO -= recordSize; } // depends on control dependency: [while], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
}
if (i < j) {
s.swap(iN, iO, jN, jO); // depends on control dependency: [if], data = [(i]
} else {
break;
}
}
j = i; jN = iN; jO = iO; // depends on control dependency: [while], data = [none] // depends on control dependency: [while], data = [none] // depends on control dependency: [while], data = [none]
// swap pivot- and all eq values- into position
while (ll >= p) {
i--; if (iO == 0) { iN--; iO = maxOffset; } else { iO -= recordSize; } // depends on control dependency: [while], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
s.swap(llN, llO, iN, iO); // depends on control dependency: [while], data = [(ll]
ll--; if (llO == 0) { llN--; llO = maxOffset; } else { llO -= recordSize; } // depends on control dependency: [while], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
}
while (rr < r) {
s.swap(rrN, rrO, jN, jO); // depends on control dependency: [while], data = [(rr]
rr++; if (rrO == maxOffset) { rrN++; rrO = 0; } else { rrO += recordSize; } // depends on control dependency: [while], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
j++; if (jO == maxOffset) { jN++; jO = 0; } else { jO += recordSize; } // depends on control dependency: [while], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
}
// Conquer
// Recurse on smaller interval first to keep stack shallow
assert i != j;
if (i - p < r - j) {
sortInternal(s, recordsPerSegment, recordSize, maxOffset, p, pN, pO, i, iN, iO, depth); // depends on control dependency: [if], data = [none]
p = j; pN = jN; pO = jO; // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
} else {
sortInternal(s, recordsPerSegment, recordSize, maxOffset, j, jN, jO, r, rN, rO, depth); // depends on control dependency: [if], data = [none]
r = i; rN = iN; rO = iO; // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
static List<IMavenProjectImportResult> runFixedImportJob(boolean addSarlSpecificSourceFolders,
Collection<MavenProjectInfo> projectInfos,
ProjectImportConfiguration importConfiguration, IProjectCreationListener projectCreationListener,
IProgressMonitor monitor) throws CoreException {
final List<IMavenProjectImportResult> importResults = MavenPlugin.getProjectConfigurationManager()
.importProjects(projectInfos, importConfiguration, projectCreationListener, monitor);
if (addSarlSpecificSourceFolders) {
final SubMonitor submon = SubMonitor.convert(monitor, importResults.size());
for (final IMavenProjectImportResult importResult : importResults) {
SARLProjectConfigurator.configureSARLSourceFolders(
// Project to configure
importResult.getProject(),
// Create folders
true,
// Monitor
submon.newChild(1));
final WorkspaceJob job = new WorkspaceJob("Creating Maven project") { //$NON-NLS-1$
@SuppressWarnings("synthetic-access")
@Override
public IStatus runInWorkspace(IProgressMonitor monitor) throws CoreException {
try {
runBugFix(importResult.getProject(), monitor);
} catch (Exception exception) {
return SARLMavenEclipsePlugin.getDefault().createStatus(IStatus.ERROR, exception);
}
return Status.OK_STATUS;
}
};
job.setRule(MavenPlugin.getProjectConfigurationManager().getRule());
job.schedule();
}
}
return importResults;
} } | public class class_name {
static List<IMavenProjectImportResult> runFixedImportJob(boolean addSarlSpecificSourceFolders,
Collection<MavenProjectInfo> projectInfos,
ProjectImportConfiguration importConfiguration, IProjectCreationListener projectCreationListener,
IProgressMonitor monitor) throws CoreException {
final List<IMavenProjectImportResult> importResults = MavenPlugin.getProjectConfigurationManager()
.importProjects(projectInfos, importConfiguration, projectCreationListener, monitor);
if (addSarlSpecificSourceFolders) {
final SubMonitor submon = SubMonitor.convert(monitor, importResults.size());
for (final IMavenProjectImportResult importResult : importResults) {
SARLProjectConfigurator.configureSARLSourceFolders(
// Project to configure
importResult.getProject(),
// Create folders
true,
// Monitor
submon.newChild(1)); // depends on control dependency: [for], data = [none]
final WorkspaceJob job = new WorkspaceJob("Creating Maven project") { //$NON-NLS-1$
@SuppressWarnings("synthetic-access")
@Override
public IStatus runInWorkspace(IProgressMonitor monitor) throws CoreException {
try {
runBugFix(importResult.getProject(), monitor);
} catch (Exception exception) { // depends on control dependency: [for], data = [none]
return SARLMavenEclipsePlugin.getDefault().createStatus(IStatus.ERROR, exception);
}
return Status.OK_STATUS; // depends on control dependency: [for], data = [none]
}
};
job.setRule(MavenPlugin.getProjectConfigurationManager().getRule());
job.schedule();
}
}
return importResults;
} } |
public class class_name {
private OutlookFieldInformation analyzeDocumentEntry(final DocumentEntry de) {
final String name = de.getName();
// we are only interested in document entries
// with names starting with __substg1.
LOGGER.trace("Document entry: {}", name);
if (name.startsWith(PROPERTY_STREAM_PREFIX)) {
final String clazz;
final String type;
final int mapiType;
try {
final String val = name.substring(PROPERTY_STREAM_PREFIX.length()).toLowerCase();
// the first 4 digits of the remainder
// defines the field class (or field name)
// and the last 4 digits indicate the
// data type.
clazz = val.substring(0, 4);
type = val.substring(4);
LOGGER.trace(" Found document entry: class={}, type={}", clazz, type);
mapiType = Integer.parseInt(type, 16);
} catch (final RuntimeException re) {
LOGGER.error("Could not parse directory entry {}", name, re);
return new OutlookFieldInformation();
}
return new OutlookFieldInformation(clazz, mapiType);
} else {
LOGGER.trace("Ignoring entry with name {}", name);
}
// we are not interested in the field
// and return an empty OutlookFieldInformation object
return new OutlookFieldInformation();
} } | public class class_name {
private OutlookFieldInformation analyzeDocumentEntry(final DocumentEntry de) {
final String name = de.getName();
// we are only interested in document entries
// with names starting with __substg1.
LOGGER.trace("Document entry: {}", name);
if (name.startsWith(PROPERTY_STREAM_PREFIX)) {
final String clazz;
final String type;
final int mapiType;
try {
final String val = name.substring(PROPERTY_STREAM_PREFIX.length()).toLowerCase();
// the first 4 digits of the remainder
// defines the field class (or field name)
// and the last 4 digits indicate the
// data type.
clazz = val.substring(0, 4); // depends on control dependency: [try], data = [none]
type = val.substring(4); // depends on control dependency: [try], data = [none]
LOGGER.trace(" Found document entry: class={}, type={}", clazz, type); // depends on control dependency: [try], data = [none]
mapiType = Integer.parseInt(type, 16); // depends on control dependency: [try], data = [none]
} catch (final RuntimeException re) {
LOGGER.error("Could not parse directory entry {}", name, re);
return new OutlookFieldInformation();
} // depends on control dependency: [catch], data = [none]
return new OutlookFieldInformation(clazz, mapiType); // depends on control dependency: [if], data = [none]
} else {
LOGGER.trace("Ignoring entry with name {}", name); // depends on control dependency: [if], data = [none]
}
// we are not interested in the field
// and return an empty OutlookFieldInformation object
return new OutlookFieldInformation();
} } |
public class class_name {
@Api
public void setNamedRoles(Map<String, List<NamedRoleInfo>> namedRoles) {
this.namedRoles = namedRoles;
ldapRoleMapping = new HashMap<String, Set<String>>();
for (String roleName : namedRoles.keySet()) {
if (!ldapRoleMapping.containsKey(roleName)) {
ldapRoleMapping.put(roleName, new HashSet<String>());
}
for (NamedRoleInfo role : namedRoles.get(roleName)) {
ldapRoleMapping.get(roleName).add(role.getName());
}
}
} } | public class class_name {
@Api
public void setNamedRoles(Map<String, List<NamedRoleInfo>> namedRoles) {
this.namedRoles = namedRoles;
ldapRoleMapping = new HashMap<String, Set<String>>();
for (String roleName : namedRoles.keySet()) {
if (!ldapRoleMapping.containsKey(roleName)) {
ldapRoleMapping.put(roleName, new HashSet<String>()); // depends on control dependency: [if], data = [none]
}
for (NamedRoleInfo role : namedRoles.get(roleName)) {
ldapRoleMapping.get(roleName).add(role.getName()); // depends on control dependency: [for], data = [role]
}
}
} } |
public class class_name {
public ServiceCall<QueryResponse> federatedQuery(FederatedQueryOptions federatedQueryOptions) {
Validator.notNull(federatedQueryOptions, "federatedQueryOptions cannot be null");
String[] pathSegments = { "v1/environments", "query" };
String[] pathParameters = { federatedQueryOptions.environmentId() };
RequestBuilder builder = RequestBuilder.post(RequestBuilder.constructHttpUrl(getEndPoint(), pathSegments,
pathParameters));
builder.query("version", versionDate);
Map<String, String> sdkHeaders = SdkCommon.getSdkHeaders("discovery", "v1", "federatedQuery");
for (Entry<String, String> header : sdkHeaders.entrySet()) {
builder.header(header.getKey(), header.getValue());
}
builder.header("Accept", "application/json");
if (federatedQueryOptions.loggingOptOut() != null) {
builder.header("X-Watson-Logging-Opt-Out", federatedQueryOptions.loggingOptOut());
}
final JsonObject contentJson = new JsonObject();
if (federatedQueryOptions.filter() != null) {
contentJson.addProperty("filter", federatedQueryOptions.filter());
}
if (federatedQueryOptions.query() != null) {
contentJson.addProperty("query", federatedQueryOptions.query());
}
if (federatedQueryOptions.naturalLanguageQuery() != null) {
contentJson.addProperty("natural_language_query", federatedQueryOptions.naturalLanguageQuery());
}
if (federatedQueryOptions.passages() != null) {
contentJson.addProperty("passages", federatedQueryOptions.passages());
}
if (federatedQueryOptions.aggregation() != null) {
contentJson.addProperty("aggregation", federatedQueryOptions.aggregation());
}
if (federatedQueryOptions.count() != null) {
contentJson.addProperty("count", federatedQueryOptions.count());
}
if (federatedQueryOptions.returnFields() != null) {
contentJson.addProperty("return", federatedQueryOptions.returnFields());
}
if (federatedQueryOptions.offset() != null) {
contentJson.addProperty("offset", federatedQueryOptions.offset());
}
if (federatedQueryOptions.sort() != null) {
contentJson.addProperty("sort", federatedQueryOptions.sort());
}
if (federatedQueryOptions.highlight() != null) {
contentJson.addProperty("highlight", federatedQueryOptions.highlight());
}
if (federatedQueryOptions.passagesFields() != null) {
contentJson.addProperty("passages.fields", federatedQueryOptions.passagesFields());
}
if (federatedQueryOptions.passagesCount() != null) {
contentJson.addProperty("passages.count", federatedQueryOptions.passagesCount());
}
if (federatedQueryOptions.passagesCharacters() != null) {
contentJson.addProperty("passages.characters", federatedQueryOptions.passagesCharacters());
}
if (federatedQueryOptions.deduplicate() != null) {
contentJson.addProperty("deduplicate", federatedQueryOptions.deduplicate());
}
if (federatedQueryOptions.deduplicateField() != null) {
contentJson.addProperty("deduplicate.field", federatedQueryOptions.deduplicateField());
}
if (federatedQueryOptions.collectionIds() != null) {
contentJson.addProperty("collection_ids", federatedQueryOptions.collectionIds());
}
if (federatedQueryOptions.similar() != null) {
contentJson.addProperty("similar", federatedQueryOptions.similar());
}
if (federatedQueryOptions.similarDocumentIds() != null) {
contentJson.addProperty("similar.document_ids", federatedQueryOptions.similarDocumentIds());
}
if (federatedQueryOptions.similarFields() != null) {
contentJson.addProperty("similar.fields", federatedQueryOptions.similarFields());
}
if (federatedQueryOptions.bias() != null) {
contentJson.addProperty("bias", federatedQueryOptions.bias());
}
builder.bodyJson(contentJson);
return createServiceCall(builder.build(), ResponseConverterUtils.getObject(QueryResponse.class));
} } | public class class_name {
public ServiceCall<QueryResponse> federatedQuery(FederatedQueryOptions federatedQueryOptions) {
Validator.notNull(federatedQueryOptions, "federatedQueryOptions cannot be null");
String[] pathSegments = { "v1/environments", "query" };
String[] pathParameters = { federatedQueryOptions.environmentId() };
RequestBuilder builder = RequestBuilder.post(RequestBuilder.constructHttpUrl(getEndPoint(), pathSegments,
pathParameters));
builder.query("version", versionDate);
Map<String, String> sdkHeaders = SdkCommon.getSdkHeaders("discovery", "v1", "federatedQuery");
for (Entry<String, String> header : sdkHeaders.entrySet()) {
builder.header(header.getKey(), header.getValue()); // depends on control dependency: [for], data = [header]
}
builder.header("Accept", "application/json");
if (federatedQueryOptions.loggingOptOut() != null) {
builder.header("X-Watson-Logging-Opt-Out", federatedQueryOptions.loggingOptOut()); // depends on control dependency: [if], data = [none]
}
final JsonObject contentJson = new JsonObject();
if (federatedQueryOptions.filter() != null) {
contentJson.addProperty("filter", federatedQueryOptions.filter()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.query() != null) {
contentJson.addProperty("query", federatedQueryOptions.query()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.naturalLanguageQuery() != null) {
contentJson.addProperty("natural_language_query", federatedQueryOptions.naturalLanguageQuery()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.passages() != null) {
contentJson.addProperty("passages", federatedQueryOptions.passages()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.aggregation() != null) {
contentJson.addProperty("aggregation", federatedQueryOptions.aggregation()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.count() != null) {
contentJson.addProperty("count", federatedQueryOptions.count()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.returnFields() != null) {
contentJson.addProperty("return", federatedQueryOptions.returnFields()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.offset() != null) {
contentJson.addProperty("offset", federatedQueryOptions.offset()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.sort() != null) {
contentJson.addProperty("sort", federatedQueryOptions.sort()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.highlight() != null) {
contentJson.addProperty("highlight", federatedQueryOptions.highlight()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.passagesFields() != null) {
contentJson.addProperty("passages.fields", federatedQueryOptions.passagesFields()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.passagesCount() != null) {
contentJson.addProperty("passages.count", federatedQueryOptions.passagesCount()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.passagesCharacters() != null) {
contentJson.addProperty("passages.characters", federatedQueryOptions.passagesCharacters()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.deduplicate() != null) {
contentJson.addProperty("deduplicate", federatedQueryOptions.deduplicate()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.deduplicateField() != null) {
contentJson.addProperty("deduplicate.field", federatedQueryOptions.deduplicateField()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.collectionIds() != null) {
contentJson.addProperty("collection_ids", federatedQueryOptions.collectionIds()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.similar() != null) {
contentJson.addProperty("similar", federatedQueryOptions.similar()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.similarDocumentIds() != null) {
contentJson.addProperty("similar.document_ids", federatedQueryOptions.similarDocumentIds()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.similarFields() != null) {
contentJson.addProperty("similar.fields", federatedQueryOptions.similarFields()); // depends on control dependency: [if], data = [none]
}
if (federatedQueryOptions.bias() != null) {
contentJson.addProperty("bias", federatedQueryOptions.bias()); // depends on control dependency: [if], data = [none]
}
builder.bodyJson(contentJson);
return createServiceCall(builder.build(), ResponseConverterUtils.getObject(QueryResponse.class));
} } |
public class class_name {
public AwsSecurityFindingFilters withType(StringFilter... type) {
if (this.type == null) {
setType(new java.util.ArrayList<StringFilter>(type.length));
}
for (StringFilter ele : type) {
this.type.add(ele);
}
return this;
} } | public class class_name {
public AwsSecurityFindingFilters withType(StringFilter... type) {
if (this.type == null) {
setType(new java.util.ArrayList<StringFilter>(type.length)); // depends on control dependency: [if], data = [none]
}
for (StringFilter ele : type) {
this.type.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
protected String makePrefixedPropertyName(String base, String prefix)
{
String name = base;
if (prefix != null)
{
StringBuffer buff = new StringBuffer(base);
if (prefix != null)
{
buff.insert(0, PROPERTY_NAME_SEPARATOR);
buff.insert(0, prefix);
}
return buff.toString();
}
return name;
} } | public class class_name {
protected String makePrefixedPropertyName(String base, String prefix)
{
String name = base;
if (prefix != null)
{
StringBuffer buff = new StringBuffer(base);
if (prefix != null)
{
buff.insert(0, PROPERTY_NAME_SEPARATOR); // depends on control dependency: [if], data = [none]
buff.insert(0, prefix); // depends on control dependency: [if], data = [none]
}
return buff.toString(); // depends on control dependency: [if], data = [none]
}
return name;
} } |
public class class_name {
private void handleResourceRequest(EventModel eventModel) {
if (MusicUsageResource.isPermanent(eventModel)) {
ResourceModel resourceModel = eventModel.getListResourceContainer()
.provideResource(MusicUsageResource.ID)
.stream()
.filter(MusicUsageResource::isPermanent)
.findAny()
.orElse(null);//should not happen
//a partially applied function which takes an Identification an returns an Optional StartMusicRequest
Function<Identification, Optional<StartMusicRequest>> getStartMusicRequest = own ->
StartMusicRequest.createStartMusicRequest(resourceModel.getProvider(), own);
//if we have a trackInfo we create it with the trackInfo as a parameter
getStartMusicRequest = TrackInfoResource.getTrackInfo(eventModel)
.map(trackInfo -> (Function<Identification, Optional<StartMusicRequest>>) own ->
StartMusicRequest.createStartMusicRequest(resourceModel.getProvider(), own, trackInfo))
.orElse(getStartMusicRequest);
//if we have a trackInfo we create it with the playlist as a parameter
getStartMusicRequest = PlaylistResource.getPlaylist(eventModel)
.map(playlist -> (Function<Identification, Optional<StartMusicRequest>>) own ->
StartMusicRequest.createStartMusicRequest(resourceModel.getProvider(), own, playlist))
.orElse(getStartMusicRequest);
//composes a new Function which appends the Volume to the result
getStartMusicRequest = getStartMusicRequest.andThen(
VolumeResource.getVolume(eventModel)
.flatMap(volume -> IdentificationManagerM.getInstance().getIdentification(this)
.map(identification -> new VolumeResource(identification, volume)))
.map(resource -> (Function<Optional<StartMusicRequest>, Optional<StartMusicRequest>>) opt ->
opt.map(event -> (StartMusicRequest) event.addResource(resource))
)
.orElse(Function.identity())::apply);
IdentificationManagerM.getInstance().getIdentification(this)
.flatMap(getStartMusicRequest::apply)
.ifPresent(this::fire);
} else {
play(eventModel);
if (!runsInPlay) {
blockRequest = lock.newCondition();
lock.lock();
try {
blockRequest.await(10, TimeUnit.MINUTES);
} catch (InterruptedException e) {
debug("interrupted", e);
} finally {
lock.unlock();
}
}
}
} } | public class class_name {
private void handleResourceRequest(EventModel eventModel) {
if (MusicUsageResource.isPermanent(eventModel)) {
ResourceModel resourceModel = eventModel.getListResourceContainer()
.provideResource(MusicUsageResource.ID)
.stream()
.filter(MusicUsageResource::isPermanent)
.findAny()
.orElse(null);//should not happen
//a partially applied function which takes an Identification an returns an Optional StartMusicRequest
Function<Identification, Optional<StartMusicRequest>> getStartMusicRequest = own ->
StartMusicRequest.createStartMusicRequest(resourceModel.getProvider(), own);
//if we have a trackInfo we create it with the trackInfo as a parameter
getStartMusicRequest = TrackInfoResource.getTrackInfo(eventModel)
.map(trackInfo -> (Function<Identification, Optional<StartMusicRequest>>) own ->
StartMusicRequest.createStartMusicRequest(resourceModel.getProvider(), own, trackInfo))
.orElse(getStartMusicRequest); // depends on control dependency: [if], data = [none]
//if we have a trackInfo we create it with the playlist as a parameter
getStartMusicRequest = PlaylistResource.getPlaylist(eventModel)
.map(playlist -> (Function<Identification, Optional<StartMusicRequest>>) own ->
StartMusicRequest.createStartMusicRequest(resourceModel.getProvider(), own, playlist))
.orElse(getStartMusicRequest); // depends on control dependency: [if], data = [none]
//composes a new Function which appends the Volume to the result
getStartMusicRequest = getStartMusicRequest.andThen(
VolumeResource.getVolume(eventModel)
.flatMap(volume -> IdentificationManagerM.getInstance().getIdentification(this)
.map(identification -> new VolumeResource(identification, volume)))
.map(resource -> (Function<Optional<StartMusicRequest>, Optional<StartMusicRequest>>) opt ->
opt.map(event -> (StartMusicRequest) event.addResource(resource))
)
.orElse(Function.identity())::apply); // depends on control dependency: [if], data = [none]
IdentificationManagerM.getInstance().getIdentification(this)
.flatMap(getStartMusicRequest::apply)
.ifPresent(this::fire); // depends on control dependency: [if], data = [none]
} else {
play(eventModel); // depends on control dependency: [if], data = [none]
if (!runsInPlay) {
blockRequest = lock.newCondition(); // depends on control dependency: [if], data = [none]
lock.lock(); // depends on control dependency: [if], data = [none]
try {
blockRequest.await(10, TimeUnit.MINUTES); // depends on control dependency: [try], data = [none]
} catch (InterruptedException e) {
debug("interrupted", e);
} finally { // depends on control dependency: [catch], data = [none]
lock.unlock();
}
}
}
} } |
public class class_name {
public void addListener(HealthCheckRegistryListener listener) {
listeners.add(listener);
for (Map.Entry<String, HealthCheck> entry : healthChecks.entrySet()) {
listener.onHealthCheckAdded(entry.getKey(), entry.getValue());
}
} } | public class class_name {
public void addListener(HealthCheckRegistryListener listener) {
listeners.add(listener);
for (Map.Entry<String, HealthCheck> entry : healthChecks.entrySet()) {
listener.onHealthCheckAdded(entry.getKey(), entry.getValue()); // depends on control dependency: [for], data = [entry]
}
} } |
public class class_name {
public static ErrorItem toErrorItem(final String logMessage, final String className, final String methodName, final int lineNumber) {
ErrorItem.Builder builder = ErrorItem.newBuilder();
builder.message(logMessage);
builder.errorType("StringException");
builder.sourceMethod(className + "." + methodName);
List<TraceFrame> stackFrames = new ArrayList<TraceFrame>();
StackTraceElement[] stackTrace = (new Throwable()).getStackTrace();
int start = 0;
if ((className != null) && (methodName != null)) {
for (int i = 0; i < stackTrace.length; ++i) {
StackTraceElement ste = (stackTrace[i]);
if (className.equals(ste.getClassName()) &&
methodName.equals(ste.getMethodName()) &&
lineNumber == ste.getLineNumber()) {
start = i;
break;
}
}
}
for (int i = start; i < stackTrace.length; ++i) {
TraceFrame stackFrame = StackTraceElements.toTraceFrame(stackTrace[i]);
stackFrames.add(stackFrame);
}
builder.stackTrace(stackFrames);
return builder.build();
} } | public class class_name {
public static ErrorItem toErrorItem(final String logMessage, final String className, final String methodName, final int lineNumber) {
ErrorItem.Builder builder = ErrorItem.newBuilder();
builder.message(logMessage);
builder.errorType("StringException");
builder.sourceMethod(className + "." + methodName);
List<TraceFrame> stackFrames = new ArrayList<TraceFrame>();
StackTraceElement[] stackTrace = (new Throwable()).getStackTrace();
int start = 0;
if ((className != null) && (methodName != null)) {
for (int i = 0; i < stackTrace.length; ++i) {
StackTraceElement ste = (stackTrace[i]);
if (className.equals(ste.getClassName()) &&
methodName.equals(ste.getMethodName()) &&
lineNumber == ste.getLineNumber()) {
start = i; // depends on control dependency: [if], data = [none]
break;
}
}
}
for (int i = start; i < stackTrace.length; ++i) {
TraceFrame stackFrame = StackTraceElements.toTraceFrame(stackTrace[i]);
stackFrames.add(stackFrame); // depends on control dependency: [for], data = [none]
}
builder.stackTrace(stackFrames);
return builder.build();
} } |
public class class_name {
@Deprecated
public Set<String> getAffectedSecurityRoles() {
if (CollectionUtils.isEmpty(securityRoles)) {
Stream<MenuEntry> parents = reverseFlattened();
Iterator<MenuEntry> entry = parents.iterator();
while (entry.hasNext()) {
MenuEntry menuEntry = (MenuEntry) entry.next();
if (!CollectionUtils.isEmpty(menuEntry.getSecurityRoles())) {
return Collections.unmodifiableSet(menuEntry.getSecurityRoles());
}
//root?
if (null != menuEntry.getComponent()) {
if (null != menuEntry.getComponent().getSecurityRoles()) {
return Collections.unmodifiableSet(menuEntry.getComponent().getSecurityRoles());
}
}
}
return Collections.emptySet();
}
return Collections.unmodifiableSet(securityRoles);
} } | public class class_name {
@Deprecated
public Set<String> getAffectedSecurityRoles() {
if (CollectionUtils.isEmpty(securityRoles)) {
Stream<MenuEntry> parents = reverseFlattened();
Iterator<MenuEntry> entry = parents.iterator();
while (entry.hasNext()) {
MenuEntry menuEntry = (MenuEntry) entry.next();
if (!CollectionUtils.isEmpty(menuEntry.getSecurityRoles())) {
return Collections.unmodifiableSet(menuEntry.getSecurityRoles());
// depends on control dependency: [if], data = [none]
}
//root?
if (null != menuEntry.getComponent()) {
if (null != menuEntry.getComponent().getSecurityRoles()) {
return Collections.unmodifiableSet(menuEntry.getComponent().getSecurityRoles());
// depends on control dependency: [if], data = [menuEntry.getComponent().getSecurityRoles())]
}
}
}
return Collections.emptySet();
// depends on control dependency: [if], data = [none]
}
return Collections.unmodifiableSet(securityRoles);
} } |
public class class_name {
public DescribeHostReservationsRequest withHostReservationIdSet(String... hostReservationIdSet) {
if (this.hostReservationIdSet == null) {
setHostReservationIdSet(new com.amazonaws.internal.SdkInternalList<String>(hostReservationIdSet.length));
}
for (String ele : hostReservationIdSet) {
this.hostReservationIdSet.add(ele);
}
return this;
} } | public class class_name {
public DescribeHostReservationsRequest withHostReservationIdSet(String... hostReservationIdSet) {
if (this.hostReservationIdSet == null) {
setHostReservationIdSet(new com.amazonaws.internal.SdkInternalList<String>(hostReservationIdSet.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : hostReservationIdSet) {
this.hostReservationIdSet.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
void endOffering(IOException exception) {
if (closed) {
return;
}
// closed should never be set by this method--only the polling side should do it, as it means
// that the CLOSED marker has been encountered.
if (this.exception == null) {
this.exception = exception;
}
// Since this is an unbounded queue, offer() always succeeds.
queue.offer(CLOSED);
} } | public class class_name {
void endOffering(IOException exception) {
if (closed) {
return; // depends on control dependency: [if], data = [none]
}
// closed should never be set by this method--only the polling side should do it, as it means
// that the CLOSED marker has been encountered.
if (this.exception == null) {
this.exception = exception; // depends on control dependency: [if], data = [none]
}
// Since this is an unbounded queue, offer() always succeeds.
queue.offer(CLOSED);
} } |
public class class_name {
private void documentChanged() {
// View of the component has not been updated at the time
// the DocumentEvent is fired
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
try {
int endPos = component.getDocument().getLength();
Rectangle rect = component.modelToView(endPos);
if (rect != null && rect.y != lastHeight) {
setPreferredWidth();
repaint();
lastHeight = rect.y;
}
} catch (BadLocationException ex) {
/* nothing to do */
}
}
});
} } | public class class_name {
private void documentChanged() {
// View of the component has not been updated at the time
// the DocumentEvent is fired
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
try {
int endPos = component.getDocument().getLength();
Rectangle rect = component.modelToView(endPos);
if (rect != null && rect.y != lastHeight) {
setPreferredWidth(); // depends on control dependency: [if], data = [none]
repaint(); // depends on control dependency: [if], data = [none]
lastHeight = rect.y; // depends on control dependency: [if], data = [none]
}
} catch (BadLocationException ex) {
/* nothing to do */
} // depends on control dependency: [catch], data = [none]
}
});
} } |
public class class_name {
static String toJsonString(final DataModelDto dto) {
try { return mapper.writeValueAsString(dto); }
catch (IOException e) { throw new CalculationEngineException(e); }
} } | public class class_name {
static String toJsonString(final DataModelDto dto) {
try { return mapper.writeValueAsString(dto); } // depends on control dependency: [try], data = [none]
catch (IOException e) { throw new CalculationEngineException(e); } // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public synchronized Object put(Object key, Object value) {
if (value instanceof String
&& key instanceof String) {
boolean append = ((String) key).endsWith("+");
boolean remove = ((String) key).endsWith("-");
if (append || remove) {
key = ((String) key).substring(0, ((String) key).length() - 1);
Object v = linkMap.get(key);
if (v instanceof String) {
if (append) {
value = v + "," + value;
} else {
value = ((String) v).replaceAll(Pattern.quote((String) value), "").replaceAll(",{2,}", ",");
}
} else if (remove) {
key = null;
}
}
}
if (key != null)
return linkMap.put(key, value);
else return null;
} } | public class class_name {
@Override
public synchronized Object put(Object key, Object value) {
if (value instanceof String
&& key instanceof String) {
boolean append = ((String) key).endsWith("+");
boolean remove = ((String) key).endsWith("-");
if (append || remove) {
key = ((String) key).substring(0, ((String) key).length() - 1); // depends on control dependency: [if], data = [none]
Object v = linkMap.get(key);
if (v instanceof String) {
if (append) {
value = v + "," + value; // depends on control dependency: [if], data = [none]
} else {
value = ((String) v).replaceAll(Pattern.quote((String) value), "").replaceAll(",{2,}", ","); // depends on control dependency: [if], data = [none]
}
} else if (remove) {
key = null; // depends on control dependency: [if], data = [none]
}
}
}
if (key != null)
return linkMap.put(key, value);
else return null;
} } |
public class class_name {
public void addMessage(Message message) {
synchronized (messages) {
api.addMessageToCache(message);
if (messages.stream().map(Reference::get).anyMatch(message::equals)) {
return;
}
// Add the message in the correct order
messages.removeIf(messageRef -> messageRef.get() == null);
Reference<Message> messageRef = new SoftReference<>(message, messagesCleanupQueue);
int pos = Collections.binarySearch(messages, messageRef, Comparator.comparing(Reference::get));
if (pos < 0) {
pos = -pos - 1;
}
messages.add(pos, messageRef);
}
} } | public class class_name {
public void addMessage(Message message) {
synchronized (messages) {
api.addMessageToCache(message);
if (messages.stream().map(Reference::get).anyMatch(message::equals)) {
return; // depends on control dependency: [if], data = [none]
}
// Add the message in the correct order
messages.removeIf(messageRef -> messageRef.get() == null);
Reference<Message> messageRef = new SoftReference<>(message, messagesCleanupQueue);
int pos = Collections.binarySearch(messages, messageRef, Comparator.comparing(Reference::get));
if (pos < 0) {
pos = -pos - 1; // depends on control dependency: [if], data = [none]
}
messages.add(pos, messageRef);
}
} } |
public class class_name {
public final EObject entryRuleXPostfixOperation() throws RecognitionException {
EObject current = null;
EObject iv_ruleXPostfixOperation = null;
try {
// InternalSARL.g:13146:58: (iv_ruleXPostfixOperation= ruleXPostfixOperation EOF )
// InternalSARL.g:13147:2: iv_ruleXPostfixOperation= ruleXPostfixOperation EOF
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXPostfixOperationRule());
}
pushFollow(FOLLOW_1);
iv_ruleXPostfixOperation=ruleXPostfixOperation();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current =iv_ruleXPostfixOperation;
}
match(input,EOF,FOLLOW_2); if (state.failed) return current;
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} } | public class class_name {
public final EObject entryRuleXPostfixOperation() throws RecognitionException {
EObject current = null;
EObject iv_ruleXPostfixOperation = null;
try {
// InternalSARL.g:13146:58: (iv_ruleXPostfixOperation= ruleXPostfixOperation EOF )
// InternalSARL.g:13147:2: iv_ruleXPostfixOperation= ruleXPostfixOperation EOF
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXPostfixOperationRule()); // depends on control dependency: [if], data = [none]
}
pushFollow(FOLLOW_1);
iv_ruleXPostfixOperation=ruleXPostfixOperation();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current =iv_ruleXPostfixOperation; // depends on control dependency: [if], data = [none]
}
match(input,EOF,FOLLOW_2); if (state.failed) return current;
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} } |
public class class_name {
public long applyAndJournal(Supplier<JournalContext> context, NewBlockEntry entry) {
try {
long id = applyNewBlock(entry);
context.get().append(JournalEntry.newBuilder().setNewBlock(entry).build());
return id;
} catch (Throwable t) {
ProcessUtils.fatalError(LOG, t, "Failed to apply %s", entry);
throw t; // fatalError will usually system.exit
}
} } | public class class_name {
public long applyAndJournal(Supplier<JournalContext> context, NewBlockEntry entry) {
try {
long id = applyNewBlock(entry);
context.get().append(JournalEntry.newBuilder().setNewBlock(entry).build()); // depends on control dependency: [try], data = [none]
return id; // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
ProcessUtils.fatalError(LOG, t, "Failed to apply %s", entry);
throw t; // fatalError will usually system.exit
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void addBluetoothRoute() {
updateBluetoothAudioRoute(BLUETOOTH_DEVICE_NAME);
if (RuntimeEnvironment.getApiLevel() <= JELLY_BEAN_MR1) {
ReflectionHelpers.callInstanceMethod(
MediaRouter.class,
realObject,
"selectRouteInt",
ClassParameter.from(int.class, MediaRouter.ROUTE_TYPE_LIVE_AUDIO),
ClassParameter.from(RouteInfo.class, getBluetoothA2dpRoute()));
} else {
realObject.selectRoute(MediaRouter.ROUTE_TYPE_LIVE_AUDIO, getBluetoothA2dpRoute());
}
} } | public class class_name {
public void addBluetoothRoute() {
updateBluetoothAudioRoute(BLUETOOTH_DEVICE_NAME);
if (RuntimeEnvironment.getApiLevel() <= JELLY_BEAN_MR1) {
ReflectionHelpers.callInstanceMethod(
MediaRouter.class,
realObject,
"selectRouteInt",
ClassParameter.from(int.class, MediaRouter.ROUTE_TYPE_LIVE_AUDIO),
ClassParameter.from(RouteInfo.class, getBluetoothA2dpRoute())); // depends on control dependency: [if], data = [none]
} else {
realObject.selectRoute(MediaRouter.ROUTE_TYPE_LIVE_AUDIO, getBluetoothA2dpRoute()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public long getFileSize() {
try {
final rh_protoClient client = getRpcClient(OncRpcProtocols.ONCRPC_UDP);
try {
return BlockingWrapper.execute(() -> client.getFileSize_1());
} finally {
client.close();
}
} catch (OncRpcException | IOException | InterruptedException | RuntimeException ex) {
LOG.log(Level.SEVERE, "getFileSize RPC call failed", ex);
throw new RuntimeException("RPC call failed", ex);
}
} } | public class class_name {
@Override
public long getFileSize() {
try {
final rh_protoClient client = getRpcClient(OncRpcProtocols.ONCRPC_UDP);
try {
return BlockingWrapper.execute(() -> client.getFileSize_1()); // depends on control dependency: [try], data = [none]
} finally {
client.close();
}
} catch (OncRpcException | IOException | InterruptedException | RuntimeException ex) {
LOG.log(Level.SEVERE, "getFileSize RPC call failed", ex);
throw new RuntimeException("RPC call failed", ex);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void addAttribute(Attribute attr) {
if (attr instanceof SourceFileAttr) {
if (mSource != null) {
mAttributes.remove(mSource);
}
mSource = (SourceFileAttr)attr;
}
else if (attr instanceof InnerClassesAttr) {
if (mInnerClassesAttr != null) {
mAttributes.remove(mInnerClassesAttr);
}
mInnerClassesAttr = (InnerClassesAttr)attr;
}
mAttributes.add(attr);
} } | public class class_name {
public void addAttribute(Attribute attr) {
if (attr instanceof SourceFileAttr) {
if (mSource != null) {
mAttributes.remove(mSource); // depends on control dependency: [if], data = [(mSource]
}
mSource = (SourceFileAttr)attr; // depends on control dependency: [if], data = [none]
}
else if (attr instanceof InnerClassesAttr) {
if (mInnerClassesAttr != null) {
mAttributes.remove(mInnerClassesAttr); // depends on control dependency: [if], data = [(mInnerClassesAttr]
}
mInnerClassesAttr = (InnerClassesAttr)attr; // depends on control dependency: [if], data = [none]
}
mAttributes.add(attr);
} } |
public class class_name {
static String cosNameToInsString(NameComponent[] cname) {
StringBuffer str = new StringBuffer();
for (int i = 0; i < cname.length; i++) {
if (i > 0) {
str.append(compSeparator);
}
str.append(stringifyComponent(cname[i]));
}
return str.toString();
} } | public class class_name {
static String cosNameToInsString(NameComponent[] cname) {
StringBuffer str = new StringBuffer();
for (int i = 0; i < cname.length; i++) {
if (i > 0) {
str.append(compSeparator); // depends on control dependency: [if], data = [none]
}
str.append(stringifyComponent(cname[i])); // depends on control dependency: [for], data = [i]
}
return str.toString();
} } |
public class class_name {
public synchronized void initTasks()
throws IOException, KillInterruptedException {
if (tasksInited.get() || isComplete()) {
return;
}
synchronized(jobInitKillStatus){
if(jobInitKillStatus.killed || jobInitKillStatus.initStarted) {
return;
}
jobInitKillStatus.initStarted = true;
}
LOG.info("Initializing " + jobId);
// log job info
JobHistory.JobInfo.logSubmitted(getJobID(), conf, jobFile.toString(),
this.startTime, hasRestarted());
// log the job priority
setPriority(this.priority);
//
// read input splits and create a map per a split
//
String jobFile = profile.getJobFile();
Path sysDir = new Path(this.jobtracker.getSystemDir());
FileSystem fs = sysDir.getFileSystem(conf);
DataInputStream splitFile =
fs.open(new Path(conf.get("mapred.job.split.file")));
JobClient.RawSplit[] splits;
try {
splits = JobClient.readSplitFile(splitFile);
} finally {
splitFile.close();
}
numMapTasks = splits.length;
// if the number of splits is larger than a configured value
// then fail the job.
int maxTasks = jobtracker.getMaxTasksPerJob();
if (maxTasks > 0 && numMapTasks + numReduceTasks > maxTasks) {
throw new IOException(
"The number of tasks for this job " +
(numMapTasks + numReduceTasks) +
" exceeds the configured limit " + maxTasks);
}
jobtracker.getInstrumentation().addWaitingMaps(getJobID(), numMapTasks);
jobtracker.getInstrumentation().addWaitingReduces(getJobID(), numReduceTasks);
maps = new TaskInProgress[numMapTasks];
for(int i=0; i < numMapTasks; ++i) {
inputLength += splits[i].getDataLength();
maps[i] = new TaskInProgress(jobId, jobFile,
splits[i],
conf, this, i, numSlotsPerMap);
}
LOG.info("Input size for job " + jobId + " = " + inputLength
+ ". Number of splits = " + splits.length);
if (numMapTasks > 0) {
nonRunningMapCache = createCache(splits, maxLevel);
}
// set the launch time
this.launchTime = JobTracker.getClock().getTime();
jobtracker.getInstrumentation().addLaunchedJobs(
this.launchTime - this.startTime);
//
// Create reduce tasks
//
this.reduces = new TaskInProgress[numReduceTasks];
for (int i = 0; i < numReduceTasks; i++) {
reduces[i] = new TaskInProgress(jobId, jobFile,
numMapTasks, i,
conf, this, numSlotsPerReduce);
nonRunningReduces.add(reduces[i]);
}
// Calculate the minimum number of maps to be complete before
// we should start scheduling reduces
completedMapsForReduceSlowstart =
(int)Math.ceil(
(conf.getFloat("mapred.reduce.slowstart.completed.maps",
DEFAULT_COMPLETED_MAPS_PERCENT_FOR_REDUCE_SLOWSTART) *
numMapTasks));
// The thresholds of total maps and reduces for scheduling reducers
// immediately.
rushReduceMaps =
conf.getInt(RUSH_REDUCER_MAP_THRESHOLD, rushReduceMaps);
rushReduceReduces =
conf.getInt(RUSH_REDUCER_REDUCE_THRESHOLD, rushReduceReduces);
maxFetchFailuresPerMapper = conf.getInt(MAX_FETCH_FAILURES_PER_MAP_KEY,
MAX_FETCH_FAILURES_PER_MAP_DEFAULT);
initSetupCleanupTasks(jobFile);
synchronized(jobInitKillStatus){
jobInitKillStatus.initDone = true;
if(jobInitKillStatus.killed) {
throw new KillInterruptedException("Job " + jobId + " killed in init");
}
}
tasksInited.set(true);
JobHistory.JobInfo.logInited(profile.getJobID(), this.launchTime,
numMapTasks, numReduceTasks);
// Log the number of map and reduce tasks
LOG.info("Job " + jobId + " initialized successfully with " + numMapTasks
+ " map tasks and " + numReduceTasks + " reduce tasks.");
refreshIfNecessary();
} } | public class class_name {
public synchronized void initTasks()
throws IOException, KillInterruptedException {
if (tasksInited.get() || isComplete()) {
return;
}
synchronized(jobInitKillStatus){
if(jobInitKillStatus.killed || jobInitKillStatus.initStarted) {
return; // depends on control dependency: [if], data = [none]
}
jobInitKillStatus.initStarted = true;
}
LOG.info("Initializing " + jobId);
// log job info
JobHistory.JobInfo.logSubmitted(getJobID(), conf, jobFile.toString(),
this.startTime, hasRestarted());
// log the job priority
setPriority(this.priority);
//
// read input splits and create a map per a split
//
String jobFile = profile.getJobFile();
Path sysDir = new Path(this.jobtracker.getSystemDir());
FileSystem fs = sysDir.getFileSystem(conf);
DataInputStream splitFile =
fs.open(new Path(conf.get("mapred.job.split.file")));
JobClient.RawSplit[] splits;
try {
splits = JobClient.readSplitFile(splitFile);
} finally {
splitFile.close();
}
numMapTasks = splits.length;
// if the number of splits is larger than a configured value
// then fail the job.
int maxTasks = jobtracker.getMaxTasksPerJob();
if (maxTasks > 0 && numMapTasks + numReduceTasks > maxTasks) {
throw new IOException(
"The number of tasks for this job " +
(numMapTasks + numReduceTasks) +
" exceeds the configured limit " + maxTasks);
}
jobtracker.getInstrumentation().addWaitingMaps(getJobID(), numMapTasks);
jobtracker.getInstrumentation().addWaitingReduces(getJobID(), numReduceTasks);
maps = new TaskInProgress[numMapTasks];
for(int i=0; i < numMapTasks; ++i) {
inputLength += splits[i].getDataLength();
maps[i] = new TaskInProgress(jobId, jobFile,
splits[i],
conf, this, i, numSlotsPerMap);
}
LOG.info("Input size for job " + jobId + " = " + inputLength
+ ". Number of splits = " + splits.length);
if (numMapTasks > 0) {
nonRunningMapCache = createCache(splits, maxLevel);
}
// set the launch time
this.launchTime = JobTracker.getClock().getTime();
jobtracker.getInstrumentation().addLaunchedJobs(
this.launchTime - this.startTime);
//
// Create reduce tasks
//
this.reduces = new TaskInProgress[numReduceTasks];
for (int i = 0; i < numReduceTasks; i++) {
reduces[i] = new TaskInProgress(jobId, jobFile,
numMapTasks, i,
conf, this, numSlotsPerReduce);
nonRunningReduces.add(reduces[i]);
}
// Calculate the minimum number of maps to be complete before
// we should start scheduling reduces
completedMapsForReduceSlowstart =
(int)Math.ceil(
(conf.getFloat("mapred.reduce.slowstart.completed.maps",
DEFAULT_COMPLETED_MAPS_PERCENT_FOR_REDUCE_SLOWSTART) *
numMapTasks));
// The thresholds of total maps and reduces for scheduling reducers
// immediately.
rushReduceMaps =
conf.getInt(RUSH_REDUCER_MAP_THRESHOLD, rushReduceMaps);
rushReduceReduces =
conf.getInt(RUSH_REDUCER_REDUCE_THRESHOLD, rushReduceReduces);
maxFetchFailuresPerMapper = conf.getInt(MAX_FETCH_FAILURES_PER_MAP_KEY,
MAX_FETCH_FAILURES_PER_MAP_DEFAULT);
initSetupCleanupTasks(jobFile);
synchronized(jobInitKillStatus){
jobInitKillStatus.initDone = true;
if(jobInitKillStatus.killed) {
throw new KillInterruptedException("Job " + jobId + " killed in init");
}
}
tasksInited.set(true);
JobHistory.JobInfo.logInited(profile.getJobID(), this.launchTime,
numMapTasks, numReduceTasks);
// Log the number of map and reduce tasks
LOG.info("Job " + jobId + " initialized successfully with " + numMapTasks
+ " map tasks and " + numReduceTasks + " reduce tasks.");
refreshIfNecessary();
} } |
public class class_name {
@VisibleForTesting
static String embedCssIntoHtml(String css) {
// `</style` can close a containing style element in HTML.
// `]]>` can similarly close a CDATA element in XHTML.
// Scan for "</" and "]]>" and escape enough to remove the token seen by
// the HTML parser.
// For well-formed CSS, these string might validly appear in a few contexts:
// 1. comments
// 2. string bodies
// 3. url(...) bodies.
// Appending \ should be semantics preserving in comments and string bodies.
// This may not be semantics preserving in url content.
// The substring "]>" can validly appear in a selector
// a[href]>b
// but the substring "]]>" cannot.
// This should not affect how a CSS parser recovers from syntax errors.
int indexOfEndTag = css.indexOf("</");
int indexOfEndCData = css.indexOf("]]>");
if (indexOfEndTag != -1) {
if (indexOfEndCData != -1) {
return embedCssIntoHtmlSlow(
css,
Math.min(indexOfEndTag, indexOfEndCData),
/* searchForEndCData= */ true,
/* searchForEndTag= */ true);
}
return embedCssIntoHtmlSlow(
css, indexOfEndTag, /* searchForEndCData= */ false, /* searchForEndTag= */ true);
} else if (indexOfEndCData != -1) {
return embedCssIntoHtmlSlow(
css, indexOfEndCData, /* searchForEndCData= */ true, /* searchForEndTag= */ false);
}
return css;
} } | public class class_name {
@VisibleForTesting
static String embedCssIntoHtml(String css) {
// `</style` can close a containing style element in HTML.
// `]]>` can similarly close a CDATA element in XHTML.
// Scan for "</" and "]]>" and escape enough to remove the token seen by
// the HTML parser.
// For well-formed CSS, these string might validly appear in a few contexts:
// 1. comments
// 2. string bodies
// 3. url(...) bodies.
// Appending \ should be semantics preserving in comments and string bodies.
// This may not be semantics preserving in url content.
// The substring "]>" can validly appear in a selector
// a[href]>b
// but the substring "]]>" cannot.
// This should not affect how a CSS parser recovers from syntax errors.
int indexOfEndTag = css.indexOf("</");
int indexOfEndCData = css.indexOf("]]>");
if (indexOfEndTag != -1) {
if (indexOfEndCData != -1) {
return embedCssIntoHtmlSlow(
css,
Math.min(indexOfEndTag, indexOfEndCData),
/* searchForEndCData= */ true,
/* searchForEndTag= */ true); // depends on control dependency: [if], data = [none]
}
return embedCssIntoHtmlSlow(
css, indexOfEndTag, /* searchForEndCData= */ false, /* searchForEndTag= */ true); // depends on control dependency: [if], data = [none]
} else if (indexOfEndCData != -1) {
return embedCssIntoHtmlSlow(
css, indexOfEndCData, /* searchForEndCData= */ true, /* searchForEndTag= */ false); // depends on control dependency: [if], data = [none]
}
return css;
} } |
public class class_name {
public Context conclude()
{
super.conclude();
if (null == clientLock)
{
clientLock = new ReentrantLock();
}
if (null == epochClock)
{
epochClock = new SystemEpochClock();
}
if (null == nanoClock)
{
nanoClock = new SystemNanoClock();
}
if (null == idleStrategy)
{
idleStrategy = new SleepingMillisIdleStrategy(Configuration.IDLE_SLEEP_MS);
}
if (cncFile() != null)
{
connectToDriver();
}
interServiceTimeoutNs = CncFileDescriptor.clientLivenessTimeout(cncMetaDataBuffer);
if (interServiceTimeoutNs <= keepAliveIntervalNs)
{
throw new ConfigurationException("interServiceTimeoutNs=" + interServiceTimeoutNs +
" <= keepAliveIntervalNs=" + keepAliveIntervalNs);
}
if (null == toDriverBuffer)
{
toDriverBuffer = new ManyToOneRingBuffer(
CncFileDescriptor.createToDriverBuffer(cncByteBuffer, cncMetaDataBuffer));
}
if (null == toClientBuffer)
{
toClientBuffer = new CopyBroadcastReceiver(new BroadcastReceiver(
CncFileDescriptor.createToClientsBuffer(cncByteBuffer, cncMetaDataBuffer)));
}
if (countersMetaDataBuffer() == null)
{
countersMetaDataBuffer(
CncFileDescriptor.createCountersMetaDataBuffer(cncByteBuffer, cncMetaDataBuffer));
}
if (countersValuesBuffer() == null)
{
countersValuesBuffer(CncFileDescriptor.createCountersValuesBuffer(cncByteBuffer, cncMetaDataBuffer));
}
if (null == logBuffersFactory)
{
logBuffersFactory = new MappedLogBuffersFactory();
}
if (null == errorHandler)
{
errorHandler = Configuration.DEFAULT_ERROR_HANDLER;
}
if (null == driverProxy)
{
clientId = toDriverBuffer.nextCorrelationId();
driverProxy = new DriverProxy(toDriverBuffer, clientId);
}
return this;
} } | public class class_name {
public Context conclude()
{
super.conclude();
if (null == clientLock)
{
clientLock = new ReentrantLock(); // depends on control dependency: [if], data = [none]
}
if (null == epochClock)
{
epochClock = new SystemEpochClock(); // depends on control dependency: [if], data = [none]
}
if (null == nanoClock)
{
nanoClock = new SystemNanoClock(); // depends on control dependency: [if], data = [none]
}
if (null == idleStrategy)
{
idleStrategy = new SleepingMillisIdleStrategy(Configuration.IDLE_SLEEP_MS); // depends on control dependency: [if], data = [none]
}
if (cncFile() != null)
{
connectToDriver(); // depends on control dependency: [if], data = [none]
}
interServiceTimeoutNs = CncFileDescriptor.clientLivenessTimeout(cncMetaDataBuffer);
if (interServiceTimeoutNs <= keepAliveIntervalNs)
{
throw new ConfigurationException("interServiceTimeoutNs=" + interServiceTimeoutNs +
" <= keepAliveIntervalNs=" + keepAliveIntervalNs);
}
if (null == toDriverBuffer)
{
toDriverBuffer = new ManyToOneRingBuffer(
CncFileDescriptor.createToDriverBuffer(cncByteBuffer, cncMetaDataBuffer)); // depends on control dependency: [if], data = [none]
}
if (null == toClientBuffer)
{
toClientBuffer = new CopyBroadcastReceiver(new BroadcastReceiver(
CncFileDescriptor.createToClientsBuffer(cncByteBuffer, cncMetaDataBuffer))); // depends on control dependency: [if], data = [none]
}
if (countersMetaDataBuffer() == null)
{
countersMetaDataBuffer(
CncFileDescriptor.createCountersMetaDataBuffer(cncByteBuffer, cncMetaDataBuffer)); // depends on control dependency: [if], data = [none]
}
if (countersValuesBuffer() == null)
{
countersValuesBuffer(CncFileDescriptor.createCountersValuesBuffer(cncByteBuffer, cncMetaDataBuffer)); // depends on control dependency: [if], data = [none]
}
if (null == logBuffersFactory)
{
logBuffersFactory = new MappedLogBuffersFactory(); // depends on control dependency: [if], data = [none]
}
if (null == errorHandler)
{
errorHandler = Configuration.DEFAULT_ERROR_HANDLER; // depends on control dependency: [if], data = [none]
}
if (null == driverProxy)
{
clientId = toDriverBuffer.nextCorrelationId(); // depends on control dependency: [if], data = [none]
driverProxy = new DriverProxy(toDriverBuffer, clientId); // depends on control dependency: [if], data = [none]
}
return this;
} } |
public class class_name {
@Override
public CONFIG applyConfigUpdate(final CONFIG config) throws CouldNotPerformException, InterruptedException {
synchronized (CONFIG_LOCK) {
try {
this.config = config;
configObservable.notifyObservers(config);
// detect scope change if instance is already active and reinit if needed.
try {
if (isActive() && !currentScope.equals(detectScope(config))) {
currentScope = detectScope();
reinit(currentScope);
}
} catch (CouldNotPerformException ex) {
throw new CouldNotPerformException("Could not verify scope changes!", ex);
}
try {
notifyConfigUpdate(config);
} catch (CouldNotPerformException ex) {
ExceptionPrinter.printHistory(new CouldNotPerformException("Could not notify config update!", ex), logger);
}
return this.config;
} catch (CouldNotPerformException ex) {
throw new CouldNotPerformException("Could not apply config update!", ex);
}
}
} } | public class class_name {
@Override
public CONFIG applyConfigUpdate(final CONFIG config) throws CouldNotPerformException, InterruptedException {
synchronized (CONFIG_LOCK) {
try {
this.config = config;
configObservable.notifyObservers(config);
// detect scope change if instance is already active and reinit if needed.
try {
if (isActive() && !currentScope.equals(detectScope(config))) {
currentScope = detectScope(); // depends on control dependency: [if], data = [none]
reinit(currentScope); // depends on control dependency: [if], data = [none]
}
} catch (CouldNotPerformException ex) {
throw new CouldNotPerformException("Could not verify scope changes!", ex);
}
try {
notifyConfigUpdate(config);
} catch (CouldNotPerformException ex) {
ExceptionPrinter.printHistory(new CouldNotPerformException("Could not notify config update!", ex), logger);
}
return this.config;
} catch (CouldNotPerformException ex) {
throw new CouldNotPerformException("Could not apply config update!", ex);
}
}
} } |
public class class_name {
void insertFunctionsAndVariables(TokenList tokens ) {
TokenList.Token t = tokens.getFirst();
while( t != null ) {
if( t.getType() == Type.WORD ) {
Variable v = lookupVariable(t.word);
if (v != null) {
t.variable = v;
t.word = null;
} else if (functions.isFunctionName(t.word)) {
t.function = (new Function(t.word));
t.word = null;
}
}
t = t.next;
}
} } | public class class_name {
void insertFunctionsAndVariables(TokenList tokens ) {
TokenList.Token t = tokens.getFirst();
while( t != null ) {
if( t.getType() == Type.WORD ) {
Variable v = lookupVariable(t.word);
if (v != null) {
t.variable = v; // depends on control dependency: [if], data = [none]
t.word = null; // depends on control dependency: [if], data = [none]
} else if (functions.isFunctionName(t.word)) {
t.function = (new Function(t.word)); // depends on control dependency: [if], data = [none]
t.word = null; // depends on control dependency: [if], data = [none]
}
}
t = t.next; // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
public DescribeServiceErrorsResult withServiceErrors(ServiceError... serviceErrors) {
if (this.serviceErrors == null) {
setServiceErrors(new com.amazonaws.internal.SdkInternalList<ServiceError>(serviceErrors.length));
}
for (ServiceError ele : serviceErrors) {
this.serviceErrors.add(ele);
}
return this;
} } | public class class_name {
public DescribeServiceErrorsResult withServiceErrors(ServiceError... serviceErrors) {
if (this.serviceErrors == null) {
setServiceErrors(new com.amazonaws.internal.SdkInternalList<ServiceError>(serviceErrors.length)); // depends on control dependency: [if], data = [none]
}
for (ServiceError ele : serviceErrors) {
this.serviceErrors.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
private static List<MethodCallExpr> ruleMetaAttributes(RuleContext context, RuleDescr ruleDescr) {
List<MethodCallExpr> ruleMetaAttributes = new ArrayList<>();
for (String metaAttr : ruleDescr.getAnnotationNames()) {
MethodCallExpr metaAttributeCall = new MethodCallExpr(METADATA_CALL);
metaAttributeCall.addArgument(new StringLiteralExpr(metaAttr));
AnnotationDescr ad = ruleDescr.getAnnotation( metaAttr );
String adFqn = ad.getFullyQualifiedName();
if (adFqn != null) {
AnnotationDefinition annotationDefinition;
try {
annotationDefinition = AnnotationDefinition.build(context.getTypeResolver().resolveType(adFqn),
ad.getValueMap(),
context.getTypeResolver());
} catch (NoSuchMethodException | ClassNotFoundException e) {
throw new RuntimeException( e );
}
if ( annotationDefinition.getValues().size() == 1 && annotationDefinition.getValues().containsKey( AnnotationDescr.VALUE ) ) {
Object annValue = annotationDefinition.getPropertyValue(AnnotationDescr.VALUE);
metaAttributeCall.addArgument(new StringLiteralExpr(annValue.toString()));
} else {
Map<String, Object> map = new HashMap<>( annotationDefinition.getValues().size() );
for ( String key : annotationDefinition.getValues().keySet() ) {
map.put( key, annotationDefinition.getPropertyValue( key ) );
}
metaAttributeCall.addArgument(objectAsJPExpression(map));
}
} else {
if ( ad.hasValue() ) {
if ( ad.getValues().size() == 1 ) {
metaAttributeCall.addArgument(objectAsJPExpression(resolveValue(ad.getSingleValueAsString())));
} else {
metaAttributeCall.addArgument(objectAsJPExpression(ad.getValueMap()));
}
} else {
metaAttributeCall.addArgument(new NullLiteralExpr());
}
}
ruleMetaAttributes.add(metaAttributeCall);
}
return ruleMetaAttributes;
} } | public class class_name {
private static List<MethodCallExpr> ruleMetaAttributes(RuleContext context, RuleDescr ruleDescr) {
List<MethodCallExpr> ruleMetaAttributes = new ArrayList<>();
for (String metaAttr : ruleDescr.getAnnotationNames()) {
MethodCallExpr metaAttributeCall = new MethodCallExpr(METADATA_CALL);
metaAttributeCall.addArgument(new StringLiteralExpr(metaAttr)); // depends on control dependency: [for], data = [metaAttr]
AnnotationDescr ad = ruleDescr.getAnnotation( metaAttr );
String adFqn = ad.getFullyQualifiedName();
if (adFqn != null) {
AnnotationDefinition annotationDefinition;
try {
annotationDefinition = AnnotationDefinition.build(context.getTypeResolver().resolveType(adFqn),
ad.getValueMap(),
context.getTypeResolver()); // depends on control dependency: [try], data = [none]
} catch (NoSuchMethodException | ClassNotFoundException e) {
throw new RuntimeException( e );
} // depends on control dependency: [catch], data = [none]
if ( annotationDefinition.getValues().size() == 1 && annotationDefinition.getValues().containsKey( AnnotationDescr.VALUE ) ) {
Object annValue = annotationDefinition.getPropertyValue(AnnotationDescr.VALUE);
metaAttributeCall.addArgument(new StringLiteralExpr(annValue.toString())); // depends on control dependency: [if], data = [none]
} else {
Map<String, Object> map = new HashMap<>( annotationDefinition.getValues().size() );
for ( String key : annotationDefinition.getValues().keySet() ) {
map.put( key, annotationDefinition.getPropertyValue( key ) ); // depends on control dependency: [for], data = [key]
}
metaAttributeCall.addArgument(objectAsJPExpression(map)); // depends on control dependency: [if], data = [none]
}
} else {
if ( ad.hasValue() ) {
if ( ad.getValues().size() == 1 ) {
metaAttributeCall.addArgument(objectAsJPExpression(resolveValue(ad.getSingleValueAsString()))); // depends on control dependency: [if], data = [none]
} else {
metaAttributeCall.addArgument(objectAsJPExpression(ad.getValueMap())); // depends on control dependency: [if], data = [none]
}
} else {
metaAttributeCall.addArgument(new NullLiteralExpr()); // depends on control dependency: [if], data = [none]
}
}
ruleMetaAttributes.add(metaAttributeCall); // depends on control dependency: [for], data = [metaAttr]
}
return ruleMetaAttributes;
} } |
public class class_name {
public static Object newInstance(final IClassWriter classMock) {
try {
final Class<?> clazz = classMock.build();
return clazz.newInstance();
} catch (final Exception e) {
throw new RuntimeException("Can't intanciate class", e);
}
} } | public class class_name {
public static Object newInstance(final IClassWriter classMock) {
try {
final Class<?> clazz = classMock.build();
return clazz.newInstance(); // depends on control dependency: [try], data = [none]
} catch (final Exception e) {
throw new RuntimeException("Can't intanciate class", e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public EClass getIfcSoundPressureMeasure() {
if (ifcSoundPressureMeasureEClass == null) {
ifcSoundPressureMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(868);
}
return ifcSoundPressureMeasureEClass;
} } | public class class_name {
@Override
public EClass getIfcSoundPressureMeasure() {
if (ifcSoundPressureMeasureEClass == null) {
ifcSoundPressureMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(868);
// depends on control dependency: [if], data = [none]
}
return ifcSoundPressureMeasureEClass;
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.