code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
|---|---|
public class class_name {
private void addFilePathMapping(BundlePathMapping bundlePathMapping, Set<String> paths) {
for (String path : paths) {
addFilePathMapping(bundlePathMapping, path);
}
} }
|
public class class_name {
private void addFilePathMapping(BundlePathMapping bundlePathMapping, Set<String> paths) {
for (String path : paths) {
addFilePathMapping(bundlePathMapping, path); // depends on control dependency: [for], data = [path]
}
} }
|
public class class_name {
private boolean removeMapping(Object key, Object value) {
Object k = maskNull(key);
Object[] tab = table;
int len = tab.length;
int i = hash(k, len);
while (true) {
Object item = tab[i];
if (item == k) {
if (tab[i + 1] != value)
return false;
modCount++;
size--;
tab[i] = null;
tab[i + 1] = null;
closeDeletion(i);
return true;
}
if (item == null)
return false;
i = nextKeyIndex(i, len);
}
} }
|
public class class_name {
private boolean removeMapping(Object key, Object value) {
Object k = maskNull(key);
Object[] tab = table;
int len = tab.length;
int i = hash(k, len);
while (true) {
Object item = tab[i];
if (item == k) {
if (tab[i + 1] != value)
return false;
modCount++; // depends on control dependency: [if], data = [none]
size--; // depends on control dependency: [if], data = [none]
tab[i] = null; // depends on control dependency: [if], data = [none]
tab[i + 1] = null; // depends on control dependency: [if], data = [none]
closeDeletion(i); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
if (item == null)
return false;
i = nextKeyIndex(i, len); // depends on control dependency: [while], data = [none]
}
} }
|
public class class_name {
public static boolean canDoOpDirectly(INDArray x, INDArray y) {
if (x.isVector())
return true;
if (x.ordering() != y.ordering())
return false; //other than vectors, elements in f vs. c NDArrays will never line up
if (x.elementWiseStride() < 1 || y.elementWiseStride() < 1)
return false;
//Full buffer + matching strides -> implies all elements are contiguous (and match)
//Need strides to match, otherwise elements in buffer won't line up (i.e., c vs. f order arrays)
long l1 = x.lengthLong();
long dl1 = x.data().length();
long l2 = y.lengthLong();
long dl2 = y.data().length();
long[] strides1 = x.stride();
long[] strides2 = y.stride();
boolean equalStrides = Arrays.equals(strides1, strides2);
if (l1 == dl1 && l2 == dl2 && equalStrides)
return true;
//Strides match + are same as a zero offset NDArray -> all elements are contiguous (and match)
if (equalStrides) {
long[] shape1 = x.shape();
long[] stridesAsInit = (x.ordering() == 'c' ? ArrayUtil.calcStrides(shape1)
: ArrayUtil.calcStridesFortran(shape1));
boolean stridesSameAsInit = Arrays.equals(strides1, stridesAsInit);
return stridesSameAsInit;
}
return false;
} }
|
public class class_name {
public static boolean canDoOpDirectly(INDArray x, INDArray y) {
if (x.isVector())
return true;
if (x.ordering() != y.ordering())
return false; //other than vectors, elements in f vs. c NDArrays will never line up
if (x.elementWiseStride() < 1 || y.elementWiseStride() < 1)
return false;
//Full buffer + matching strides -> implies all elements are contiguous (and match)
//Need strides to match, otherwise elements in buffer won't line up (i.e., c vs. f order arrays)
long l1 = x.lengthLong();
long dl1 = x.data().length();
long l2 = y.lengthLong();
long dl2 = y.data().length();
long[] strides1 = x.stride();
long[] strides2 = y.stride();
boolean equalStrides = Arrays.equals(strides1, strides2);
if (l1 == dl1 && l2 == dl2 && equalStrides)
return true;
//Strides match + are same as a zero offset NDArray -> all elements are contiguous (and match)
if (equalStrides) {
long[] shape1 = x.shape();
long[] stridesAsInit = (x.ordering() == 'c' ? ArrayUtil.calcStrides(shape1)
: ArrayUtil.calcStridesFortran(shape1));
boolean stridesSameAsInit = Arrays.equals(strides1, stridesAsInit);
return stridesSameAsInit; // depends on control dependency: [if], data = [none]
}
return false;
} }
|
public class class_name {
public static StringBuffer pack(SerializedRecord sr) {
StringBuffer sb = new StringBuffer();
ArrayList<String> keys = new ArrayList<String>(sr.fields.keySet());
if (sr.isValid())
SerializedRecord.arrangeKeys(keys);
for (int i = 0; i < keys.size(); i++) {
String value = sr.fields.get(keys.get(i));
sb.append(keys.get(i) + ":" + value);
sb.append(FIELD_SEPARATOR);
}
return sb;
} }
|
public class class_name {
public static StringBuffer pack(SerializedRecord sr) {
StringBuffer sb = new StringBuffer();
ArrayList<String> keys = new ArrayList<String>(sr.fields.keySet());
if (sr.isValid())
SerializedRecord.arrangeKeys(keys);
for (int i = 0; i < keys.size(); i++) {
String value = sr.fields.get(keys.get(i));
sb.append(keys.get(i) + ":" + value); // depends on control dependency: [for], data = [i]
sb.append(FIELD_SEPARATOR); // depends on control dependency: [for], data = [none]
}
return sb;
} }
|
public class class_name {
public boolean isReadableProperty(String propertyName) {
if (PropertyAccessorUtils.isIndexedProperty(propertyName)) {
String rootProperty = getRootPropertyName(propertyName);
String parentProperty = getParentPropertyName(propertyName);
return isReadableProperty(rootProperty)
&& checkKeyTypes(propertyName)
&& (!getPropertyType(parentProperty).isArray() || checkSize(propertyName) || isWritableProperty(parentProperty))
&& ((isReadableProperty(parentProperty) && getPropertyValue(parentProperty) != null) || isWritableProperty(parentProperty));
}
else {
return readAccessors.containsKey(propertyName);
}
} }
|
public class class_name {
public boolean isReadableProperty(String propertyName) {
if (PropertyAccessorUtils.isIndexedProperty(propertyName)) {
String rootProperty = getRootPropertyName(propertyName);
String parentProperty = getParentPropertyName(propertyName);
return isReadableProperty(rootProperty)
&& checkKeyTypes(propertyName)
&& (!getPropertyType(parentProperty).isArray() || checkSize(propertyName) || isWritableProperty(parentProperty))
&& ((isReadableProperty(parentProperty) && getPropertyValue(parentProperty) != null) || isWritableProperty(parentProperty)); // depends on control dependency: [if], data = [none]
}
else {
return readAccessors.containsKey(propertyName); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public <T> Converter<ResponseBody, T> nextResponseBodyConverter(
@Nullable Converter.Factory skipPast, Type type, Annotation[] annotations) {
checkNotNull(type, "type == null");
checkNotNull(annotations, "annotations == null");
int start = converterFactories.indexOf(skipPast) + 1;
for (int i = start, count = converterFactories.size(); i < count; i++) {
Converter<ResponseBody, ?> converter =
converterFactories.get(i).responseBodyConverter(type, annotations, this);
if (converter != null) {
//noinspection unchecked
return (Converter<ResponseBody, T>) converter;
}
}
StringBuilder builder = new StringBuilder("Could not locate ResponseBody converter for ")
.append(type)
.append(".\n");
if (skipPast != null) {
builder.append(" Skipped:");
for (int i = 0; i < start; i++) {
builder.append("\n * ").append(converterFactories.get(i).getClass().getName());
}
builder.append('\n');
}
builder.append(" Tried:");
for (int i = start, count = converterFactories.size(); i < count; i++) {
builder.append("\n * ").append(converterFactories.get(i).getClass().getName());
}
throw new IllegalArgumentException(builder.toString());
} }
|
public class class_name {
public <T> Converter<ResponseBody, T> nextResponseBodyConverter(
@Nullable Converter.Factory skipPast, Type type, Annotation[] annotations) {
checkNotNull(type, "type == null");
checkNotNull(annotations, "annotations == null");
int start = converterFactories.indexOf(skipPast) + 1;
for (int i = start, count = converterFactories.size(); i < count; i++) {
Converter<ResponseBody, ?> converter =
converterFactories.get(i).responseBodyConverter(type, annotations, this);
if (converter != null) {
//noinspection unchecked
return (Converter<ResponseBody, T>) converter; // depends on control dependency: [if], data = [none]
}
}
StringBuilder builder = new StringBuilder("Could not locate ResponseBody converter for ")
.append(type)
.append(".\n");
if (skipPast != null) {
builder.append(" Skipped:"); // depends on control dependency: [if], data = [none]
for (int i = 0; i < start; i++) {
builder.append("\n * ").append(converterFactories.get(i).getClass().getName()); // depends on control dependency: [for], data = [i]
}
builder.append('\n'); // depends on control dependency: [if], data = [none]
}
builder.append(" Tried:");
for (int i = start, count = converterFactories.size(); i < count; i++) {
builder.append("\n * ").append(converterFactories.get(i).getClass().getName()); // depends on control dependency: [for], data = [i]
}
throw new IllegalArgumentException(builder.toString());
} }
|
public class class_name {
private void parse(KXmlParser parser, DocumentImpl document, Node node,
int endToken) throws XmlPullParserException, IOException {
int token = parser.getEventType();
/*
* The main parsing loop. The precondition is that we are already on the
* token to be processed. This holds for each iteration of the loop, so
* the inner statements have to ensure that (in particular the recursive
* call).
*/
while (token != endToken && token != XmlPullParser.END_DOCUMENT) {
if (token == XmlPullParser.PROCESSING_INSTRUCTION) {
/*
* Found a processing instructions. We need to split the token
* text at the first whitespace character.
*/
String text = parser.getText();
int dot = text.indexOf(' ');
String target = (dot != -1 ? text.substring(0, dot) : text);
String data = (dot != -1 ? text.substring(dot + 1) : "");
node.appendChild(document.createProcessingInstruction(target,
data));
} else if (token == XmlPullParser.DOCDECL) {
String name = parser.getRootElementName();
String publicId = parser.getPublicId();
String systemId = parser.getSystemId();
document.appendChild(new DocumentTypeImpl(document, name, publicId, systemId));
} else if (token == XmlPullParser.COMMENT) {
/*
* Found a comment. We simply take the token text, but we only
* create a node if the client wants to see comments at all.
*/
if (!ignoreComments) {
node.appendChild(document.createComment(parser.getText()));
}
} else if (token == XmlPullParser.IGNORABLE_WHITESPACE) {
/*
* Found some ignorable whitespace. We only add it if the client
* wants to see whitespace. Whitespace before and after the
* document element is always ignored.
*/
if (!ignoreElementContentWhitespace && document != node) {
appendText(document, node, token, parser.getText());
}
} else if (token == XmlPullParser.TEXT || token == XmlPullParser.CDSECT) {
/*
* Found a piece of text (possibly encoded as a CDATA section).
* That's the easiest case. We simply take it and create a new text node,
* or merge with an adjacent text node.
*/
appendText(document, node, token, parser.getText());
} else if (token == XmlPullParser.ENTITY_REF) {
/*
* Found an entity reference. If an entity resolver is
* installed, we replace it by text (if possible). Otherwise we
* add an entity reference node.
*/
String entity = parser.getName();
if (entityResolver != null) {
// TODO Implement this...
}
String resolved = resolvePredefinedOrCharacterEntity(entity);
if (resolved != null) {
appendText(document, node, token, resolved);
} else {
node.appendChild(document.createEntityReference(entity));
}
} else if (token == XmlPullParser.START_TAG) {
/*
* Found an element start tag. We create an element node with
* the proper info and attributes. We then invoke parse()
* recursively to handle the next level of nesting. When we
* return from this call, we check that we are on the proper
* element end tag. The whole handling differs somewhat
* depending on whether the parser is namespace-aware or not.
*/
if (namespaceAware) {
// Collect info for element node
String namespace = parser.getNamespace();
String name = parser.getName();
String prefix = parser.getPrefix();
if ("".equals(namespace)) {
namespace = null;
}
// Create element node and wire it correctly
Element element = document.createElementNS(namespace, name);
element.setPrefix(prefix);
node.appendChild(element);
for (int i = 0; i < parser.getAttributeCount(); i++) {
// Collect info for a single attribute node
String attrNamespace = parser.getAttributeNamespace(i);
String attrPrefix = parser.getAttributePrefix(i);
String attrName = parser.getAttributeName(i);
String attrValue = parser.getAttributeValue(i);
if ("".equals(attrNamespace)) {
attrNamespace = null;
}
// Create attribute node and wire it correctly
Attr attr = document.createAttributeNS(attrNamespace, attrName);
attr.setPrefix(attrPrefix);
attr.setValue(attrValue);
element.setAttributeNodeNS(attr);
}
// Recursive descent
token = parser.nextToken();
parse(parser, document, element, XmlPullParser.END_TAG);
// Expect the element's end tag here
parser.require(XmlPullParser.END_TAG, namespace, name);
} else {
// Collect info for element node
String name = parser.getName();
// Create element node and wire it correctly
Element element = document.createElement(name);
node.appendChild(element);
for (int i = 0; i < parser.getAttributeCount(); i++) {
// Collect info for a single attribute node
String attrName = parser.getAttributeName(i);
String attrValue = parser.getAttributeValue(i);
// Create attribute node and wire it correctly
Attr attr = document.createAttribute(attrName);
attr.setValue(attrValue);
element.setAttributeNode(attr);
}
// Recursive descent
token = parser.nextToken();
parse(parser, document, element, XmlPullParser.END_TAG);
// Expect the element's end tag here
parser.require(XmlPullParser.END_TAG, "", name);
}
}
token = parser.nextToken();
}
} }
|
public class class_name {
private void parse(KXmlParser parser, DocumentImpl document, Node node,
int endToken) throws XmlPullParserException, IOException {
int token = parser.getEventType();
/*
* The main parsing loop. The precondition is that we are already on the
* token to be processed. This holds for each iteration of the loop, so
* the inner statements have to ensure that (in particular the recursive
* call).
*/
while (token != endToken && token != XmlPullParser.END_DOCUMENT) {
if (token == XmlPullParser.PROCESSING_INSTRUCTION) {
/*
* Found a processing instructions. We need to split the token
* text at the first whitespace character.
*/
String text = parser.getText();
int dot = text.indexOf(' ');
String target = (dot != -1 ? text.substring(0, dot) : text);
String data = (dot != -1 ? text.substring(dot + 1) : "");
node.appendChild(document.createProcessingInstruction(target,
data));
} else if (token == XmlPullParser.DOCDECL) {
String name = parser.getRootElementName();
String publicId = parser.getPublicId();
String systemId = parser.getSystemId();
document.appendChild(new DocumentTypeImpl(document, name, publicId, systemId)); // depends on control dependency: [if], data = [none]
} else if (token == XmlPullParser.COMMENT) {
/*
* Found a comment. We simply take the token text, but we only
* create a node if the client wants to see comments at all.
*/
if (!ignoreComments) {
node.appendChild(document.createComment(parser.getText())); // depends on control dependency: [if], data = [none]
}
} else if (token == XmlPullParser.IGNORABLE_WHITESPACE) {
/*
* Found some ignorable whitespace. We only add it if the client
* wants to see whitespace. Whitespace before and after the
* document element is always ignored.
*/
if (!ignoreElementContentWhitespace && document != node) {
appendText(document, node, token, parser.getText()); // depends on control dependency: [if], data = [none]
}
} else if (token == XmlPullParser.TEXT || token == XmlPullParser.CDSECT) {
/*
* Found a piece of text (possibly encoded as a CDATA section).
* That's the easiest case. We simply take it and create a new text node,
* or merge with an adjacent text node.
*/
appendText(document, node, token, parser.getText()); // depends on control dependency: [if], data = [none]
} else if (token == XmlPullParser.ENTITY_REF) {
/*
* Found an entity reference. If an entity resolver is
* installed, we replace it by text (if possible). Otherwise we
* add an entity reference node.
*/
String entity = parser.getName();
if (entityResolver != null) {
// TODO Implement this...
}
String resolved = resolvePredefinedOrCharacterEntity(entity);
if (resolved != null) {
appendText(document, node, token, resolved); // depends on control dependency: [if], data = [none]
} else {
node.appendChild(document.createEntityReference(entity)); // depends on control dependency: [if], data = [none]
}
} else if (token == XmlPullParser.START_TAG) {
/*
* Found an element start tag. We create an element node with
* the proper info and attributes. We then invoke parse()
* recursively to handle the next level of nesting. When we
* return from this call, we check that we are on the proper
* element end tag. The whole handling differs somewhat
* depending on whether the parser is namespace-aware or not.
*/
if (namespaceAware) {
// Collect info for element node
String namespace = parser.getNamespace();
String name = parser.getName();
String prefix = parser.getPrefix();
if ("".equals(namespace)) {
namespace = null; // depends on control dependency: [if], data = [none]
}
// Create element node and wire it correctly
Element element = document.createElementNS(namespace, name);
element.setPrefix(prefix); // depends on control dependency: [if], data = [none]
node.appendChild(element); // depends on control dependency: [if], data = [none]
for (int i = 0; i < parser.getAttributeCount(); i++) {
// Collect info for a single attribute node
String attrNamespace = parser.getAttributeNamespace(i);
String attrPrefix = parser.getAttributePrefix(i);
String attrName = parser.getAttributeName(i);
String attrValue = parser.getAttributeValue(i);
if ("".equals(attrNamespace)) {
attrNamespace = null; // depends on control dependency: [if], data = [none]
}
// Create attribute node and wire it correctly
Attr attr = document.createAttributeNS(attrNamespace, attrName);
attr.setPrefix(attrPrefix); // depends on control dependency: [for], data = [none]
attr.setValue(attrValue); // depends on control dependency: [for], data = [none]
element.setAttributeNodeNS(attr); // depends on control dependency: [for], data = [none]
}
// Recursive descent
token = parser.nextToken(); // depends on control dependency: [if], data = [none]
parse(parser, document, element, XmlPullParser.END_TAG); // depends on control dependency: [if], data = [none]
// Expect the element's end tag here
parser.require(XmlPullParser.END_TAG, namespace, name); // depends on control dependency: [if], data = [none]
} else {
// Collect info for element node
String name = parser.getName();
// Create element node and wire it correctly
Element element = document.createElement(name);
node.appendChild(element); // depends on control dependency: [if], data = [none]
for (int i = 0; i < parser.getAttributeCount(); i++) {
// Collect info for a single attribute node
String attrName = parser.getAttributeName(i);
String attrValue = parser.getAttributeValue(i);
// Create attribute node and wire it correctly
Attr attr = document.createAttribute(attrName);
attr.setValue(attrValue); // depends on control dependency: [for], data = [none]
element.setAttributeNode(attr); // depends on control dependency: [for], data = [none]
}
// Recursive descent
token = parser.nextToken(); // depends on control dependency: [if], data = [none]
parse(parser, document, element, XmlPullParser.END_TAG); // depends on control dependency: [if], data = [none]
// Expect the element's end tag here
parser.require(XmlPullParser.END_TAG, "", name); // depends on control dependency: [if], data = [none]
}
}
token = parser.nextToken();
}
} }
|
public class class_name {
static int computeCombinedBufferItemCapacity(final int k, final long n) {
final int totLevels = computeNumLevelsNeeded(k, n);
if (totLevels == 0) {
final int bbItems = computeBaseBufferItems(k, n);
return Math.max(2 * DoublesSketch.MIN_K, ceilingPowerOf2(bbItems));
}
return (2 + totLevels) * k;
} }
|
public class class_name {
static int computeCombinedBufferItemCapacity(final int k, final long n) {
final int totLevels = computeNumLevelsNeeded(k, n);
if (totLevels == 0) {
final int bbItems = computeBaseBufferItems(k, n);
return Math.max(2 * DoublesSketch.MIN_K, ceilingPowerOf2(bbItems)); // depends on control dependency: [if], data = [none]
}
return (2 + totLevels) * k;
} }
|
public class class_name {
private boolean predicateApplies(Subscriber<?> s, Object message) {
if (s instanceof PredicatedSubscriber
&& !((PredicatedSubscriber<?>) s).appliesO(message)) {
return false;
}
return true;
} }
|
public class class_name {
private boolean predicateApplies(Subscriber<?> s, Object message) {
if (s instanceof PredicatedSubscriber
&& !((PredicatedSubscriber<?>) s).appliesO(message)) {
return false;
// depends on control dependency: [if], data = [none]
}
return true;
} }
|
public class class_name {
public int getExpandedTypeID(String namespace, String localName, int type, boolean searchOnly)
{
if (null == namespace)
namespace = "";
if (null == localName)
localName = "";
// Calculate the hash code
int hash = type + namespace.hashCode() + localName.hashCode();
// Redefine the hashET object to represent the new expanded name.
hashET.redefine(type, namespace, localName, hash);
// Calculate the index into the HashEntry table.
int index = hash % m_capacity;
if (index < 0)
index = -index;
// Look up the expanded name in the hash table. Return the id if
// the expanded name is already in the hash table.
for (HashEntry e = m_table[index]; e != null; e = e.next)
{
if (e.hash == hash && e.key.equals(hashET))
return e.value;
}
if (searchOnly)
{
return DTM.NULL;
}
// Expand the internal HashEntry array if necessary.
if (m_nextType > m_threshold) {
rehash();
index = hash % m_capacity;
if (index < 0)
index = -index;
}
// Create a new ExtendedType object
ExtendedType newET = new ExtendedType(type, namespace, localName, hash);
// Expand the m_extendedTypes array if necessary.
if (m_extendedTypes.length == m_nextType) {
ExtendedType[] newArray = new ExtendedType[m_extendedTypes.length * 2];
System.arraycopy(m_extendedTypes, 0, newArray, 0,
m_extendedTypes.length);
m_extendedTypes = newArray;
}
m_extendedTypes[m_nextType] = newET;
// Create a new hash entry for the new ExtendedType and put it into
// the table.
HashEntry entry = new HashEntry(newET, m_nextType, hash, m_table[index]);
m_table[index] = entry;
return m_nextType++;
} }
|
public class class_name {
public int getExpandedTypeID(String namespace, String localName, int type, boolean searchOnly)
{
if (null == namespace)
namespace = "";
if (null == localName)
localName = "";
// Calculate the hash code
int hash = type + namespace.hashCode() + localName.hashCode();
// Redefine the hashET object to represent the new expanded name.
hashET.redefine(type, namespace, localName, hash);
// Calculate the index into the HashEntry table.
int index = hash % m_capacity;
if (index < 0)
index = -index;
// Look up the expanded name in the hash table. Return the id if
// the expanded name is already in the hash table.
for (HashEntry e = m_table[index]; e != null; e = e.next)
{
if (e.hash == hash && e.key.equals(hashET))
return e.value;
}
if (searchOnly)
{
return DTM.NULL; // depends on control dependency: [if], data = [none]
}
// Expand the internal HashEntry array if necessary.
if (m_nextType > m_threshold) {
rehash(); // depends on control dependency: [if], data = [none]
index = hash % m_capacity; // depends on control dependency: [if], data = [none]
if (index < 0)
index = -index;
}
// Create a new ExtendedType object
ExtendedType newET = new ExtendedType(type, namespace, localName, hash);
// Expand the m_extendedTypes array if necessary.
if (m_extendedTypes.length == m_nextType) {
ExtendedType[] newArray = new ExtendedType[m_extendedTypes.length * 2];
System.arraycopy(m_extendedTypes, 0, newArray, 0,
m_extendedTypes.length); // depends on control dependency: [if], data = [none]
m_extendedTypes = newArray; // depends on control dependency: [if], data = [none]
}
m_extendedTypes[m_nextType] = newET;
// Create a new hash entry for the new ExtendedType and put it into
// the table.
HashEntry entry = new HashEntry(newET, m_nextType, hash, m_table[index]);
m_table[index] = entry;
return m_nextType++;
} }
|
public class class_name {
static List<HostSpec> getCandidateHosts(HostSpec[] hostSpecs,
HostRequirement targetServerType, long hostRecheckMillis) {
List<HostSpec> candidates = new ArrayList<HostSpec>(hostSpecs.length);
long latestAllowedUpdate = currentTimeMillis() - hostRecheckMillis;
synchronized (hostStatusMap) {
for (HostSpec hostSpec : hostSpecs) {
HostSpecStatus hostInfo = hostStatusMap.get(hostSpec);
// candidates are nodes we do not know about and the nodes with correct type
if (hostInfo == null
|| hostInfo.lastUpdated < latestAllowedUpdate
|| targetServerType.allowConnectingTo(hostInfo.status)) {
candidates.add(hostSpec);
}
}
}
return candidates;
} }
|
public class class_name {
static List<HostSpec> getCandidateHosts(HostSpec[] hostSpecs,
HostRequirement targetServerType, long hostRecheckMillis) {
List<HostSpec> candidates = new ArrayList<HostSpec>(hostSpecs.length);
long latestAllowedUpdate = currentTimeMillis() - hostRecheckMillis;
synchronized (hostStatusMap) {
for (HostSpec hostSpec : hostSpecs) {
HostSpecStatus hostInfo = hostStatusMap.get(hostSpec);
// candidates are nodes we do not know about and the nodes with correct type
if (hostInfo == null
|| hostInfo.lastUpdated < latestAllowedUpdate
|| targetServerType.allowConnectingTo(hostInfo.status)) {
candidates.add(hostSpec); // depends on control dependency: [if], data = [none]
}
}
}
return candidates;
} }
|
public class class_name {
private void instrumentConstructor(MutableClass mutableClass, MethodNode method) {
makeMethodPrivate(method);
if (mutableClass.containsStubs) {
// method.instructions just throws a `stub!` exception, replace it with something anodyne...
method.instructions.clear();
RobolectricGeneratorAdapter generator = new RobolectricGeneratorAdapter(method);
generator.loadThis();
generator.visitMethodInsn(Opcodes.INVOKESPECIAL, mutableClass.classNode.superName, "<init>", "()V", false);
generator.returnValue();
generator.endMethod();
}
InsnList callSuper = extractCallToSuperConstructor(mutableClass, method);
method.name = directMethodName(mutableClass, ShadowConstants.CONSTRUCTOR_METHOD_NAME);
mutableClass.addMethod(redirectorMethod(mutableClass, method, ShadowConstants.CONSTRUCTOR_METHOD_NAME));
String[] exceptions = exceptionArray(method);
MethodNode initMethodNode = new MethodNode(method.access, "<init>", method.desc, method.signature, exceptions);
makeMethodPublic(initMethodNode);
RobolectricGeneratorAdapter generator = new RobolectricGeneratorAdapter(initMethodNode);
initMethodNode.instructions = callSuper;
generator.loadThis();
generator.invokeVirtual(mutableClass.classType, new Method(ROBO_INIT_METHOD_NAME, "()V"));
generateClassHandlerCall(mutableClass, method, ShadowConstants.CONSTRUCTOR_METHOD_NAME, generator);
generator.endMethod();
mutableClass.addMethod(initMethodNode);
} }
|
public class class_name {
private void instrumentConstructor(MutableClass mutableClass, MethodNode method) {
makeMethodPrivate(method);
if (mutableClass.containsStubs) {
// method.instructions just throws a `stub!` exception, replace it with something anodyne...
method.instructions.clear(); // depends on control dependency: [if], data = [none]
RobolectricGeneratorAdapter generator = new RobolectricGeneratorAdapter(method);
generator.loadThis(); // depends on control dependency: [if], data = [none]
generator.visitMethodInsn(Opcodes.INVOKESPECIAL, mutableClass.classNode.superName, "<init>", "()V", false);
generator.returnValue();
generator.endMethod(); // depends on control dependency: [if], data = [none]
}
InsnList callSuper = extractCallToSuperConstructor(mutableClass, method);
method.name = directMethodName(mutableClass, ShadowConstants.CONSTRUCTOR_METHOD_NAME);
mutableClass.addMethod(redirectorMethod(mutableClass, method, ShadowConstants.CONSTRUCTOR_METHOD_NAME));
String[] exceptions = exceptionArray(method);
MethodNode initMethodNode = new MethodNode(method.access, "<init>", method.desc, method.signature, exceptions);
makeMethodPublic(initMethodNode);
RobolectricGeneratorAdapter generator = new RobolectricGeneratorAdapter(initMethodNode);
initMethodNode.instructions = callSuper;
generator.loadThis();
generator.invokeVirtual(mutableClass.classType, new Method(ROBO_INIT_METHOD_NAME, "()V"));
generateClassHandlerCall(mutableClass, method, ShadowConstants.CONSTRUCTOR_METHOD_NAME, generator);
generator.endMethod();
mutableClass.addMethod(initMethodNode);
} }
|
public class class_name {
@Override
public void close() {
if ( content != null ) {
try {
content.close();
} catch (IOException e) {
logger.error("Failed to close underlying InputStream",e);
throw new MarkLogicIOException(e);
}
}
} }
|
public class class_name {
@Override
public void close() {
if ( content != null ) {
try {
content.close(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
logger.error("Failed to close underlying InputStream",e);
throw new MarkLogicIOException(e);
} // depends on control dependency: [catch], data = [none]
}
} }
|
public class class_name {
@Override
public void rollback(Xid xid) throws XAException {
synchronized (cpoXaStateMap) {
CpoXaState<T> cpoXaState = cpoXaStateMap.getXidStateMap().get(xid);
if (cpoXaState == null)
throw CpoXaError.createXAException(CpoXaError.XAER_NOTA, "Unknown XID");
if (cpoXaState.getAssociation()==CpoXaState.XA_UNASSOCIATED) {
rollbackResource(cpoXaState.getResource());
cpoXaState.setPrepared(false);
cpoXaState.setSuccess(true);
} else {
throw CpoXaError.createXAException(CpoXaError.XAER_PROTO, "Rollback can only be called on an unassociated XID");
}
}
} }
|
public class class_name {
@Override
public void rollback(Xid xid) throws XAException {
synchronized (cpoXaStateMap) {
CpoXaState<T> cpoXaState = cpoXaStateMap.getXidStateMap().get(xid);
if (cpoXaState == null)
throw CpoXaError.createXAException(CpoXaError.XAER_NOTA, "Unknown XID");
if (cpoXaState.getAssociation()==CpoXaState.XA_UNASSOCIATED) {
rollbackResource(cpoXaState.getResource()); // depends on control dependency: [if], data = [none]
cpoXaState.setPrepared(false); // depends on control dependency: [if], data = [none]
cpoXaState.setSuccess(true); // depends on control dependency: [if], data = [none]
} else {
throw CpoXaError.createXAException(CpoXaError.XAER_PROTO, "Rollback can only be called on an unassociated XID");
}
}
} }
|
public class class_name {
private String setXmlPreambleAndDTD(final String xml, final String dtdFileName, final String entities, final String dtdRootEleName) {
// Check if the XML already has a DOCTYPE. If it does then replace the values and remove entities for processing
final Matcher matcher = DOCTYPE_PATTERN.matcher(xml);
if (matcher.find()) {
String preamble = matcher.group("Preamble");
String name = matcher.group("Name");
String systemId = matcher.group("SystemId");
String declaredEntities = matcher.group("Entities");
String doctype = matcher.group();
String newDoctype = doctype.replace(name, dtdRootEleName);
if (systemId != null) {
newDoctype = newDoctype.replace(systemId, dtdFileName);
}
if (declaredEntities != null) {
newDoctype = newDoctype.replace(declaredEntities, " [\n" + entities + "\n]");
} else {
newDoctype = newDoctype.substring(0, newDoctype.length() - 1) + " [\n" + entities + "\n]>";
}
if (preamble == null) {
final StringBuilder output = new StringBuilder();
output.append("<?xml version='1.0' encoding='UTF-8' ?>\n");
output.append(xml.replace(doctype, newDoctype));
return output.toString();
} else {
return xml.replace(doctype, newDoctype);
}
} else {
// The XML doesn't have any doctype so add it
final String preamble = XMLUtilities.findPreamble(xml);
if (preamble != null) {
final StringBuilder doctype = new StringBuilder();
doctype.append(preamble);
appendDoctype(doctype, dtdRootEleName, dtdFileName, entities);
return xml.replace(preamble, doctype.toString());
} else {
final StringBuilder output = new StringBuilder();
output.append("<?xml version='1.0' encoding='UTF-8' ?>\n");
appendDoctype(output, dtdRootEleName, dtdFileName, entities);
output.append(xml);
return output.toString();
}
}
} }
|
public class class_name {
private String setXmlPreambleAndDTD(final String xml, final String dtdFileName, final String entities, final String dtdRootEleName) {
// Check if the XML already has a DOCTYPE. If it does then replace the values and remove entities for processing
final Matcher matcher = DOCTYPE_PATTERN.matcher(xml);
if (matcher.find()) {
String preamble = matcher.group("Preamble");
String name = matcher.group("Name");
String systemId = matcher.group("SystemId");
String declaredEntities = matcher.group("Entities");
String doctype = matcher.group();
String newDoctype = doctype.replace(name, dtdRootEleName);
if (systemId != null) {
newDoctype = newDoctype.replace(systemId, dtdFileName); // depends on control dependency: [if], data = [(systemId]
}
if (declaredEntities != null) {
newDoctype = newDoctype.replace(declaredEntities, " [\n" + entities + "\n]"); // depends on control dependency: [if], data = [(declaredEntities]
} else {
newDoctype = newDoctype.substring(0, newDoctype.length() - 1) + " [\n" + entities + "\n]>"; // depends on control dependency: [if], data = [none]
}
if (preamble == null) {
final StringBuilder output = new StringBuilder();
output.append("<?xml version='1.0' encoding='UTF-8' ?>\n");
output.append(xml.replace(doctype, newDoctype)); // depends on control dependency: [if], data = [none]
return output.toString(); // depends on control dependency: [if], data = [none]
} else {
return xml.replace(doctype, newDoctype); // depends on control dependency: [if], data = [none]
}
} else {
// The XML doesn't have any doctype so add it
final String preamble = XMLUtilities.findPreamble(xml);
if (preamble != null) {
final StringBuilder doctype = new StringBuilder();
doctype.append(preamble); // depends on control dependency: [if], data = [(preamble]
appendDoctype(doctype, dtdRootEleName, dtdFileName, entities); // depends on control dependency: [if], data = [none]
return xml.replace(preamble, doctype.toString()); // depends on control dependency: [if], data = [(preamble]
} else {
final StringBuilder output = new StringBuilder();
output.append("<?xml version='1.0' encoding='UTF-8' ?>\n");
appendDoctype(output, dtdRootEleName, dtdFileName, entities);
output.append(xml); // depends on control dependency: [if], data = [none]
return output.toString(); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
private void setProperties(Properties properties) {
Props props = new Props(properties);
if(props.containsKey(BOOTSTRAP_URLS_PROPERTY)) {
setBootstrapURLs(props.getList(BOOTSTRAP_URLS_PROPERTY));
}
if(props.containsKey(FAT_CLIENTS_CONFIG_SOURCE)) {
setFatClientConfigSource(StoreClientConfigSource.get(props.getString(FAT_CLIENTS_CONFIG_SOURCE)));
}
if(props.containsKey(FAT_CLIENTS_CONFIG_FILE_PATH_PROPERTY)) {
setFatClientConfigPath(props.getString(FAT_CLIENTS_CONFIG_FILE_PATH_PROPERTY));
}
if(props.containsKey(METADATA_CHECK_INTERVAL_IN_MS)) {
setMetadataCheckIntervalInMs(props.getInt(METADATA_CHECK_INTERVAL_IN_MS));
}
if(props.containsKey(NETTY_SERVER_PORT)) {
setServerPort(props.getInt(NETTY_SERVER_PORT));
}
if(props.containsKey(NETTY_SERVER_BACKLOG)) {
setNettyServerBacklog(props.getInt(NETTY_SERVER_BACKLOG));
}
if(props.containsKey(COORDINATOR_CORE_THREADS)) {
setCoordinatorCoreThreads(props.getInt(COORDINATOR_CORE_THREADS));
}
if(props.containsKey(COORDINATOR_MAX_THREADS)) {
setCoordinatorMaxThreads(props.getInt(COORDINATOR_MAX_THREADS));
}
if(props.containsKey(COORDINATOR_QUEUED_REQUESTS)) {
setCoordinatorQueuedRequestsSize(props.getInt(COORDINATOR_QUEUED_REQUESTS));
}
if(props.containsKey(HTTP_MESSAGE_DECODER_MAX_INITIAL_LINE_LENGTH)) {
setHttpMessageDecoderMaxInitialLength(props.getInt(HTTP_MESSAGE_DECODER_MAX_INITIAL_LINE_LENGTH));
}
if(props.containsKey(HTTP_MESSAGE_DECODER_MAX_HEADER_SIZE)) {
setHttpMessageDecoderMaxHeaderSize(props.getInt(HTTP_MESSAGE_DECODER_MAX_HEADER_SIZE));
}
if(props.containsKey(HTTP_MESSAGE_DECODER_MAX_CHUNK_SIZE)) {
setHttpMessageDecoderMaxChunkSize(props.getInt(HTTP_MESSAGE_DECODER_MAX_CHUNK_SIZE));
}
if(props.containsKey(ADMIN_ENABLE)) {
setAdminServiceEnabled(props.getBoolean(ADMIN_ENABLE));
}
if(props.containsKey(ADMIN_PORT)) {
setAdminPort(props.getInt(ADMIN_PORT));
}
} }
|
public class class_name {
private void setProperties(Properties properties) {
Props props = new Props(properties);
if(props.containsKey(BOOTSTRAP_URLS_PROPERTY)) {
setBootstrapURLs(props.getList(BOOTSTRAP_URLS_PROPERTY)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(FAT_CLIENTS_CONFIG_SOURCE)) {
setFatClientConfigSource(StoreClientConfigSource.get(props.getString(FAT_CLIENTS_CONFIG_SOURCE))); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(FAT_CLIENTS_CONFIG_FILE_PATH_PROPERTY)) {
setFatClientConfigPath(props.getString(FAT_CLIENTS_CONFIG_FILE_PATH_PROPERTY)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(METADATA_CHECK_INTERVAL_IN_MS)) {
setMetadataCheckIntervalInMs(props.getInt(METADATA_CHECK_INTERVAL_IN_MS)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(NETTY_SERVER_PORT)) {
setServerPort(props.getInt(NETTY_SERVER_PORT)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(NETTY_SERVER_BACKLOG)) {
setNettyServerBacklog(props.getInt(NETTY_SERVER_BACKLOG)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(COORDINATOR_CORE_THREADS)) {
setCoordinatorCoreThreads(props.getInt(COORDINATOR_CORE_THREADS)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(COORDINATOR_MAX_THREADS)) {
setCoordinatorMaxThreads(props.getInt(COORDINATOR_MAX_THREADS)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(COORDINATOR_QUEUED_REQUESTS)) {
setCoordinatorQueuedRequestsSize(props.getInt(COORDINATOR_QUEUED_REQUESTS)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(HTTP_MESSAGE_DECODER_MAX_INITIAL_LINE_LENGTH)) {
setHttpMessageDecoderMaxInitialLength(props.getInt(HTTP_MESSAGE_DECODER_MAX_INITIAL_LINE_LENGTH)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(HTTP_MESSAGE_DECODER_MAX_HEADER_SIZE)) {
setHttpMessageDecoderMaxHeaderSize(props.getInt(HTTP_MESSAGE_DECODER_MAX_HEADER_SIZE)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(HTTP_MESSAGE_DECODER_MAX_CHUNK_SIZE)) {
setHttpMessageDecoderMaxChunkSize(props.getInt(HTTP_MESSAGE_DECODER_MAX_CHUNK_SIZE)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(ADMIN_ENABLE)) {
setAdminServiceEnabled(props.getBoolean(ADMIN_ENABLE)); // depends on control dependency: [if], data = [none]
}
if(props.containsKey(ADMIN_PORT)) {
setAdminPort(props.getInt(ADMIN_PORT)); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public Inventory queryInventory(boolean querySkuDetails, List<String> moreItemSkus,
List<String> moreSubsSkus) throws IabException {
checkSetupDone("queryInventory");
try {
Inventory inv = new Inventory();
int r = queryPurchases(inv, ITEM_TYPE_INAPP);
if (r != BILLING_RESPONSE_RESULT_OK) {
throw new IabException(r, "Error refreshing inventory (querying owned items).");
}
if (querySkuDetails) {
r = querySkuDetails(ITEM_TYPE_INAPP, inv, moreItemSkus);
if (r != BILLING_RESPONSE_RESULT_OK) {
throw new IabException(r, "Error refreshing inventory (querying prices of items).");
}
}
// if subscriptions are supported, then also query for subscriptions
if (mSubscriptionsSupported) {
r = queryPurchases(inv, ITEM_TYPE_SUBS);
if (r != BILLING_RESPONSE_RESULT_OK) {
throw new IabException(r, "Error refreshing inventory (querying owned subscriptions).");
}
if (querySkuDetails) {
r = querySkuDetails(ITEM_TYPE_SUBS, inv, moreSubsSkus);
if (r != BILLING_RESPONSE_RESULT_OK) {
throw new IabException(r, "Error refreshing inventory (querying prices of subscriptions).");
}
}
}
return inv;
} catch (RemoteException e) {
throw new IabException(IABHELPER_REMOTE_EXCEPTION, "Remote exception while refreshing inventory.", e);
} catch (JSONException e) {
throw new IabException(IABHELPER_BAD_RESPONSE, "Error parsing JSON response while refreshing inventory.", e);
}
} }
|
public class class_name {
public Inventory queryInventory(boolean querySkuDetails, List<String> moreItemSkus,
List<String> moreSubsSkus) throws IabException {
checkSetupDone("queryInventory");
try {
Inventory inv = new Inventory();
int r = queryPurchases(inv, ITEM_TYPE_INAPP);
if (r != BILLING_RESPONSE_RESULT_OK) {
throw new IabException(r, "Error refreshing inventory (querying owned items).");
}
if (querySkuDetails) {
r = querySkuDetails(ITEM_TYPE_INAPP, inv, moreItemSkus); // depends on control dependency: [if], data = [none]
if (r != BILLING_RESPONSE_RESULT_OK) {
throw new IabException(r, "Error refreshing inventory (querying prices of items).");
}
}
// if subscriptions are supported, then also query for subscriptions
if (mSubscriptionsSupported) {
r = queryPurchases(inv, ITEM_TYPE_SUBS); // depends on control dependency: [if], data = [none]
if (r != BILLING_RESPONSE_RESULT_OK) {
throw new IabException(r, "Error refreshing inventory (querying owned subscriptions).");
}
if (querySkuDetails) {
r = querySkuDetails(ITEM_TYPE_SUBS, inv, moreSubsSkus); // depends on control dependency: [if], data = [none]
if (r != BILLING_RESPONSE_RESULT_OK) {
throw new IabException(r, "Error refreshing inventory (querying prices of subscriptions).");
}
}
}
return inv;
} catch (RemoteException e) {
throw new IabException(IABHELPER_REMOTE_EXCEPTION, "Remote exception while refreshing inventory.", e);
} catch (JSONException e) {
throw new IabException(IABHELPER_BAD_RESPONSE, "Error parsing JSON response while refreshing inventory.", e);
}
} }
|
public class class_name {
public void marshall(GetExportJobRequest getExportJobRequest, ProtocolMarshaller protocolMarshaller) {
if (getExportJobRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getExportJobRequest.getApplicationId(), APPLICATIONID_BINDING);
protocolMarshaller.marshall(getExportJobRequest.getJobId(), JOBID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(GetExportJobRequest getExportJobRequest, ProtocolMarshaller protocolMarshaller) {
if (getExportJobRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getExportJobRequest.getApplicationId(), APPLICATIONID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(getExportJobRequest.getJobId(), JOBID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private void takeSnapshot() {
state.checkThread();
// If no snapshot has been taken, take a snapshot and hold it in memory until the complete
// index has met the snapshot index. Note that this will be executed in the state machine thread.
// Snapshots are only taken of the state machine when the log becomes compactable. If the log compactor's
// compactIndex is greater than the last snapshot index and the lastApplied index is greater than the
// last snapshot index, take the snapshot.
Snapshot currentSnapshot = state.getSnapshotStore().currentSnapshot();
if (pendingSnapshot == null && stateMachine instanceof Snapshottable
&& (currentSnapshot == null || (log.compactor().compactIndex() > currentSnapshot.index() && lastApplied > currentSnapshot.index()))) {
pendingSnapshot = state.getSnapshotStore().createSnapshot(lastApplied);
// Write the snapshot data. Note that we don't complete the snapshot here since the completion
// of a snapshot is predicated on session events being received by clients up to the snapshot index.
LOGGER.info("{} - Taking snapshot {}", state.getCluster().member().address(), pendingSnapshot.index());
executor.executor().execute(() -> {
synchronized (pendingSnapshot) {
try (SnapshotWriter writer = pendingSnapshot.writer()) {
((Snapshottable) stateMachine).snapshot(writer);
}
}
});
}
} }
|
public class class_name {
private void takeSnapshot() {
state.checkThread();
// If no snapshot has been taken, take a snapshot and hold it in memory until the complete
// index has met the snapshot index. Note that this will be executed in the state machine thread.
// Snapshots are only taken of the state machine when the log becomes compactable. If the log compactor's
// compactIndex is greater than the last snapshot index and the lastApplied index is greater than the
// last snapshot index, take the snapshot.
Snapshot currentSnapshot = state.getSnapshotStore().currentSnapshot();
if (pendingSnapshot == null && stateMachine instanceof Snapshottable
&& (currentSnapshot == null || (log.compactor().compactIndex() > currentSnapshot.index() && lastApplied > currentSnapshot.index()))) {
pendingSnapshot = state.getSnapshotStore().createSnapshot(lastApplied); // depends on control dependency: [if], data = [none]
// Write the snapshot data. Note that we don't complete the snapshot here since the completion
// of a snapshot is predicated on session events being received by clients up to the snapshot index.
LOGGER.info("{} - Taking snapshot {}", state.getCluster().member().address(), pendingSnapshot.index()); // depends on control dependency: [if], data = [none]
executor.executor().execute(() -> {
synchronized (pendingSnapshot) { // depends on control dependency: [if], data = [none]
try (SnapshotWriter writer = pendingSnapshot.writer()) {
((Snapshottable) stateMachine).snapshot(writer);
}
}
});
}
} }
|
public class class_name {
protected Integer parseIntString(String string, boolean abortOnError, int def)
throws TransformationOperationException {
Integer integer = def;
if (string == null) {
logger.debug("Given string is empty so the default value is taken.");
}
try {
integer = Integer.parseInt(string);
} catch (NumberFormatException e) {
StringBuilder builder = new StringBuilder();
builder.append("The string ").append(string).append(" is not a number. ");
if (abortOnError) {
builder.append("The step will be skipped.");
} else {
builder.append(def).append(" will be taken instead.");
}
logger.warn(builder.toString());
if (abortOnError) {
throw new TransformationOperationException(builder.toString());
}
}
return integer;
} }
|
public class class_name {
protected Integer parseIntString(String string, boolean abortOnError, int def)
throws TransformationOperationException {
Integer integer = def;
if (string == null) {
logger.debug("Given string is empty so the default value is taken.");
}
try {
integer = Integer.parseInt(string);
} catch (NumberFormatException e) {
StringBuilder builder = new StringBuilder();
builder.append("The string ").append(string).append(" is not a number. ");
if (abortOnError) {
builder.append("The step will be skipped."); // depends on control dependency: [if], data = [none]
} else {
builder.append(def).append(" will be taken instead."); // depends on control dependency: [if], data = [none]
}
logger.warn(builder.toString());
if (abortOnError) {
throw new TransformationOperationException(builder.toString());
}
}
return integer;
} }
|
public class class_name {
@Override
public int read (byte[] b, int off, int len) throws IOException {
if (closed) {
throw new IOException("Attempted read from closed stream.");
}
if (eof) {
return -1;
}
if (pos >= chunkSize) {
nextChunk();
if (eof) {
return -1;
}
}
len = Math.min(len, chunkSize - pos);
int count = in.read(b, off, len);
pos += count;
return count;
} }
|
public class class_name {
@Override
public int read (byte[] b, int off, int len) throws IOException {
if (closed) {
throw new IOException("Attempted read from closed stream.");
}
if (eof) {
return -1;
}
if (pos >= chunkSize) {
nextChunk();
if (eof) {
return -1; // depends on control dependency: [if], data = [none]
}
}
len = Math.min(len, chunkSize - pos);
int count = in.read(b, off, len);
pos += count;
return count;
} }
|
public class class_name {
public void add(final Param key, final Integer value) {
if (value != null && value > 0) {
parameters.put(key, String.valueOf(value));
}
} }
|
public class class_name {
public void add(final Param key, final Integer value) {
if (value != null && value > 0) {
parameters.put(key, String.valueOf(value)); // depends on control dependency: [if], data = [(value]
}
} }
|
public class class_name {
public final void commit(final CacheEntry entry, final Flag operation, int segment,
boolean l1Only, InvocationContext ctx) {
if (trace) {
log.tracef("Trying to commit. Key=%s. Operation Flag=%s, L1 write/invalidation=%s", toStr(entry.getKey()),
operation, l1Only);
}
if (l1Only || (operation == null && !trackStateTransfer && !trackXSiteStateTransfer)) {
//track == null means that it is a normal put and the tracking is not enabled!
//if it is a L1 invalidation, commit without track it.
if (trace) {
log.tracef("Committing key=%s. It is a L1 invalidation or a normal put and no tracking is enabled!",
toStr(entry.getKey()));
}
commitEntry(entry, segment, ctx);
return;
}
if (isTrackDisabled(operation)) {
//this a put for state transfer but we are not tracking it. This means that the state transfer has ended
//or canceled due to a clear command.
if (trace) {
log.tracef("Not committing key=%s. It is a state transfer key but no track is enabled!",
toStr(entry.getKey()));
}
return;
}
tracker.compute(entry.getKey(), (o, discardPolicy) -> {
if (discardPolicy != null && discardPolicy.ignore(operation)) {
if (trace) {
log.tracef("Not committing key=%s. It was already overwritten! Discard policy=%s",
toStr(entry.getKey()), discardPolicy);
}
return discardPolicy;
}
commitEntry(entry, segment, ctx);
DiscardPolicy newDiscardPolicy = calculateDiscardPolicy(operation);
if (trace) {
log.tracef("Committed key=%s. Old discard policy=%s. New discard policy=%s", toStr(entry.getKey()),
discardPolicy, newDiscardPolicy);
}
return newDiscardPolicy;
});
} }
|
public class class_name {
public final void commit(final CacheEntry entry, final Flag operation, int segment,
boolean l1Only, InvocationContext ctx) {
if (trace) {
log.tracef("Trying to commit. Key=%s. Operation Flag=%s, L1 write/invalidation=%s", toStr(entry.getKey()),
operation, l1Only); // depends on control dependency: [if], data = [none]
}
if (l1Only || (operation == null && !trackStateTransfer && !trackXSiteStateTransfer)) {
//track == null means that it is a normal put and the tracking is not enabled!
//if it is a L1 invalidation, commit without track it.
if (trace) {
log.tracef("Committing key=%s. It is a L1 invalidation or a normal put and no tracking is enabled!",
toStr(entry.getKey())); // depends on control dependency: [if], data = [none]
}
commitEntry(entry, segment, ctx); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
if (isTrackDisabled(operation)) {
//this a put for state transfer but we are not tracking it. This means that the state transfer has ended
//or canceled due to a clear command.
if (trace) {
log.tracef("Not committing key=%s. It is a state transfer key but no track is enabled!",
toStr(entry.getKey())); // depends on control dependency: [if], data = [none]
}
return; // depends on control dependency: [if], data = [none]
}
tracker.compute(entry.getKey(), (o, discardPolicy) -> {
if (discardPolicy != null && discardPolicy.ignore(operation)) {
if (trace) {
log.tracef("Not committing key=%s. It was already overwritten! Discard policy=%s",
toStr(entry.getKey()), discardPolicy); // depends on control dependency: [if], data = [none]
}
return discardPolicy; // depends on control dependency: [if], data = [none]
}
commitEntry(entry, segment, ctx);
DiscardPolicy newDiscardPolicy = calculateDiscardPolicy(operation);
if (trace) {
log.tracef("Committed key=%s. Old discard policy=%s. New discard policy=%s", toStr(entry.getKey()),
discardPolicy, newDiscardPolicy); // depends on control dependency: [if], data = [none]
}
return newDiscardPolicy;
});
} }
|
public class class_name {
@SuppressWarnings("rawtypes")
public List blpop(int timeout, Object... keys) {
Jedis jedis = getJedis();
try {
List<byte[]> data = jedis.blpop(timeout, keysToBytesArray(keys));
return keyValueListFromBytesList(data);
}
finally {close(jedis);}
} }
|
public class class_name {
@SuppressWarnings("rawtypes")
public List blpop(int timeout, Object... keys) {
Jedis jedis = getJedis();
try {
List<byte[]> data = jedis.blpop(timeout, keysToBytesArray(keys));
return keyValueListFromBytesList(data);
// depends on control dependency: [try], data = [none]
}
finally {close(jedis);}
} }
|
public class class_name {
public CmsLinkBean getLinkBean() {
String link = m_textbox.getText();
if (CmsStringUtil.isEmptyOrWhitespaceOnly(link)) {
return null;
}
return new CmsLinkBean(m_textbox.getText(), m_internal);
} }
|
public class class_name {
public CmsLinkBean getLinkBean() {
String link = m_textbox.getText();
if (CmsStringUtil.isEmptyOrWhitespaceOnly(link)) {
return null; // depends on control dependency: [if], data = [none]
}
return new CmsLinkBean(m_textbox.getText(), m_internal);
} }
|
public class class_name {
public synchronized int co_joinCoroutineSet(int coroutineID)
{
if(coroutineID>=0)
{
if(coroutineID>=m_unreasonableId || m_activeIDs.get(coroutineID))
return -1;
}
else
{
// What I want is "Find first clear bit". That doesn't exist.
// JDK1.2 added "find last set bit", but that doesn't help now.
coroutineID=0;
while(coroutineID<m_unreasonableId)
{
if(m_activeIDs.get(coroutineID))
++coroutineID;
else
break;
}
if(coroutineID>=m_unreasonableId)
return -1;
}
m_activeIDs.set(coroutineID);
return coroutineID;
} }
|
public class class_name {
public synchronized int co_joinCoroutineSet(int coroutineID)
{
if(coroutineID>=0)
{
if(coroutineID>=m_unreasonableId || m_activeIDs.get(coroutineID))
return -1;
}
else
{
// What I want is "Find first clear bit". That doesn't exist.
// JDK1.2 added "find last set bit", but that doesn't help now.
coroutineID=0; // depends on control dependency: [if], data = [none]
while(coroutineID<m_unreasonableId)
{
if(m_activeIDs.get(coroutineID))
++coroutineID;
else
break;
}
if(coroutineID>=m_unreasonableId)
return -1;
}
m_activeIDs.set(coroutineID);
return coroutineID;
} }
|
public class class_name {
protected SQLException closeWrapper(boolean closeWrapperOnly)
{
// Indicate the statement is closed by setting the parent object's statement to null.
// This will allow us to be garbage collected.
try // Connection wrapper can close at any time.
{
parentWrapper.childWrappers.remove(this);
} catch (RuntimeException runtimeX) {
// No FFDC code needed; parent wrapper might be closed.
if (parentWrapper.state != State.CLOSED)
throw runtimeX;
}
try // Close the JDBC driver ResultSet implemenation object.
{
stmtImpl.close();
} catch (SQLException closeX) {
FFDCFilter.processException(closeX,
"com.ibm.ws.rsadapter.jdbc.WSJdbcStatement.closeWrapper", "314", this);
stmtImpl = null;
return WSJdbcUtil.mapException(this, closeX);
}
stmtImpl = null;
return null;
} }
|
public class class_name {
protected SQLException closeWrapper(boolean closeWrapperOnly)
{
// Indicate the statement is closed by setting the parent object's statement to null.
// This will allow us to be garbage collected.
try // Connection wrapper can close at any time.
{
parentWrapper.childWrappers.remove(this); // depends on control dependency: [try], data = [none]
} catch (RuntimeException runtimeX) {
// No FFDC code needed; parent wrapper might be closed.
if (parentWrapper.state != State.CLOSED)
throw runtimeX;
} // depends on control dependency: [catch], data = [none]
try // Close the JDBC driver ResultSet implemenation object.
{
stmtImpl.close(); // depends on control dependency: [try], data = [none]
} catch (SQLException closeX) {
FFDCFilter.processException(closeX,
"com.ibm.ws.rsadapter.jdbc.WSJdbcStatement.closeWrapper", "314", this);
stmtImpl = null;
return WSJdbcUtil.mapException(this, closeX);
} // depends on control dependency: [catch], data = [none]
stmtImpl = null;
return null;
} }
|
public class class_name {
final void buildBloomFilterForBucket(int bucketInSegmentPos, MemorySegment bucket, HashPartition<BT, PT> p) {
final int count = bucket.getShort(bucketInSegmentPos + HEADER_COUNT_OFFSET);
if (count <= 0) {
return;
}
int[] hashCodes = new int[count];
// As the hashcode and bloom filter occupy same bytes, so we read all hashcode out at first and then write back to bloom filter.
for (int i = 0; i < count; i++) {
hashCodes[i] = bucket.getInt(bucketInSegmentPos + BUCKET_HEADER_LENGTH + i * HASH_CODE_LEN);
}
this.bloomFilter.setBitsLocation(bucket, bucketInSegmentPos + BUCKET_HEADER_LENGTH);
for (int hashCode : hashCodes) {
this.bloomFilter.addHash(hashCode);
}
buildBloomFilterForExtraOverflowSegments(bucketInSegmentPos, bucket, p);
} }
|
public class class_name {
final void buildBloomFilterForBucket(int bucketInSegmentPos, MemorySegment bucket, HashPartition<BT, PT> p) {
final int count = bucket.getShort(bucketInSegmentPos + HEADER_COUNT_OFFSET);
if (count <= 0) {
return; // depends on control dependency: [if], data = [none]
}
int[] hashCodes = new int[count];
// As the hashcode and bloom filter occupy same bytes, so we read all hashcode out at first and then write back to bloom filter.
for (int i = 0; i < count; i++) {
hashCodes[i] = bucket.getInt(bucketInSegmentPos + BUCKET_HEADER_LENGTH + i * HASH_CODE_LEN); // depends on control dependency: [for], data = [i]
}
this.bloomFilter.setBitsLocation(bucket, bucketInSegmentPos + BUCKET_HEADER_LENGTH);
for (int hashCode : hashCodes) {
this.bloomFilter.addHash(hashCode); // depends on control dependency: [for], data = [hashCode]
}
buildBloomFilterForExtraOverflowSegments(bucketInSegmentPos, bucket, p);
} }
|
public class class_name {
public <U> Try<U> flatMap(Function<? super V, Try<U>> mapper) {
if (isSuccess()) {
return mapper.apply(value);
}
return failed(this.throwable);
} }
|
public class class_name {
public <U> Try<U> flatMap(Function<? super V, Try<U>> mapper) {
if (isSuccess()) {
return mapper.apply(value); // depends on control dependency: [if], data = [none]
}
return failed(this.throwable);
} }
|
public class class_name {
public boolean isValid(
ChronoElement<Integer> element,
int value
) {
IntElementRule<T> intRule = this.getChronology().getIntegerRule(element);
if (intRule != null) {
return intRule.isValid(this.getContext(), value);
}
return this.isValid(element, Integer.valueOf(value));
} }
|
public class class_name {
public boolean isValid(
ChronoElement<Integer> element,
int value
) {
IntElementRule<T> intRule = this.getChronology().getIntegerRule(element);
if (intRule != null) {
return intRule.isValid(this.getContext(), value); // depends on control dependency: [if], data = [none]
}
return this.isValid(element, Integer.valueOf(value));
} }
|
public class class_name {
public void setCloudWatchLoggingOptionDescriptions(java.util.Collection<CloudWatchLoggingOptionDescription> cloudWatchLoggingOptionDescriptions) {
if (cloudWatchLoggingOptionDescriptions == null) {
this.cloudWatchLoggingOptionDescriptions = null;
return;
}
this.cloudWatchLoggingOptionDescriptions = new java.util.ArrayList<CloudWatchLoggingOptionDescription>(cloudWatchLoggingOptionDescriptions);
} }
|
public class class_name {
public void setCloudWatchLoggingOptionDescriptions(java.util.Collection<CloudWatchLoggingOptionDescription> cloudWatchLoggingOptionDescriptions) {
if (cloudWatchLoggingOptionDescriptions == null) {
this.cloudWatchLoggingOptionDescriptions = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.cloudWatchLoggingOptionDescriptions = new java.util.ArrayList<CloudWatchLoggingOptionDescription>(cloudWatchLoggingOptionDescriptions);
} }
|
public class class_name {
public GetGlue setIsDebug(boolean isDebug) {
this.isDebug = isDebug;
RestAdapter.LogLevel logLevel = isDebug ? RestAdapter.LogLevel.FULL : RestAdapter.LogLevel.NONE;
if (restAdapter != null) {
restAdapter.setLogLevel(logLevel);
}
if (restAdapterApiFour != null) {
restAdapterApiFour.setLogLevel(logLevel);
}
return this;
} }
|
public class class_name {
public GetGlue setIsDebug(boolean isDebug) {
this.isDebug = isDebug;
RestAdapter.LogLevel logLevel = isDebug ? RestAdapter.LogLevel.FULL : RestAdapter.LogLevel.NONE;
if (restAdapter != null) {
restAdapter.setLogLevel(logLevel); // depends on control dependency: [if], data = [none]
}
if (restAdapterApiFour != null) {
restAdapterApiFour.setLogLevel(logLevel); // depends on control dependency: [if], data = [none]
}
return this;
} }
|
public class class_name {
private SIDestinationAddress[] getDestinationAddresses(
DestinationNamePattern destinationNamePattern,
DestinationType destinationType,
DestinationAvailability destinationAvailability)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc,
"getDestinationAddresses",
new Object[] { destinationNamePattern, destinationType, destinationAvailability });
BaseDestinationHandler destinationHandler = null;
SIDestinationAddress[] destinationAddresses = null;
DestinationTypeFilter filter = new DestinationTypeFilter();
filter.LOCAL = Boolean.TRUE;
filter.FOREIGN_DESTINATION = Boolean.FALSE;
filter.ALIAS = Boolean.FALSE;
filter.QUEUE = Boolean.TRUE; //Any destination type apart from topicspace is treated as a queue
filter.VISIBLE = Boolean.TRUE;
SIMPIterator iterator = destinationIndex.iterator(filter);
int i = 0;
List<String> addresses = new Vector<String>();
while (iterator.hasNext())
{
destinationHandler = (BaseDestinationHandler) iterator.next();
if (isAvailable(destinationHandler, destinationAvailability) && (!(destinationHandler.isSystem())))
{
// Check that the destination is the type that we want
if (destinationHandler.getDestinationType() == destinationType)
{
String destinationName = destinationHandler.getName();
// Check whether the user has specified a pattern to match against.
if (destinationNamePattern == null || destinationNamePattern.match(destinationName))
{
// Add this destination to the Address list.
addresses.add(i, destinationName);
i++;
}
}
}
}
destinationAddresses = new SIDestinationAddress[addresses.size()];
for (int cursor = 0; cursor < addresses.size(); cursor++)
{
// Create the new SIDestinationAddress
destinationAddresses[cursor] =
((SIDestinationAddressFactory) MessageProcessor.getSingletonInstance(
SIMPConstants.SI_DESTINATION_ADDRESS_FACTORY)).createSIDestinationAddress(
addresses.get(cursor),
true);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "getDestinationAddresses", new Object[] { destinationAddresses });
return destinationAddresses;
} }
|
public class class_name {
private SIDestinationAddress[] getDestinationAddresses(
DestinationNamePattern destinationNamePattern,
DestinationType destinationType,
DestinationAvailability destinationAvailability)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc,
"getDestinationAddresses",
new Object[] { destinationNamePattern, destinationType, destinationAvailability });
BaseDestinationHandler destinationHandler = null;
SIDestinationAddress[] destinationAddresses = null;
DestinationTypeFilter filter = new DestinationTypeFilter();
filter.LOCAL = Boolean.TRUE;
filter.FOREIGN_DESTINATION = Boolean.FALSE;
filter.ALIAS = Boolean.FALSE;
filter.QUEUE = Boolean.TRUE; //Any destination type apart from topicspace is treated as a queue
filter.VISIBLE = Boolean.TRUE;
SIMPIterator iterator = destinationIndex.iterator(filter);
int i = 0;
List<String> addresses = new Vector<String>();
while (iterator.hasNext())
{
destinationHandler = (BaseDestinationHandler) iterator.next(); // depends on control dependency: [while], data = [none]
if (isAvailable(destinationHandler, destinationAvailability) && (!(destinationHandler.isSystem())))
{
// Check that the destination is the type that we want
if (destinationHandler.getDestinationType() == destinationType)
{
String destinationName = destinationHandler.getName();
// Check whether the user has specified a pattern to match against.
if (destinationNamePattern == null || destinationNamePattern.match(destinationName))
{
// Add this destination to the Address list.
addresses.add(i, destinationName); // depends on control dependency: [if], data = [none]
i++; // depends on control dependency: [if], data = [none]
}
}
}
}
destinationAddresses = new SIDestinationAddress[addresses.size()];
for (int cursor = 0; cursor < addresses.size(); cursor++)
{
// Create the new SIDestinationAddress
destinationAddresses[cursor] =
((SIDestinationAddressFactory) MessageProcessor.getSingletonInstance(
SIMPConstants.SI_DESTINATION_ADDRESS_FACTORY)).createSIDestinationAddress(
addresses.get(cursor),
true); // depends on control dependency: [for], data = [cursor]
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "getDestinationAddresses", new Object[] { destinationAddresses });
return destinationAddresses;
} }
|
public class class_name {
public static final void fillTimeToPool(
IWord[] wPool, String timeVal)
{
String[] p = timeVal.split(":");
TimeUtil.fillDateTimePool(wPool, TimeUtil.DATETIME_H,
new Word(p[0]+"点", IWord.T_CJK_WORD, Entity.E_TIME_H_A));
TimeUtil.fillDateTimePool(wPool, TimeUtil.DATETIME_I,
new Word(p[1]+"分", IWord.T_CJK_WORD, Entity.E_TIME_I_A));
if ( p.length == 3 ) {
TimeUtil.fillDateTimePool(wPool, TimeUtil.DATETIME_S,
new Word(p[2]+"秒", IWord.T_CJK_WORD, Entity.E_TIME_S_A));
}
} }
|
public class class_name {
public static final void fillTimeToPool(
IWord[] wPool, String timeVal)
{
String[] p = timeVal.split(":");
TimeUtil.fillDateTimePool(wPool, TimeUtil.DATETIME_H,
new Word(p[0]+"点", IWord.T_CJK_WORD, Entity.E_TIME_H_A));
TimeUtil.fillDateTimePool(wPool, TimeUtil.DATETIME_I,
new Word(p[1]+"分", IWord.T_CJK_WORD, Entity.E_TIME_I_A));
if ( p.length == 3 ) {
TimeUtil.fillDateTimePool(wPool, TimeUtil.DATETIME_S,
new Word(p[2]+"秒", IWord.T_CJK_WORD, Entity.E_TIME_S_A)); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@JsonInclude(Include.NON_EMPTY)
@JsonProperty("id")
public String getJsonId() {
if (!EntityIdValue.SITE_LOCAL.equals(this.siteIri)) {
return this.entityId;
} else {
return null;
}
} }
|
public class class_name {
@JsonInclude(Include.NON_EMPTY)
@JsonProperty("id")
public String getJsonId() {
if (!EntityIdValue.SITE_LOCAL.equals(this.siteIri)) {
return this.entityId; // depends on control dependency: [if], data = [none]
} else {
return null; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
String readWord(BufferedReader raf) throws IOException
{
while (true)
{
// check the buffer first
if (wbp < wordsBuffer.length)
{
return wordsBuffer[wbp++];
}
String line = raf.readLine();
if (line == null)
{ // end of corpus
eoc = true;
return null;
}
line = line.trim();
if (line.length() == 0)
{
continue;
}
cache.writeInt(-3); // mark end of sentence
wordsBuffer = line.split("\\s+");
wbp = 0;
eoc = false;
}
} }
|
public class class_name {
String readWord(BufferedReader raf) throws IOException
{
while (true)
{
// check the buffer first
if (wbp < wordsBuffer.length)
{
return wordsBuffer[wbp++]; // depends on control dependency: [if], data = [none]
}
String line = raf.readLine();
if (line == null)
{ // end of corpus
eoc = true; // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
line = line.trim();
if (line.length() == 0)
{
continue;
}
cache.writeInt(-3); // mark end of sentence
wordsBuffer = line.split("\\s+");
wbp = 0;
eoc = false;
}
} }
|
public class class_name {
@ShellMethod(key = "validate-ldap", value = "Test connections to an LDAP server to verify connectivity, SSL, etc")
public static void validateLdap(
@ShellOption(value = {"url"},
help = "LDAP URL to test, comma-separated.") final String url,
@ShellOption(value = {"bindDn"},
help = "bindDn to use when testing the LDAP server") final String bindDn,
@ShellOption(value = {"bindCredential"},
help = "bindCredential to use when testing the LDAP server") final String bindCredential,
@ShellOption(value = {"baseDn"},
help = "baseDn to use when testing the LDAP server, searching for accounts (i.e. OU=some,DC=org,DC=edu)") final String baseDn,
@ShellOption(value = {"searchFilter"},
help = "Filter to use when searching for accounts (i.e. (&(objectClass=*) (sAMAccountName=user)))") final String searchFilter,
@ShellOption(value = {"userPassword"},
help = "Password for the user found in the search result, to attempt authentication") final String userPassword,
@ShellOption(value = {"userAttributes"},
help = "User attributes, comma-separated, to fetch for the user found in the search result") final String userAttributes) {
try {
connect(url, bindDn, bindCredential, baseDn, searchFilter, userAttributes, userPassword);
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
} }
|
public class class_name {
@ShellMethod(key = "validate-ldap", value = "Test connections to an LDAP server to verify connectivity, SSL, etc")
public static void validateLdap(
@ShellOption(value = {"url"},
help = "LDAP URL to test, comma-separated.") final String url,
@ShellOption(value = {"bindDn"},
help = "bindDn to use when testing the LDAP server") final String bindDn,
@ShellOption(value = {"bindCredential"},
help = "bindCredential to use when testing the LDAP server") final String bindCredential,
@ShellOption(value = {"baseDn"},
help = "baseDn to use when testing the LDAP server, searching for accounts (i.e. OU=some,DC=org,DC=edu)") final String baseDn,
@ShellOption(value = {"searchFilter"},
help = "Filter to use when searching for accounts (i.e. (&(objectClass=*) (sAMAccountName=user)))") final String searchFilter,
@ShellOption(value = {"userPassword"},
help = "Password for the user found in the search result, to attempt authentication") final String userPassword,
@ShellOption(value = {"userAttributes"},
help = "User attributes, comma-separated, to fetch for the user found in the search result") final String userAttributes) {
try {
connect(url, bindDn, bindCredential, baseDn, searchFilter, userAttributes, userPassword); // depends on control dependency: [try], data = [none]
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public Observable<ServiceResponse<DatabaseAccountListReadOnlyKeysResultInner>> listReadOnlyKeysWithServiceResponseAsync(String resourceGroupName, String accountName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listReadOnlyKeys(this.client.subscriptionId(), resourceGroupName, accountName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<DatabaseAccountListReadOnlyKeysResultInner>>>() {
@Override
public Observable<ServiceResponse<DatabaseAccountListReadOnlyKeysResultInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<DatabaseAccountListReadOnlyKeysResultInner> clientResponse = listReadOnlyKeysDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
} }
|
public class class_name {
public Observable<ServiceResponse<DatabaseAccountListReadOnlyKeysResultInner>> listReadOnlyKeysWithServiceResponseAsync(String resourceGroupName, String accountName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listReadOnlyKeys(this.client.subscriptionId(), resourceGroupName, accountName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<DatabaseAccountListReadOnlyKeysResultInner>>>() {
@Override
public Observable<ServiceResponse<DatabaseAccountListReadOnlyKeysResultInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<DatabaseAccountListReadOnlyKeysResultInner> clientResponse = listReadOnlyKeysDelegate(response);
return Observable.just(clientResponse); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
return Observable.error(t);
} // depends on control dependency: [catch], data = [none]
}
});
} }
|
public class class_name {
private static void collectSpansPositionsAndTokens(
Map<MtasSpanQuery, SpanWeight> spansQueryWeight, IndexSearcher searcher,
CodecInfo mtasCodecInfo, LeafReader r, LeafReaderContext lrc,
String field, Terms t, List<Integer> docSet, List<Integer> docList,
ComponentField fieldInfo, FieldInfos fieldInfos, Status status)
throws IOException {
boolean needSpans = false;
boolean needPositions = false;
boolean needTokens = false;
// results
Map<Integer, Integer> positionsData = null;
Map<Integer, Integer> tokensData = null;
Set<MtasSpanQuery> spansNumberByPositions = null;
Map<MtasSpanQuery, Map<Integer, Integer>> spansNumberData = null;
Map<MtasSpanQuery, Map<Integer, List<Match>>> spansMatchData = null;
Map<String, SortedMap<String, int[]>> facetData = null;
Map<String, String> facetDataType = null;
// collect position stats
if (!fieldInfo.statsPositionList.isEmpty()) {
needPositions = true;
}
// collect token stats
if (!fieldInfo.statsTokenList.isEmpty()) {
needTokens = true;
}
if (!fieldInfo.termVectorList.isEmpty()) {
for (ComponentTermVector ctv : fieldInfo.termVectorList) {
if (!needPositions) {
needPositions = (ctv.functions == null
? ctv.subComponentFunction.parserFunction.needPositions()
: ctv.functionNeedPositions());
}
}
}
// compute from spans for selected docs
if (!fieldInfo.spanQueryList.isEmpty()) {
// check for statsSpans
spansNumberByPositions = new HashSet<>();
spansNumberData = new HashMap<>();
spansMatchData = new HashMap<>();
facetData = new HashMap<>();
facetDataType = new HashMap<>();
// spans
if (!fieldInfo.statsSpanList.isEmpty()) {
for (ComponentSpan cs : fieldInfo.statsSpanList) {
needPositions = (!needPositions) ? cs.parser.needPositions()
: needPositions;
needPositions = (!needPositions) ? cs.functionNeedPositions()
: needPositions;
needSpans = (!needSpans) ? cs.parser.needArgumentsNumber() > 0
: needSpans;
HashSet<Integer> arguments = cs.parser.needArgument();
arguments.addAll(cs.functionNeedArguments());
for (int a : arguments) {
if (cs.queries.length > a) {
MtasSpanQuery q = cs.queries[a];
if (!spansNumberData.containsKey(q)) {
spansNumberData.put(q, new HashMap<Integer, Integer>());
}
}
}
}
}
// kwic
if (!fieldInfo.kwicList.isEmpty()) {
needSpans = true;
for (ComponentKwic ck : fieldInfo.kwicList) {
if (!spansMatchData.containsKey(ck.query)) {
spansMatchData.put(ck.query, new HashMap<Integer, List<Match>>());
}
}
}
// list
if (!fieldInfo.listList.isEmpty()) {
needSpans = true;
for (ComponentList cl : fieldInfo.listList) {
if (!spansMatchData.containsKey(cl.spanQuery)) {
if (cl.number > 0) {
// only if needed
if (cl.position < (cl.start + cl.number)) {
spansMatchData.put(cl.spanQuery,
new HashMap<Integer, List<Match>>());
} else {
spansNumberData.put(cl.spanQuery,
new HashMap<Integer, Integer>());
}
} else if (!spansNumberData.containsKey(cl.spanQuery)) {
spansNumberData.put(cl.spanQuery,
new HashMap<Integer, Integer>());
}
}
}
}
// group
if (!fieldInfo.groupList.isEmpty()) {
needSpans = true;
for (ComponentGroup cg : fieldInfo.groupList) {
if (!spansMatchData.containsKey(cg.spanQuery)) {
spansMatchData.put(cg.spanQuery,
new HashMap<Integer, List<Match>>());
}
}
}
// facet
if (!fieldInfo.facetList.isEmpty()) {
for (ComponentFacet cf : fieldInfo.facetList) {
needPositions = !needPositions ? cf.baseParserNeedPositions()
: needPositions;
needPositions = !needPositions ? cf.functionNeedPositions()
: needPositions;
for (int i = 0; i < cf.baseFields.length; i++) {
needSpans = !needSpans ? cf.baseParsers[i].needArgumentsNumber() > 0
: needSpans;
HashSet<Integer> arguments = cf.baseParsers[i].needArgument();
for (int a : arguments) {
if (cf.spanQueries.length > a) {
MtasSpanQuery q = cf.spanQueries[a];
if (!spansNumberData.containsKey(q)) {
spansNumberData.put(q, new HashMap<Integer, Integer>());
}
}
}
for (MtasFunctionParserFunction function : cf.baseFunctionParserFunctions[i]) {
needSpans = !needSpans ? function.needArgumentsNumber() > 0
: needSpans;
arguments = function.needArgument();
for (int a : arguments) {
if (cf.spanQueries.length > a) {
MtasSpanQuery q = cf.spanQueries[a];
if (!spansNumberData.containsKey(q)) {
spansNumberData.put(q, new HashMap<Integer, Integer>());
}
}
}
}
if (!facetData.containsKey(cf.baseFields[i])) {
facetData.put(cf.baseFields[i], new TreeMap<String, int[]>());
facetDataType.put(cf.baseFields[i], cf.baseFieldTypes[i]);
}
}
}
}
// termvector
if (!fieldInfo.termVectorList.isEmpty()) {
for (ComponentTermVector ctv : fieldInfo.termVectorList) {
if ((ctv.subComponentFunction.parserFunction != null
&& ctv.subComponentFunction.parserFunction.needPositions())
|| (ctv.functions != null && ctv.functionNeedPositions())) {
needPositions = true;
}
}
}
}
if (needSpans) {
Map<Integer, Integer> numberData;
Map<Integer, List<Match>> matchData;
// collect values for facetFields
for (Entry<String, SortedMap<String, int[]>> entry : facetData
.entrySet()) {
FieldInfo fi = fieldInfos.fieldInfo(entry.getKey());
if (fi != null) {
// prefer to use pointvalue
if (!fi.getDocValuesType().equals(DocValuesType.NONE)) {
Iterator<Integer> docIterator = docSet.iterator();
// numeric or sorted
if (fi.getDocValuesType().equals(DocValuesType.NUMERIC)
|| fi.getDocValuesType().equals(DocValuesType.SORTED)
|| fi.getDocValuesType().equals(DocValuesType.SORTED_SET)
|| fi.getDocValuesType().equals(DocValuesType.SORTED_NUMERIC)
|| fi.getDocValuesType().equals(DocValuesType.BINARY)) {
// create map of values to corresponding docIds
Map<Object, List<Integer>> facetDataSubList = new HashMap<>();
// numeric
if (fi.getDocValuesType().equals(DocValuesType.NUMERIC)) {
NumericDocValues docValues = r.getContext().reader()
.getNumericDocValues(entry.getKey());
int docId;
while (docIterator.hasNext()) {
docId = docIterator.next() - lrc.docBase;
if (docValues.advanceExact(docId)) {
long value = docValues.longValue();
if (!facetDataSubList.containsKey(value)) {
List<Integer> facetDataSubListItem = new ArrayList<>();
facetDataSubListItem.add(docId + lrc.docBase);
facetDataSubList.put(value, facetDataSubListItem);
} else {
facetDataSubList.get(value).add(docId + lrc.docBase);
}
}
}
// sorted numeric
} else if (fi.getDocValuesType().equals(DocValuesType.SORTED_NUMERIC)) {
SortedNumericDocValues docValues = r.getContext().reader()
.getSortedNumericDocValues(entry.getKey());
int docId;
while (docIterator.hasNext()) {
docId = docIterator.next() - lrc.docBase;
if (docValues.advanceExact(docId)) {
int n = docValues.docValueCount();
for(int i =0; i<n; i++) {
long value = docValues.nextValue();
if (!facetDataSubList.containsKey(value)) {
List<Integer> facetDataSubListItem = new ArrayList<>();
facetDataSubListItem.add(docId + lrc.docBase);
facetDataSubList.put(value, facetDataSubListItem);
} else {
facetDataSubList.get(value).add(docId + lrc.docBase);
}
}
}
}
// sorted set
} else if (fi.getDocValuesType().equals(DocValuesType.SORTED_SET)) {
SortedSetDocValues docValues = r.getContext().reader()
.getSortedSetDocValues(entry.getKey());
int docId;
Map<Long, String> dictionary = new HashMap<>();
while (docIterator.hasNext()) {
docId = docIterator.next() - lrc.docBase;
if (docValues.advanceExact(docId)) {
long tmpValue;
String value;
while((tmpValue=docValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
if(!dictionary.containsKey(tmpValue)) {
value = docValues.lookupOrd(tmpValue).utf8ToString();
dictionary.put(tmpValue, value);
} else {
value = dictionary.get(tmpValue);
}
if (!facetDataSubList.containsKey(value)) {
List<Integer> facetDataSubListItem = new ArrayList<>();
facetDataSubListItem.add(docId + lrc.docBase);
facetDataSubList.put(value, facetDataSubListItem);
} else {
facetDataSubList.get(value).add(docId + lrc.docBase);
}
}
}
}
// sorted
} else if (fi.getDocValuesType().equals(DocValuesType.SORTED)) {
SortedDocValues docValues = r.getContext().reader()
.getSortedDocValues(entry.getKey());
int docId;
while (docIterator.hasNext()) {
docId = docIterator.next() - lrc.docBase;
if (docValues.advanceExact(docId)) {
String value = docValues.binaryValue().utf8ToString();
if (!facetDataSubList.containsKey(value)) {
List<Integer> facetDataSubListItem = new ArrayList<>();
facetDataSubListItem.add(docId + lrc.docBase);
facetDataSubList.put(value, facetDataSubListItem);
} else {
facetDataSubList.get(value).add(docId + lrc.docBase);
}
}
}
}
if (!facetDataSubList.isEmpty()) {
SortedMap<String, int[]> facetDataList = entry.getValue();
SimpleDateFormat sdf = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss'Z'");
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
for (Entry<Object, List<Integer>> facetEntry : facetDataSubList
.entrySet()) {
int[] docIdList = facetEntry.getValue().stream()
.mapToInt(i -> i).toArray();
String termValue;
if (facetDataType.get(entry.getKey())
.equals(NumberType.DATE.name())) {
Date date = new Date((Long) facetEntry.getKey());
termValue = sdf.format(date);
} else {
termValue = facetEntry.getKey().toString();
}
if (!facetDataList.containsKey(termValue)) {
facetDataList.put(termValue, docIdList);
} else {
int[] oldList = facetDataList.get(termValue);
int[] newList = new int[oldList.length + docIdList.length];
System.arraycopy(oldList, 0, newList, 0, oldList.length);
System.arraycopy(docIdList, 0, newList, oldList.length,
docIdList.length);
facetDataList.put(termValue, newList);
}
}
}
} else {
throw new IOException("facets for docValues of type "
+ fi.getDocValuesType() + " not implemented");
}
} else if (!docSet.isEmpty()) {
if (facetDataType.get(entry.getKey())
.equals(ComponentFacet.TYPE_POINTFIELD_WITHOUT_DOCVALUES)) {
throw new IOException(
"can't create facets for pointField without docValues");
}
Terms fft = r.terms(entry.getKey());
if (fft != null) {
TermsEnum termsEnum = fft.iterator();
BytesRef term = null;
PostingsEnum postingsEnum = null;
SortedMap<String, int[]> facetDataList = entry.getValue();
while ((term = termsEnum.next()) != null) {
int docId;
int termDocId = -1;
int[] facetDataSublist = new int[docSet.size()];
int facetDataSublistCounter = 0;
Iterator<Integer> docIterator = docSet.iterator();
postingsEnum = termsEnum.postings(postingsEnum);
while (docIterator.hasNext()) {
docId = docIterator.next() - lrc.docBase;
if (docId >= termDocId
&& ((docId == termDocId) || ((termDocId = postingsEnum
.advance(docId)) == docId))) {
facetDataSublist[facetDataSublistCounter] = docId
+ lrc.docBase;
facetDataSublistCounter++;
}
}
if (facetDataSublistCounter > 0) {
String termValue = null;
if (facetDataType.get(entry.getKey())
.equals(NumberType.INTEGER.name())) {
// only values without shifting bits
if (term.bytes[term.offset] == LegacyNumericUtils.SHIFT_START_INT) {
termValue = Integer
.toString(LegacyNumericUtils.prefixCodedToInt(term));
} else {
continue;
}
} else if (facetDataType.get(entry.getKey())
.equals(NumberType.LONG.name())) {
if (term.bytes[term.offset] == LegacyNumericUtils.SHIFT_START_LONG) {
termValue = Long
.toString(LegacyNumericUtils.prefixCodedToLong(term));
} else {
continue;
}
} else {
termValue = term.utf8ToString();
}
if (!facetDataList.containsKey(termValue)) {
facetDataList.put(termValue, Arrays.copyOf(facetDataSublist,
facetDataSublistCounter));
} else {
int[] oldList = facetDataList.get(termValue);
int[] newList = new int[oldList.length
+ facetDataSublistCounter];
System.arraycopy(oldList, 0, newList, 0, oldList.length);
System.arraycopy(facetDataSublist, 0, newList,
oldList.length, facetDataSublistCounter);
facetDataList.put(termValue, newList);
}
}
}
}
}
}
}
// collect matches and numbers for queries
for (MtasSpanQuery sq : fieldInfo.spanQueryList) {
// what to collect : numbers
if (spansNumberData.containsKey(sq)) {
numberData = spansNumberData.get(sq);
} else {
numberData = null;
}
// what to collect: matches
if (spansMatchData.containsKey(sq)) {
matchData = spansMatchData.get(sq);
} else {
matchData = null;
}
boolean doNormalCollection = true;
// if only number is needed, possibly termvectors can be used
if ((numberData != null) && (matchData == null)) {
if (sq.isMatchAllPositionsQuery()) {
spansNumberByPositions.add(sq);
needPositions = true;
doNormalCollection = false;
}
}
// collect (if termvector collection didn't work)
if (doNormalCollection
&& ((numberData != null) || (matchData != null))) {
Spans spans = spansQueryWeight.get(sq).getSpans(lrc,
SpanWeight.Postings.POSITIONS);
if (spans != null) {
Iterator<Integer> it;
if (docSet != null) {
it = docSet.iterator();
} else {
it = docList.iterator();
}
if (it.hasNext()) {
int docId = it.next();
int number;
ArrayList<Match> matchDataList;
Integer spansDocId = null;
while (docId != DocIdSetIterator.NO_MORE_DOCS) {
if (spans.advance(
(docId - lrc.docBase)) == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
spansDocId = spans.docID() + lrc.docBase;
while ((docId < spansDocId) && it.hasNext()) {
docId = it.next();
}
if (docId < spansDocId) {
break;
}
if (spansDocId.equals(docId)) {
number = 0;
matchDataList = new ArrayList<>();
int tmpStartPosition;
while ((tmpStartPosition = spans
.nextStartPosition()) != Spans.NO_MORE_POSITIONS) {
number++;
if (matchData != null) {
Match m = new Match(tmpStartPosition,
spans.endPosition());
matchDataList.add(m);
}
}
if ((numberData != null)) {
numberData.put(spansDocId, number);
}
if ((matchData != null)) {
matchData.put(spansDocId, matchDataList);
}
if (it.hasNext()) {
docId = it.next();
} else {
break;
}
}
}
}
}
}
}
}
// collect position stats
if (needPositions) {
if (mtasCodecInfo != null) {
// for relatively small numbers, compute only what is needed
if (docSet.size() < Math.log(r.maxDoc())) {
positionsData = new HashMap<>();
for (int docId : docSet) {
positionsData.put(docId, mtasCodecInfo.getNumberOfPositions(field,
(docId - lrc.docBase)));
}
// compute everything, only use what is needed
} else {
positionsData = mtasCodecInfo.getAllNumberOfPositions(field,
lrc.docBase);
for (int docId : docSet) {
if (!positionsData.containsKey(docId)) {
positionsData.put(docId, 0);
}
}
}
} else {
positionsData = new HashMap<>();
for (int docId : docSet) {
positionsData.put(docId, 0);
}
}
if (spansNumberByPositions != null && spansNumberData != null) {
for (MtasSpanQuery sq : spansNumberByPositions) {
Map<Integer,Integer> numberData = spansNumberData.get(sq);
positionsData.forEach((k,v) -> numberData.put(k,v!=null?v:0));
}
}
}
// collect token stats
if (needTokens) {
if (mtasCodecInfo != null) {
// for relatively small numbers, compute only what is needed
if (docSet.size() < Math.log(r.maxDoc())) {
tokensData = new HashMap<>();
for (int docId : docSet) {
tokensData.put(docId,
mtasCodecInfo.getNumberOfTokens(field, (docId - lrc.docBase)));
}
// compute everything, only use what is needed
} else {
tokensData = mtasCodecInfo.getAllNumberOfTokens(field, lrc.docBase);
for (int docId : docSet) {
if (!tokensData.containsKey(docId)) {
tokensData.put(docId, 0);
}
}
}
} else {
tokensData = new HashMap<>();
for (int docId : docSet) {
tokensData.put(docId, 0);
}
}
}
if (!fieldInfo.statsPositionList.isEmpty()) {
// create positions
createPositions(fieldInfo.statsPositionList, positionsData, docSet);
}
if (!fieldInfo.statsTokenList.isEmpty()) {
// create positions
createTokens(fieldInfo.statsTokenList, tokensData, docSet);
}
if (!fieldInfo.documentList.isEmpty()) {
// create document
createDocument(fieldInfo.documentList, docList, fieldInfo.uniqueKeyField,
searcher, t, lrc);
}
if (!fieldInfo.spanQueryList.isEmpty()) {
if (!fieldInfo.statsSpanList.isEmpty()) {
// create stats
createStats(fieldInfo.statsSpanList, positionsData, spansNumberData,
docSet.toArray(new Integer[docSet.size()]));
}
if (!fieldInfo.listList.isEmpty()) {
// create list
createList(fieldInfo.listList, spansNumberData, spansMatchData, docSet,
field, lrc.docBase, fieldInfo.uniqueKeyField, mtasCodecInfo,
searcher);
}
if (!fieldInfo.groupList.isEmpty()) {
// create group
createGroup(fieldInfo.groupList, spansMatchData, docSet,
fieldInfos.fieldInfo(field), field, lrc.docBase, mtasCodecInfo,
searcher, lrc, status);
}
if (!fieldInfo.kwicList.isEmpty()) {
// create kwic
createKwic(fieldInfo.kwicList, spansMatchData, docList, field,
lrc.docBase, fieldInfo.uniqueKeyField, mtasCodecInfo, searcher);
}
if (!fieldInfo.facetList.isEmpty()) {
// create facets
createFacet(fieldInfo.facetList, positionsData, spansNumberData,
facetData, docSet);
}
}
if (!fieldInfo.termVectorList.isEmpty()) {
createTermvectorFull(fieldInfo.termVectorList, positionsData, docSet, t,
r, lrc);
createTermvectorFirstRound(fieldInfo.termVectorList, positionsData,
docSet, t, r, lrc);
}
} }
|
public class class_name {
private static void collectSpansPositionsAndTokens(
Map<MtasSpanQuery, SpanWeight> spansQueryWeight, IndexSearcher searcher,
CodecInfo mtasCodecInfo, LeafReader r, LeafReaderContext lrc,
String field, Terms t, List<Integer> docSet, List<Integer> docList,
ComponentField fieldInfo, FieldInfos fieldInfos, Status status)
throws IOException {
boolean needSpans = false;
boolean needPositions = false;
boolean needTokens = false;
// results
Map<Integer, Integer> positionsData = null;
Map<Integer, Integer> tokensData = null;
Set<MtasSpanQuery> spansNumberByPositions = null;
Map<MtasSpanQuery, Map<Integer, Integer>> spansNumberData = null;
Map<MtasSpanQuery, Map<Integer, List<Match>>> spansMatchData = null;
Map<String, SortedMap<String, int[]>> facetData = null;
Map<String, String> facetDataType = null;
// collect position stats
if (!fieldInfo.statsPositionList.isEmpty()) {
needPositions = true;
}
// collect token stats
if (!fieldInfo.statsTokenList.isEmpty()) {
needTokens = true;
}
if (!fieldInfo.termVectorList.isEmpty()) {
for (ComponentTermVector ctv : fieldInfo.termVectorList) {
if (!needPositions) {
needPositions = (ctv.functions == null
? ctv.subComponentFunction.parserFunction.needPositions()
: ctv.functionNeedPositions()); // depends on control dependency: [if], data = [none]
}
}
}
// compute from spans for selected docs
if (!fieldInfo.spanQueryList.isEmpty()) {
// check for statsSpans
spansNumberByPositions = new HashSet<>();
spansNumberData = new HashMap<>();
spansMatchData = new HashMap<>();
facetData = new HashMap<>();
facetDataType = new HashMap<>();
// spans
if (!fieldInfo.statsSpanList.isEmpty()) {
for (ComponentSpan cs : fieldInfo.statsSpanList) {
needPositions = (!needPositions) ? cs.parser.needPositions()
: needPositions;
needPositions = (!needPositions) ? cs.functionNeedPositions()
: needPositions;
needSpans = (!needSpans) ? cs.parser.needArgumentsNumber() > 0
: needSpans;
HashSet<Integer> arguments = cs.parser.needArgument();
arguments.addAll(cs.functionNeedArguments());
for (int a : arguments) {
if (cs.queries.length > a) {
MtasSpanQuery q = cs.queries[a];
if (!spansNumberData.containsKey(q)) {
spansNumberData.put(q, new HashMap<Integer, Integer>());
}
}
}
}
}
// kwic
if (!fieldInfo.kwicList.isEmpty()) {
needSpans = true;
for (ComponentKwic ck : fieldInfo.kwicList) {
if (!spansMatchData.containsKey(ck.query)) {
spansMatchData.put(ck.query, new HashMap<Integer, List<Match>>());
}
}
}
// list
if (!fieldInfo.listList.isEmpty()) {
needSpans = true;
for (ComponentList cl : fieldInfo.listList) {
if (!spansMatchData.containsKey(cl.spanQuery)) {
if (cl.number > 0) {
// only if needed
if (cl.position < (cl.start + cl.number)) {
spansMatchData.put(cl.spanQuery,
new HashMap<Integer, List<Match>>());
} else {
spansNumberData.put(cl.spanQuery,
new HashMap<Integer, Integer>());
}
} else if (!spansNumberData.containsKey(cl.spanQuery)) {
spansNumberData.put(cl.spanQuery,
new HashMap<Integer, Integer>());
}
}
}
}
// group
if (!fieldInfo.groupList.isEmpty()) {
needSpans = true;
for (ComponentGroup cg : fieldInfo.groupList) {
if (!spansMatchData.containsKey(cg.spanQuery)) {
spansMatchData.put(cg.spanQuery,
new HashMap<Integer, List<Match>>());
}
}
}
// facet
if (!fieldInfo.facetList.isEmpty()) {
for (ComponentFacet cf : fieldInfo.facetList) {
needPositions = !needPositions ? cf.baseParserNeedPositions()
: needPositions;
needPositions = !needPositions ? cf.functionNeedPositions()
: needPositions;
for (int i = 0; i < cf.baseFields.length; i++) {
needSpans = !needSpans ? cf.baseParsers[i].needArgumentsNumber() > 0
: needSpans;
HashSet<Integer> arguments = cf.baseParsers[i].needArgument();
for (int a : arguments) {
if (cf.spanQueries.length > a) {
MtasSpanQuery q = cf.spanQueries[a];
if (!spansNumberData.containsKey(q)) {
spansNumberData.put(q, new HashMap<Integer, Integer>());
}
}
}
for (MtasFunctionParserFunction function : cf.baseFunctionParserFunctions[i]) {
needSpans = !needSpans ? function.needArgumentsNumber() > 0
: needSpans;
arguments = function.needArgument();
for (int a : arguments) {
if (cf.spanQueries.length > a) {
MtasSpanQuery q = cf.spanQueries[a];
if (!spansNumberData.containsKey(q)) {
spansNumberData.put(q, new HashMap<Integer, Integer>());
}
}
}
}
if (!facetData.containsKey(cf.baseFields[i])) {
facetData.put(cf.baseFields[i], new TreeMap<String, int[]>());
facetDataType.put(cf.baseFields[i], cf.baseFieldTypes[i]);
}
}
}
}
// termvector
if (!fieldInfo.termVectorList.isEmpty()) {
for (ComponentTermVector ctv : fieldInfo.termVectorList) {
if ((ctv.subComponentFunction.parserFunction != null
&& ctv.subComponentFunction.parserFunction.needPositions())
|| (ctv.functions != null && ctv.functionNeedPositions())) {
needPositions = true;
}
}
}
}
if (needSpans) {
Map<Integer, Integer> numberData;
Map<Integer, List<Match>> matchData;
// collect values for facetFields
for (Entry<String, SortedMap<String, int[]>> entry : facetData
.entrySet()) {
FieldInfo fi = fieldInfos.fieldInfo(entry.getKey());
if (fi != null) {
// prefer to use pointvalue
if (!fi.getDocValuesType().equals(DocValuesType.NONE)) {
Iterator<Integer> docIterator = docSet.iterator();
// numeric or sorted
if (fi.getDocValuesType().equals(DocValuesType.NUMERIC)
|| fi.getDocValuesType().equals(DocValuesType.SORTED)
|| fi.getDocValuesType().equals(DocValuesType.SORTED_SET)
|| fi.getDocValuesType().equals(DocValuesType.SORTED_NUMERIC)
|| fi.getDocValuesType().equals(DocValuesType.BINARY)) {
// create map of values to corresponding docIds
Map<Object, List<Integer>> facetDataSubList = new HashMap<>();
// numeric
if (fi.getDocValuesType().equals(DocValuesType.NUMERIC)) {
NumericDocValues docValues = r.getContext().reader()
.getNumericDocValues(entry.getKey());
int docId;
while (docIterator.hasNext()) {
docId = docIterator.next() - lrc.docBase;
if (docValues.advanceExact(docId)) {
long value = docValues.longValue();
if (!facetDataSubList.containsKey(value)) {
List<Integer> facetDataSubListItem = new ArrayList<>();
facetDataSubListItem.add(docId + lrc.docBase);
facetDataSubList.put(value, facetDataSubListItem);
} else {
facetDataSubList.get(value).add(docId + lrc.docBase);
}
}
}
// sorted numeric
} else if (fi.getDocValuesType().equals(DocValuesType.SORTED_NUMERIC)) {
SortedNumericDocValues docValues = r.getContext().reader()
.getSortedNumericDocValues(entry.getKey());
int docId;
while (docIterator.hasNext()) {
docId = docIterator.next() - lrc.docBase;
if (docValues.advanceExact(docId)) {
int n = docValues.docValueCount();
for(int i =0; i<n; i++) {
long value = docValues.nextValue();
if (!facetDataSubList.containsKey(value)) {
List<Integer> facetDataSubListItem = new ArrayList<>();
facetDataSubListItem.add(docId + lrc.docBase);
facetDataSubList.put(value, facetDataSubListItem);
} else {
facetDataSubList.get(value).add(docId + lrc.docBase);
}
}
}
}
// sorted set
} else if (fi.getDocValuesType().equals(DocValuesType.SORTED_SET)) {
SortedSetDocValues docValues = r.getContext().reader()
.getSortedSetDocValues(entry.getKey());
int docId;
Map<Long, String> dictionary = new HashMap<>();
while (docIterator.hasNext()) {
docId = docIterator.next() - lrc.docBase;
if (docValues.advanceExact(docId)) {
long tmpValue;
String value;
while((tmpValue=docValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
if(!dictionary.containsKey(tmpValue)) {
value = docValues.lookupOrd(tmpValue).utf8ToString();
dictionary.put(tmpValue, value);
} else {
value = dictionary.get(tmpValue);
}
if (!facetDataSubList.containsKey(value)) {
List<Integer> facetDataSubListItem = new ArrayList<>();
facetDataSubListItem.add(docId + lrc.docBase);
facetDataSubList.put(value, facetDataSubListItem);
} else {
facetDataSubList.get(value).add(docId + lrc.docBase);
}
}
}
}
// sorted
} else if (fi.getDocValuesType().equals(DocValuesType.SORTED)) {
SortedDocValues docValues = r.getContext().reader()
.getSortedDocValues(entry.getKey());
int docId;
while (docIterator.hasNext()) {
docId = docIterator.next() - lrc.docBase;
if (docValues.advanceExact(docId)) {
String value = docValues.binaryValue().utf8ToString();
if (!facetDataSubList.containsKey(value)) {
List<Integer> facetDataSubListItem = new ArrayList<>();
facetDataSubListItem.add(docId + lrc.docBase);
facetDataSubList.put(value, facetDataSubListItem);
} else {
facetDataSubList.get(value).add(docId + lrc.docBase);
}
}
}
}
if (!facetDataSubList.isEmpty()) {
SortedMap<String, int[]> facetDataList = entry.getValue();
SimpleDateFormat sdf = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss'Z'");
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
for (Entry<Object, List<Integer>> facetEntry : facetDataSubList
.entrySet()) {
int[] docIdList = facetEntry.getValue().stream()
.mapToInt(i -> i).toArray();
String termValue;
if (facetDataType.get(entry.getKey())
.equals(NumberType.DATE.name())) {
Date date = new Date((Long) facetEntry.getKey());
termValue = sdf.format(date);
} else {
termValue = facetEntry.getKey().toString();
}
if (!facetDataList.containsKey(termValue)) {
facetDataList.put(termValue, docIdList);
} else {
int[] oldList = facetDataList.get(termValue);
int[] newList = new int[oldList.length + docIdList.length];
System.arraycopy(oldList, 0, newList, 0, oldList.length);
System.arraycopy(docIdList, 0, newList, oldList.length,
docIdList.length);
facetDataList.put(termValue, newList);
}
}
}
} else {
throw new IOException("facets for docValues of type "
+ fi.getDocValuesType() + " not implemented");
}
} else if (!docSet.isEmpty()) {
if (facetDataType.get(entry.getKey())
.equals(ComponentFacet.TYPE_POINTFIELD_WITHOUT_DOCVALUES)) {
throw new IOException(
"can't create facets for pointField without docValues");
}
Terms fft = r.terms(entry.getKey());
if (fft != null) {
TermsEnum termsEnum = fft.iterator();
BytesRef term = null;
PostingsEnum postingsEnum = null;
SortedMap<String, int[]> facetDataList = entry.getValue();
while ((term = termsEnum.next()) != null) {
int docId;
int termDocId = -1;
int[] facetDataSublist = new int[docSet.size()];
int facetDataSublistCounter = 0;
Iterator<Integer> docIterator = docSet.iterator();
postingsEnum = termsEnum.postings(postingsEnum);
while (docIterator.hasNext()) {
docId = docIterator.next() - lrc.docBase;
if (docId >= termDocId
&& ((docId == termDocId) || ((termDocId = postingsEnum
.advance(docId)) == docId))) {
facetDataSublist[facetDataSublistCounter] = docId
+ lrc.docBase;
facetDataSublistCounter++;
}
}
if (facetDataSublistCounter > 0) {
String termValue = null;
if (facetDataType.get(entry.getKey())
.equals(NumberType.INTEGER.name())) {
// only values without shifting bits
if (term.bytes[term.offset] == LegacyNumericUtils.SHIFT_START_INT) {
termValue = Integer
.toString(LegacyNumericUtils.prefixCodedToInt(term));
} else {
continue;
}
} else if (facetDataType.get(entry.getKey())
.equals(NumberType.LONG.name())) {
if (term.bytes[term.offset] == LegacyNumericUtils.SHIFT_START_LONG) {
termValue = Long
.toString(LegacyNumericUtils.prefixCodedToLong(term));
} else {
continue;
}
} else {
termValue = term.utf8ToString();
}
if (!facetDataList.containsKey(termValue)) {
facetDataList.put(termValue, Arrays.copyOf(facetDataSublist,
facetDataSublistCounter));
} else {
int[] oldList = facetDataList.get(termValue);
int[] newList = new int[oldList.length
+ facetDataSublistCounter];
System.arraycopy(oldList, 0, newList, 0, oldList.length);
System.arraycopy(facetDataSublist, 0, newList,
oldList.length, facetDataSublistCounter);
facetDataList.put(termValue, newList);
}
}
}
}
}
}
}
// collect matches and numbers for queries
for (MtasSpanQuery sq : fieldInfo.spanQueryList) {
// what to collect : numbers
if (spansNumberData.containsKey(sq)) {
numberData = spansNumberData.get(sq);
} else {
numberData = null;
}
// what to collect: matches
if (spansMatchData.containsKey(sq)) {
matchData = spansMatchData.get(sq);
} else {
matchData = null;
}
boolean doNormalCollection = true;
// if only number is needed, possibly termvectors can be used
if ((numberData != null) && (matchData == null)) {
if (sq.isMatchAllPositionsQuery()) {
spansNumberByPositions.add(sq);
needPositions = true;
doNormalCollection = false;
}
}
// collect (if termvector collection didn't work)
if (doNormalCollection
&& ((numberData != null) || (matchData != null))) {
Spans spans = spansQueryWeight.get(sq).getSpans(lrc,
SpanWeight.Postings.POSITIONS);
if (spans != null) {
Iterator<Integer> it;
if (docSet != null) {
it = docSet.iterator();
} else {
it = docList.iterator();
}
if (it.hasNext()) {
int docId = it.next();
int number;
ArrayList<Match> matchDataList;
Integer spansDocId = null;
while (docId != DocIdSetIterator.NO_MORE_DOCS) {
if (spans.advance(
(docId - lrc.docBase)) == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
spansDocId = spans.docID() + lrc.docBase;
while ((docId < spansDocId) && it.hasNext()) {
docId = it.next();
}
if (docId < spansDocId) {
break;
}
if (spansDocId.equals(docId)) {
number = 0;
matchDataList = new ArrayList<>();
int tmpStartPosition;
while ((tmpStartPosition = spans
.nextStartPosition()) != Spans.NO_MORE_POSITIONS) {
number++;
if (matchData != null) {
Match m = new Match(tmpStartPosition,
spans.endPosition());
matchDataList.add(m);
}
}
if ((numberData != null)) {
numberData.put(spansDocId, number);
}
if ((matchData != null)) {
matchData.put(spansDocId, matchDataList);
}
if (it.hasNext()) {
docId = it.next();
} else {
break;
}
}
}
}
}
}
}
}
// collect position stats
if (needPositions) {
if (mtasCodecInfo != null) {
// for relatively small numbers, compute only what is needed
if (docSet.size() < Math.log(r.maxDoc())) {
positionsData = new HashMap<>();
for (int docId : docSet) {
positionsData.put(docId, mtasCodecInfo.getNumberOfPositions(field,
(docId - lrc.docBase)));
}
// compute everything, only use what is needed
} else {
positionsData = mtasCodecInfo.getAllNumberOfPositions(field,
lrc.docBase);
for (int docId : docSet) {
if (!positionsData.containsKey(docId)) {
positionsData.put(docId, 0);
}
}
}
} else {
positionsData = new HashMap<>();
for (int docId : docSet) {
positionsData.put(docId, 0);
}
}
if (spansNumberByPositions != null && spansNumberData != null) {
for (MtasSpanQuery sq : spansNumberByPositions) {
Map<Integer,Integer> numberData = spansNumberData.get(sq);
positionsData.forEach((k,v) -> numberData.put(k,v!=null?v:0));
}
}
}
// collect token stats
if (needTokens) {
if (mtasCodecInfo != null) {
// for relatively small numbers, compute only what is needed
if (docSet.size() < Math.log(r.maxDoc())) {
tokensData = new HashMap<>();
for (int docId : docSet) {
tokensData.put(docId,
mtasCodecInfo.getNumberOfTokens(field, (docId - lrc.docBase)));
}
// compute everything, only use what is needed
} else {
tokensData = mtasCodecInfo.getAllNumberOfTokens(field, lrc.docBase);
for (int docId : docSet) {
if (!tokensData.containsKey(docId)) {
tokensData.put(docId, 0);
}
}
}
} else {
tokensData = new HashMap<>();
for (int docId : docSet) {
tokensData.put(docId, 0);
}
}
}
if (!fieldInfo.statsPositionList.isEmpty()) {
// create positions
createPositions(fieldInfo.statsPositionList, positionsData, docSet);
}
if (!fieldInfo.statsTokenList.isEmpty()) {
// create positions
createTokens(fieldInfo.statsTokenList, tokensData, docSet);
}
if (!fieldInfo.documentList.isEmpty()) {
// create document
createDocument(fieldInfo.documentList, docList, fieldInfo.uniqueKeyField,
searcher, t, lrc);
}
if (!fieldInfo.spanQueryList.isEmpty()) {
if (!fieldInfo.statsSpanList.isEmpty()) {
// create stats
createStats(fieldInfo.statsSpanList, positionsData, spansNumberData,
docSet.toArray(new Integer[docSet.size()]));
}
if (!fieldInfo.listList.isEmpty()) {
// create list
createList(fieldInfo.listList, spansNumberData, spansMatchData, docSet,
field, lrc.docBase, fieldInfo.uniqueKeyField, mtasCodecInfo,
searcher);
}
if (!fieldInfo.groupList.isEmpty()) {
// create group
createGroup(fieldInfo.groupList, spansMatchData, docSet,
fieldInfos.fieldInfo(field), field, lrc.docBase, mtasCodecInfo,
searcher, lrc, status);
}
if (!fieldInfo.kwicList.isEmpty()) {
// create kwic
createKwic(fieldInfo.kwicList, spansMatchData, docList, field,
lrc.docBase, fieldInfo.uniqueKeyField, mtasCodecInfo, searcher);
}
if (!fieldInfo.facetList.isEmpty()) {
// create facets
createFacet(fieldInfo.facetList, positionsData, spansNumberData,
facetData, docSet);
}
}
if (!fieldInfo.termVectorList.isEmpty()) {
createTermvectorFull(fieldInfo.termVectorList, positionsData, docSet, t,
r, lrc);
createTermvectorFirstRound(fieldInfo.termVectorList, positionsData,
docSet, t, r, lrc);
}
} }
|
public class class_name {
private Node<E> findRootOrCreateNode(E e) {
Node<E> node = elmap.get(e);
if (node != null) {
return findRoot(node);
}
node = new Node<E>(e);
elmap.put(e, node);
return node;
} }
|
public class class_name {
private Node<E> findRootOrCreateNode(E e) {
Node<E> node = elmap.get(e);
if (node != null) {
return findRoot(node); // depends on control dependency: [if], data = [(node]
}
node = new Node<E>(e);
elmap.put(e, node);
return node;
} }
|
public class class_name {
public boolean link(String url, String pkglisturl, Reporter reporter, boolean linkoffline)
throws DocFileIOException {
this.linkoffline = linkoffline;
try {
url = adjustEndFileSeparator(url);
if (isUrl(pkglisturl)) {
readPackageListFromURL(url, toURL(adjustEndFileSeparator(pkglisturl)));
} else {
readPackageListFromFile(url, DocFile.createFileForInput(configuration, pkglisturl));
}
return true;
} catch (Fault f) {
reporter.print(Diagnostic.Kind.WARNING, f.getMessage());
return false;
}
} }
|
public class class_name {
public boolean link(String url, String pkglisturl, Reporter reporter, boolean linkoffline)
throws DocFileIOException {
this.linkoffline = linkoffline;
try {
url = adjustEndFileSeparator(url);
if (isUrl(pkglisturl)) {
readPackageListFromURL(url, toURL(adjustEndFileSeparator(pkglisturl))); // depends on control dependency: [if], data = [none]
} else {
readPackageListFromFile(url, DocFile.createFileForInput(configuration, pkglisturl)); // depends on control dependency: [if], data = [none]
}
return true;
} catch (Fault f) {
reporter.print(Diagnostic.Kind.WARNING, f.getMessage());
return false;
}
} }
|
public class class_name {
@Override
public DBObject getUpdateClause() {
if (clear) {
// If clear() was called, updates will either be an empty Map or added items with prefix-less keys
return new BasicDBObject($set.name(), new BasicDBObject(getNameFromPrefix(), updates));
} else if (operation == null) {
return new BasicDBObject(); // If there are no updates, return an empty DBObject
} else {
return new BasicDBObject(operation.name(), updates);
}
} }
|
public class class_name {
@Override
public DBObject getUpdateClause() {
if (clear) {
// If clear() was called, updates will either be an empty Map or added items with prefix-less keys
return new BasicDBObject($set.name(), new BasicDBObject(getNameFromPrefix(), updates)); // depends on control dependency: [if], data = [none]
} else if (operation == null) {
return new BasicDBObject(); // If there are no updates, return an empty DBObject // depends on control dependency: [if], data = [none]
} else {
return new BasicDBObject(operation.name(), updates); // depends on control dependency: [if], data = [(operation]
}
} }
|
public class class_name {
private void checkContainerState(String methodName, ContainerState minimalState) {
if (nonPortableMode) {
return;
}
if (this.container == null) {
this.container = Container.instance(manager);
}
ContainerState state = container.getState();
if (SHUTDOWN.equals(state)) {
throw BeanManagerLogger.LOG.methodNotAvailableAfterShutdown(methodName);
}
if (state.compareTo(minimalState) < 0) {
throw BeanManagerLogger.LOG.methodNotAvailableDuringInitialization(methodName, state);
}
} }
|
public class class_name {
private void checkContainerState(String methodName, ContainerState minimalState) {
if (nonPortableMode) {
return; // depends on control dependency: [if], data = [none]
}
if (this.container == null) {
this.container = Container.instance(manager); // depends on control dependency: [if], data = [none]
}
ContainerState state = container.getState();
if (SHUTDOWN.equals(state)) {
throw BeanManagerLogger.LOG.methodNotAvailableAfterShutdown(methodName);
}
if (state.compareTo(minimalState) < 0) {
throw BeanManagerLogger.LOG.methodNotAvailableDuringInitialization(methodName, state);
}
} }
|
public class class_name {
private void processStructLogExpressions(final RoundEnvironment roundEnv) {
for (Element element : roundEnv.getRootElements()) {
final Map<Name, StructLoggerFieldContext> fields = new HashMap<>();
for (Element enclosed : element.getEnclosedElements()) {
if (enclosed.getKind().isField()) {
final LoggerContext annotation = enclosed.getAnnotation(LoggerContext.class);
if (annotation != null) {
final TypeMirror typeMirrorOfField = enclosed.asType();
final TypeMirror typeMirrorOfStructlogger = elements.getTypeElement(StructLogger.class.getCanonicalName()).asType();
if (!types.isSubtype(typeMirrorOfField, types.erasure(typeMirrorOfStructlogger))) { //check that annotated field is of type StructLogger
messager.printMessage(
Diagnostic.Kind.ERROR,
format("field %s in %s should be of type StructLogger", enclosed, element),
enclosed
);
return;
}
try {
annotation.context(); //throws exception
//TODO class is already compiled
} catch (MirroredTypeException ex) {
final TypeMirror contextProviderTypeMirror = ex.getTypeMirror();
//check that type specified by @LoggerContext annotation matches type specified by generic parameter
final List<? extends TypeMirror> typeArguments = ((DeclaredType) typeMirrorOfField).getTypeArguments();
if (typeArguments.size() != 1) {
messager.printMessage(
Diagnostic.Kind.ERROR,
format("Expected 1 type argument specified in field %s in %s", enclosed, element),
enclosed
);
return;
}
if (!types.isSameType(typeArguments.get(0), contextProviderTypeMirror)) {
messager.printMessage(
Diagnostic.Kind.ERROR,
format("Generic type of field %s in class %s differs from type specified in @LoggerContext annotation", enclosed, element),
enclosed
);
return;
}
if (!checkVarContextProvider(contextProviderTypeMirror)) {
return;
}
fields.put(enclosed.getSimpleName(), new StructLoggerFieldContext(contextProviderTypeMirror));
}
}
}
}
final TypeElement typeElement = (TypeElement) element;
final TreePath path = trees.getPath(element);
// do not do any code replacement in such class which do not specify any LoggerContext annotated StructLogger
if (!fields.isEmpty()) {
logInvocationScanner.scan(
path,
new ScannerParams(
typeElement,
path.getCompilationUnit(),
varsHashMap,
fields,
generatedClassesInfo
)
);
}
}
} }
|
public class class_name {
private void processStructLogExpressions(final RoundEnvironment roundEnv) {
for (Element element : roundEnv.getRootElements()) {
final Map<Name, StructLoggerFieldContext> fields = new HashMap<>();
for (Element enclosed : element.getEnclosedElements()) {
if (enclosed.getKind().isField()) {
final LoggerContext annotation = enclosed.getAnnotation(LoggerContext.class);
if (annotation != null) {
final TypeMirror typeMirrorOfField = enclosed.asType();
final TypeMirror typeMirrorOfStructlogger = elements.getTypeElement(StructLogger.class.getCanonicalName()).asType();
if (!types.isSubtype(typeMirrorOfField, types.erasure(typeMirrorOfStructlogger))) { //check that annotated field is of type StructLogger
messager.printMessage(
Diagnostic.Kind.ERROR,
format("field %s in %s should be of type StructLogger", enclosed, element),
enclosed
); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
try {
annotation.context(); //throws exception // depends on control dependency: [try], data = [none]
//TODO class is already compiled
} catch (MirroredTypeException ex) {
final TypeMirror contextProviderTypeMirror = ex.getTypeMirror();
//check that type specified by @LoggerContext annotation matches type specified by generic parameter
final List<? extends TypeMirror> typeArguments = ((DeclaredType) typeMirrorOfField).getTypeArguments();
if (typeArguments.size() != 1) {
messager.printMessage(
Diagnostic.Kind.ERROR,
format("Expected 1 type argument specified in field %s in %s", enclosed, element),
enclosed
); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
if (!types.isSameType(typeArguments.get(0), contextProviderTypeMirror)) {
messager.printMessage(
Diagnostic.Kind.ERROR,
format("Generic type of field %s in class %s differs from type specified in @LoggerContext annotation", enclosed, element),
enclosed
);
return; // depends on control dependency: [if], data = [none]
}
if (!checkVarContextProvider(contextProviderTypeMirror)) {
return; // depends on control dependency: [if], data = [none]
}
fields.put(enclosed.getSimpleName(), new StructLoggerFieldContext(contextProviderTypeMirror));
} // depends on control dependency: [catch], data = [none]
}
}
}
final TypeElement typeElement = (TypeElement) element;
final TreePath path = trees.getPath(element);
// do not do any code replacement in such class which do not specify any LoggerContext annotated StructLogger
if (!fields.isEmpty()) {
logInvocationScanner.scan(
path,
new ScannerParams(
typeElement,
path.getCompilationUnit(),
varsHashMap,
fields,
generatedClassesInfo
)
); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public EClass getPTD1() {
if (ptd1EClass == null) {
ptd1EClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(318);
}
return ptd1EClass;
} }
|
public class class_name {
public EClass getPTD1() {
if (ptd1EClass == null) {
ptd1EClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(318); // depends on control dependency: [if], data = [none]
}
return ptd1EClass;
} }
|
public class class_name {
private void generateMTFValues() {
final int lastShadow = this.last;
final Data dataShadow = this.data;
final boolean[] inUse = dataShadow.inUse;
final byte[] block = dataShadow.block;
final int[] fmap = dataShadow.fmap;
final char[] sfmap = dataShadow.sfmap;
final int[] mtfFreq = dataShadow.mtfFreq;
final byte[] unseqToSeq = dataShadow.unseqToSeq;
final byte[] yy = dataShadow.generateMTFValues_yy;
// make maps
int nInUseShadow = 0;
for (int i = 0; i < 256; i++) {
if (inUse[i]) {
unseqToSeq[i] = (byte) nInUseShadow;
nInUseShadow++;
}
}
this.nInUse = nInUseShadow;
final int eob = nInUseShadow + 1;
for (int i = eob; i >= 0; i--) {
mtfFreq[i] = 0;
}
for (int i = nInUseShadow; --i >= 0;) {
yy[i] = (byte) i;
}
int wr = 0;
int zPend = 0;
for (int i = 0; i <= lastShadow; i++) {
final byte ll_i = unseqToSeq[block[fmap[i]] & 0xff];
byte tmp = yy[0];
int j = 0;
while (ll_i != tmp) {
j++;
byte tmp2 = tmp;
tmp = yy[j];
yy[j] = tmp2;
}
yy[0] = tmp;
if (j == 0) {
zPend++;
} else {
if (zPend > 0) {
zPend--;
while (true) {
if ((zPend & 1) == 0) {
sfmap[wr] = RUNA;
wr++;
mtfFreq[RUNA]++;
} else {
sfmap[wr] = RUNB;
wr++;
mtfFreq[RUNB]++;
}
if (zPend >= 2) {
zPend = (zPend - 2) >> 1;
} else {
break;
}
}
zPend = 0;
}
sfmap[wr] = (char) (j + 1);
wr++;
mtfFreq[j + 1]++;
}
}
if (zPend > 0) {
zPend--;
while (true) {
if ((zPend & 1) == 0) {
sfmap[wr] = RUNA;
wr++;
mtfFreq[RUNA]++;
} else {
sfmap[wr] = RUNB;
wr++;
mtfFreq[RUNB]++;
}
if (zPend >= 2) {
zPend = (zPend - 2) >> 1;
} else {
break;
}
}
}
sfmap[wr] = (char) eob;
mtfFreq[eob]++;
this.nMTF = wr + 1;
} }
|
public class class_name {
private void generateMTFValues() {
final int lastShadow = this.last;
final Data dataShadow = this.data;
final boolean[] inUse = dataShadow.inUse;
final byte[] block = dataShadow.block;
final int[] fmap = dataShadow.fmap;
final char[] sfmap = dataShadow.sfmap;
final int[] mtfFreq = dataShadow.mtfFreq;
final byte[] unseqToSeq = dataShadow.unseqToSeq;
final byte[] yy = dataShadow.generateMTFValues_yy;
// make maps
int nInUseShadow = 0;
for (int i = 0; i < 256; i++) {
if (inUse[i]) {
unseqToSeq[i] = (byte) nInUseShadow; // depends on control dependency: [if], data = [none]
nInUseShadow++; // depends on control dependency: [if], data = [none]
}
}
this.nInUse = nInUseShadow;
final int eob = nInUseShadow + 1;
for (int i = eob; i >= 0; i--) {
mtfFreq[i] = 0; // depends on control dependency: [for], data = [i]
}
for (int i = nInUseShadow; --i >= 0;) {
yy[i] = (byte) i; // depends on control dependency: [for], data = [i]
}
int wr = 0;
int zPend = 0;
for (int i = 0; i <= lastShadow; i++) {
final byte ll_i = unseqToSeq[block[fmap[i]] & 0xff];
byte tmp = yy[0];
int j = 0;
while (ll_i != tmp) {
j++; // depends on control dependency: [while], data = [none]
byte tmp2 = tmp;
tmp = yy[j]; // depends on control dependency: [while], data = [none]
yy[j] = tmp2; // depends on control dependency: [while], data = [none]
}
yy[0] = tmp; // depends on control dependency: [for], data = [none]
if (j == 0) {
zPend++; // depends on control dependency: [if], data = [none]
} else {
if (zPend > 0) {
zPend--; // depends on control dependency: [if], data = [none]
while (true) {
if ((zPend & 1) == 0) {
sfmap[wr] = RUNA; // depends on control dependency: [if], data = [none]
wr++; // depends on control dependency: [if], data = [none]
mtfFreq[RUNA]++; // depends on control dependency: [if], data = [none]
} else {
sfmap[wr] = RUNB; // depends on control dependency: [if], data = [none]
wr++; // depends on control dependency: [if], data = [none]
mtfFreq[RUNB]++; // depends on control dependency: [if], data = [none]
}
if (zPend >= 2) {
zPend = (zPend - 2) >> 1; // depends on control dependency: [if], data = [(zPend]
} else {
break;
}
}
zPend = 0; // depends on control dependency: [if], data = [none]
}
sfmap[wr] = (char) (j + 1); // depends on control dependency: [if], data = [(j]
wr++; // depends on control dependency: [if], data = [none]
mtfFreq[j + 1]++; // depends on control dependency: [if], data = [none]
}
}
if (zPend > 0) {
zPend--; // depends on control dependency: [if], data = [none]
while (true) {
if ((zPend & 1) == 0) {
sfmap[wr] = RUNA; // depends on control dependency: [if], data = [none]
wr++; // depends on control dependency: [if], data = [none]
mtfFreq[RUNA]++; // depends on control dependency: [if], data = [none]
} else {
sfmap[wr] = RUNB; // depends on control dependency: [if], data = [none]
wr++; // depends on control dependency: [if], data = [none]
mtfFreq[RUNB]++; // depends on control dependency: [if], data = [none]
}
if (zPend >= 2) {
zPend = (zPend - 2) >> 1; // depends on control dependency: [if], data = [(zPend]
} else {
break;
}
}
}
sfmap[wr] = (char) eob;
mtfFreq[eob]++;
this.nMTF = wr + 1;
} }
|
public class class_name {
@Override
protected void computeFields() {
int mask;
if (isPartiallyNormalized()) {
// Determine which calendar fields need to be computed.
mask = getSetStateFields();
int fieldMask = ~mask & ALL_FIELDS;
// We have to call computTime in case calsys == null in
// order to set calsys and cdate. (6263644)
if (fieldMask != 0 || calsys == null) {
mask |= computeFields(fieldMask,
mask & (ZONE_OFFSET_MASK|DST_OFFSET_MASK));
assert mask == ALL_FIELDS;
}
} else {
mask = ALL_FIELDS;
computeFields(mask, 0);
}
// After computing all the fields, set the field state to `COMPUTED'.
setFieldsComputed(mask);
} }
|
public class class_name {
@Override
protected void computeFields() {
int mask;
if (isPartiallyNormalized()) {
// Determine which calendar fields need to be computed.
mask = getSetStateFields(); // depends on control dependency: [if], data = [none]
int fieldMask = ~mask & ALL_FIELDS;
// We have to call computTime in case calsys == null in
// order to set calsys and cdate. (6263644)
if (fieldMask != 0 || calsys == null) {
mask |= computeFields(fieldMask,
mask & (ZONE_OFFSET_MASK|DST_OFFSET_MASK)); // depends on control dependency: [if], data = [none]
assert mask == ALL_FIELDS;
}
} else {
mask = ALL_FIELDS; // depends on control dependency: [if], data = [none]
computeFields(mask, 0); // depends on control dependency: [if], data = [none]
}
// After computing all the fields, set the field state to `COMPUTED'.
setFieldsComputed(mask);
} }
|
public class class_name {
public InputStream getVersionInputStream(String path, String versionId, String streamId) throws AccessDeniedException,
StorageClientException, IOException {
long t = System.currentTimeMillis();
try {
accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ);
checkOpen();
Map<String, Object> structure = getCached(keySpace, contentColumnFamily, path);
if (exists(structure)) {
String contentId = (String) structure.get(STRUCTURE_UUID_FIELD);
Map<String, Object> content = getCached(keySpace, contentColumnFamily, contentId);
if (exists(content)) {
String versionHistoryId = (String) content.get(VERSION_HISTORY_ID_FIELD);
if (versionHistoryId != null) {
Map<String, Object> versionHistory = getCached(keySpace, contentColumnFamily, versionHistoryId);
if (versionHistory != null && versionHistory.containsKey(versionId)) {
return internalGetInputStream(versionId, streamId);
}
}
}
}
return null;
} finally {
statsService.apiCall(ContentManagerImpl.class.getName(), "egtVersionInputStream", System.currentTimeMillis() - t);
}
} }
|
public class class_name {
public InputStream getVersionInputStream(String path, String versionId, String streamId) throws AccessDeniedException,
StorageClientException, IOException {
long t = System.currentTimeMillis();
try {
accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ);
checkOpen();
Map<String, Object> structure = getCached(keySpace, contentColumnFamily, path);
if (exists(structure)) {
String contentId = (String) structure.get(STRUCTURE_UUID_FIELD);
Map<String, Object> content = getCached(keySpace, contentColumnFamily, contentId);
if (exists(content)) {
String versionHistoryId = (String) content.get(VERSION_HISTORY_ID_FIELD);
if (versionHistoryId != null) {
Map<String, Object> versionHistory = getCached(keySpace, contentColumnFamily, versionHistoryId);
if (versionHistory != null && versionHistory.containsKey(versionId)) {
return internalGetInputStream(versionId, streamId); // depends on control dependency: [if], data = [none]
}
}
}
}
return null;
} finally {
statsService.apiCall(ContentManagerImpl.class.getName(), "egtVersionInputStream", System.currentTimeMillis() - t);
}
} }
|
public class class_name {
@Override
public void write(TextWriterStream out, String label, Object object) {
StringBuilder buf = new StringBuilder(100);
if(label != null) {
buf.append(label).append('=');
}
if(object != null) {
buf.append(object.toString());
}
out.commentPrintLn(buf);
} }
|
public class class_name {
@Override
public void write(TextWriterStream out, String label, Object object) {
StringBuilder buf = new StringBuilder(100);
if(label != null) {
buf.append(label).append('='); // depends on control dependency: [if], data = [(label]
}
if(object != null) {
buf.append(object.toString()); // depends on control dependency: [if], data = [(object]
}
out.commentPrintLn(buf);
} }
|
public class class_name {
private void filter(CloneGroup current) {
for (CloneGroup earlier : filtered) {
if (containsIn(current, earlier)) {
return;
}
}
filtered.add(current);
} }
|
public class class_name {
private void filter(CloneGroup current) {
for (CloneGroup earlier : filtered) {
if (containsIn(current, earlier)) {
return; // depends on control dependency: [if], data = [none]
}
}
filtered.add(current);
} }
|
public class class_name {
public boolean setMargins(float marginLeft, float marginRight, float marginTop, float marginBottom) {
if (writer != null && writer.isPaused()) {
return false;
}
nextMarginLeft = marginLeft;
nextMarginRight = marginRight;
nextMarginTop = marginTop;
nextMarginBottom = marginBottom;
return true;
} }
|
public class class_name {
public boolean setMargins(float marginLeft, float marginRight, float marginTop, float marginBottom) {
if (writer != null && writer.isPaused()) {
return false; // depends on control dependency: [if], data = [none]
}
nextMarginLeft = marginLeft;
nextMarginRight = marginRight;
nextMarginTop = marginTop;
nextMarginBottom = marginBottom;
return true;
} }
|
public class class_name {
@Override
public void updateCheckpointData(CheckpointDataKey key, CheckpointData value) {
logger.entering(CLASSNAME, "updateCheckpointData", new Object[] {key, value});
CheckpointData data = queryCheckpointData(key.getCommaSeparatedKey());
if(data != null) {
updateCheckpointData(key.getCommaSeparatedKey(), value);
} else {
createCheckpointData(key, value);
}
logger.exiting(CLASSNAME, "updateCheckpointData");
} }
|
public class class_name {
@Override
public void updateCheckpointData(CheckpointDataKey key, CheckpointData value) {
logger.entering(CLASSNAME, "updateCheckpointData", new Object[] {key, value});
CheckpointData data = queryCheckpointData(key.getCommaSeparatedKey());
if(data != null) {
updateCheckpointData(key.getCommaSeparatedKey(), value);
// depends on control dependency: [if], data = [none]
} else {
createCheckpointData(key, value);
// depends on control dependency: [if], data = [none]
}
logger.exiting(CLASSNAME, "updateCheckpointData");
} }
|
public class class_name {
private ImmutableBitSet.Builder getInputFieldUsed(Aggregate aggregate, RelNode input) {
// 1. group fields are always used
final ImmutableBitSet.Builder inputFieldsUsed =
aggregate.getGroupSet().rebuild();
// 2. agg functions
for (AggregateCall aggCall : aggregate.getAggCallList()) {
for (int i : aggCall.getArgList()) {
inputFieldsUsed.set(i);
}
if (aggCall.filterArg >= 0) {
inputFieldsUsed.set(aggCall.filterArg);
}
}
// 3. window time field if the aggregate is a group window aggregate.
if (aggregate instanceof LogicalWindowAggregate) {
inputFieldsUsed.set(getWindowTimeFieldIndex((LogicalWindowAggregate) aggregate, input));
}
return inputFieldsUsed;
} }
|
public class class_name {
private ImmutableBitSet.Builder getInputFieldUsed(Aggregate aggregate, RelNode input) {
// 1. group fields are always used
final ImmutableBitSet.Builder inputFieldsUsed =
aggregate.getGroupSet().rebuild();
// 2. agg functions
for (AggregateCall aggCall : aggregate.getAggCallList()) {
for (int i : aggCall.getArgList()) {
inputFieldsUsed.set(i); // depends on control dependency: [for], data = [i]
}
if (aggCall.filterArg >= 0) {
inputFieldsUsed.set(aggCall.filterArg); // depends on control dependency: [if], data = [(aggCall.filterArg]
}
}
// 3. window time field if the aggregate is a group window aggregate.
if (aggregate instanceof LogicalWindowAggregate) {
inputFieldsUsed.set(getWindowTimeFieldIndex((LogicalWindowAggregate) aggregate, input)); // depends on control dependency: [if], data = [none]
}
return inputFieldsUsed;
} }
|
public class class_name {
public List<CmsProperty> readPropertyObjects(CmsDbContext dbc, CmsResource resource, boolean search)
throws CmsException {
// check if we have the result already cached
CmsUUID projectId = getProjectIdForContext(dbc);
String cacheKey = getCacheKey(CACHE_ALL_PROPERTIES, search, projectId, resource.getRootPath());
List<CmsProperty> properties = m_monitor.getCachedPropertyList(cacheKey);
if ((properties == null) || !dbc.getProjectId().isNullUUID()) {
// result not cached, let's look it up in the DB
if (search) {
boolean cont;
properties = new ArrayList<CmsProperty>();
List<CmsProperty> parentProperties = null;
do {
try {
parentProperties = readPropertyObjects(dbc, resource, false);
// make sure properties from lower folders "overwrite" properties from upper folders
parentProperties.removeAll(properties);
parentProperties.addAll(properties);
properties.clear();
properties.addAll(parentProperties);
cont = resource.getRootPath().length() > 1;
} catch (CmsSecurityException se) {
// a security exception (probably no read permission) we return the current result
cont = false;
}
if (cont) {
// no permission check on parent folder is required since we must have "read"
// permissions to read the child resource anyway
resource = readResource(
dbc,
CmsResource.getParentFolder(resource.getRootPath()),
CmsResourceFilter.ALL);
}
} while (cont);
} else {
properties = getVfsDriver(dbc).readPropertyObjects(dbc, dbc.currentProject(), resource);
// for (CmsProperty prop : properties) {
// prop.setOrigin(resource.getRootPath());
// }
}
// set all properties in the result list as frozen
CmsProperty.setFrozen(properties);
if (dbc.getProjectId().isNullUUID()) {
// store the result in the cache if needed
m_monitor.cachePropertyList(cacheKey, properties);
}
}
return new ArrayList<CmsProperty>(properties);
} }
|
public class class_name {
public List<CmsProperty> readPropertyObjects(CmsDbContext dbc, CmsResource resource, boolean search)
throws CmsException {
// check if we have the result already cached
CmsUUID projectId = getProjectIdForContext(dbc);
String cacheKey = getCacheKey(CACHE_ALL_PROPERTIES, search, projectId, resource.getRootPath());
List<CmsProperty> properties = m_monitor.getCachedPropertyList(cacheKey);
if ((properties == null) || !dbc.getProjectId().isNullUUID()) {
// result not cached, let's look it up in the DB
if (search) {
boolean cont;
properties = new ArrayList<CmsProperty>(); // depends on control dependency: [if], data = [none]
List<CmsProperty> parentProperties = null;
do {
try {
parentProperties = readPropertyObjects(dbc, resource, false); // depends on control dependency: [try], data = [none]
// make sure properties from lower folders "overwrite" properties from upper folders
parentProperties.removeAll(properties); // depends on control dependency: [try], data = [none]
parentProperties.addAll(properties); // depends on control dependency: [try], data = [none]
properties.clear(); // depends on control dependency: [try], data = [none]
properties.addAll(parentProperties); // depends on control dependency: [try], data = [none]
cont = resource.getRootPath().length() > 1; // depends on control dependency: [try], data = [none]
} catch (CmsSecurityException se) {
// a security exception (probably no read permission) we return the current result
cont = false;
} // depends on control dependency: [catch], data = [none]
if (cont) {
// no permission check on parent folder is required since we must have "read"
// permissions to read the child resource anyway
resource = readResource(
dbc,
CmsResource.getParentFolder(resource.getRootPath()),
CmsResourceFilter.ALL); // depends on control dependency: [if], data = [none]
}
} while (cont);
} else {
properties = getVfsDriver(dbc).readPropertyObjects(dbc, dbc.currentProject(), resource); // depends on control dependency: [if], data = [none]
// for (CmsProperty prop : properties) {
// prop.setOrigin(resource.getRootPath());
// }
}
// set all properties in the result list as frozen
CmsProperty.setFrozen(properties);
if (dbc.getProjectId().isNullUUID()) {
// store the result in the cache if needed
m_monitor.cachePropertyList(cacheKey, properties); // depends on control dependency: [if], data = [none]
}
}
return new ArrayList<CmsProperty>(properties);
} }
|
public class class_name {
private AttributesImpl validateScope(final Attributes atts, final AttributesImpl modified) {
AttributesImpl res = modified;
final String scope = atts.getValue(ATTRIBUTE_NAME_SCOPE);
final URI href = toURI(atts.getValue(ATTRIBUTE_NAME_HREF));
if (scope == null && href != null && href.isAbsolute()) {
final boolean sameScheme = Objects.equals(currentFile.getScheme(), href.getScheme());
final boolean sameAuthority = Objects.equals(currentFile.getRawAuthority(), href.getRawAuthority());
if (!(sameScheme && sameAuthority)) {
switch (processingMode) {
case LAX:
if (res == null) {
res = new AttributesImpl(atts);
}
addOrSetAttribute(res, ATTRIBUTE_NAME_SCOPE, ATTR_SCOPE_VALUE_EXTERNAL);
logger.warn(MessageUtils.getMessage("DOTJ075W", href.toString()).setLocation(locator).toString());
break;
default:
logger.warn(MessageUtils.getMessage("DOTJ076W", href.toString()).setLocation(locator) + ", using invalid value.");
break;
}
}
}
return res;
} }
|
public class class_name {
private AttributesImpl validateScope(final Attributes atts, final AttributesImpl modified) {
AttributesImpl res = modified;
final String scope = atts.getValue(ATTRIBUTE_NAME_SCOPE);
final URI href = toURI(atts.getValue(ATTRIBUTE_NAME_HREF));
if (scope == null && href != null && href.isAbsolute()) {
final boolean sameScheme = Objects.equals(currentFile.getScheme(), href.getScheme());
final boolean sameAuthority = Objects.equals(currentFile.getRawAuthority(), href.getRawAuthority());
if (!(sameScheme && sameAuthority)) {
switch (processingMode) {
case LAX:
if (res == null) {
res = new AttributesImpl(atts); // depends on control dependency: [if], data = [none]
}
addOrSetAttribute(res, ATTRIBUTE_NAME_SCOPE, ATTR_SCOPE_VALUE_EXTERNAL);
logger.warn(MessageUtils.getMessage("DOTJ075W", href.toString()).setLocation(locator).toString());
break;
default:
logger.warn(MessageUtils.getMessage("DOTJ076W", href.toString()).setLocation(locator) + ", using invalid value.");
break;
}
}
}
return res;
} }
|
public class class_name {
public void setYellowBlinking(final boolean YELLOW_BLINKING) {
yellowBlinking = YELLOW_BLINKING;
if (!YELLOW_BLINKING) {
yellowOn = false;
TIMER.stop();
} else {
TIMER.start();
}
} }
|
public class class_name {
public void setYellowBlinking(final boolean YELLOW_BLINKING) {
yellowBlinking = YELLOW_BLINKING;
if (!YELLOW_BLINKING) {
yellowOn = false; // depends on control dependency: [if], data = [none]
TIMER.stop(); // depends on control dependency: [if], data = [none]
} else {
TIMER.start(); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public com.google.protobuf.ByteString
getProviderIdBytes() {
java.lang.Object ref = providerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
providerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
} }
|
public class class_name {
public com.google.protobuf.ByteString
getProviderIdBytes() {
java.lang.Object ref = providerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
providerId_ = b; // depends on control dependency: [if], data = [none]
return b; // depends on control dependency: [if], data = [none]
} else {
return (com.google.protobuf.ByteString) ref; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
void resolveConflictInDocument(String docID) throws CouchbaseLiteException {
synchronized (lock) {
boolean commit = false;
beginTransaction();
try {
// Read local document:
final Document localDoc = new Document((Database) this, docID, true);
// Read the conflicting remote revision:
final Document remoteDoc = new Document((Database) this, docID, true);
try {
if (!remoteDoc.selectConflictingRevision()) {
Log.w(DOMAIN, "Unable to select conflicting revision for '%s', skipping...", docID);
return;
}
}
catch (LiteCoreException e) {
throw CBLStatus.convertException(e);
}
// Resolve conflict:
Log.v(DOMAIN, "Resolving doc '%s' (local=%s and remote=%s)", docID,
localDoc.getRevID(), remoteDoc.getRevID());
final Document resolvedDoc = resolveConflict(localDoc, remoteDoc);
// Save resolved document:
try {
saveResolvedDocument(resolvedDoc, localDoc, remoteDoc);
}
catch (LiteCoreException e) {
throw CBLStatus.convertException(e);
}
commit = true;
}
finally {
endTransaction(commit);
}
}
} }
|
public class class_name {
void resolveConflictInDocument(String docID) throws CouchbaseLiteException {
synchronized (lock) {
boolean commit = false;
beginTransaction();
try {
// Read local document:
final Document localDoc = new Document((Database) this, docID, true);
// Read the conflicting remote revision:
final Document remoteDoc = new Document((Database) this, docID, true);
try {
if (!remoteDoc.selectConflictingRevision()) {
Log.w(DOMAIN, "Unable to select conflicting revision for '%s', skipping...", docID);
return;
}
}
catch (LiteCoreException e) {
throw CBLStatus.convertException(e);
}
// Resolve conflict:
Log.v(DOMAIN, "Resolving doc '%s' (local=%s and remote=%s)", docID,
localDoc.getRevID(), remoteDoc.getRevID()); // depends on control dependency: [if], data = [none]
final Document resolvedDoc = resolveConflict(localDoc, remoteDoc);
// Save resolved document:
try {
saveResolvedDocument(resolvedDoc, localDoc, remoteDoc); // depends on control dependency: [try], data = [none]
}
catch (LiteCoreException e) {
throw CBLStatus.convertException(e);
} // depends on control dependency: [catch], data = [none]
commit = true; // depends on control dependency: [if], data = [none]
}
finally {
endTransaction(commit);
}
}
} }
|
public class class_name {
public static int varIntBytesToInt(byte[] bytes, int offset) {
byte tmp = bytes[offset++];
if (tmp >= 0) {
return tmp;
}
int result = tmp & 0x7f;
if ((tmp = bytes[offset++]) >= 0) {
result |= tmp << 7;
} else {
result |= (tmp & 0x7f) << 7;
if ((tmp = bytes[offset++]) >= 0) {
result |= tmp << 14;
} else {
result |= (tmp & 0x7f) << 14;
if ((tmp = bytes[offset++]) >= 0) {
result |= tmp << 21;
} else {
result |= (tmp & 0x7f) << 21;
result |= (tmp = bytes[offset++]) << 28;
if (tmp < 0) {
// Discard upper 32 bits.
for (int i = 0; i < 5; i++) {
if (bytes[offset++] >= 0) {
return result;
}
}
//Should never happen since we wrote the varint value. If this occurs due to an internal bug
//this exception is caught and wrapped further up the chain.
throw new IllegalStateException("Varint representation is invalid or exceeds 32-bit value");
}
}
}
}
return result;
} }
|
public class class_name {
public static int varIntBytesToInt(byte[] bytes, int offset) {
byte tmp = bytes[offset++];
if (tmp >= 0) {
return tmp; // depends on control dependency: [if], data = [none]
}
int result = tmp & 0x7f;
if ((tmp = bytes[offset++]) >= 0) {
result |= tmp << 7; // depends on control dependency: [if], data = [none]
} else {
result |= (tmp & 0x7f) << 7; // depends on control dependency: [if], data = [none]
if ((tmp = bytes[offset++]) >= 0) {
result |= tmp << 14; // depends on control dependency: [if], data = [none]
} else {
result |= (tmp & 0x7f) << 14; // depends on control dependency: [if], data = [none]
if ((tmp = bytes[offset++]) >= 0) {
result |= tmp << 21; // depends on control dependency: [if], data = [none]
} else {
result |= (tmp & 0x7f) << 21; // depends on control dependency: [if], data = [none]
result |= (tmp = bytes[offset++]) << 28; // depends on control dependency: [if], data = [none]
if (tmp < 0) {
// Discard upper 32 bits.
for (int i = 0; i < 5; i++) {
if (bytes[offset++] >= 0) {
return result; // depends on control dependency: [if], data = [none]
}
}
//Should never happen since we wrote the varint value. If this occurs due to an internal bug
//this exception is caught and wrapped further up the chain.
throw new IllegalStateException("Varint representation is invalid or exceeds 32-bit value");
}
}
}
}
return result;
} }
|
public class class_name {
public boolean repeatsMonthlyOnDayCount() {
if (this.freq != MONTHLY) {
return false;
}
if (bydayCount != 1 || bymonthdayCount != 0) {
return false;
}
if (bydayNum[0] <= 0) {
return false;
}
return true;
} }
|
public class class_name {
public boolean repeatsMonthlyOnDayCount() {
if (this.freq != MONTHLY) {
return false; // depends on control dependency: [if], data = [none]
}
if (bydayCount != 1 || bymonthdayCount != 0) {
return false; // depends on control dependency: [if], data = [none]
}
if (bydayNum[0] <= 0) {
return false; // depends on control dependency: [if], data = [none]
}
return true;
} }
|
public class class_name {
public void setDomainValidationOptions(java.util.Collection<LoadBalancerTlsCertificateDomainValidationOption> domainValidationOptions) {
if (domainValidationOptions == null) {
this.domainValidationOptions = null;
return;
}
this.domainValidationOptions = new java.util.ArrayList<LoadBalancerTlsCertificateDomainValidationOption>(domainValidationOptions);
} }
|
public class class_name {
public void setDomainValidationOptions(java.util.Collection<LoadBalancerTlsCertificateDomainValidationOption> domainValidationOptions) {
if (domainValidationOptions == null) {
this.domainValidationOptions = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.domainValidationOptions = new java.util.ArrayList<LoadBalancerTlsCertificateDomainValidationOption>(domainValidationOptions);
} }
|
public class class_name {
public static Metadata fromManifest(InputStream is) {
try {
Manifest mf = new Manifest(is);
return fromManifest(mf);
} catch (IOException e) {
// Problems? Too bad!
}
return new Metadata();
} }
|
public class class_name {
public static Metadata fromManifest(InputStream is) {
try {
Manifest mf = new Manifest(is);
return fromManifest(mf); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
// Problems? Too bad!
} // depends on control dependency: [catch], data = [none]
return new Metadata();
} }
|
public class class_name {
public List<TorrentHandle> torrents() {
torrent_handle_vector v = s.get_torrents();
int size = (int) v.size();
ArrayList<TorrentHandle> l = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
l.add(new TorrentHandle(v.get(i)));
}
return l;
} }
|
public class class_name {
public List<TorrentHandle> torrents() {
torrent_handle_vector v = s.get_torrents();
int size = (int) v.size();
ArrayList<TorrentHandle> l = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
l.add(new TorrentHandle(v.get(i))); // depends on control dependency: [for], data = [i]
}
return l;
} }
|
public class class_name {
static public boolean isUnCaughtExceptionDetected () {
if (context == null) {
if (!context_null_msg_already_printed) {
Log.w(LOG_TAG_NAME, CONTEXT_NULL_MSG);
context_null_msg_already_printed = true;
}
return false;
}
return context.getSharedPreferences (SHARED_PREF_KEY, Context.MODE_PRIVATE).getBoolean (SHARED_PREF_KEY_CRASH_DETECTED, false);
} }
|
public class class_name {
static public boolean isUnCaughtExceptionDetected () {
if (context == null) {
if (!context_null_msg_already_printed) {
Log.w(LOG_TAG_NAME, CONTEXT_NULL_MSG); // depends on control dependency: [if], data = [none]
context_null_msg_already_printed = true; // depends on control dependency: [if], data = [none]
}
return false; // depends on control dependency: [if], data = [none]
}
return context.getSharedPreferences (SHARED_PREF_KEY, Context.MODE_PRIVATE).getBoolean (SHARED_PREF_KEY_CRASH_DETECTED, false);
} }
|
public class class_name {
public static boolean touches(Geometry geometry, Coordinate coordinate) {
if (Geometry.MULTI_POLYGON.equals(geometry.getGeometryType())) {
for (int i = 0; i < geometry.getGeometries().length; i++) {
if (touches(geometry.getGeometries()[i], coordinate)) {
return true;
}
}
return false;
} else if (Geometry.POLYGON.equals(geometry.getGeometryType())) {
for (int i = 0; i < geometry.getGeometries().length; i++) {
if (touchesLineString(geometry.getGeometries()[i], coordinate)) {
return true;
}
}
return false;
} else if (Geometry.MULTI_LINE_STRING.equals(geometry.getGeometryType())) {
for (int i = 0; i < geometry.getGeometries().length; i++) {
if (touchesLineString(geometry.getGeometries()[i], coordinate)) {
return true;
}
}
return false;
} else if (Geometry.LINEAR_RING.equals(geometry.getGeometryType())) {
return touchesLineString(geometry, coordinate);
} else if (Geometry.LINE_STRING.equals(geometry.getGeometryType())) {
return touchesLineString(geometry, coordinate);
} else if (Geometry.POINT.equals(geometry.getGeometryType())) {
return distance(geometry.getCoordinates()[0], coordinate) < PARAM_DEFAULT_DELTA;
}
return false;
} }
|
public class class_name {
public static boolean touches(Geometry geometry, Coordinate coordinate) {
if (Geometry.MULTI_POLYGON.equals(geometry.getGeometryType())) {
for (int i = 0; i < geometry.getGeometries().length; i++) {
if (touches(geometry.getGeometries()[i], coordinate)) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false; // depends on control dependency: [if], data = [none]
} else if (Geometry.POLYGON.equals(geometry.getGeometryType())) {
for (int i = 0; i < geometry.getGeometries().length; i++) {
if (touchesLineString(geometry.getGeometries()[i], coordinate)) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false; // depends on control dependency: [if], data = [none]
} else if (Geometry.MULTI_LINE_STRING.equals(geometry.getGeometryType())) {
for (int i = 0; i < geometry.getGeometries().length; i++) {
if (touchesLineString(geometry.getGeometries()[i], coordinate)) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false; // depends on control dependency: [if], data = [none]
} else if (Geometry.LINEAR_RING.equals(geometry.getGeometryType())) {
return touchesLineString(geometry, coordinate); // depends on control dependency: [if], data = [none]
} else if (Geometry.LINE_STRING.equals(geometry.getGeometryType())) {
return touchesLineString(geometry, coordinate); // depends on control dependency: [if], data = [none]
} else if (Geometry.POINT.equals(geometry.getGeometryType())) {
return distance(geometry.getCoordinates()[0], coordinate) < PARAM_DEFAULT_DELTA; // depends on control dependency: [if], data = [none]
}
return false;
} }
|
public class class_name {
public static VelocityContext parseSession(VelocityContext context, javax.servlet.http.HttpSession session) {
if (null != session) {
final Enumeration<String> sessionAttrs = session.getAttributeNames();
if (sessionAttrs != null) {
String attrName = null;
while (sessionAttrs.hasMoreElements()) {
attrName = sessionAttrs.nextElement();
context.put(attrName, session.getAttribute(attrName));
}
}
}
return context;
} }
|
public class class_name {
public static VelocityContext parseSession(VelocityContext context, javax.servlet.http.HttpSession session) {
if (null != session) {
final Enumeration<String> sessionAttrs = session.getAttributeNames();
if (sessionAttrs != null) {
String attrName = null;
while (sessionAttrs.hasMoreElements()) {
attrName = sessionAttrs.nextElement();
// depends on control dependency: [while], data = [none]
context.put(attrName, session.getAttribute(attrName));
// depends on control dependency: [while], data = [none]
}
}
}
return context;
} }
|
public class class_name {
@SuppressWarnings("unchecked")
private Converter getConverter(Class<?> valueType)
{
Converter c = converters.get(valueType);
if(c == null) {
synchronized(converters) {
if(c == null) {
if(Types.isKindOf(valueType, Converter.class)) {
// self-converting value type are instances that contains both data model and converting logic
registerConverter(valueType, (Class<? extends Converter>)valueType);
}
else {
// not concrete value types, i.e. interface or abstract class, are bound to abstract converters map
// on the other hand this method got a concrete value type as argument
// lookup an entry into abstract converters map that is super-class for requested value type, that is,
// requested value type is a kind of a class from abstract converters map
// then uses that converter instance to create a concrete bind
// as a rational for abstract converters map think of Java time zone
// time zone instances returned by JRE are not directly implementing TimeZone interface
// there is an internal abstract base class that cannot be bound to a converter instance
for(Map.Entry<Class<?>, Converter> entries : abstractConverters.entrySet()) {
if(Types.isKindOf(valueType, entries.getKey())) {
registerConverterInstance(valueType, entries.getValue());
}
}
}
c = converters.get(valueType);
}
}
}
// at this point converter can still be null
if(c == null && valueType.isEnum()) {
return enumsConverter;
}
return c;
} }
|
public class class_name {
@SuppressWarnings("unchecked")
private Converter getConverter(Class<?> valueType)
{
Converter c = converters.get(valueType);
if(c == null) {
synchronized(converters) {
// depends on control dependency: [if], data = [(c]
if(c == null) {
if(Types.isKindOf(valueType, Converter.class)) {
// self-converting value type are instances that contains both data model and converting logic
registerConverter(valueType, (Class<? extends Converter>)valueType);
// depends on control dependency: [if], data = [none]
}
else {
// not concrete value types, i.e. interface or abstract class, are bound to abstract converters map
// on the other hand this method got a concrete value type as argument
// lookup an entry into abstract converters map that is super-class for requested value type, that is,
// requested value type is a kind of a class from abstract converters map
// then uses that converter instance to create a concrete bind
// as a rational for abstract converters map think of Java time zone
// time zone instances returned by JRE are not directly implementing TimeZone interface
// there is an internal abstract base class that cannot be bound to a converter instance
for(Map.Entry<Class<?>, Converter> entries : abstractConverters.entrySet()) {
if(Types.isKindOf(valueType, entries.getKey())) {
registerConverterInstance(valueType, entries.getValue());
// depends on control dependency: [if], data = [none]
}
}
}
c = converters.get(valueType);
// depends on control dependency: [if], data = [none]
}
}
}
// at this point converter can still be null
if(c == null && valueType.isEnum()) {
return enumsConverter;
// depends on control dependency: [if], data = [none]
}
return c;
} }
|
public class class_name {
private void buildNameMap() {
nameMap = new HashMap<String, GrailsPlugin>();
for (GrailsPlugin plugin : originalPlugins) {
nameMap.put(plugin.getName(), plugin);
}
} }
|
public class class_name {
private void buildNameMap() {
nameMap = new HashMap<String, GrailsPlugin>();
for (GrailsPlugin plugin : originalPlugins) {
nameMap.put(plugin.getName(), plugin); // depends on control dependency: [for], data = [plugin]
}
} }
|
public class class_name {
public void unpush()
{
// First, move 'back' one position backwards.
if (backPos > 0) {
--backPos;
}
else {
backPos = size - 1;
backChunk = backChunk.prev;
}
// Now, move 'end' position backwards. Note that obsolete end chunk
// is not used as a spare chunk. The analysis shows that doing so
// would require free and atomic operation per chunk deallocated
// instead of a simple free.
if (endPos > 0) {
--endPos;
}
else {
endPos = size - 1;
endChunk = endChunk.prev;
endChunk.next = null;
}
} }
|
public class class_name {
public void unpush()
{
// First, move 'back' one position backwards.
if (backPos > 0) {
--backPos; // depends on control dependency: [if], data = [none]
}
else {
backPos = size - 1; // depends on control dependency: [if], data = [none]
backChunk = backChunk.prev; // depends on control dependency: [if], data = [none]
}
// Now, move 'end' position backwards. Note that obsolete end chunk
// is not used as a spare chunk. The analysis shows that doing so
// would require free and atomic operation per chunk deallocated
// instead of a simple free.
if (endPos > 0) {
--endPos; // depends on control dependency: [if], data = [none]
}
else {
endPos = size - 1; // depends on control dependency: [if], data = [none]
endChunk = endChunk.prev; // depends on control dependency: [if], data = [none]
endChunk.next = null; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public InputsModel getInputs() {
if (_inputs == null) {
_inputs = (InputsModel)getFirstChildModel(INPUTS);
}
return _inputs;
} }
|
public class class_name {
@Override
public InputsModel getInputs() {
if (_inputs == null) {
_inputs = (InputsModel)getFirstChildModel(INPUTS); // depends on control dependency: [if], data = [none]
}
return _inputs;
} }
|
public class class_name {
public static void addRowTimes(double[][] matrix, int diag, int fromCol, int row, double factor) {
int cols = matrix[0].length;
double[] d = matrix[diag];
double[] r = matrix[row];
for (int col = fromCol; col < cols; col++) {
r[col] -= factor * d[col];
}
} }
|
public class class_name {
public static void addRowTimes(double[][] matrix, int diag, int fromCol, int row, double factor) {
int cols = matrix[0].length;
double[] d = matrix[diag];
double[] r = matrix[row];
for (int col = fromCol; col < cols; col++) {
r[col] -= factor * d[col]; // depends on control dependency: [for], data = [col]
}
} }
|
public class class_name {
public void setUseCamera(final boolean useCamera) {
if (useCamera != getUseCamera()) {
ImageEditModel model = getOrCreateComponentModel();
model.useCamera = useCamera;
}
} }
|
public class class_name {
public void setUseCamera(final boolean useCamera) {
if (useCamera != getUseCamera()) {
ImageEditModel model = getOrCreateComponentModel();
model.useCamera = useCamera; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private static AttributesType matchParaMeterOfMethod(final Object obj,
final String methodName) {
for (AttributesType attr : AttributesType.values()) {
try {
obj.getClass().getMethod(methodName,
new Class[] { attr.clazz });
return attr;
} catch (Exception ex) {
LOG.log(Level.FINE, " error in matchParaMeterOfMethod = "
+ ex.getLocalizedMessage(), ex);
}
}
return null;
} }
|
public class class_name {
private static AttributesType matchParaMeterOfMethod(final Object obj,
final String methodName) {
for (AttributesType attr : AttributesType.values()) {
try {
obj.getClass().getMethod(methodName,
new Class[] { attr.clazz });
// depends on control dependency: [try], data = [none]
return attr;
// depends on control dependency: [try], data = [none]
} catch (Exception ex) {
LOG.log(Level.FINE, " error in matchParaMeterOfMethod = "
+ ex.getLocalizedMessage(), ex);
}
// depends on control dependency: [catch], data = [none]
}
return null;
} }
|
public class class_name {
public static void initialize()
{
if (InfinispanCache.get().exists(AccessCache.PERMISSIONCACHE)) {
InfinispanCache.get().<UUID, AccessType>getCache(AccessCache.PERMISSIONCACHE).clear();
} else {
InfinispanCache.get().<UUID, AccessType>getCache(AccessCache.PERMISSIONCACHE)
.addListener(new CacheLogListener(AccessCache.LOG));
}
if (InfinispanCache.get().exists(AccessCache.STATUSCACHE)) {
InfinispanCache.get().<Long, AccessType>getCache(AccessCache.STATUSCACHE).clear();
} else {
InfinispanCache.get().<Long, AccessType>getCache(AccessCache.STATUSCACHE)
.addListener(new CacheLogListener(AccessCache.LOG));
}
} }
|
public class class_name {
public static void initialize()
{
if (InfinispanCache.get().exists(AccessCache.PERMISSIONCACHE)) {
InfinispanCache.get().<UUID, AccessType>getCache(AccessCache.PERMISSIONCACHE).clear(); // depends on control dependency: [if], data = [none]
} else {
InfinispanCache.get().<UUID, AccessType>getCache(AccessCache.PERMISSIONCACHE)
.addListener(new CacheLogListener(AccessCache.LOG)); // depends on control dependency: [if], data = [none]
}
if (InfinispanCache.get().exists(AccessCache.STATUSCACHE)) {
InfinispanCache.get().<Long, AccessType>getCache(AccessCache.STATUSCACHE).clear(); // depends on control dependency: [if], data = [none]
} else {
InfinispanCache.get().<Long, AccessType>getCache(AccessCache.STATUSCACHE)
.addListener(new CacheLogListener(AccessCache.LOG)); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
void addNetworkReachabilityListener(NetworkReachabilityListener listener) {
synchronized (listeners) {
listeners.add(listener);
if (listeners.size() == 1) { startListening(); }
}
} }
|
public class class_name {
void addNetworkReachabilityListener(NetworkReachabilityListener listener) {
synchronized (listeners) {
listeners.add(listener);
if (listeners.size() == 1) { startListening(); } // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public <T extends Serializable> int enqueue(final String topic, List<T> objects) {
int messagesBuffered = 0;
for (T object : objects) {
final String value;
if (String.class.isAssignableFrom(object.getClass())) {
value = String.class.cast(object);
} else {
try {
value = _mapper.writeValueAsString(object);
} catch (JsonProcessingException e) {
_logger.warn("Exception while serializing the object to a string. Skipping this object.", e);
continue;
}
}
try {
boolean addedToBuffer = _executorService.submit(new ProducerWorker(topic, value)).get();
if (addedToBuffer) {
messagesBuffered++;
}
} catch (InterruptedException e) {
_logger.warn("Enqueue operation was interrupted by calling code.");
Thread.currentThread().interrupt();
} catch (ExecutionException e) {
throw new SystemException(e);
}
}
return messagesBuffered;
} }
|
public class class_name {
public <T extends Serializable> int enqueue(final String topic, List<T> objects) {
int messagesBuffered = 0;
for (T object : objects) {
final String value;
if (String.class.isAssignableFrom(object.getClass())) {
value = String.class.cast(object); // depends on control dependency: [if], data = [none]
} else {
try {
value = _mapper.writeValueAsString(object); // depends on control dependency: [try], data = [none]
} catch (JsonProcessingException e) {
_logger.warn("Exception while serializing the object to a string. Skipping this object.", e);
continue;
} // depends on control dependency: [catch], data = [none]
}
try {
boolean addedToBuffer = _executorService.submit(new ProducerWorker(topic, value)).get();
if (addedToBuffer) {
messagesBuffered++; // depends on control dependency: [if], data = [none]
}
} catch (InterruptedException e) {
_logger.warn("Enqueue operation was interrupted by calling code.");
Thread.currentThread().interrupt();
} catch (ExecutionException e) { // depends on control dependency: [catch], data = [none]
throw new SystemException(e);
} // depends on control dependency: [catch], data = [none]
}
return messagesBuffered;
} }
|
public class class_name {
public static IAtomContainer copyAndSuppressedHydrogens(IAtomContainer org) {
try {
return suppressHydrogens(org.clone());
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("atom container could not be cloned");
}
} }
|
public class class_name {
public static IAtomContainer copyAndSuppressedHydrogens(IAtomContainer org) {
try {
return suppressHydrogens(org.clone()); // depends on control dependency: [try], data = [none]
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("atom container could not be cloned");
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public void marshall(DescribeComplianceByResourceRequest describeComplianceByResourceRequest, ProtocolMarshaller protocolMarshaller) {
if (describeComplianceByResourceRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeComplianceByResourceRequest.getResourceType(), RESOURCETYPE_BINDING);
protocolMarshaller.marshall(describeComplianceByResourceRequest.getResourceId(), RESOURCEID_BINDING);
protocolMarshaller.marshall(describeComplianceByResourceRequest.getComplianceTypes(), COMPLIANCETYPES_BINDING);
protocolMarshaller.marshall(describeComplianceByResourceRequest.getLimit(), LIMIT_BINDING);
protocolMarshaller.marshall(describeComplianceByResourceRequest.getNextToken(), NEXTTOKEN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(DescribeComplianceByResourceRequest describeComplianceByResourceRequest, ProtocolMarshaller protocolMarshaller) {
if (describeComplianceByResourceRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeComplianceByResourceRequest.getResourceType(), RESOURCETYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeComplianceByResourceRequest.getResourceId(), RESOURCEID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeComplianceByResourceRequest.getComplianceTypes(), COMPLIANCETYPES_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeComplianceByResourceRequest.getLimit(), LIMIT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeComplianceByResourceRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private void buildSpectrogram() {
short[] amplitudes = wave.getSampleAmplitudes();
int numSamples = amplitudes.length;
int pointer = 0;
// overlapping
if (overlapFactor > 1) {
int numOverlappedSamples = numSamples * overlapFactor;
int backSamples = fftSampleSize * (overlapFactor - 1) / overlapFactor;
short[] overlapAmp = new short[numOverlappedSamples];
pointer = 0;
for (int i = 0; i < amplitudes.length; i++) {
overlapAmp[pointer++] = amplitudes[i];
if (pointer % fftSampleSize == 0) {
// overlap
i -= backSamples;
}
}
numSamples = numOverlappedSamples;
amplitudes = overlapAmp;
}
// end overlapping
numFrames = numSamples / fftSampleSize;
framesPerSecond = (int) (numFrames / wave.length());
// set signals for fft
WindowFunction window = new WindowFunction();
window.setWindowType("Hamming");
double[] win = window.generate(fftSampleSize);
double[][] signals = new double[numFrames][];
for (int f = 0; f < numFrames; f++) {
signals[f] = new double[fftSampleSize];
int startSample = f * fftSampleSize;
for (int n = 0; n < fftSampleSize; n++) {
signals[f][n] = amplitudes[startSample + n] * win[n];
}
}
// end set signals for fft
absoluteSpectrogram = new double[numFrames][];
// for each frame in signals, do fft on it
FastFourierTransform fft = new FastFourierTransform();
for (int i = 0; i < numFrames; i++) {
absoluteSpectrogram[i] = fft.getMagnitudes(signals[i], false);
}
if (absoluteSpectrogram.length > 0) {
numFrequencyUnit = absoluteSpectrogram[0].length;
unitFrequency = (double) wave.getWaveHeader().getSampleRate() / 2 / numFrequencyUnit; // frequency could be caught within the half of nSamples according to Nyquist theory
// normalization of absoultSpectrogram
spectrogram = new double[numFrames][numFrequencyUnit];
// set max and min amplitudes
double maxAmp = Double.MIN_VALUE;
double minAmp = Double.MAX_VALUE;
for (int i = 0; i < numFrames; i++) {
for (int j = 0; j < numFrequencyUnit; j++) {
if (absoluteSpectrogram[i][j] > maxAmp) {
maxAmp = absoluteSpectrogram[i][j];
} else if (absoluteSpectrogram[i][j] < minAmp) {
minAmp = absoluteSpectrogram[i][j];
}
}
}
// end set max and min amplitudes
// normalization
// avoiding divided by zero
double minValidAmp = 0.00000000001F;
if (minAmp == 0) {
minAmp = minValidAmp;
}
double diff = Math.log10(maxAmp / minAmp); // perceptual difference
for (int i = 0; i < numFrames; i++) {
for (int j = 0; j < numFrequencyUnit; j++) {
if (absoluteSpectrogram[i][j] < minValidAmp) {
spectrogram[i][j] = 0;
} else {
spectrogram[i][j] = (Math.log10(absoluteSpectrogram[i][j] / minAmp)) / diff;
}
}
}
// end normalization
}
} }
|
public class class_name {
private void buildSpectrogram() {
short[] amplitudes = wave.getSampleAmplitudes();
int numSamples = amplitudes.length;
int pointer = 0;
// overlapping
if (overlapFactor > 1) {
int numOverlappedSamples = numSamples * overlapFactor;
int backSamples = fftSampleSize * (overlapFactor - 1) / overlapFactor;
short[] overlapAmp = new short[numOverlappedSamples];
pointer = 0; // depends on control dependency: [if], data = [none]
for (int i = 0; i < amplitudes.length; i++) {
overlapAmp[pointer++] = amplitudes[i]; // depends on control dependency: [for], data = [i]
if (pointer % fftSampleSize == 0) {
// overlap
i -= backSamples; // depends on control dependency: [if], data = [none]
}
}
numSamples = numOverlappedSamples; // depends on control dependency: [if], data = [none]
amplitudes = overlapAmp; // depends on control dependency: [if], data = [none]
}
// end overlapping
numFrames = numSamples / fftSampleSize;
framesPerSecond = (int) (numFrames / wave.length());
// set signals for fft
WindowFunction window = new WindowFunction();
window.setWindowType("Hamming");
double[] win = window.generate(fftSampleSize);
double[][] signals = new double[numFrames][];
for (int f = 0; f < numFrames; f++) {
signals[f] = new double[fftSampleSize]; // depends on control dependency: [for], data = [f]
int startSample = f * fftSampleSize;
for (int n = 0; n < fftSampleSize; n++) {
signals[f][n] = amplitudes[startSample + n] * win[n]; // depends on control dependency: [for], data = [n]
}
}
// end set signals for fft
absoluteSpectrogram = new double[numFrames][];
// for each frame in signals, do fft on it
FastFourierTransform fft = new FastFourierTransform();
for (int i = 0; i < numFrames; i++) {
absoluteSpectrogram[i] = fft.getMagnitudes(signals[i], false); // depends on control dependency: [for], data = [i]
}
if (absoluteSpectrogram.length > 0) {
numFrequencyUnit = absoluteSpectrogram[0].length; // depends on control dependency: [if], data = [none]
unitFrequency = (double) wave.getWaveHeader().getSampleRate() / 2 / numFrequencyUnit; // frequency could be caught within the half of nSamples according to Nyquist theory // depends on control dependency: [if], data = [none]
// normalization of absoultSpectrogram
spectrogram = new double[numFrames][numFrequencyUnit]; // depends on control dependency: [if], data = [none]
// set max and min amplitudes
double maxAmp = Double.MIN_VALUE;
double minAmp = Double.MAX_VALUE;
for (int i = 0; i < numFrames; i++) {
for (int j = 0; j < numFrequencyUnit; j++) {
if (absoluteSpectrogram[i][j] > maxAmp) {
maxAmp = absoluteSpectrogram[i][j]; // depends on control dependency: [if], data = [none]
} else if (absoluteSpectrogram[i][j] < minAmp) {
minAmp = absoluteSpectrogram[i][j]; // depends on control dependency: [if], data = [none]
}
}
}
// end set max and min amplitudes
// normalization
// avoiding divided by zero
double minValidAmp = 0.00000000001F;
if (minAmp == 0) {
minAmp = minValidAmp; // depends on control dependency: [if], data = [none]
}
double diff = Math.log10(maxAmp / minAmp); // perceptual difference
for (int i = 0; i < numFrames; i++) {
for (int j = 0; j < numFrequencyUnit; j++) {
if (absoluteSpectrogram[i][j] < minValidAmp) {
spectrogram[i][j] = 0; // depends on control dependency: [if], data = [none]
} else {
spectrogram[i][j] = (Math.log10(absoluteSpectrogram[i][j] / minAmp)) / diff; // depends on control dependency: [if], data = [(absoluteSpectrogram[i][j]]
}
}
}
// end normalization
}
} }
|
public class class_name {
public void addColumn(CmsListColumnDefinition listColumn, int position) {
setListIdForColumn(listColumn);
if (m_columns.elementList().isEmpty()) {
listColumn.setPrintable(true);
} else {
listColumn.setPrintable(listColumn.isSorteable());
}
if ((listColumn.getName() == null) && listColumn.isPrintable()) {
listColumn.setPrintable(false);
}
m_columns.addIdentifiableObject(listColumn.getId(), listColumn, position);
} }
|
public class class_name {
public void addColumn(CmsListColumnDefinition listColumn, int position) {
setListIdForColumn(listColumn);
if (m_columns.elementList().isEmpty()) {
listColumn.setPrintable(true); // depends on control dependency: [if], data = [none]
} else {
listColumn.setPrintable(listColumn.isSorteable()); // depends on control dependency: [if], data = [none]
}
if ((listColumn.getName() == null) && listColumn.isPrintable()) {
listColumn.setPrintable(false); // depends on control dependency: [if], data = [none]
}
m_columns.addIdentifiableObject(listColumn.getId(), listColumn, position);
} }
|
public class class_name {
private Set<Field> getItemFields() {
Class next = Item.class;
Set<Field> fields = new HashSet<>(getFields(next));
while (next.getSuperclass() != Object.class) {
next = next.getSuperclass();
fields.addAll(getFields(next));
}
return fields;
} }
|
public class class_name {
private Set<Field> getItemFields() {
Class next = Item.class;
Set<Field> fields = new HashSet<>(getFields(next));
while (next.getSuperclass() != Object.class) {
next = next.getSuperclass(); // depends on control dependency: [while], data = [none]
fields.addAll(getFields(next)); // depends on control dependency: [while], data = [none]
}
return fields;
} }
|
public class class_name {
private void handleArgUninstall(final String arg, final Deque<String> args) {
install = true;
String name = arg;
final int posEq = name.indexOf("=");
String value;
if (posEq != -1) {
value = name.substring(posEq + 1);
} else {
value = args.peek();
if (value != null && !value.startsWith("-")) {
value = args.pop();
} else {
value = null;
}
}
if (value == null) {
throw new BuildException("You must specify a installation package when using the --uninstall argument");
}
uninstallId = value;
} }
|
public class class_name {
private void handleArgUninstall(final String arg, final Deque<String> args) {
install = true;
String name = arg;
final int posEq = name.indexOf("=");
String value;
if (posEq != -1) {
value = name.substring(posEq + 1); // depends on control dependency: [if], data = [(posEq]
} else {
value = args.peek(); // depends on control dependency: [if], data = [none]
if (value != null && !value.startsWith("-")) {
value = args.pop(); // depends on control dependency: [if], data = [none]
} else {
value = null; // depends on control dependency: [if], data = [none]
}
}
if (value == null) {
throw new BuildException("You must specify a installation package when using the --uninstall argument");
}
uninstallId = value;
} }
|
public class class_name {
public double getRank(final double value) {
if (isEmpty()) { return Double.NaN; }
final DoublesSketchAccessor samples = DoublesSketchAccessor.wrap(this);
long total = 0;
int weight = 1;
samples.setLevel(DoublesSketchAccessor.BB_LVL_IDX);
for (int i = 0; i < samples.numItems(); i++) {
if (samples.get(i) < value) {
total += weight;
}
}
long bitPattern = getBitPattern();
for (int lvl = 0; bitPattern != 0L; lvl++, bitPattern >>>= 1) {
weight *= 2;
if ((bitPattern & 1L) > 0) { // level is not empty
samples.setLevel(lvl);
for (int i = 0; i < samples.numItems(); i++) {
if (samples.get(i) < value) {
total += weight;
} else {
break; // levels are sorted, no point comparing further
}
}
}
}
return (double) total / getN();
} }
|
public class class_name {
public double getRank(final double value) {
if (isEmpty()) { return Double.NaN; } // depends on control dependency: [if], data = [none]
final DoublesSketchAccessor samples = DoublesSketchAccessor.wrap(this);
long total = 0;
int weight = 1;
samples.setLevel(DoublesSketchAccessor.BB_LVL_IDX);
for (int i = 0; i < samples.numItems(); i++) {
if (samples.get(i) < value) {
total += weight; // depends on control dependency: [if], data = [none]
}
}
long bitPattern = getBitPattern();
for (int lvl = 0; bitPattern != 0L; lvl++, bitPattern >>>= 1) {
weight *= 2; // depends on control dependency: [for], data = [none]
if ((bitPattern & 1L) > 0) { // level is not empty
samples.setLevel(lvl); // depends on control dependency: [if], data = [none]
for (int i = 0; i < samples.numItems(); i++) {
if (samples.get(i) < value) {
total += weight; // depends on control dependency: [if], data = [none]
} else {
break; // levels are sorted, no point comparing further
}
}
}
}
return (double) total / getN();
} }
|
public class class_name {
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
comboBox.setEnabled(enabled);
if (spinner != null) {
spinner.setEnabled(enabled);
}
} }
|
public class class_name {
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
comboBox.setEnabled(enabled);
if (spinner != null) {
spinner.setEnabled(enabled); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
static boolean containsAllImpl(Collection<?> self, Collection<?> c) {
for (Object o : c) {
if (!self.contains(o)) {
return false;
}
}
return true;
} }
|
public class class_name {
static boolean containsAllImpl(Collection<?> self, Collection<?> c) {
for (Object o : c) {
if (!self.contains(o)) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} }
|
public class class_name {
public Map<String, String> map() {
Map<String, String> map = new HashMap<String, String>();
int i = 0;
while (i < size) {
map.put(items[i], items[i + 1]);
i += 2;
}
return map;
} }
|
public class class_name {
public Map<String, String> map() {
Map<String, String> map = new HashMap<String, String>();
int i = 0;
while (i < size) {
map.put(items[i], items[i + 1]); // depends on control dependency: [while], data = [(i]
i += 2; // depends on control dependency: [while], data = [none]
}
return map;
} }
|
public class class_name {
public static Nature convertStringToNature(String name, LinkedHashSet<Nature> customNatureCollector)
{
Nature nature = Nature.fromString(name);
if (nature == null)
{
nature = Nature.create(name);
if (customNatureCollector != null) customNatureCollector.add(nature);
}
return nature;
} }
|
public class class_name {
public static Nature convertStringToNature(String name, LinkedHashSet<Nature> customNatureCollector)
{
Nature nature = Nature.fromString(name);
if (nature == null)
{
nature = Nature.create(name); // depends on control dependency: [if], data = [none]
if (customNatureCollector != null) customNatureCollector.add(nature);
}
return nature;
} }
|
public class class_name {
public WebSocketCall createWebSocketCall(String url, Map<String, String> headers) {
String base64Credentials = buildBase64Credentials();
Request.Builder requestBuilder = new Request.Builder()
.url(url)
.addHeader("Authorization", "Basic " + base64Credentials)
.addHeader("Accept", "application/json");
if (headers != null) {
for (Map.Entry<String, String> header : headers.entrySet()) {
requestBuilder.addHeader(header.getKey(), header.getValue());
}
}
Request request = requestBuilder.build();
WebSocketCall wsc = WebSocketCall.create(getHttpClient(), request);
return wsc;
} }
|
public class class_name {
public WebSocketCall createWebSocketCall(String url, Map<String, String> headers) {
String base64Credentials = buildBase64Credentials();
Request.Builder requestBuilder = new Request.Builder()
.url(url)
.addHeader("Authorization", "Basic " + base64Credentials)
.addHeader("Accept", "application/json");
if (headers != null) {
for (Map.Entry<String, String> header : headers.entrySet()) {
requestBuilder.addHeader(header.getKey(), header.getValue()); // depends on control dependency: [for], data = [header]
}
}
Request request = requestBuilder.build();
WebSocketCall wsc = WebSocketCall.create(getHttpClient(), request);
return wsc;
} }
|
public class class_name {
public Builder done() {
if (done) {
return parent;
}
this.tree.fragments = fragments.toArray(new QueryFragment[fragments.size()]);
if (parent != null) {
parent.tree.subTrees.add(this.tree);
parent.children.remove(this);
}
//done will remove the child from the children, so we'd get concurrent modification exceptions
//avoid that stupidly by working on a copy of children
new ArrayList<>(children).forEach(Query.Builder::done);
done = true;
return parent;
} }
|
public class class_name {
public Builder done() {
if (done) {
return parent; // depends on control dependency: [if], data = [none]
}
this.tree.fragments = fragments.toArray(new QueryFragment[fragments.size()]);
if (parent != null) {
parent.tree.subTrees.add(this.tree); // depends on control dependency: [if], data = [none]
parent.children.remove(this); // depends on control dependency: [if], data = [none]
}
//done will remove the child from the children, so we'd get concurrent modification exceptions
//avoid that stupidly by working on a copy of children
new ArrayList<>(children).forEach(Query.Builder::done);
done = true;
return parent;
} }
|
public class class_name {
private static void drawOctogon()
{
// Do the following 8 times --#5.1
for (int i = 0; i < 8; i++)
{
// Change the pen color of the line the tortoise draws to the next color on the color wheel --#3
Tortoise.setPenColor(ColorWheel.getNextColor());
// Move the tortoise 50 pixels --#2
Tortoise.move(50);
// Turn the tortoise 1/8th of 360 degrees to the right --#4
Tortoise.turn(360.0 / 8);
// End Repeat --#5.2
}
// ------------- End of drawOctogon recipe --#10.3
} }
|
public class class_name {
private static void drawOctogon()
{
// Do the following 8 times --#5.1
for (int i = 0; i < 8; i++)
{
// Change the pen color of the line the tortoise draws to the next color on the color wheel --#3
Tortoise.setPenColor(ColorWheel.getNextColor()); // depends on control dependency: [for], data = [none]
// Move the tortoise 50 pixels --#2
Tortoise.move(50); // depends on control dependency: [for], data = [none]
// Turn the tortoise 1/8th of 360 degrees to the right --#4
Tortoise.turn(360.0 / 8); // depends on control dependency: [for], data = [none]
// End Repeat --#5.2
}
// ------------- End of drawOctogon recipe --#10.3
} }
|
public class class_name {
public static long[] reductionShape(INDArray x, int[] dimension, boolean newFormat, boolean keepDims){
boolean wholeArray = Shape.wholeArrayDimension(dimension) || dimension.length == x.rank();
long[] retShape;
if(!newFormat) {
retShape = wholeArray ? new long[] {1, 1} : ArrayUtil.removeIndex(x.shape(), dimension);
//ensure vector is proper shape (if old format)
if (retShape.length == 1) {
if (dimension[0] == 0)
retShape = new long[]{1, retShape[0]};
else
retShape = new long[]{retShape[0], 1};
} else if (retShape.length == 0) {
retShape = new long[]{1, 1};
}
} else {
if(keepDims){
retShape = x.shape().clone();
if(wholeArray){
for( int i=0; i<retShape.length; i++ ){
retShape[i] = 1;
}
} else {
for (int d : dimension) {
retShape[d] = 1;
}
}
} else {
retShape = wholeArray ? new long[0] : ArrayUtil.removeIndex(x.shape(), dimension);
}
}
return retShape;
} }
|
public class class_name {
public static long[] reductionShape(INDArray x, int[] dimension, boolean newFormat, boolean keepDims){
boolean wholeArray = Shape.wholeArrayDimension(dimension) || dimension.length == x.rank();
long[] retShape;
if(!newFormat) {
retShape = wholeArray ? new long[] {1, 1} : ArrayUtil.removeIndex(x.shape(), dimension); // depends on control dependency: [if], data = [none]
//ensure vector is proper shape (if old format)
if (retShape.length == 1) {
if (dimension[0] == 0)
retShape = new long[]{1, retShape[0]};
else
retShape = new long[]{retShape[0], 1};
} else if (retShape.length == 0) {
retShape = new long[]{1, 1}; // depends on control dependency: [if], data = [none]
}
} else {
if(keepDims){
retShape = x.shape().clone(); // depends on control dependency: [if], data = [none]
if(wholeArray){
for( int i=0; i<retShape.length; i++ ){
retShape[i] = 1; // depends on control dependency: [for], data = [i]
}
} else {
for (int d : dimension) {
retShape[d] = 1; // depends on control dependency: [for], data = [d]
}
}
} else {
retShape = wholeArray ? new long[0] : ArrayUtil.removeIndex(x.shape(), dimension); // depends on control dependency: [if], data = [none]
}
}
return retShape;
} }
|
public class class_name {
private org.ironjacamar.core.connectionmanager.listener.ConnectionListener
associateConnectionListener(Credential credential, Object connection)
throws ResourceException
{
log.tracef("associateConnectionListener(%s, %s)", credential, connection);
if (isShutdown())
{
throw new ResourceException();
}
if (!cmConfiguration.isSharable())
throw new ResourceException();
org.ironjacamar.core.connectionmanager.listener.ConnectionListener cl =
pool.getActiveConnectionListener(credential);
if (cl == null)
{
if (!pool.isFull())
{
try
{
cl = pool.getConnectionListener(credential);
}
catch (ResourceException re)
{
// Ignore
}
}
if (cl == null)
{
org.ironjacamar.core.connectionmanager.listener.ConnectionListener removeCl =
pool.removeConnectionListener(null);
if (removeCl != null)
{
try
{
if (ccm != null)
{
for (Object c : removeCl.getConnections())
{
ccm.unregisterConnection(this, removeCl, c);
}
}
returnConnectionListener(removeCl, true);
cl = pool.getConnectionListener(credential);
}
catch (ResourceException ire)
{
// Nothing we can do
}
}
else
{
if (getTransactionSupport() == TransactionSupportLevel.NoTransaction)
{
org.ironjacamar.core.connectionmanager.listener.ConnectionListener targetCl =
pool.removeConnectionListener(credential);
if (targetCl != null)
{
if (targetCl.getManagedConnection() instanceof DissociatableManagedConnection)
{
DissociatableManagedConnection dmc =
(DissociatableManagedConnection)targetCl.getManagedConnection();
if (ccm != null)
{
for (Object c : targetCl.getConnections())
{
ccm.unregisterConnection(this, targetCl, c);
}
}
dmc.dissociateConnections();
targetCl.clearConnections();
cl = targetCl;
}
else
{
try
{
if (ccm != null)
{
for (Object c : targetCl.getConnections())
{
ccm.unregisterConnection(this, targetCl, c);
}
}
returnConnectionListener(targetCl, true);
cl = pool.getConnectionListener(credential);
}
catch (ResourceException ire)
{
// Nothing we can do
}
}
}
}
}
}
}
if (cl == null)
throw new ResourceException();
if (connection != null)
{
// Associate managed connection with the connection
cl.getManagedConnection().associateConnection(connection);
cl.addConnection(connection);
if (ccm != null)
{
ccm.registerConnection(this, cl, connection);
}
}
return cl;
} }
|
public class class_name {
private org.ironjacamar.core.connectionmanager.listener.ConnectionListener
associateConnectionListener(Credential credential, Object connection)
throws ResourceException
{
log.tracef("associateConnectionListener(%s, %s)", credential, connection);
if (isShutdown())
{
throw new ResourceException();
}
if (!cmConfiguration.isSharable())
throw new ResourceException();
org.ironjacamar.core.connectionmanager.listener.ConnectionListener cl =
pool.getActiveConnectionListener(credential);
if (cl == null)
{
if (!pool.isFull())
{
try
{
cl = pool.getConnectionListener(credential); // depends on control dependency: [try], data = [none]
}
catch (ResourceException re)
{
// Ignore
} // depends on control dependency: [catch], data = [none]
}
if (cl == null)
{
org.ironjacamar.core.connectionmanager.listener.ConnectionListener removeCl =
pool.removeConnectionListener(null);
if (removeCl != null)
{
try
{
if (ccm != null)
{
for (Object c : removeCl.getConnections())
{
ccm.unregisterConnection(this, removeCl, c); // depends on control dependency: [for], data = [c]
}
}
returnConnectionListener(removeCl, true); // depends on control dependency: [try], data = [none]
cl = pool.getConnectionListener(credential); // depends on control dependency: [try], data = [none]
}
catch (ResourceException ire)
{
// Nothing we can do
} // depends on control dependency: [catch], data = [none]
}
else
{
if (getTransactionSupport() == TransactionSupportLevel.NoTransaction)
{
org.ironjacamar.core.connectionmanager.listener.ConnectionListener targetCl =
pool.removeConnectionListener(credential);
if (targetCl != null)
{
if (targetCl.getManagedConnection() instanceof DissociatableManagedConnection)
{
DissociatableManagedConnection dmc =
(DissociatableManagedConnection)targetCl.getManagedConnection();
if (ccm != null)
{
for (Object c : targetCl.getConnections())
{
ccm.unregisterConnection(this, targetCl, c); // depends on control dependency: [for], data = [c]
}
}
dmc.dissociateConnections(); // depends on control dependency: [if], data = [none]
targetCl.clearConnections(); // depends on control dependency: [if], data = [none]
cl = targetCl; // depends on control dependency: [if], data = [none]
}
else
{
try
{
if (ccm != null)
{
for (Object c : targetCl.getConnections())
{
ccm.unregisterConnection(this, targetCl, c); // depends on control dependency: [for], data = [c]
}
}
returnConnectionListener(targetCl, true); // depends on control dependency: [try], data = [none]
cl = pool.getConnectionListener(credential); // depends on control dependency: [try], data = [none]
}
catch (ResourceException ire)
{
// Nothing we can do
} // depends on control dependency: [catch], data = [none]
}
}
}
}
}
}
if (cl == null)
throw new ResourceException();
if (connection != null)
{
// Associate managed connection with the connection
cl.getManagedConnection().associateConnection(connection);
cl.addConnection(connection);
if (ccm != null)
{
ccm.registerConnection(this, cl, connection);
}
}
return cl;
} }
|
public class class_name {
public static String getText(Activity context, int id) {
TextView view = findViewById(context, id);
String text = "";
if (view != null) {
text = view.getText().toString();
} else {
Log.e("Caffeine", "Null view given to getText(). \"\" will be returned.");
}
return text;
} }
|
public class class_name {
public static String getText(Activity context, int id) {
TextView view = findViewById(context, id);
String text = "";
if (view != null) {
text = view.getText().toString(); // depends on control dependency: [if], data = [none]
} else {
Log.e("Caffeine", "Null view given to getText(). \"\" will be returned."); // depends on control dependency: [if], data = [none]
}
return text;
} }
|
public class class_name {
private static void modifyField(Calendar calendar, int field, ModifyType modifyType) {
// Console.log("# {} {}", DateField.of(field), calendar.getActualMinimum(field));
switch (modifyType) {
case TRUNCATE:
calendar.set(field, DateUtil.getBeginValue(calendar, field));
break;
case CEILING:
calendar.set(field, DateUtil.getEndValue(calendar, field));
break;
case ROUND:
int min = DateUtil.getBeginValue(calendar, field);
int max = DateUtil.getEndValue(calendar, field);
int href;
if (Calendar.DAY_OF_WEEK == field) {
// 星期特殊处理,假设周一是第一天,中间的为周四
href = (min + 3) % 7;
} else {
href = (max - min) / 2 + 1;
}
int value = calendar.get(field);
calendar.set(field, (value < href) ? min : max);
break;
}
} }
|
public class class_name {
private static void modifyField(Calendar calendar, int field, ModifyType modifyType) {
// Console.log("# {} {}", DateField.of(field), calendar.getActualMinimum(field));
switch (modifyType) {
case TRUNCATE:
calendar.set(field, DateUtil.getBeginValue(calendar, field));
break;
case CEILING:
calendar.set(field, DateUtil.getEndValue(calendar, field));
break;
case ROUND:
int min = DateUtil.getBeginValue(calendar, field);
int max = DateUtil.getEndValue(calendar, field);
int href;
if (Calendar.DAY_OF_WEEK == field) {
// 星期特殊处理,假设周一是第一天,中间的为周四
href = (min + 3) % 7;
// depends on control dependency: [if], data = [none]
} else {
href = (max - min) / 2 + 1;
// depends on control dependency: [if], data = [none]
}
int value = calendar.get(field);
calendar.set(field, (value < href) ? min : max);
break;
}
} }
|
public class class_name {
public void setObjectProjectionAttribute(String objectProjectionAttribute)
{
ClassDescriptor baseCld = MetadataManager.getInstance().getRepository().getDescriptorFor(m_baseClass);
ArrayList descs = baseCld.getAttributeDescriptorsForPath(objectProjectionAttribute);
int pathLen = descs.size();
if ((pathLen > 0) && (descs.get(pathLen - 1) instanceof ObjectReferenceDescriptor))
{
ObjectReferenceDescriptor ord =
((ObjectReferenceDescriptor) descs.get(pathLen - 1));
setObjectProjectionAttribute(objectProjectionAttribute,
ord.getItemClass());
}
} }
|
public class class_name {
public void setObjectProjectionAttribute(String objectProjectionAttribute)
{
ClassDescriptor baseCld = MetadataManager.getInstance().getRepository().getDescriptorFor(m_baseClass);
ArrayList descs = baseCld.getAttributeDescriptorsForPath(objectProjectionAttribute);
int pathLen = descs.size();
if ((pathLen > 0) && (descs.get(pathLen - 1) instanceof ObjectReferenceDescriptor))
{
ObjectReferenceDescriptor ord =
((ObjectReferenceDescriptor) descs.get(pathLen - 1));
setObjectProjectionAttribute(objectProjectionAttribute,
ord.getItemClass());
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static String removeSemicolonContent(String requestUri) {
int semicolonIndex = requestUri.indexOf(';');
while (semicolonIndex != -1) {
int slashIndex = requestUri.indexOf('/', semicolonIndex);
String start = requestUri.substring(0, semicolonIndex);
requestUri = (slashIndex != -1) ? start + requestUri.substring(slashIndex) : start;
semicolonIndex = requestUri.indexOf(';', semicolonIndex);
}
return requestUri;
} }
|
public class class_name {
public static String removeSemicolonContent(String requestUri) {
int semicolonIndex = requestUri.indexOf(';');
while (semicolonIndex != -1) {
int slashIndex = requestUri.indexOf('/', semicolonIndex);
String start = requestUri.substring(0, semicolonIndex);
requestUri = (slashIndex != -1) ? start + requestUri.substring(slashIndex) : start; // depends on control dependency: [while], data = [-1)]
semicolonIndex = requestUri.indexOf(';', semicolonIndex); // depends on control dependency: [while], data = [none]
}
return requestUri;
} }
|
public class class_name {
RecurrenceIterator createIterator(Observance observance) {
List<RecurrenceIterator> inclusions = new ArrayList<RecurrenceIterator>();
List<RecurrenceIterator> exclusions = new ArrayList<RecurrenceIterator>();
ICalDate dtstart = getValue(observance.getDateStart());
if (dtstart != null) {
DateValue dtstartValue = convertFromRawComponents(dtstart);
//add DTSTART property
inclusions.add(new DateValueRecurrenceIterator(Arrays.asList(dtstartValue)));
//add RRULE properties
for (RecurrenceRule rrule : observance.getProperties(RecurrenceRule.class)) {
Recurrence recur = rrule.getValue();
if (recur != null) {
inclusions.add(RecurrenceIteratorFactory.createRecurrenceIterator(recur, dtstartValue, utc));
}
}
//add EXRULE properties
for (ExceptionRule exrule : observance.getProperties(ExceptionRule.class)) {
Recurrence recur = exrule.getValue();
if (recur != null) {
exclusions.add(RecurrenceIteratorFactory.createRecurrenceIterator(recur, dtstartValue, utc));
}
}
}
//add RDATE properties
List<ICalDate> rdates = new ArrayList<ICalDate>();
for (RecurrenceDates rdate : observance.getRecurrenceDates()) {
rdates.addAll(rdate.getDates());
}
Collections.sort(rdates);
inclusions.add(new DateRecurrenceIterator(rdates));
//add EXDATE properties
List<ICalDate> exdates = new ArrayList<ICalDate>();
for (ExceptionDates exdate : observance.getProperties(ExceptionDates.class)) {
exdates.addAll(exdate.getValues());
}
Collections.sort(exdates);
exclusions.add(new DateRecurrenceIterator(exdates));
RecurrenceIterator included = join(inclusions);
if (exclusions.isEmpty()) {
return included;
}
RecurrenceIterator excluded = join(exclusions);
return RecurrenceIteratorFactory.except(included, excluded);
} }
|
public class class_name {
RecurrenceIterator createIterator(Observance observance) {
List<RecurrenceIterator> inclusions = new ArrayList<RecurrenceIterator>();
List<RecurrenceIterator> exclusions = new ArrayList<RecurrenceIterator>();
ICalDate dtstart = getValue(observance.getDateStart());
if (dtstart != null) {
DateValue dtstartValue = convertFromRawComponents(dtstart);
//add DTSTART property
inclusions.add(new DateValueRecurrenceIterator(Arrays.asList(dtstartValue))); // depends on control dependency: [if], data = [(dtstart]
//add RRULE properties
for (RecurrenceRule rrule : observance.getProperties(RecurrenceRule.class)) {
Recurrence recur = rrule.getValue();
if (recur != null) {
inclusions.add(RecurrenceIteratorFactory.createRecurrenceIterator(recur, dtstartValue, utc)); // depends on control dependency: [if], data = [(recur]
}
}
//add EXRULE properties
for (ExceptionRule exrule : observance.getProperties(ExceptionRule.class)) {
Recurrence recur = exrule.getValue();
if (recur != null) {
exclusions.add(RecurrenceIteratorFactory.createRecurrenceIterator(recur, dtstartValue, utc)); // depends on control dependency: [if], data = [(recur]
}
}
}
//add RDATE properties
List<ICalDate> rdates = new ArrayList<ICalDate>();
for (RecurrenceDates rdate : observance.getRecurrenceDates()) {
rdates.addAll(rdate.getDates()); // depends on control dependency: [for], data = [rdate]
}
Collections.sort(rdates);
inclusions.add(new DateRecurrenceIterator(rdates));
//add EXDATE properties
List<ICalDate> exdates = new ArrayList<ICalDate>();
for (ExceptionDates exdate : observance.getProperties(ExceptionDates.class)) {
exdates.addAll(exdate.getValues()); // depends on control dependency: [for], data = [exdate]
}
Collections.sort(exdates);
exclusions.add(new DateRecurrenceIterator(exdates));
RecurrenceIterator included = join(inclusions);
if (exclusions.isEmpty()) {
return included; // depends on control dependency: [if], data = [none]
}
RecurrenceIterator excluded = join(exclusions);
return RecurrenceIteratorFactory.except(included, excluded);
} }
|
public class class_name {
private Properties parseProperties(AbstractBlock part, String attributeName) {
Properties properties = new Properties();
Object attribute = part.getAttributes().get(attributeName);
if (attribute == null) {
return properties;
}
Scanner propertiesScanner = new Scanner(attribute.toString());
propertiesScanner.useDelimiter(";");
while (propertiesScanner.hasNext()) {
String next = propertiesScanner.next().trim();
if (next.length() > 0) {
Scanner propertyScanner = new Scanner(next);
propertyScanner.useDelimiter("=");
String key = propertyScanner.next().trim();
String value = propertyScanner.next().trim();
properties.setProperty(key, value);
}
}
return properties;
} }
|
public class class_name {
private Properties parseProperties(AbstractBlock part, String attributeName) {
Properties properties = new Properties();
Object attribute = part.getAttributes().get(attributeName);
if (attribute == null) {
return properties; // depends on control dependency: [if], data = [none]
}
Scanner propertiesScanner = new Scanner(attribute.toString());
propertiesScanner.useDelimiter(";");
while (propertiesScanner.hasNext()) {
String next = propertiesScanner.next().trim();
if (next.length() > 0) {
Scanner propertyScanner = new Scanner(next);
propertyScanner.useDelimiter("="); // depends on control dependency: [if], data = [none]
String key = propertyScanner.next().trim();
String value = propertyScanner.next().trim();
properties.setProperty(key, value); // depends on control dependency: [if], data = [none]
}
}
return properties;
} }
|
public class class_name {
public void markCanBeCompleted(long id) {
Long opId = id;
completedOperationIds.offerLast(opId);
Long start = starTimes.remove(opId);
if (start != null) {
stats.markRpcComplete(clock.nanoTime() - start);
}
} }
|
public class class_name {
public void markCanBeCompleted(long id) {
Long opId = id;
completedOperationIds.offerLast(opId);
Long start = starTimes.remove(opId);
if (start != null) {
stats.markRpcComplete(clock.nanoTime() - start); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public BeanInvoker getReadInvoker(String name) {
BeanInvoker invoker;
if (readInvokers.containsKey(name)) {
invoker = readInvokers.get(name);
} else {
invoker = getInvoker(readHandleType.get(name), name);
readInvokers.put(name, invoker);
}
return invoker;
} }
|
public class class_name {
public BeanInvoker getReadInvoker(String name) {
BeanInvoker invoker;
if (readInvokers.containsKey(name)) {
invoker = readInvokers.get(name); // depends on control dependency: [if], data = [none]
} else {
invoker = getInvoker(readHandleType.get(name), name); // depends on control dependency: [if], data = [none]
readInvokers.put(name, invoker); // depends on control dependency: [if], data = [none]
}
return invoker;
} }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.