code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
protected Response.ResponseType getWebApplicationServiceResponseType(final WebApplicationService finalService) {
val request = HttpRequestUtils.getHttpServletRequestFromRequestAttributes();
val methodRequest = request != null ? request.getParameter(CasProtocolConstants.PARAMETER_METHOD) : null;
final Function<String, String> func = FunctionUtils.doIf(StringUtils::isBlank,
t -> {
val registeredService = this.servicesManager.findServiceBy(finalService);
if (registeredService != null) {
return registeredService.getResponseType();
}
return null;
},
f -> methodRequest);
val method = func.apply(methodRequest);
if (StringUtils.isBlank(method)) {
return Response.ResponseType.REDIRECT;
}
return Response.ResponseType.valueOf(method.toUpperCase());
} } | public class class_name {
protected Response.ResponseType getWebApplicationServiceResponseType(final WebApplicationService finalService) {
val request = HttpRequestUtils.getHttpServletRequestFromRequestAttributes();
val methodRequest = request != null ? request.getParameter(CasProtocolConstants.PARAMETER_METHOD) : null;
final Function<String, String> func = FunctionUtils.doIf(StringUtils::isBlank,
t -> {
val registeredService = this.servicesManager.findServiceBy(finalService);
if (registeredService != null) {
return registeredService.getResponseType(); // depends on control dependency: [if], data = [none]
}
return null;
},
f -> methodRequest);
val method = func.apply(methodRequest);
if (StringUtils.isBlank(method)) {
return Response.ResponseType.REDIRECT;
}
return Response.ResponseType.valueOf(method.toUpperCase());
} } |
public class class_name {
public static String concatenatePath(String path, String endpoint) {
String firstSegment = ensureAbsolutePath(path);
if (endpoint == null || endpoint.isEmpty()) {
return firstSegment;
}
StringBuilder sb = new StringBuilder(firstSegment);
if (path.charAt(path.length() - 1) != PATH_SEPARATOR) {
sb.append(PATH_SEPARATOR);
}
if (endpoint.charAt(0) == PATH_SEPARATOR) {
sb.append(endpoint.substring(1));
}
else {
sb.append(endpoint);
}
return sb.toString();
} } | public class class_name {
public static String concatenatePath(String path, String endpoint) {
String firstSegment = ensureAbsolutePath(path);
if (endpoint == null || endpoint.isEmpty()) {
return firstSegment; // depends on control dependency: [if], data = [none]
}
StringBuilder sb = new StringBuilder(firstSegment);
if (path.charAt(path.length() - 1) != PATH_SEPARATOR) {
sb.append(PATH_SEPARATOR); // depends on control dependency: [if], data = [PATH_SEPARATOR)]
}
if (endpoint.charAt(0) == PATH_SEPARATOR) {
sb.append(endpoint.substring(1)); // depends on control dependency: [if], data = [none]
}
else {
sb.append(endpoint); // depends on control dependency: [if], data = [none]
}
return sb.toString();
} } |
public class class_name {
private static void save(final SharedPreferences pPrefs, final SharedPreferences.Editor pEdit,
final Map<String, String> pMap, final String pPrefix) {
for (final String key : pPrefs.getAll().keySet()) {
if (key.startsWith(pPrefix)) {
pEdit.remove(key);
}
}
for (final Map.Entry<String, String> entry : pMap.entrySet()) {
final String key = pPrefix + entry.getKey();
pEdit.putString(key, entry.getValue());
}
} } | public class class_name {
private static void save(final SharedPreferences pPrefs, final SharedPreferences.Editor pEdit,
final Map<String, String> pMap, final String pPrefix) {
for (final String key : pPrefs.getAll().keySet()) {
if (key.startsWith(pPrefix)) {
pEdit.remove(key); // depends on control dependency: [if], data = [none]
}
}
for (final Map.Entry<String, String> entry : pMap.entrySet()) {
final String key = pPrefix + entry.getKey();
pEdit.putString(key, entry.getValue()); // depends on control dependency: [for], data = [entry]
}
} } |
public class class_name {
public void marshall(ServiceSpecification serviceSpecification, ProtocolMarshaller protocolMarshaller) {
if (serviceSpecification == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(serviceSpecification.getEC2Specification(), EC2SPECIFICATION_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(ServiceSpecification serviceSpecification, ProtocolMarshaller protocolMarshaller) {
if (serviceSpecification == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(serviceSpecification.getEC2Specification(), EC2SPECIFICATION_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
static DocumentBuilderFactory getDBF() {
DocumentBuilderFactory dbf = threadDBF.get();
if (dbf == null) {
dbf = DocumentBuilderFactory.newInstance();
dbf.setNamespaceAware(true);
threadDBF.set(dbf);
}
return dbf;
} } | public class class_name {
static DocumentBuilderFactory getDBF() {
DocumentBuilderFactory dbf = threadDBF.get();
if (dbf == null) {
dbf = DocumentBuilderFactory.newInstance(); // depends on control dependency: [if], data = [none]
dbf.setNamespaceAware(true); // depends on control dependency: [if], data = [none]
threadDBF.set(dbf); // depends on control dependency: [if], data = [(dbf]
}
return dbf;
} } |
public class class_name {
public Property get(String key) {
for (Property property : properties) {
if (property.getKey().equals(key)) {
return property;
}
}
return null;
} } | public class class_name {
public Property get(String key) {
for (Property property : properties) {
if (property.getKey().equals(key)) {
return property; // depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
@Override
public Object put(ChronoElement<?> key, Object value) {
Object obj = super.put(key, value);
if (
(key == null)
|| (obj == null)
|| obj.equals(value)
) {
return obj;
} else {
throw new AmbivalentValueException(key);
}
} } | public class class_name {
@Override
public Object put(ChronoElement<?> key, Object value) {
Object obj = super.put(key, value);
if (
(key == null)
|| (obj == null)
|| obj.equals(value)
) {
return obj; // depends on control dependency: [if], data = []
} else {
throw new AmbivalentValueException(key);
}
} } |
public class class_name {
public static void toHex(byte b, Appendable buf) {
try {
int bi = _0XFF & b;
int c = '0' + (bi / SIXTEEN) % SIXTEEN;
if (c > '9') {
c = 'A' + (c - '0' - TEN);
}
buf.append((char) c);
c = '0' + bi % SIXTEEN;
if (c > '9') {
c = 'A' + (c - '0' - TEN);
}
buf.append((char) c);
} catch (IOException e) {
throw new RuntimeException(e);
}
} } | public class class_name {
public static void toHex(byte b, Appendable buf) {
try {
int bi = _0XFF & b;
int c = '0' + (bi / SIXTEEN) % SIXTEEN;
if (c > '9') {
c = 'A' + (c - '0' - TEN); // depends on control dependency: [if], data = [(c]
}
buf.append((char) c); // depends on control dependency: [try], data = [none]
c = '0' + bi % SIXTEEN; // depends on control dependency: [try], data = [none]
if (c > '9') {
c = 'A' + (c - '0' - TEN); // depends on control dependency: [if], data = [(c]
}
buf.append((char) c); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static void decodePackbits(byte data[], byte[] dst) {
int srcCount = 0, dstCount = 0;
byte repeat, b;
try {
while (dstCount < dst.length) {
b = data[srcCount++];
if (b >= 0 && b <= 127) {
// literal run packet
for (int i=0; i<(b + 1); i++) {
dst[dstCount++] = data[srcCount++];
}
} else if (b <= -1 && b >= -127) {
// 2 byte encoded run packet
repeat = data[srcCount++];
for (int i=0; i<(-b + 1); i++) {
dst[dstCount++] = repeat;
}
} else {
// no-op packet. Do nothing
srcCount++;
}
}
}
catch (Exception e) {
// do nothing
}
} } | public class class_name {
public static void decodePackbits(byte data[], byte[] dst) {
int srcCount = 0, dstCount = 0;
byte repeat, b;
try {
while (dstCount < dst.length) {
b = data[srcCount++]; // depends on control dependency: [while], data = [none]
if (b >= 0 && b <= 127) {
// literal run packet
for (int i=0; i<(b + 1); i++) {
dst[dstCount++] = data[srcCount++]; // depends on control dependency: [for], data = [none]
}
} else if (b <= -1 && b >= -127) {
// 2 byte encoded run packet
repeat = data[srcCount++]; // depends on control dependency: [if], data = [none]
for (int i=0; i<(-b + 1); i++) {
dst[dstCount++] = repeat; // depends on control dependency: [for], data = [none]
}
} else {
// no-op packet. Do nothing
srcCount++; // depends on control dependency: [if], data = [none]
}
}
}
catch (Exception e) {
// do nothing
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public void startPrefixMapping(String prefix, String uri) {
m_prefixMap.put(prefix, uri);
if (m_inXMLMetadata && !m_gotAudit) {
m_localPrefixMap.put(prefix, uri);
m_prefixList.add(prefix);
}
} } | public class class_name {
@Override
public void startPrefixMapping(String prefix, String uri) {
m_prefixMap.put(prefix, uri);
if (m_inXMLMetadata && !m_gotAudit) {
m_localPrefixMap.put(prefix, uri); // depends on control dependency: [if], data = [none]
m_prefixList.add(prefix); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void addToCache(BundleInfo bInfo) {
List<Resource> info = _cacheBySymbolicName.get(bInfo.symbolicName);
if (info == null) {
info = new ArrayList<Resource>();
info.add(bInfo);
}
info = _cacheBySymbolicName.putIfAbsent(bInfo.symbolicName, info);
if (info != null) {
synchronized (info) {
info.add(bInfo);
}
}
_bundleLocations.add(bInfo.file);
} } | public class class_name {
private void addToCache(BundleInfo bInfo) {
List<Resource> info = _cacheBySymbolicName.get(bInfo.symbolicName);
if (info == null) {
info = new ArrayList<Resource>(); // depends on control dependency: [if], data = [none]
info.add(bInfo); // depends on control dependency: [if], data = [none]
}
info = _cacheBySymbolicName.putIfAbsent(bInfo.symbolicName, info);
if (info != null) {
synchronized (info) { // depends on control dependency: [if], data = [(info]
info.add(bInfo);
}
}
_bundleLocations.add(bInfo.file);
} } |
public class class_name {
private Node createCompilerDefaultValueOverridesVarNode(
Node sourceInformationNode) {
Node objNode = IR.objectlit().srcref(sourceInformationNode);
for (Entry<String, Node> entry : compilerDefaultValueOverrides.entrySet()) {
Node objKeyNode = IR.stringKey(entry.getKey())
.useSourceInfoIfMissingFrom(sourceInformationNode);
Node objValueNode = entry.getValue().cloneNode()
.useSourceInfoIfMissingFrom(sourceInformationNode);
objKeyNode.addChildToBack(objValueNode);
objNode.addChildToBack(objKeyNode);
}
return objNode;
} } | public class class_name {
private Node createCompilerDefaultValueOverridesVarNode(
Node sourceInformationNode) {
Node objNode = IR.objectlit().srcref(sourceInformationNode);
for (Entry<String, Node> entry : compilerDefaultValueOverrides.entrySet()) {
Node objKeyNode = IR.stringKey(entry.getKey())
.useSourceInfoIfMissingFrom(sourceInformationNode);
Node objValueNode = entry.getValue().cloneNode()
.useSourceInfoIfMissingFrom(sourceInformationNode);
objKeyNode.addChildToBack(objValueNode); // depends on control dependency: [for], data = [none]
objNode.addChildToBack(objKeyNode); // depends on control dependency: [for], data = [none]
}
return objNode;
} } |
public class class_name {
private EventChannelStruct getEventChannelStruct(String channelName) {
if (channel_map.containsKey(channelName)) {
return channel_map.get(channelName);
}
// Check with other TangoHosts using possibleTangoHosts as header
int index = channelName.indexOf("//");
if (index>0) {
index = channelName.indexOf('/', index+2); // "//".length()
for (String possibleTangoHost : possibleTangoHosts) {
String key = possibleTangoHost + channelName.substring(index);
if (channel_map.containsKey(key))
return channel_map.get(key);
}
}
return null;
} } | public class class_name {
private EventChannelStruct getEventChannelStruct(String channelName) {
if (channel_map.containsKey(channelName)) {
return channel_map.get(channelName); // depends on control dependency: [if], data = [none]
}
// Check with other TangoHosts using possibleTangoHosts as header
int index = channelName.indexOf("//");
if (index>0) {
index = channelName.indexOf('/', index+2); // "//".length()
for (String possibleTangoHost : possibleTangoHosts) {
String key = possibleTangoHost + channelName.substring(index);
if (channel_map.containsKey(key))
return channel_map.get(key);
}
}
return null;
} } |
public class class_name {
@SuppressWarnings("checkstyle:magicnumber")
private boolean setChild8(N newChild) {
if (this.child8 == newChild) {
return false;
}
if (this.child8 != null) {
this.child8.setParentNodeReference(null, true);
--this.notNullChildCount;
firePropertyChildRemoved(7, this.child8);
}
if (newChild != null) {
final N oldParent = newChild.getParentNode();
if (oldParent != this) {
newChild.removeFromParent();
}
}
this.child8 = newChild;
if (newChild != null) {
newChild.setParentNodeReference(toN(), true);
++this.notNullChildCount;
firePropertyChildAdded(7, newChild);
}
return true;
} } | public class class_name {
@SuppressWarnings("checkstyle:magicnumber")
private boolean setChild8(N newChild) {
if (this.child8 == newChild) {
return false; // depends on control dependency: [if], data = [none]
}
if (this.child8 != null) {
this.child8.setParentNodeReference(null, true); // depends on control dependency: [if], data = [none]
--this.notNullChildCount; // depends on control dependency: [if], data = [none]
firePropertyChildRemoved(7, this.child8); // depends on control dependency: [if], data = [none]
}
if (newChild != null) {
final N oldParent = newChild.getParentNode();
if (oldParent != this) {
newChild.removeFromParent(); // depends on control dependency: [if], data = [none]
}
}
this.child8 = newChild;
if (newChild != null) {
newChild.setParentNodeReference(toN(), true); // depends on control dependency: [if], data = [none]
++this.notNullChildCount; // depends on control dependency: [if], data = [none]
firePropertyChildAdded(7, newChild); // depends on control dependency: [if], data = [none]
}
return true;
} } |
public class class_name {
public void marshall(DescribePlacementRequest describePlacementRequest, ProtocolMarshaller protocolMarshaller) {
if (describePlacementRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describePlacementRequest.getPlacementName(), PLACEMENTNAME_BINDING);
protocolMarshaller.marshall(describePlacementRequest.getProjectName(), PROJECTNAME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(DescribePlacementRequest describePlacementRequest, ProtocolMarshaller protocolMarshaller) {
if (describePlacementRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describePlacementRequest.getPlacementName(), PLACEMENTNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describePlacementRequest.getProjectName(), PROJECTNAME_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void setCombinable(boolean combinable) {
// sanity check
if (combinable && !GroupCombineFunction.class.isAssignableFrom(this.userFunction.getUserCodeClass())) {
throw new IllegalArgumentException("Cannot set a UDF as combinable if it does not implement the interface " +
GroupCombineFunction.class.getName());
} else {
this.combinable = combinable;
}
} } | public class class_name {
public void setCombinable(boolean combinable) {
// sanity check
if (combinable && !GroupCombineFunction.class.isAssignableFrom(this.userFunction.getUserCodeClass())) {
throw new IllegalArgumentException("Cannot set a UDF as combinable if it does not implement the interface " +
GroupCombineFunction.class.getName());
} else {
this.combinable = combinable; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static long copy(ReadableByteChannel in, WritableByteChannel out, int bufferSize, StreamProgress streamProgress) throws IORuntimeException {
Assert.notNull(in, "InputStream is null !");
Assert.notNull(out, "OutputStream is null !");
ByteBuffer byteBuffer = ByteBuffer.allocate(bufferSize <= 0 ? DEFAULT_BUFFER_SIZE : bufferSize);
long size = 0;
if (null != streamProgress) {
streamProgress.start();
}
try {
while (in.read(byteBuffer) != EOF) {
byteBuffer.flip();// 写转读
size += out.write(byteBuffer);
byteBuffer.clear();
if (null != streamProgress) {
streamProgress.progress(size);
}
}
} catch (IOException e) {
throw new IORuntimeException(e);
}
if (null != streamProgress) {
streamProgress.finish();
}
return size;
} } | public class class_name {
public static long copy(ReadableByteChannel in, WritableByteChannel out, int bufferSize, StreamProgress streamProgress) throws IORuntimeException {
Assert.notNull(in, "InputStream is null !");
Assert.notNull(out, "OutputStream is null !");
ByteBuffer byteBuffer = ByteBuffer.allocate(bufferSize <= 0 ? DEFAULT_BUFFER_SIZE : bufferSize);
long size = 0;
if (null != streamProgress) {
streamProgress.start();
}
try {
while (in.read(byteBuffer) != EOF) {
byteBuffer.flip();// 写转读
// depends on control dependency: [while], data = [none]
size += out.write(byteBuffer);
// depends on control dependency: [while], data = [none]
byteBuffer.clear();
// depends on control dependency: [while], data = [none]
if (null != streamProgress) {
streamProgress.progress(size);
// depends on control dependency: [if], data = [none]
}
}
} catch (IOException e) {
throw new IORuntimeException(e);
}
if (null != streamProgress) {
streamProgress.finish();
}
return size;
} } |
public class class_name {
private void setBindingName() // d681743
throws InjectionException
{
Map<String, String> ejbRefBindings = ivNameSpaceConfig.getEJBRefBindings();
if (ejbRefBindings != null)
{
ivBindingName = ejbRefBindings.get(getJndiName());
if (ivBindingName != null && ivBindingName.equals(""))
{
ivBindingName = null;
Tr.warning(tc, "EJB_BOUND_TO_EMPTY_STRING_CWNEN0025W");
if (isValidationFailable()) // fail if enabled F743-14449
{
InjectionConfigurationException icex = new InjectionConfigurationException
("The " + getJndiName() + " EJB reference in the " + ivModule +
" module of the " + ivApplication + " application has been" +
" bound to the empty string in the global Java Naming and Directory Interface (JNDI) namespace.");
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(tc, "resolve : " + icex);
throw icex;
}
}
}
} } | public class class_name {
private void setBindingName() // d681743
throws InjectionException
{
Map<String, String> ejbRefBindings = ivNameSpaceConfig.getEJBRefBindings();
if (ejbRefBindings != null)
{
ivBindingName = ejbRefBindings.get(getJndiName());
if (ivBindingName != null && ivBindingName.equals(""))
{
ivBindingName = null;
Tr.warning(tc, "EJB_BOUND_TO_EMPTY_STRING_CWNEN0025W");
if (isValidationFailable()) // fail if enabled F743-14449
{
InjectionConfigurationException icex = new InjectionConfigurationException
("The " + getJndiName() + " EJB reference in the " + ivModule +
" module of the " + ivApplication + " application has been" +
" bound to the empty string in the global Java Naming and Directory Interface (JNDI) namespace."); // depends on control dependency: [if], data = [none]
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(tc, "resolve : " + icex);
throw icex;
}
}
}
} } |
public class class_name {
public void clear() {
queue.clear();
if (listener != null) {
for (V item : map.values()) {
listener.onRemove(item);
}
}
map.clear();
} } | public class class_name {
public void clear() {
queue.clear();
if (listener != null) {
for (V item : map.values()) {
listener.onRemove(item); // depends on control dependency: [for], data = [item]
}
}
map.clear();
} } |
public class class_name {
public final Package<DependsOn> findAllowedByName(final String packageName) {
final List<Package<DependsOn>> list = getAllowed();
for (final Package<DependsOn> pkg : list) {
if (pkg.getName().equals(packageName)) {
return pkg;
}
}
return null;
} } | public class class_name {
public final Package<DependsOn> findAllowedByName(final String packageName) {
final List<Package<DependsOn>> list = getAllowed();
for (final Package<DependsOn> pkg : list) {
if (pkg.getName().equals(packageName)) {
return pkg;
// depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
protected void retry(final Notification not) {
if (!pendings.contains(not.subscriber.getCallback())) {
// We don't have a current retry for this callback pending, so we
// will schedule the retry
pendings.add(not.subscriber.getCallback());
timer.schedule(new TimerTask() {
@Override
public void run() {
pendings.remove(not.subscriber.getCallback());
enqueueNotification(not);
}
}, TWO_MINUTES);
} else {
// There is a retry in front of this one, so we will just schedule
// it to retry again in a bit
timer.schedule(new TimerTask() {
@Override
public void run() {
retry(not);
}
}, TWO_MINUTES);
}
} } | public class class_name {
protected void retry(final Notification not) {
if (!pendings.contains(not.subscriber.getCallback())) {
// We don't have a current retry for this callback pending, so we
// will schedule the retry
pendings.add(not.subscriber.getCallback()); // depends on control dependency: [if], data = [none]
timer.schedule(new TimerTask() {
@Override
public void run() {
pendings.remove(not.subscriber.getCallback());
enqueueNotification(not);
}
}, TWO_MINUTES); // depends on control dependency: [if], data = [none]
} else {
// There is a retry in front of this one, so we will just schedule
// it to retry again in a bit
timer.schedule(new TimerTask() {
@Override
public void run() {
retry(not);
}
}, TWO_MINUTES); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void marshall(Relationship relationship, ProtocolMarshaller protocolMarshaller) {
if (relationship == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(relationship.getResourceType(), RESOURCETYPE_BINDING);
protocolMarshaller.marshall(relationship.getResourceId(), RESOURCEID_BINDING);
protocolMarshaller.marshall(relationship.getResourceName(), RESOURCENAME_BINDING);
protocolMarshaller.marshall(relationship.getRelationshipName(), RELATIONSHIPNAME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(Relationship relationship, ProtocolMarshaller protocolMarshaller) {
if (relationship == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(relationship.getResourceType(), RESOURCETYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(relationship.getResourceId(), RESOURCEID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(relationship.getResourceName(), RESOURCENAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(relationship.getRelationshipName(), RELATIONSHIPNAME_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public EClass getIfcStructuralSurfaceConnection() {
if (ifcStructuralSurfaceConnectionEClass == null) {
ifcStructuralSurfaceConnectionEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(661);
}
return ifcStructuralSurfaceConnectionEClass;
} } | public class class_name {
@Override
public EClass getIfcStructuralSurfaceConnection() {
if (ifcStructuralSurfaceConnectionEClass == null) {
ifcStructuralSurfaceConnectionEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(661);
// depends on control dependency: [if], data = [none]
}
return ifcStructuralSurfaceConnectionEClass;
} } |
public class class_name {
public boolean parse(final boolean fragment) throws FrameworkException {
init();
if (StringUtils.isNotBlank(code)) {
if (!isDeployment) {
logger.info("##### Start parsing code for page {} #####", new Object[]{name});
} else {
// a trailing slash to all void/self-closing tags so the XML parser can parse it correctly
code = code.replaceAll("<(area|base|br|col|command|embed|hr|img|input|keygen|link|meta|param|source|track|wbr)([^>]*)>", "<$1$2/>");
}
if (fragment) {
if (isDeployment) {
final List<Node> nodeList = Parser.parseXmlFragment(code, "");
parsedDocument = Document.createShell("");
final Element body = parsedDocument.body();
final Node[] nodes = nodeList.toArray(new Node[nodeList.size()]);
for (int i = nodes.length - 1; i > 0; i--) {
nodes[i].remove();
}
for (Node node : nodes) {
body.appendChild(node);
}
} else {
parsedDocument = Jsoup.parseBodyFragment(code);
}
} else {
if (isDeployment) {
parsedDocument = Jsoup.parse(code, "", Parser.xmlParser());
} else {
parsedDocument = Jsoup.parse(code);
}
}
} else {
if (!isDeployment) {
logger.info("##### Start fetching {} for page {} #####", new Object[]{address, name});
}
code = HttpHelper.get(address);
parsedDocument = Jsoup.parse(code);
}
return true;
} } | public class class_name {
public boolean parse(final boolean fragment) throws FrameworkException {
init();
if (StringUtils.isNotBlank(code)) {
if (!isDeployment) {
logger.info("##### Start parsing code for page {} #####", new Object[]{name}); // depends on control dependency: [if], data = [none]
} else {
// a trailing slash to all void/self-closing tags so the XML parser can parse it correctly
code = code.replaceAll("<(area|base|br|col|command|embed|hr|img|input|keygen|link|meta|param|source|track|wbr)([^>]*)>", "<$1$2/>"); // depends on control dependency: [if], data = [none]
}
if (fragment) {
if (isDeployment) {
final List<Node> nodeList = Parser.parseXmlFragment(code, "");
parsedDocument = Document.createShell(""); // depends on control dependency: [if], data = [none]
final Element body = parsedDocument.body();
final Node[] nodes = nodeList.toArray(new Node[nodeList.size()]);
for (int i = nodes.length - 1; i > 0; i--) {
nodes[i].remove(); // depends on control dependency: [for], data = [i]
}
for (Node node : nodes) {
body.appendChild(node); // depends on control dependency: [for], data = [node]
}
} else {
parsedDocument = Jsoup.parseBodyFragment(code); // depends on control dependency: [if], data = [none]
}
} else {
if (isDeployment) {
parsedDocument = Jsoup.parse(code, "", Parser.xmlParser()); // depends on control dependency: [if], data = [none]
} else {
parsedDocument = Jsoup.parse(code); // depends on control dependency: [if], data = [none]
}
}
} else {
if (!isDeployment) {
logger.info("##### Start fetching {} for page {} #####", new Object[]{address, name}); // depends on control dependency: [if], data = [none]
}
code = HttpHelper.get(address);
parsedDocument = Jsoup.parse(code);
}
return true;
} } |
public class class_name {
public void delete() throws IOException {
int attempts = 0;
while (true) {
try {
acquireWriteLock();
try {
close();
if (osFile != null) {
Files.deleteIfExists(osFile);
}
} finally {
releaseWriteLock();
attempts++;
}
break;
} catch (final IOException ioe) {
OLogManager.instance().error(this, "Error during deletion of file '" + getName() + "' " + attempts + "-th attempt", ioe);
reopenFile(attempts, ioe);
}
}
} } | public class class_name {
public void delete() throws IOException {
int attempts = 0;
while (true) {
try {
acquireWriteLock(); // depends on control dependency: [try], data = [none]
try {
close(); // depends on control dependency: [try], data = [none]
if (osFile != null) {
Files.deleteIfExists(osFile); // depends on control dependency: [if], data = [(osFile]
}
} finally {
releaseWriteLock();
attempts++;
}
break;
} catch (final IOException ioe) {
OLogManager.instance().error(this, "Error during deletion of file '" + getName() + "' " + attempts + "-th attempt", ioe);
reopenFile(attempts, ioe);
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public <E> List<E> unmodifiable(List<E> input)
{
if (input == null) {
return null;
}
return Collections.unmodifiableList(input);
} } | public class class_name {
public <E> List<E> unmodifiable(List<E> input)
{
if (input == null) {
return null; // depends on control dependency: [if], data = [none]
}
return Collections.unmodifiableList(input);
} } |
public class class_name {
@Override
public CPDefinition fetchByC_S_Last(long CProductId, int status,
OrderByComparator<CPDefinition> orderByComparator) {
int count = countByC_S(CProductId, status);
if (count == 0) {
return null;
}
List<CPDefinition> list = findByC_S(CProductId, status, count - 1,
count, orderByComparator);
if (!list.isEmpty()) {
return list.get(0);
}
return null;
} } | public class class_name {
@Override
public CPDefinition fetchByC_S_Last(long CProductId, int status,
OrderByComparator<CPDefinition> orderByComparator) {
int count = countByC_S(CProductId, status);
if (count == 0) {
return null; // depends on control dependency: [if], data = [none]
}
List<CPDefinition> list = findByC_S(CProductId, status, count - 1,
count, orderByComparator);
if (!list.isEmpty()) {
return list.get(0); // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
private static int estimateQuotient(int u2, int u1, int u0, int v1, int v0)
{
// estimate qhat based on the first 2 digits of divisor divided by the first digit of a dividend
long u21 = combineInts(u2, u1);
long qhat;
if (u2 == v1) {
qhat = INT_BASE - 1;
}
else if (u21 >= 0) {
qhat = u21 / (v1 & LONG_MASK);
}
else {
qhat = divideUnsignedLong(u21, v1);
}
if (qhat == 0) {
return 0;
}
// Check if qhat is greater than expected considering only first 3 digits of a dividend
// This step help to eliminate all the cases when the estimation is greater than q by 2
// and eliminates most of the cases when qhat is greater than q by 1
//
// u2 * b * b + u1 * b + u0 >= (v1 * b + v0) * qhat
// u2 * b * b + u1 * b + u0 >= v1 * b * qhat + v0 * qhat
// u2 * b * b + u1 * b - v1 * b * qhat >= v0 * qhat - u0
// (u21 - v1 * qhat) * b >= v0 * qhat - u0
// (u21 - v1 * qhat) * b + u0 >= v0 * qhat
// When ((u21 - v1 * qhat) * b + u0) is less than (v0 * qhat) decrease qhat by one
int iterations = 0;
long rhat = u21 - (v1 & LONG_MASK) * qhat;
while (Long.compareUnsigned(rhat, INT_BASE) < 0 && Long.compareUnsigned((v0 & LONG_MASK) * qhat, combineInts(lowInt(rhat), u0)) > 0) {
iterations++;
qhat--;
rhat += (v1 & LONG_MASK);
}
if (iterations > 2) {
throw new IllegalStateException("qhat is greater than q by more than 2: " + iterations);
}
return (int) qhat;
} } | public class class_name {
private static int estimateQuotient(int u2, int u1, int u0, int v1, int v0)
{
// estimate qhat based on the first 2 digits of divisor divided by the first digit of a dividend
long u21 = combineInts(u2, u1);
long qhat;
if (u2 == v1) {
qhat = INT_BASE - 1; // depends on control dependency: [if], data = [none]
}
else if (u21 >= 0) {
qhat = u21 / (v1 & LONG_MASK); // depends on control dependency: [if], data = [none]
}
else {
qhat = divideUnsignedLong(u21, v1); // depends on control dependency: [if], data = [(u21]
}
if (qhat == 0) {
return 0; // depends on control dependency: [if], data = [none]
}
// Check if qhat is greater than expected considering only first 3 digits of a dividend
// This step help to eliminate all the cases when the estimation is greater than q by 2
// and eliminates most of the cases when qhat is greater than q by 1
//
// u2 * b * b + u1 * b + u0 >= (v1 * b + v0) * qhat
// u2 * b * b + u1 * b + u0 >= v1 * b * qhat + v0 * qhat
// u2 * b * b + u1 * b - v1 * b * qhat >= v0 * qhat - u0
// (u21 - v1 * qhat) * b >= v0 * qhat - u0
// (u21 - v1 * qhat) * b + u0 >= v0 * qhat
// When ((u21 - v1 * qhat) * b + u0) is less than (v0 * qhat) decrease qhat by one
int iterations = 0;
long rhat = u21 - (v1 & LONG_MASK) * qhat;
while (Long.compareUnsigned(rhat, INT_BASE) < 0 && Long.compareUnsigned((v0 & LONG_MASK) * qhat, combineInts(lowInt(rhat), u0)) > 0) {
iterations++; // depends on control dependency: [while], data = [none]
qhat--; // depends on control dependency: [while], data = [none]
rhat += (v1 & LONG_MASK); // depends on control dependency: [while], data = [none]
}
if (iterations > 2) {
throw new IllegalStateException("qhat is greater than q by more than 2: " + iterations);
}
return (int) qhat;
} } |
public class class_name {
@Override
public WikipediaExample.Response<X> read(JsonReader reader) throws IOException {
WikipediaExample.Response<X> pages = new WikipediaExample.Response<X>();
reader.beginObject();
while (reader.hasNext()) {
String nextName = reader.nextName();
if ("query".equals(nextName)) {
reader.beginObject();
while (reader.hasNext()) {
if (query().equals(reader.nextName())) {
reader.beginObject();
while (reader.hasNext()) {
// each element is in form: "id" : { object }
// this advances the pointer to the value and skips the key
reader.nextName();
reader.beginObject();
pages.add(build(reader));
reader.endObject();
}
reader.endObject();
} else {
reader.skipValue();
}
}
reader.endObject();
} else if ("continue".equals(nextName)) {
reader.beginObject();
while (reader.hasNext()) {
if ("gsroffset".equals(reader.nextName())) {
pages.nextOffset = reader.nextLong();
} else {
reader.skipValue();
}
}
reader.endObject();
} else {
reader.skipValue();
}
}
reader.endObject();
return pages;
} } | public class class_name {
@Override
public WikipediaExample.Response<X> read(JsonReader reader) throws IOException {
WikipediaExample.Response<X> pages = new WikipediaExample.Response<X>();
reader.beginObject();
while (reader.hasNext()) {
String nextName = reader.nextName();
if ("query".equals(nextName)) {
reader.beginObject();
while (reader.hasNext()) {
if (query().equals(reader.nextName())) {
reader.beginObject(); // depends on control dependency: [if], data = [none]
while (reader.hasNext()) {
// each element is in form: "id" : { object }
// this advances the pointer to the value and skips the key
reader.nextName(); // depends on control dependency: [while], data = [none]
reader.beginObject(); // depends on control dependency: [while], data = [none]
pages.add(build(reader)); // depends on control dependency: [while], data = [none]
reader.endObject(); // depends on control dependency: [while], data = [none]
}
reader.endObject(); // depends on control dependency: [if], data = [none]
} else {
reader.skipValue(); // depends on control dependency: [if], data = [none]
}
}
reader.endObject();
} else if ("continue".equals(nextName)) {
reader.beginObject();
while (reader.hasNext()) {
if ("gsroffset".equals(reader.nextName())) {
pages.nextOffset = reader.nextLong(); // depends on control dependency: [if], data = [none]
} else {
reader.skipValue(); // depends on control dependency: [if], data = [none]
}
}
reader.endObject();
} else {
reader.skipValue();
}
}
reader.endObject();
return pages;
} } |
public class class_name {
public void marshall(StartContinuousExportRequest startContinuousExportRequest, ProtocolMarshaller protocolMarshaller) {
if (startContinuousExportRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(StartContinuousExportRequest startContinuousExportRequest, ProtocolMarshaller protocolMarshaller) {
if (startContinuousExportRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static List<Locale> getPossibleLocales(CmsObject cms, CmsResource currentResource) {
CmsSite site = OpenCms.getSiteManager().getSiteForRootPath(currentResource.getRootPath());
List<Locale> secondaryLocales = Lists.newArrayList();
Locale mainLocale = null;
if (site != null) {
List<Locale> siteLocales = site.getSecondaryTranslationLocales();
mainLocale = site.getMainTranslationLocale(null);
if ((siteLocales == null) || siteLocales.isEmpty()) {
siteLocales = OpenCms.getLocaleManager().getAvailableLocales();
if (mainLocale == null) {
mainLocale = siteLocales.get(0);
}
}
secondaryLocales.addAll(siteLocales);
}
try {
CmsProperty secondaryLocaleProp = cms.readPropertyObject(
currentResource,
CmsPropertyDefinition.PROPERTY_SECONDARY_LOCALES,
true);
String propValue = secondaryLocaleProp.getValue();
if (!CmsStringUtil.isEmptyOrWhitespaceOnly(propValue)) {
List<Locale> restrictionLocales = Lists.newArrayList();
String[] tokens = propValue.trim().split(" *, *"); //$NON-NLS-1$
for (String token : tokens) {
OpenCms.getLocaleManager();
Locale localeForToken = CmsLocaleManager.getLocale(token);
restrictionLocales.add(localeForToken);
}
if (!restrictionLocales.isEmpty()) {
secondaryLocales.retainAll(restrictionLocales);
}
}
} catch (CmsException e) {
LOG.error(e.getLocalizedMessage(), e);
}
List<Locale> result = new ArrayList<Locale>();
result.add(mainLocale);
for (Locale secondaryLocale : secondaryLocales) {
if (!result.contains(secondaryLocale)) {
result.add(secondaryLocale);
}
}
return result;
} } | public class class_name {
public static List<Locale> getPossibleLocales(CmsObject cms, CmsResource currentResource) {
CmsSite site = OpenCms.getSiteManager().getSiteForRootPath(currentResource.getRootPath());
List<Locale> secondaryLocales = Lists.newArrayList();
Locale mainLocale = null;
if (site != null) {
List<Locale> siteLocales = site.getSecondaryTranslationLocales();
mainLocale = site.getMainTranslationLocale(null); // depends on control dependency: [if], data = [null)]
if ((siteLocales == null) || siteLocales.isEmpty()) {
siteLocales = OpenCms.getLocaleManager().getAvailableLocales(); // depends on control dependency: [if], data = [none]
if (mainLocale == null) {
mainLocale = siteLocales.get(0); // depends on control dependency: [if], data = [none]
}
}
secondaryLocales.addAll(siteLocales); // depends on control dependency: [if], data = [(site]
}
try {
CmsProperty secondaryLocaleProp = cms.readPropertyObject(
currentResource,
CmsPropertyDefinition.PROPERTY_SECONDARY_LOCALES,
true);
String propValue = secondaryLocaleProp.getValue();
if (!CmsStringUtil.isEmptyOrWhitespaceOnly(propValue)) {
List<Locale> restrictionLocales = Lists.newArrayList();
String[] tokens = propValue.trim().split(" *, *"); //$NON-NLS-1$
for (String token : tokens) {
OpenCms.getLocaleManager(); // depends on control dependency: [for], data = [none]
Locale localeForToken = CmsLocaleManager.getLocale(token);
restrictionLocales.add(localeForToken); // depends on control dependency: [for], data = [none]
}
if (!restrictionLocales.isEmpty()) {
secondaryLocales.retainAll(restrictionLocales); // depends on control dependency: [if], data = [none]
}
}
} catch (CmsException e) {
LOG.error(e.getLocalizedMessage(), e);
} // depends on control dependency: [catch], data = [none]
List<Locale> result = new ArrayList<Locale>();
result.add(mainLocale);
for (Locale secondaryLocale : secondaryLocales) {
if (!result.contains(secondaryLocale)) {
result.add(secondaryLocale); // depends on control dependency: [if], data = [none]
}
}
return result;
} } |
public class class_name {
public int deQueue()
{
if ( size == 0 ) {
return -1;
}
//remove the first element
Entry o = head.next;
head.next = o.next;
o.next.prev = head;
//backup the data
int v = o.data;
size--;
o = null; //Let gc do its work
return v;
} } | public class class_name {
public int deQueue()
{
if ( size == 0 ) {
return -1; // depends on control dependency: [if], data = [none]
}
//remove the first element
Entry o = head.next;
head.next = o.next;
o.next.prev = head;
//backup the data
int v = o.data;
size--;
o = null; //Let gc do its work
return v;
} } |
public class class_name {
private String formatPhoneNumber() {
StringBuilder buffer = new StringBuilder();
buffer.append(this.countryCode);
buffer.append(' ');
buffer.append(this.areaCode);
buffer.append(' ');
buffer.append(this.localNumber);
if ((this.extension != null) && (this.extension.length() > 0)) {
buffer.append('-');
buffer.append(this.extension);
}
return buffer.toString();
} } | public class class_name {
private String formatPhoneNumber() {
StringBuilder buffer = new StringBuilder();
buffer.append(this.countryCode);
buffer.append(' ');
buffer.append(this.areaCode);
buffer.append(' ');
buffer.append(this.localNumber);
if ((this.extension != null) && (this.extension.length() > 0)) {
buffer.append('-'); // depends on control dependency: [if], data = [none]
buffer.append(this.extension); // depends on control dependency: [if], data = [none]
}
return buffer.toString();
} } |
public class class_name {
@Override
public ICxDxSessionData getAppSessionData(Class<? extends AppSession> clazz, String sessionId) {
if (clazz.equals(ClientCxDxSession.class)) {
ClientCxDxSessionDataLocalImpl data = new ClientCxDxSessionDataLocalImpl();
data.setSessionId(sessionId);
return data;
}
else if (clazz.equals(ServerCxDxSession.class)) {
ServerCxDxSessionDataLocalImpl data = new ServerCxDxSessionDataLocalImpl();
data.setSessionId(sessionId);
return data;
}
throw new IllegalArgumentException(clazz.toString());
} } | public class class_name {
@Override
public ICxDxSessionData getAppSessionData(Class<? extends AppSession> clazz, String sessionId) {
if (clazz.equals(ClientCxDxSession.class)) {
ClientCxDxSessionDataLocalImpl data = new ClientCxDxSessionDataLocalImpl();
data.setSessionId(sessionId);
// depends on control dependency: [if], data = [none]
return data;
// depends on control dependency: [if], data = [none]
}
else if (clazz.equals(ServerCxDxSession.class)) {
ServerCxDxSessionDataLocalImpl data = new ServerCxDxSessionDataLocalImpl();
data.setSessionId(sessionId);
// depends on control dependency: [if], data = [none]
return data;
// depends on control dependency: [if], data = [none]
}
throw new IllegalArgumentException(clazz.toString());
} } |
public class class_name {
private void updateMax(int idx, double v) {
AtomicLong current = max.getCurrent(idx);
long m = current.get();
while (v > Double.longBitsToDouble(m)) {
if (current.compareAndSet(m, Double.doubleToLongBits(v))) {
break;
}
m = current.get();
}
} } | public class class_name {
private void updateMax(int idx, double v) {
AtomicLong current = max.getCurrent(idx);
long m = current.get();
while (v > Double.longBitsToDouble(m)) {
if (current.compareAndSet(m, Double.doubleToLongBits(v))) {
break;
}
m = current.get(); // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
public Team createTeam(final String name, final boolean createApiKey) {
pm.currentTransaction().begin();
final Team team = new Team();
team.setName(name);
//todo assign permissions
pm.makePersistent(team);
pm.currentTransaction().commit();
if (createApiKey) {
createApiKey(team);
}
return getObjectByUuid(Team.class, team.getUuid(), Team.FetchGroup.ALL.name());
} } | public class class_name {
public Team createTeam(final String name, final boolean createApiKey) {
pm.currentTransaction().begin();
final Team team = new Team();
team.setName(name);
//todo assign permissions
pm.makePersistent(team);
pm.currentTransaction().commit();
if (createApiKey) {
createApiKey(team); // depends on control dependency: [if], data = [none]
}
return getObjectByUuid(Team.class, team.getUuid(), Team.FetchGroup.ALL.name());
} } |
public class class_name {
public <T> T convert(final Object source, final Class<T> targetclass) {
if (source == null) {
return null;
}
final Class<?> sourceclass = source.getClass();
if (targetclass.isPrimitive() && String.class.isAssignableFrom(sourceclass)) {
return (T) parsePrimitive(source.toString(), targetclass);
}
final int sourceId = ids.getId(sourceclass);
final int targetId = ids.getId(targetclass);
final SourceTargetPairKey key = new SourceTargetPairKey(sourceId, targetId);
Converter converter = cache.get(key);
if (converter != null) {
return (T) converter.convert(source, targetclass);
}
final LinkedList<SourceTargetPairMatch> matches = new LinkedList<>();
for (SourceTargetPair pair : converters.values()) {
SourceTargetPairMatch match = pair.match(sourceclass, targetclass);
if (match.matchesSource() && match.matchesTarget()) {
matches.add(match);
}
}
if (matches.size() == 0) {
throw new ConversionException("No suitable converter found for target class ["
+ targetclass.getName() + "] and source value [" + sourceclass.getName()
+ "]. The following converters are available [" + converters.keySet() + "]");
}
Collections.sort(matches, SourceTargetPairMatch.bestTargetMatch());
converter = matches.get(0).pair.converter;
cache.put(key, converter);
return (T) converter.convert(source, targetclass);
} } | public class class_name {
public <T> T convert(final Object source, final Class<T> targetclass) {
if (source == null) {
return null; // depends on control dependency: [if], data = [none]
}
final Class<?> sourceclass = source.getClass();
if (targetclass.isPrimitive() && String.class.isAssignableFrom(sourceclass)) {
return (T) parsePrimitive(source.toString(), targetclass); // depends on control dependency: [if], data = [none]
}
final int sourceId = ids.getId(sourceclass);
final int targetId = ids.getId(targetclass);
final SourceTargetPairKey key = new SourceTargetPairKey(sourceId, targetId);
Converter converter = cache.get(key);
if (converter != null) {
return (T) converter.convert(source, targetclass); // depends on control dependency: [if], data = [none]
}
final LinkedList<SourceTargetPairMatch> matches = new LinkedList<>();
for (SourceTargetPair pair : converters.values()) {
SourceTargetPairMatch match = pair.match(sourceclass, targetclass);
if (match.matchesSource() && match.matchesTarget()) {
matches.add(match); // depends on control dependency: [if], data = [none]
}
}
if (matches.size() == 0) {
throw new ConversionException("No suitable converter found for target class ["
+ targetclass.getName() + "] and source value [" + sourceclass.getName()
+ "]. The following converters are available [" + converters.keySet() + "]");
}
Collections.sort(matches, SourceTargetPairMatch.bestTargetMatch());
converter = matches.get(0).pair.converter;
cache.put(key, converter);
return (T) converter.convert(source, targetclass);
} } |
public class class_name {
private final CClassLoader getLoaderByName(final String name) {
try {
CClassLoader loader = (CClassLoader) this.childrenMap.get(name);
if (loader == null) {
loader = CClassLoader.createLoader(this, name);
this.childrenMap.put(name, loader);
}
return loader;
} finally {
try {
} catch (final Exception ignore) {
}
}
} } | public class class_name {
private final CClassLoader getLoaderByName(final String name) {
try {
CClassLoader loader = (CClassLoader) this.childrenMap.get(name);
if (loader == null) {
loader = CClassLoader.createLoader(this, name); // depends on control dependency: [if], data = [none]
this.childrenMap.put(name, loader); // depends on control dependency: [if], data = [none]
}
return loader; // depends on control dependency: [try], data = [none]
} finally {
try {
} catch (final Exception ignore) {
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public static Optional<Dependency> parseJarName(String sourceName, String jarName) {
if(SPECIAL_CASES.containsKey(jarName)) {
Dependency specialCase = SPECIAL_CASES.get(jarName);
return Optional.of(new Dependency(sourceName, specialCase.getName(), specialCase.getVersion()));
}
// Go over all known patterns
for(Pattern p : PATTERNS) {
Matcher m = p.matcher(jarName);
if (m.matches()) {
LOG.trace("Applied pattern '{}' to {}", p.pattern(), jarName);
return Optional.of(new Dependency(sourceName, m.group(1), m.group(2)));
}
}
// Otherwise this jar name is unknown to us
return Optional.empty();
} } | public class class_name {
public static Optional<Dependency> parseJarName(String sourceName, String jarName) {
if(SPECIAL_CASES.containsKey(jarName)) {
Dependency specialCase = SPECIAL_CASES.get(jarName);
return Optional.of(new Dependency(sourceName, specialCase.getName(), specialCase.getVersion())); // depends on control dependency: [if], data = [none]
}
// Go over all known patterns
for(Pattern p : PATTERNS) {
Matcher m = p.matcher(jarName);
if (m.matches()) {
LOG.trace("Applied pattern '{}' to {}", p.pattern(), jarName); // depends on control dependency: [if], data = [none]
return Optional.of(new Dependency(sourceName, m.group(1), m.group(2))); // depends on control dependency: [if], data = [none]
}
}
// Otherwise this jar name is unknown to us
return Optional.empty();
} } |
public class class_name {
public static Triple<String, String, String> buldIndexes(final SQLiteEntity entity,
ArrayList<Pair<List<String>, Boolean>> indexList, boolean unique, int counter) {
Triple<String, String, String> result = new Triple<>();
result.value0 = "";
result.value1 = "";
result.value2 = "";
if (indexList.size() == 0)
return result;
String uniqueString;
if (unique) {
uniqueString = "UNIQUE ";
} else {
uniqueString = "";
}
List<String> listCreateIndex = new ArrayList<>();
List<String> listDropIndex = new ArrayList<>();
List<String> listUniqueConstraint = new ArrayList<>();
for (Pair<List<String>, Boolean> index : indexList) {
final List<String> listUniqueFields = new ArrayList<>();
String createIndex = String.format(" CREATE %sINDEX idx_%s_%s on %s (%s)", uniqueString,
entity.getTableName(), counter++, entity.getTableName(), StringUtils.join(index.value0, ", "));
String dropIndex = String.format(" DROP INDEX IF EXISTS idx_%s_%s", entity.getTableName(), counter);
final One<Integer> fieldCounter = new One<Integer>(0);
createIndex = JQLChecker.getInstance().replace(new JQLContext() {
@Override
public String getContextDescription() {
return "While table definition generation for entity " + entity.getName();
}
@Override
public String getName() {
// TODO Auto-generated method stub
return null;
}
@Override
public String getParentName() {
// TODO Auto-generated method stub
return null;
}
@Override
public Finder<SQLProperty> findEntityByName(String entityName) {
// TODO Auto-generated method stub
return null;
}
}, createIndex, new JQLReplacerListenerImpl(null) {
@Override
public String onColumnName(String columnName) {
fieldCounter.value0++;
SQLProperty property = entity.findPropertyByName(columnName);
AssertKripton.assertTrue(property != null, "class '%s' in @%s(indexes) use unknown property '%s'",
entity.getName(), BindSqlType.class.getSimpleName(), columnName);
listUniqueFields.add(property.columnName);
return property.columnName;
}
@Override
public String onColumnFullyQualifiedName(String tableName, String columnName) {
AssertKripton.fail("Inconsistent state");
return null;
}
});
AssertKripton.assertTrue(fieldCounter.value0 > 0,
"class '%s' have @%s(indexes) with no well formed indexes", entity.getName(),
BindSqlType.class.getSimpleName());
if (unique) {
// add unique constraint
listUniqueConstraint.add(String.format(", UNIQUE (%s)", StringUtils.join(listUniqueFields, ", ")));
}
listCreateIndex.add(createIndex);
listDropIndex.add(dropIndex);
}
result.value0 = StringUtils.join(listCreateIndex, ";");
result.value1 = StringUtils.join(listDropIndex, ";");
result.value2 = StringUtils.join(listUniqueConstraint, "");
return result;
} } | public class class_name {
public static Triple<String, String, String> buldIndexes(final SQLiteEntity entity,
ArrayList<Pair<List<String>, Boolean>> indexList, boolean unique, int counter) {
Triple<String, String, String> result = new Triple<>();
result.value0 = "";
result.value1 = "";
result.value2 = "";
if (indexList.size() == 0)
return result;
String uniqueString;
if (unique) {
uniqueString = "UNIQUE "; // depends on control dependency: [if], data = [none]
} else {
uniqueString = ""; // depends on control dependency: [if], data = [none]
}
List<String> listCreateIndex = new ArrayList<>();
List<String> listDropIndex = new ArrayList<>();
List<String> listUniqueConstraint = new ArrayList<>();
for (Pair<List<String>, Boolean> index : indexList) {
final List<String> listUniqueFields = new ArrayList<>();
String createIndex = String.format(" CREATE %sINDEX idx_%s_%s on %s (%s)", uniqueString,
entity.getTableName(), counter++, entity.getTableName(), StringUtils.join(index.value0, ", "));
String dropIndex = String.format(" DROP INDEX IF EXISTS idx_%s_%s", entity.getTableName(), counter);
final One<Integer> fieldCounter = new One<Integer>(0);
createIndex = JQLChecker.getInstance().replace(new JQLContext() {
@Override
public String getContextDescription() {
return "While table definition generation for entity " + entity.getName();
}
@Override
public String getName() {
// TODO Auto-generated method stub
return null;
}
@Override
public String getParentName() {
// TODO Auto-generated method stub
return null;
}
@Override
public Finder<SQLProperty> findEntityByName(String entityName) {
// TODO Auto-generated method stub
return null;
}
}, createIndex, new JQLReplacerListenerImpl(null) {
@Override
public String onColumnName(String columnName) {
fieldCounter.value0++;
SQLProperty property = entity.findPropertyByName(columnName);
AssertKripton.assertTrue(property != null, "class '%s' in @%s(indexes) use unknown property '%s'",
entity.getName(), BindSqlType.class.getSimpleName(), columnName);
listUniqueFields.add(property.columnName);
return property.columnName;
}
@Override
public String onColumnFullyQualifiedName(String tableName, String columnName) {
AssertKripton.fail("Inconsistent state");
return null;
}
});
AssertKripton.assertTrue(fieldCounter.value0 > 0,
"class '%s' have @%s(indexes) with no well formed indexes", entity.getName(),
BindSqlType.class.getSimpleName());
if (unique) {
// add unique constraint
listUniqueConstraint.add(String.format(", UNIQUE (%s)", StringUtils.join(listUniqueFields, ", "))); // depends on control dependency: [if], data = [none]
}
listCreateIndex.add(createIndex);
listDropIndex.add(dropIndex);
}
result.value0 = StringUtils.join(listCreateIndex, ";");
result.value1 = StringUtils.join(listDropIndex, ";");
result.value2 = StringUtils.join(listUniqueConstraint, "");
return result;
} } // depends on control dependency: [for], data = [none] |
public class class_name {
public int download() {
try {
//if (url.startsWith("https://packagist.org") || url.startsWith("https://getcomposer.org")) {
if (url.startsWith("https://")) {
registerSSLContext(client.getBackend());
}
final HttpGet httpGet = new HttpGet(url);
if (httpGet.isAborted()) {
httpGet.abort();
}
client.execute(httpGet, new FutureCallback<HttpResponse>() {
@Override
public void failed(Exception e) {
for (DownloadListenerInterface listener : listeners) {
listener.errorOccured(e);
}
}
@Override
public void completed(HttpResponse response) {
for (DownloadListenerInterface listener : listeners) {
try {
listener.dataReceived(response.getEntity().getContent(), httpGet.getURI().toString());
} catch (Exception e) {
listener.errorOccured(e);
}
}
}
@Override
public void cancelled() {
for (DownloadListenerInterface listener : listeners) {
listener.aborted(httpGet.getURI().toString());
}
}
});
httpGets.add(httpGet);
// client.shutdown();
lastSlot = httpGets.size() - 1;
return lastSlot;
} catch (Exception e) {
for (DownloadListenerInterface listener : listeners) {
listener.errorOccured(e);
}
}
return -1;
} } | public class class_name {
public int download() {
try {
//if (url.startsWith("https://packagist.org") || url.startsWith("https://getcomposer.org")) {
if (url.startsWith("https://")) {
registerSSLContext(client.getBackend());
}
final HttpGet httpGet = new HttpGet(url);
if (httpGet.isAborted()) {
httpGet.abort();
}
client.execute(httpGet, new FutureCallback<HttpResponse>() {
@Override
public void failed(Exception e) {
for (DownloadListenerInterface listener : listeners) {
listener.errorOccured(e); // depends on control dependency: [for], data = [listener]
}
}
@Override
public void completed(HttpResponse response) {
for (DownloadListenerInterface listener : listeners) {
try {
listener.dataReceived(response.getEntity().getContent(), httpGet.getURI().toString()); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
listener.errorOccured(e);
} // depends on control dependency: [catch], data = [none]
}
}
@Override
public void cancelled() {
for (DownloadListenerInterface listener : listeners) {
listener.aborted(httpGet.getURI().toString()); // depends on control dependency: [for], data = [listener]
}
}
});
httpGets.add(httpGet);
// client.shutdown();
lastSlot = httpGets.size() - 1; // depends on control dependency: [try], data = [none]
return lastSlot; // depends on control dependency: [try], data = [none]
} catch (Exception e) {
for (DownloadListenerInterface listener : listeners) {
listener.errorOccured(e); // depends on control dependency: [for], data = [listener]
}
} // depends on control dependency: [catch], data = [none]
return -1;
} } |
public class class_name {
public void setUpgradeHistories(java.util.Collection<UpgradeHistory> upgradeHistories) {
if (upgradeHistories == null) {
this.upgradeHistories = null;
return;
}
this.upgradeHistories = new java.util.ArrayList<UpgradeHistory>(upgradeHistories);
} } | public class class_name {
public void setUpgradeHistories(java.util.Collection<UpgradeHistory> upgradeHistories) {
if (upgradeHistories == null) {
this.upgradeHistories = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.upgradeHistories = new java.util.ArrayList<UpgradeHistory>(upgradeHistories);
} } |
public class class_name {
protected void appendJsNode(HttpServletRequest request, Document document, DocumentFragment head, Js js, String relativeRoot) {
final String scriptPath = getElementPath(request, js, relativeRoot);
if (resourcesDao.isConditional(js)) {
Comment c = document.createComment("");
c.appendData(OPEN_COND_COMMENT_PRE);
c.appendData(js.getConditional());
c.appendData(OPEN_COND_COMMENT_POST);
c.appendData(OPEN_SCRIPT);
c.appendData(scriptPath);
c.appendData(CLOSE_SCRIPT);
c.appendData(CLOSE_COND_COMMENT);
head.appendChild(c);
}
else {
Element element = document.createElement(SCRIPT);
element.setAttribute(TYPE, "text/javascript");
element.setAttribute(SRC, scriptPath);
element.appendChild(document.createTextNode(" "));
head.appendChild(element);
}
} } | public class class_name {
protected void appendJsNode(HttpServletRequest request, Document document, DocumentFragment head, Js js, String relativeRoot) {
final String scriptPath = getElementPath(request, js, relativeRoot);
if (resourcesDao.isConditional(js)) {
Comment c = document.createComment("");
c.appendData(OPEN_COND_COMMENT_PRE); // depends on control dependency: [if], data = [none]
c.appendData(js.getConditional()); // depends on control dependency: [if], data = [none]
c.appendData(OPEN_COND_COMMENT_POST); // depends on control dependency: [if], data = [none]
c.appendData(OPEN_SCRIPT); // depends on control dependency: [if], data = [none]
c.appendData(scriptPath); // depends on control dependency: [if], data = [none]
c.appendData(CLOSE_SCRIPT); // depends on control dependency: [if], data = [none]
c.appendData(CLOSE_COND_COMMENT); // depends on control dependency: [if], data = [none]
head.appendChild(c); // depends on control dependency: [if], data = [none]
}
else {
Element element = document.createElement(SCRIPT);
element.setAttribute(TYPE, "text/javascript"); // depends on control dependency: [if], data = [none]
element.setAttribute(SRC, scriptPath); // depends on control dependency: [if], data = [none]
element.appendChild(document.createTextNode(" ")); // depends on control dependency: [if], data = [none]
head.appendChild(element); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
HistoricEra getPreferredEra(
HistoricDate hd,
PlainDate date
) {
if ((this.era == null) || date.isBefore(this.start) || date.isAfter(this.end)) {
return ((hd.compareTo(AD1) < 0) ? HistoricEra.BC : HistoricEra.AD);
} else if ((this.era == HistoricEra.HISPANIC) && (hd.compareTo(BC38) < 0)) {
return HistoricEra.BC; // exceptional case
} else {
return this.era;
}
} } | public class class_name {
HistoricEra getPreferredEra(
HistoricDate hd,
PlainDate date
) {
if ((this.era == null) || date.isBefore(this.start) || date.isAfter(this.end)) {
return ((hd.compareTo(AD1) < 0) ? HistoricEra.BC : HistoricEra.AD); // depends on control dependency: [if], data = [none]
} else if ((this.era == HistoricEra.HISPANIC) && (hd.compareTo(BC38) < 0)) {
return HistoricEra.BC; // exceptional case // depends on control dependency: [if], data = [none]
} else {
return this.era; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static <T> boolean retainAll(Collection<T> self, @ClosureParams(FirstParam.FirstGenericType.class) Closure condition) {
Iterator iter = InvokerHelper.asIterator(self);
BooleanClosureWrapper bcw = new BooleanClosureWrapper(condition);
boolean result = false;
while (iter.hasNext()) {
Object value = iter.next();
if (!bcw.call(value)) {
iter.remove();
result = true;
}
}
return result;
} } | public class class_name {
public static <T> boolean retainAll(Collection<T> self, @ClosureParams(FirstParam.FirstGenericType.class) Closure condition) {
Iterator iter = InvokerHelper.asIterator(self);
BooleanClosureWrapper bcw = new BooleanClosureWrapper(condition);
boolean result = false;
while (iter.hasNext()) {
Object value = iter.next();
if (!bcw.call(value)) {
iter.remove(); // depends on control dependency: [if], data = [none]
result = true; // depends on control dependency: [if], data = [none]
}
}
return result;
} } |
public class class_name {
public synchronized CmsListMetadata getMetadata(String listDialogName, String listId) {
getSettings();
String metaDataKey = listDialogName + listId;
if ((getMetadataCache().get(metaDataKey) == null) || getMetadataCache().get(metaDataKey).isVolatile()) {
if (LOG.isDebugEnabled()) {
LOG.debug(Messages.get().getBundle().key(Messages.LOG_START_METADATA_LIST_1, getListId()));
}
CmsListMetadata metadata = new CmsListMetadata(listId);
setColumns(metadata);
// always check the search action
setSearchAction(metadata, m_searchColId);
setIndependentActions(metadata);
metadata.addIndependentAction(new CmsListPrintIAction());
setMultiActions(metadata);
metadata.checkIds();
getMetadataCache().put(metaDataKey, metadata);
if (LOG.isDebugEnabled()) {
LOG.debug(Messages.get().getBundle().key(Messages.LOG_END_METADATA_LIST_1, getListId()));
}
}
return getMetadata(metaDataKey);
} } | public class class_name {
public synchronized CmsListMetadata getMetadata(String listDialogName, String listId) {
getSettings();
String metaDataKey = listDialogName + listId;
if ((getMetadataCache().get(metaDataKey) == null) || getMetadataCache().get(metaDataKey).isVolatile()) {
if (LOG.isDebugEnabled()) {
LOG.debug(Messages.get().getBundle().key(Messages.LOG_START_METADATA_LIST_1, getListId())); // depends on control dependency: [if], data = [none]
}
CmsListMetadata metadata = new CmsListMetadata(listId);
setColumns(metadata); // depends on control dependency: [if], data = [none]
// always check the search action
setSearchAction(metadata, m_searchColId); // depends on control dependency: [if], data = [none]
setIndependentActions(metadata); // depends on control dependency: [if], data = [none]
metadata.addIndependentAction(new CmsListPrintIAction()); // depends on control dependency: [if], data = [none]
setMultiActions(metadata); // depends on control dependency: [if], data = [none]
metadata.checkIds(); // depends on control dependency: [if], data = [none]
getMetadataCache().put(metaDataKey, metadata); // depends on control dependency: [if], data = [none]
if (LOG.isDebugEnabled()) {
LOG.debug(Messages.get().getBundle().key(Messages.LOG_END_METADATA_LIST_1, getListId())); // depends on control dependency: [if], data = [none]
}
}
return getMetadata(metaDataKey);
} } |
public class class_name {
public static void disableSSLValidation() {
try {
SSLContext context = SSLContext.getInstance("SSL");
context.init(null, new TrustManager[]{new UnsafeTrustManager()}, null);
HttpsURLConnection.setDefaultSSLSocketFactory(context.getSocketFactory());
} catch (Exception e) {
throw new RuntimeException(e);
}
} } | public class class_name {
public static void disableSSLValidation() {
try {
SSLContext context = SSLContext.getInstance("SSL");
context.init(null, new TrustManager[]{new UnsafeTrustManager()}, null); // depends on control dependency: [try], data = [none]
HttpsURLConnection.setDefaultSSLSocketFactory(context.getSocketFactory()); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@SuppressWarnings("unchecked")
private static Method getMethodOfInterest(Object cmp, Class<? extends Annotation> ann) {
Class cmpClass = cmp.getClass();
Class infoClass = infoClass(cmpClass);
Method[] ms = infoClass.getMethods();
for (Method m : ms) {
if (m.getAnnotation(ann) != null) {
if (m.getReturnType() != Void.TYPE || m.getParameterTypes().length > 0) {
throw new IllegalArgumentException("Invalid Method signature: " + m);
}
try {
return cmpClass.getMethod(m.getName());
} catch (Exception ex) {
throw new ComponentException("Cannot find/access method: " + m);
}
}
}
throw new IllegalArgumentException("No " + ann.getCanonicalName() + " found in " + cmp.getClass());
} } | public class class_name {
@SuppressWarnings("unchecked")
private static Method getMethodOfInterest(Object cmp, Class<? extends Annotation> ann) {
Class cmpClass = cmp.getClass();
Class infoClass = infoClass(cmpClass);
Method[] ms = infoClass.getMethods();
for (Method m : ms) {
if (m.getAnnotation(ann) != null) {
if (m.getReturnType() != Void.TYPE || m.getParameterTypes().length > 0) {
throw new IllegalArgumentException("Invalid Method signature: " + m);
}
try {
return cmpClass.getMethod(m.getName()); // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
throw new ComponentException("Cannot find/access method: " + m);
} // depends on control dependency: [catch], data = [none]
}
}
throw new IllegalArgumentException("No " + ann.getCanonicalName() + " found in " + cmp.getClass());
} } |
public class class_name {
public Object get(Id id, Mode mode) {
Object value = writeCache.get(id);
if (value == null) {
value = readCache.get(new CacheKey(id));
if (value != null && Mode.WRITE.equals(mode)) {
writeCache.put(id, value);
}
}
return value;
} } | public class class_name {
public Object get(Id id, Mode mode) {
Object value = writeCache.get(id);
if (value == null) {
value = readCache.get(new CacheKey(id)); // depends on control dependency: [if], data = [none]
if (value != null && Mode.WRITE.equals(mode)) {
writeCache.put(id, value); // depends on control dependency: [if], data = [none]
}
}
return value;
} } |
public class class_name {
public void removeLocation(final String key, final CompletionListener completionListener) {
if (key == null) {
throw new NullPointerException();
}
DatabaseReference keyRef = this.getDatabaseRefForKey(key);
if (completionListener != null) {
keyRef.setValue(null, new DatabaseReference.CompletionListener() {
@Override
public void onComplete(DatabaseError databaseError, DatabaseReference databaseReference) {
completionListener.onComplete(key, databaseError);
}
});
} else {
keyRef.setValue(null);
}
} } | public class class_name {
public void removeLocation(final String key, final CompletionListener completionListener) {
if (key == null) {
throw new NullPointerException();
}
DatabaseReference keyRef = this.getDatabaseRefForKey(key);
if (completionListener != null) {
keyRef.setValue(null, new DatabaseReference.CompletionListener() {
@Override
public void onComplete(DatabaseError databaseError, DatabaseReference databaseReference) {
completionListener.onComplete(key, databaseError);
}
}); // depends on control dependency: [if], data = [none]
} else {
keyRef.setValue(null); // depends on control dependency: [if], data = [null)]
}
} } |
public class class_name {
@SuppressWarnings("unchecked")
private Map<String, Object> _getExecuteFlashMap(FacesContext context)
{
// Note that we don't have to synchronize here, because it is no problem
// if we create more SubKeyMaps with the same subkey, because they are
// totally equal and point to the same entries in the SessionMap.
Map<String, Object> requestMap = context.getExternalContext().getRequestMap();
Map<String, Object> map = (Map<String, Object>) requestMap.get(FLASH_EXECUTE_MAP);
if (map == null)
{
String token = (String) requestMap.get(FLASH_EXECUTE_MAP_TOKEN);
String fullToken = FLASH_SESSION_MAP_SUBKEY_PREFIX + SEPARATOR_CHAR + token + SEPARATOR_CHAR;
map = _createSubKeyMap(context, fullToken);
requestMap.put(FLASH_EXECUTE_MAP, map);
}
return map;
} } | public class class_name {
@SuppressWarnings("unchecked")
private Map<String, Object> _getExecuteFlashMap(FacesContext context)
{
// Note that we don't have to synchronize here, because it is no problem
// if we create more SubKeyMaps with the same subkey, because they are
// totally equal and point to the same entries in the SessionMap.
Map<String, Object> requestMap = context.getExternalContext().getRequestMap();
Map<String, Object> map = (Map<String, Object>) requestMap.get(FLASH_EXECUTE_MAP);
if (map == null)
{
String token = (String) requestMap.get(FLASH_EXECUTE_MAP_TOKEN);
String fullToken = FLASH_SESSION_MAP_SUBKEY_PREFIX + SEPARATOR_CHAR + token + SEPARATOR_CHAR;
map = _createSubKeyMap(context, fullToken); // depends on control dependency: [if], data = [none]
requestMap.put(FLASH_EXECUTE_MAP, map); // depends on control dependency: [if], data = [none]
}
return map;
} } |
public class class_name {
public static void shutdown(Duration timeout, ExecutorService... pools) {
// Prevent new tasks from being submitted.
for (ExecutorService pool : pools) {
pool.shutdown();
}
TimeoutTimer timer = new TimeoutTimer(timeout);
for (ExecutorService pool : pools) {
try {
// Wait a while for existing tasks to terminate. Note that subsequent pools will be given a smaller timeout,
// since they all started shutting down at the same time (above), and they can shut down in parallel.
if (!pool.awaitTermination(timer.getRemaining().toMillis(), TimeUnit.MILLISECONDS)) {
// Cancel currently executing tasks and wait for them to respond to being cancelled.
pool.shutdownNow();
pool.awaitTermination(timer.getRemaining().toMillis(), TimeUnit.MILLISECONDS);
}
} catch (InterruptedException ie) {
pool.shutdownNow();
Thread.currentThread().interrupt();
}
}
} } | public class class_name {
public static void shutdown(Duration timeout, ExecutorService... pools) {
// Prevent new tasks from being submitted.
for (ExecutorService pool : pools) {
pool.shutdown(); // depends on control dependency: [for], data = [pool]
}
TimeoutTimer timer = new TimeoutTimer(timeout);
for (ExecutorService pool : pools) {
try {
// Wait a while for existing tasks to terminate. Note that subsequent pools will be given a smaller timeout,
// since they all started shutting down at the same time (above), and they can shut down in parallel.
if (!pool.awaitTermination(timer.getRemaining().toMillis(), TimeUnit.MILLISECONDS)) {
// Cancel currently executing tasks and wait for them to respond to being cancelled.
pool.shutdownNow(); // depends on control dependency: [if], data = [none]
pool.awaitTermination(timer.getRemaining().toMillis(), TimeUnit.MILLISECONDS); // depends on control dependency: [if], data = [none]
}
} catch (InterruptedException ie) {
pool.shutdownNow();
Thread.currentThread().interrupt();
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public static String removeLast(String original, String string) {
int lastIndexOf = original.lastIndexOf(string);
if (lastIndexOf == -1) {
return original;
}
return original.substring(0, lastIndexOf);
} } | public class class_name {
public static String removeLast(String original, String string) {
int lastIndexOf = original.lastIndexOf(string);
if (lastIndexOf == -1) {
return original;
// depends on control dependency: [if], data = [none]
}
return original.substring(0, lastIndexOf);
} } |
public class class_name {
private CompletableFuture<WriterFlushResult> flushOnce(TimeoutTimer timer) {
boolean hasDelete = this.hasDeletePending.get();
boolean hasMerge = this.mergeTransactionCount.get() > 0;
boolean hasSeal = this.hasSealPending.get();
boolean hasTruncate = this.truncateCount.get() > 0;
long traceId = LoggerHelpers.traceEnterWithContext(log, this.traceObjectId, "flushOnce", this.operations.size(),
this.mergeTransactionCount, hasSeal, hasTruncate, hasDelete);
CompletableFuture<WriterFlushResult> result;
if (hasDelete) {
// If we have a Deletes, simply delete the Segment and move on. No other operation matters now.
result = deleteSegment(timer);
} else if (hasSeal || hasMerge || hasTruncate) {
// If we have a Seal, Merge or Truncate Pending, flush everything until we reach that operation.
result = flushFully(timer);
if (hasMerge) {
// If we have a merge, do it after we flush fully.
result = result.thenComposeAsync(flushResult -> mergeIfNecessary(flushResult, timer), this.executor);
}
if (hasSeal) {
// If we have a seal, do it after every other operation.
result = result.thenComposeAsync(flushResult -> sealIfNecessary(flushResult, timer), this.executor);
}
} else {
// Otherwise, just flush the excess as long as we have something to flush.
result = flushExcess(timer);
}
if (log.isTraceEnabled()) {
result = result.thenApply(r -> {
LoggerHelpers.traceLeave(log, this.traceObjectId, "flushOnce", traceId, r);
return r;
});
}
return result;
} } | public class class_name {
private CompletableFuture<WriterFlushResult> flushOnce(TimeoutTimer timer) {
boolean hasDelete = this.hasDeletePending.get();
boolean hasMerge = this.mergeTransactionCount.get() > 0;
boolean hasSeal = this.hasSealPending.get();
boolean hasTruncate = this.truncateCount.get() > 0;
long traceId = LoggerHelpers.traceEnterWithContext(log, this.traceObjectId, "flushOnce", this.operations.size(),
this.mergeTransactionCount, hasSeal, hasTruncate, hasDelete);
CompletableFuture<WriterFlushResult> result;
if (hasDelete) {
// If we have a Deletes, simply delete the Segment and move on. No other operation matters now.
result = deleteSegment(timer); // depends on control dependency: [if], data = [none]
} else if (hasSeal || hasMerge || hasTruncate) {
// If we have a Seal, Merge or Truncate Pending, flush everything until we reach that operation.
result = flushFully(timer); // depends on control dependency: [if], data = [none]
if (hasMerge) {
// If we have a merge, do it after we flush fully.
result = result.thenComposeAsync(flushResult -> mergeIfNecessary(flushResult, timer), this.executor); // depends on control dependency: [if], data = [none]
}
if (hasSeal) {
// If we have a seal, do it after every other operation.
result = result.thenComposeAsync(flushResult -> sealIfNecessary(flushResult, timer), this.executor); // depends on control dependency: [if], data = [none]
}
} else {
// Otherwise, just flush the excess as long as we have something to flush.
result = flushExcess(timer); // depends on control dependency: [if], data = [none]
}
if (log.isTraceEnabled()) {
result = result.thenApply(r -> {
LoggerHelpers.traceLeave(log, this.traceObjectId, "flushOnce", traceId, r); // depends on control dependency: [if], data = [none]
return r; // depends on control dependency: [if], data = [none]
});
}
return result;
} } |
public class class_name {
public ReplicationInstance withVpcSecurityGroups(VpcSecurityGroupMembership... vpcSecurityGroups) {
if (this.vpcSecurityGroups == null) {
setVpcSecurityGroups(new java.util.ArrayList<VpcSecurityGroupMembership>(vpcSecurityGroups.length));
}
for (VpcSecurityGroupMembership ele : vpcSecurityGroups) {
this.vpcSecurityGroups.add(ele);
}
return this;
} } | public class class_name {
public ReplicationInstance withVpcSecurityGroups(VpcSecurityGroupMembership... vpcSecurityGroups) {
if (this.vpcSecurityGroups == null) {
setVpcSecurityGroups(new java.util.ArrayList<VpcSecurityGroupMembership>(vpcSecurityGroups.length)); // depends on control dependency: [if], data = [none]
}
for (VpcSecurityGroupMembership ele : vpcSecurityGroups) {
this.vpcSecurityGroups.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
private String getFeatureString(Set<String> featureNames) {
StringBuilder featureBuilder = new StringBuilder();
if (featureNames != null) {
Iterator<String> iter = featureNames.iterator();
while (iter.hasNext()) {
featureBuilder.append(iter.next());
if (iter.hasNext())
featureBuilder.append(',');
}
}
return featureBuilder.toString();
} } | public class class_name {
private String getFeatureString(Set<String> featureNames) {
StringBuilder featureBuilder = new StringBuilder();
if (featureNames != null) {
Iterator<String> iter = featureNames.iterator();
while (iter.hasNext()) {
featureBuilder.append(iter.next()); // depends on control dependency: [while], data = [none]
if (iter.hasNext())
featureBuilder.append(',');
}
}
return featureBuilder.toString();
} } |
public class class_name {
public static boolean isSingleWord(String value) {
if (value == null) {
return false;
}
value = value.trim();
if (value.isEmpty()) {
return false;
}
return !SINGLE_WORD_PATTERN.matcher(value).matches();
} } | public class class_name {
public static boolean isSingleWord(String value) {
if (value == null) {
return false; // depends on control dependency: [if], data = [none]
}
value = value.trim();
if (value.isEmpty()) {
return false; // depends on control dependency: [if], data = [none]
}
return !SINGLE_WORD_PATTERN.matcher(value).matches();
} } |
public class class_name {
protected ActionFormProperty findProperty(String propertyName) {
final ActionFormProperty property = formMeta.getProperty(propertyName);
if (property == null) {
throwFormPropertyNotFoundException(propertyName);
}
return property;
} } | public class class_name {
protected ActionFormProperty findProperty(String propertyName) {
final ActionFormProperty property = formMeta.getProperty(propertyName);
if (property == null) {
throwFormPropertyNotFoundException(propertyName); // depends on control dependency: [if], data = [(property]
}
return property;
} } |
public class class_name {
private List<String> loadBalanceSuites(List<ForkedJvmInfo> jvmInfo,
TestsCollection testsCollection, List<SuiteBalancer> balancers) {
// Order test suites identically for balancers.
// and split into replicated and non-replicated suites.
Map<Boolean,List<String>> partitioned = sortAndSplitReplicated(testsCollection.testClasses);
Collection<String> replicated = partitioned.get(true);
Collection<String> suites = partitioned.get(false);
final List<SuiteBalancer> balancersWithFallback = new ArrayList<>(balancers);
balancersWithFallback.add(new RoundRobinBalancer());
// Go through all the balancers, the first one to assign a suite wins.
final List<String> remaining = new ArrayList<>(suites);
Collections.sort(remaining);
final Map<Integer,List<Assignment>> perJvmAssignments = new HashMap<>();
for (ForkedJvmInfo si : jvmInfo) {
perJvmAssignments.put(si.id, new ArrayList<Assignment>());
}
final int jvmCount = jvmInfo.size();
for (SuiteBalancer balancer : balancersWithFallback) {
balancer.setOwner(this);
final List<Assignment> assignments =
balancer.assign(
Collections.unmodifiableCollection(remaining), jvmCount, masterSeed());
for (Assignment e : assignments) {
if (e == null) {
throw new RuntimeException("Balancer must return non-null assignments.");
}
if (!remaining.remove(e.suiteName)) {
throw new RuntimeException("Balancer must return suite name as a key: " + e.suiteName);
}
log(String.format(Locale.ROOT,
"Assignment hint: J%-2d (cost %5d) %s (by %s)",
e.slaveId,
e.estimatedCost,
e.suiteName,
balancer.getClass().getSimpleName()), Project.MSG_VERBOSE);
perJvmAssignments.get(e.slaveId).add(e);
}
}
if (remaining.size() != 0) {
throw new RuntimeException("Not all suites assigned?: " + remaining);
}
if (shuffleOnSlave) {
// Shuffle suites on slaves so that the result is always the same wrt master seed
// (sort first, then shuffle with a constant seed).
for (List<Assignment> assignments : perJvmAssignments.values()) {
Collections.sort(assignments);
Collections.shuffle(assignments, new Random(this.masterSeed()));
}
}
// Take a fraction of suites scheduled as last on each slave and move them to a common
// job-stealing queue.
List<SuiteHint> stealingQueueWithHints = new ArrayList<>();
for (ForkedJvmInfo si : jvmInfo) {
final List<Assignment> assignments = perJvmAssignments.get(si.id);
int moveToCommon = (int) (assignments.size() * dynamicAssignmentRatio);
if (moveToCommon > 0) {
final List<Assignment> movedToCommon =
assignments.subList(assignments.size() - moveToCommon, assignments.size());
for (Assignment a : movedToCommon) {
stealingQueueWithHints.add(new SuiteHint(a.suiteName, a.estimatedCost));
}
movedToCommon.clear();
}
final ArrayList<String> slaveSuites = (si.testSuites = new ArrayList<>());
for (Assignment a : assignments) {
slaveSuites.add(a.suiteName);
}
}
// Sort stealing queue according to descending cost.
Collections.sort(stealingQueueWithHints, SuiteHint.DESCENDING_BY_WEIGHT);
// Append all replicated suites to each forked JVM, AFTER we process the stealing queue
// to enforce all replicated suites run on each bound JVM.
if (!replicated.isEmpty()) {
for (ForkedJvmInfo si : jvmInfo) {
for (String suite : replicated) {
si.testSuites.add(suite);
}
if (shuffleOnSlave) {
// Shuffle suites on slaves so that the result is always the same wrt master seed
// (sort first, then shuffle with a constant seed).
Collections.shuffle(si.testSuites, new Random(this.masterSeed()));
}
}
}
// Dump scheduling information.
for (ForkedJvmInfo si : jvmInfo) {
log("Forked JVM J" + si.id + " assignments (after shuffle):", Project.MSG_VERBOSE);
for (String suiteName : si.testSuites) {
log(" " + suiteName, Project.MSG_VERBOSE);
}
}
log("Stealing queue:", Project.MSG_VERBOSE);
for (SuiteHint suiteHint : stealingQueueWithHints) {
log(" " + suiteHint.suiteName + " " + suiteHint.cost, Project.MSG_VERBOSE);
}
List<String> stealingQueue = new ArrayList<>(stealingQueueWithHints.size());
for (SuiteHint suiteHint : stealingQueueWithHints) {
stealingQueue.add(suiteHint.suiteName);
}
return stealingQueue;
} } | public class class_name {
private List<String> loadBalanceSuites(List<ForkedJvmInfo> jvmInfo,
TestsCollection testsCollection, List<SuiteBalancer> balancers) {
// Order test suites identically for balancers.
// and split into replicated and non-replicated suites.
Map<Boolean,List<String>> partitioned = sortAndSplitReplicated(testsCollection.testClasses);
Collection<String> replicated = partitioned.get(true);
Collection<String> suites = partitioned.get(false);
final List<SuiteBalancer> balancersWithFallback = new ArrayList<>(balancers);
balancersWithFallback.add(new RoundRobinBalancer());
// Go through all the balancers, the first one to assign a suite wins.
final List<String> remaining = new ArrayList<>(suites);
Collections.sort(remaining);
final Map<Integer,List<Assignment>> perJvmAssignments = new HashMap<>();
for (ForkedJvmInfo si : jvmInfo) {
perJvmAssignments.put(si.id, new ArrayList<Assignment>()); // depends on control dependency: [for], data = [si]
}
final int jvmCount = jvmInfo.size();
for (SuiteBalancer balancer : balancersWithFallback) {
balancer.setOwner(this); // depends on control dependency: [for], data = [balancer]
final List<Assignment> assignments =
balancer.assign(
Collections.unmodifiableCollection(remaining), jvmCount, masterSeed());
for (Assignment e : assignments) {
if (e == null) {
throw new RuntimeException("Balancer must return non-null assignments.");
}
if (!remaining.remove(e.suiteName)) {
throw new RuntimeException("Balancer must return suite name as a key: " + e.suiteName);
}
log(String.format(Locale.ROOT,
"Assignment hint: J%-2d (cost %5d) %s (by %s)",
e.slaveId,
e.estimatedCost,
e.suiteName,
balancer.getClass().getSimpleName()), Project.MSG_VERBOSE); // depends on control dependency: [for], data = [e]
perJvmAssignments.get(e.slaveId).add(e); // depends on control dependency: [for], data = [e]
}
}
if (remaining.size() != 0) {
throw new RuntimeException("Not all suites assigned?: " + remaining);
}
if (shuffleOnSlave) {
// Shuffle suites on slaves so that the result is always the same wrt master seed
// (sort first, then shuffle with a constant seed).
for (List<Assignment> assignments : perJvmAssignments.values()) {
Collections.sort(assignments); // depends on control dependency: [for], data = [assignments]
Collections.shuffle(assignments, new Random(this.masterSeed())); // depends on control dependency: [for], data = [assignments]
}
}
// Take a fraction of suites scheduled as last on each slave and move them to a common
// job-stealing queue.
List<SuiteHint> stealingQueueWithHints = new ArrayList<>();
for (ForkedJvmInfo si : jvmInfo) {
final List<Assignment> assignments = perJvmAssignments.get(si.id);
int moveToCommon = (int) (assignments.size() * dynamicAssignmentRatio);
if (moveToCommon > 0) {
final List<Assignment> movedToCommon =
assignments.subList(assignments.size() - moveToCommon, assignments.size());
for (Assignment a : movedToCommon) {
stealingQueueWithHints.add(new SuiteHint(a.suiteName, a.estimatedCost)); // depends on control dependency: [for], data = [a]
}
movedToCommon.clear(); // depends on control dependency: [if], data = [none]
}
final ArrayList<String> slaveSuites = (si.testSuites = new ArrayList<>());
for (Assignment a : assignments) {
slaveSuites.add(a.suiteName); // depends on control dependency: [for], data = [a]
}
}
// Sort stealing queue according to descending cost.
Collections.sort(stealingQueueWithHints, SuiteHint.DESCENDING_BY_WEIGHT);
// Append all replicated suites to each forked JVM, AFTER we process the stealing queue
// to enforce all replicated suites run on each bound JVM.
if (!replicated.isEmpty()) {
for (ForkedJvmInfo si : jvmInfo) {
for (String suite : replicated) {
si.testSuites.add(suite); // depends on control dependency: [for], data = [suite]
}
if (shuffleOnSlave) {
// Shuffle suites on slaves so that the result is always the same wrt master seed
// (sort first, then shuffle with a constant seed).
Collections.shuffle(si.testSuites, new Random(this.masterSeed())); // depends on control dependency: [if], data = [none]
}
}
}
// Dump scheduling information.
for (ForkedJvmInfo si : jvmInfo) {
log("Forked JVM J" + si.id + " assignments (after shuffle):", Project.MSG_VERBOSE); // depends on control dependency: [for], data = [si]
for (String suiteName : si.testSuites) {
log(" " + suiteName, Project.MSG_VERBOSE); // depends on control dependency: [for], data = [suiteName]
}
}
log("Stealing queue:", Project.MSG_VERBOSE);
for (SuiteHint suiteHint : stealingQueueWithHints) {
log(" " + suiteHint.suiteName + " " + suiteHint.cost, Project.MSG_VERBOSE); // depends on control dependency: [for], data = [suiteHint]
}
List<String> stealingQueue = new ArrayList<>(stealingQueueWithHints.size());
for (SuiteHint suiteHint : stealingQueueWithHints) {
stealingQueue.add(suiteHint.suiteName); // depends on control dependency: [for], data = [suiteHint]
}
return stealingQueue;
} } |
public class class_name {
public static String getAdjustedSiteRoot(String siteRoot, String resourcename) {
if (resourcename.startsWith(CmsWorkplace.VFS_PATH_SYSTEM)
|| OpenCms.getSiteManager().startsWithShared(resourcename)
|| (resourcename.startsWith(CmsWorkplace.VFS_PATH_SITES) && !resourcename.startsWith(siteRoot))) {
return "";
} else {
return siteRoot;
}
} } | public class class_name {
public static String getAdjustedSiteRoot(String siteRoot, String resourcename) {
if (resourcename.startsWith(CmsWorkplace.VFS_PATH_SYSTEM)
|| OpenCms.getSiteManager().startsWithShared(resourcename)
|| (resourcename.startsWith(CmsWorkplace.VFS_PATH_SITES) && !resourcename.startsWith(siteRoot))) {
return ""; // depends on control dependency: [if], data = [none]
} else {
return siteRoot; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public IClientBroadcastStream newBroadcastStream(Number streamId) {
if (isValidStreamId(streamId)) {
// get ClientBroadcastStream defined as a prototype in red5-common.xml
ClientBroadcastStream cbs = (ClientBroadcastStream) scope.getContext().getBean("clientBroadcastStream");
customizeStream(streamId, cbs);
if (!registerStream(cbs)) {
cbs = null;
}
return cbs;
}
return null;
} } | public class class_name {
public IClientBroadcastStream newBroadcastStream(Number streamId) {
if (isValidStreamId(streamId)) {
// get ClientBroadcastStream defined as a prototype in red5-common.xml
ClientBroadcastStream cbs = (ClientBroadcastStream) scope.getContext().getBean("clientBroadcastStream");
customizeStream(streamId, cbs);
// depends on control dependency: [if], data = [none]
if (!registerStream(cbs)) {
cbs = null;
// depends on control dependency: [if], data = [none]
}
return cbs;
// depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public java.util.List<StackEvent> getStackEvents() {
if (stackEvents == null) {
stackEvents = new com.amazonaws.internal.SdkInternalList<StackEvent>();
}
return stackEvents;
} } | public class class_name {
public java.util.List<StackEvent> getStackEvents() {
if (stackEvents == null) {
stackEvents = new com.amazonaws.internal.SdkInternalList<StackEvent>(); // depends on control dependency: [if], data = [none]
}
return stackEvents;
} } |
public class class_name {
public static <T> int compareTo(Comparable<T> object1, T object2) {
if (object1 == object2) {
return 0;
}
if (object1 == null) {
return Integer.MIN_VALUE;
}
if (object2 == null) {
return Integer.MAX_VALUE;
}
assert object1 != null && object2 != null;
return object1.compareTo(object2);
} } | public class class_name {
public static <T> int compareTo(Comparable<T> object1, T object2) {
if (object1 == object2) {
return 0; // depends on control dependency: [if], data = [none]
}
if (object1 == null) {
return Integer.MIN_VALUE; // depends on control dependency: [if], data = [none]
}
if (object2 == null) {
return Integer.MAX_VALUE; // depends on control dependency: [if], data = [none]
}
assert object1 != null && object2 != null;
return object1.compareTo(object2);
} } |
public class class_name {
public void setValue(Change change, Object value) {
if ((value instanceof String) && (!"string".equals(dataType))) {
try {
switch (dataType) {
case "bigInteger":
value = new BigInteger((String) value);
break;
case "databaseFunction":
value = new DatabaseFunction((String) value);
break;
default:
throw new UnexpectedLiquibaseException("Unknown data type: " + dataType);
}
} catch (Exception e) {
throw new UnexpectedLiquibaseException("Cannot convert string value '" + value + "' to " +
dataType + ": " + e.getMessage());
}
}
try {
for (PropertyDescriptor descriptor : PropertyUtils.getInstance().getDescriptors(change.getClass())) {
if (descriptor.getDisplayName().equals(this.parameterName)) {
Method writeMethod = descriptor.getWriteMethod();
if (writeMethod == null) {
throw new UnexpectedLiquibaseException("Could not find writeMethod for " + this.parameterName);
}
Class<?> expectedWriteType = writeMethod.getParameterTypes()[0];
if ((value != null) && !expectedWriteType.isAssignableFrom(value.getClass())) {
if (expectedWriteType.equals(String.class)) {
value = value.toString();
} else {
throw new UnexpectedLiquibaseException(
"Could not convert " + value.getClass().getName() +
" to " +
expectedWriteType.getName()
);
}
}
writeMethod.invoke(change, value);
}
}
} catch (Exception e) {
throw new UnexpectedLiquibaseException("Error setting " + this.parameterName + " to " + value, e);
}
} } | public class class_name {
public void setValue(Change change, Object value) {
if ((value instanceof String) && (!"string".equals(dataType))) {
try {
switch (dataType) {
case "bigInteger":
value = new BigInteger((String) value);
break;
case "databaseFunction":
value = new DatabaseFunction((String) value);
break;
default:
throw new UnexpectedLiquibaseException("Unknown data type: " + dataType);
}
} catch (Exception e) {
throw new UnexpectedLiquibaseException("Cannot convert string value '" + value + "' to " +
dataType + ": " + e.getMessage());
} // depends on control dependency: [catch], data = [none]
}
try {
for (PropertyDescriptor descriptor : PropertyUtils.getInstance().getDescriptors(change.getClass())) {
if (descriptor.getDisplayName().equals(this.parameterName)) {
Method writeMethod = descriptor.getWriteMethod();
if (writeMethod == null) {
throw new UnexpectedLiquibaseException("Could not find writeMethod for " + this.parameterName);
}
Class<?> expectedWriteType = writeMethod.getParameterTypes()[0];
if ((value != null) && !expectedWriteType.isAssignableFrom(value.getClass())) {
if (expectedWriteType.equals(String.class)) {
value = value.toString(); // depends on control dependency: [if], data = [none]
} else {
throw new UnexpectedLiquibaseException(
"Could not convert " + value.getClass().getName() +
" to " +
expectedWriteType.getName()
);
}
}
writeMethod.invoke(change, value); // depends on control dependency: [if], data = [none]
}
}
} catch (Exception e) {
throw new UnexpectedLiquibaseException("Error setting " + this.parameterName + " to " + value, e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void setSourceIds(java.util.Collection<String> sourceIds) {
if (sourceIds == null) {
this.sourceIds = null;
return;
}
this.sourceIds = new java.util.ArrayList<String>(sourceIds);
} } | public class class_name {
public void setSourceIds(java.util.Collection<String> sourceIds) {
if (sourceIds == null) {
this.sourceIds = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.sourceIds = new java.util.ArrayList<String>(sourceIds);
} } |
public class class_name {
public static Map<String, String[]> params(){
SimpleHash params = new SimpleHash(RequestContext.getHttpRequest().getParameterMap());
if(getId() != null)
params.put("id", new String[]{getId()});
Map<String, String> userSegments = RequestContext.getRequestVo().getUserSegments();
for(String name:userSegments.keySet()){
params.put(name, new String[]{userSegments.get(name)});
}
return params;
} } | public class class_name {
public static Map<String, String[]> params(){
SimpleHash params = new SimpleHash(RequestContext.getHttpRequest().getParameterMap());
if(getId() != null)
params.put("id", new String[]{getId()});
Map<String, String> userSegments = RequestContext.getRequestVo().getUserSegments();
for(String name:userSegments.keySet()){
params.put(name, new String[]{userSegments.get(name)}); // depends on control dependency: [for], data = [name]
}
return params;
} } |
public class class_name {
public List<String> getBodyLanguages() {
Body defaultBody = getMessageBody(null);
List<String> languages = new ArrayList<String>();
for (Body body : getBodies()) {
if (!body.equals(defaultBody)) {
languages.add(body.language);
}
}
return Collections.unmodifiableList(languages);
} } | public class class_name {
public List<String> getBodyLanguages() {
Body defaultBody = getMessageBody(null);
List<String> languages = new ArrayList<String>();
for (Body body : getBodies()) {
if (!body.equals(defaultBody)) {
languages.add(body.language); // depends on control dependency: [if], data = [none]
}
}
return Collections.unmodifiableList(languages);
} } |
public class class_name {
public void setColor(@ColorInt int color) {
this.color = color;
if (!style.isFullyLoaded()) {
// We are in progress of loading a new style
return;
}
fillExtrusionLayer.setProperties(fillExtrusionColor(color));
} } | public class class_name {
public void setColor(@ColorInt int color) {
this.color = color;
if (!style.isFullyLoaded()) {
// We are in progress of loading a new style
return; // depends on control dependency: [if], data = [none]
}
fillExtrusionLayer.setProperties(fillExtrusionColor(color));
} } |
public class class_name {
@Override
public boolean matches(int lineNumber, String line) {
if (line.startsWith("atom ")
&& (line.endsWith(" s") || line.endsWith(" d") || line.endsWith(" t") || line.endsWith(" a"))) {
StringTokenizer tokenizer = new StringTokenizer(line, " ");
if ((tokenizer.countTokens() % 2) == 0) {
// odd number of values found, typical for HIN
return true;
}
}
return false;
} } | public class class_name {
@Override
public boolean matches(int lineNumber, String line) {
if (line.startsWith("atom ")
&& (line.endsWith(" s") || line.endsWith(" d") || line.endsWith(" t") || line.endsWith(" a"))) {
StringTokenizer tokenizer = new StringTokenizer(line, " ");
if ((tokenizer.countTokens() % 2) == 0) {
// odd number of values found, typical for HIN
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
@Override
public int getIsotopeCount(IIsotope isotope) {
int count = 0;
Iterator<IMolecularFormula> componentIterator = components.iterator();
while (componentIterator.hasNext()) {
count += componentIterator.next().getIsotopeCount(isotope);
}
return count;
} } | public class class_name {
@Override
public int getIsotopeCount(IIsotope isotope) {
int count = 0;
Iterator<IMolecularFormula> componentIterator = components.iterator();
while (componentIterator.hasNext()) {
count += componentIterator.next().getIsotopeCount(isotope); // depends on control dependency: [while], data = [none]
}
return count;
} } |
public class class_name {
public void processRequest(final ODistributedRequest request, final boolean waitForAcceptingRequests) {
if (!running) {
throw new ODistributedException("Server is going down or is removing the database:'" + getDatabaseName() + "' discarding");
}
final ORemoteTask task = request.getTask();
if (waitForAcceptingRequests) {
waitIsReady(task);
if (!running) {
throw new ODistributedException("Server is going down or is removing the database:'" + getDatabaseName() + "' discarding");
}
}
totalReceivedRequests.incrementAndGet();
// final ODistributedMomentum lastMomentum = filterByMomentum.get();
// if (lastMomentum != null && task instanceof OAbstractReplicatedTask) {
// final OLogSequenceNumber taskLastLSN = ((OAbstractReplicatedTask) task).getLastLSN();
//
// final String sourceServer = manager.getNodeNameById(request.getId().getNodeId());
// final OLogSequenceNumber lastLSNFromMomentum = lastMomentum.getLSN(sourceServer);
//
// if (taskLastLSN != null && lastLSNFromMomentum != null && taskLastLSN.compareTo(lastLSNFromMomentum) < 0) {
// // SKIP REQUEST BECAUSE CONTAINS AN OLD LSN
// final String msg = String.format("Skipped request %s on database '%s' because %s < current %s", request, databaseName,
// taskLastLSN, lastLSNFromMomentum);
// ODistributedServerLog.info(this, localNodeName, null, DIRECTION.NONE, msg);
// ODistributedWorker.sendResponseBack(this, manager, request, new ODistributedException(msg));
// return;
// }
// }
final int[] partitionKeys = task.getPartitionKey();
if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog
.debug(this, localNodeName, task.getNodeSource(), DIRECTION.IN, "Request %s on database '%s' partitionKeys=%s task=%s",
request, databaseName, Arrays.toString(partitionKeys), task);
if (partitionKeys.length > 1 || partitionKeys[0] == -1) {
final Set<Integer> involvedWorkerQueues;
if (partitionKeys.length > 1)
involvedWorkerQueues = getInvolvedQueuesByPartitionKeys(partitionKeys);
else
// LOCK ALL THE QUEUES
involvedWorkerQueues = ALL_QUEUES;
// if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog
.debug(this, localNodeName, null, DIRECTION.NONE, "Request %s on database '%s' involvedQueues=%s", request, databaseName,
involvedWorkerQueues);
if (involvedWorkerQueues.size() == 1)
// JUST ONE QUEUE INVOLVED: PROCESS IT IMMEDIATELY
processRequest(involvedWorkerQueues.iterator().next(), request);
else {
// INVOLVING MULTIPLE QUEUES
// if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog.debug(this, localNodeName, null, DIRECTION.NONE,
"Request %s on database '%s' waiting for all the previous requests to be completed", request, databaseName);
// WAIT ALL THE INVOLVED QUEUES ARE FREE AND SYNCHRONIZED
final CountDownLatch syncLatch = new CountDownLatch(involvedWorkerQueues.size());
final ODistributedRequest syncRequest = new ODistributedRequest(null, request.getId().getNodeId(), -1, databaseName,
new OSynchronizedTaskWrapper(syncLatch));
for (int queue : involvedWorkerQueues) {
ODistributedWorker worker = workerThreads.get(queue);
worker.processRequest(syncRequest);
}
// Make infinite timeout everytime
long taskTimeout = 0;
try {
if (taskTimeout <= 0)
syncLatch.await();
else {
// WAIT FOR COMPLETION. THE TIMEOUT IS MANAGED IN SMALLER CYCLES TO PROPERLY RECOGNIZE WHEN THE DB IS REMOVED
final long start = System.currentTimeMillis();
final long cycleTimeout = Math.min(taskTimeout, 2000);
boolean locked = false;
do {
if (syncLatch.await(cycleTimeout, TimeUnit.MILLISECONDS)) {
// DONE
locked = true;
break;
}
if (this.workerThreads.size() == 0)
// DATABASE WAS SHUTDOWN
break;
} while (System.currentTimeMillis() - start < taskTimeout);
if (!locked) {
final String msg = String.format(
"Cannot execute distributed request (%s) because all worker threads (%d) are busy (pending=%d timeout=%d)",
request, workerThreads.size(), syncLatch.getCount(), taskTimeout);
ODistributedWorker.sendResponseBack(this, manager, request, new ODistributedOperationException(msg));
return;
}
}
} catch (InterruptedException e) {
// IGNORE
Thread.currentThread().interrupt();
final String msg = String
.format("Cannot execute distributed request (%s) because all worker threads (%d) are busy", request,
workerThreads.size());
ODistributedWorker.sendResponseBack(this, manager, request, new ODistributedOperationException(msg));
return;
}
// PUT THE TASK TO EXECUTE ONLY IN THE FIRST QUEUE AND PUT WAIT-FOR TASKS IN THE OTHERS. WHEN THE REAL TASK IS EXECUTED,
// ALL THE OTHER TASKS WILL RETURN, SO THE QUEUES WILL BE BUSY DURING THE EXECUTION OF THE TASK. THIS AVOID CONCURRENT
// EXECUTION FOR THE SAME PARTITION
final CountDownLatch queueLatch = new CountDownLatch(1);
int i = 0;
for (int queue : involvedWorkerQueues) {
final ODistributedRequest req;
if (i++ == 0) {
// USE THE FIRST QUEUE TO PROCESS THE REQUEST
final String senderNodeName = manager.getNodeNameById(request.getId().getNodeId());
request.setTask(new OSynchronizedTaskWrapper(queueLatch, senderNodeName, task));
req = request;
} else
req = new ODistributedRequest(manager, request.getId().getNodeId(), -1, databaseName, new OWaitForTask(queueLatch));
workerThreads.get(queue).processRequest(req);
}
}
} else if (partitionKeys.length == 1 && partitionKeys[0] == -2) {
// ANY PARTITION: USE THE FIRST EMPTY IF ANY, OTHERWISE THE FIRST IN THE LIST
boolean found = false;
for (ODistributedWorker q : workerThreads) {
if (q.isWaitingForNextRequest() && q.localQueue.isEmpty()) {
q.processRequest(request);
found = true;
break;
}
}
if (!found)
// ALL THE THREADS ARE BUSY, SELECT THE FIRST EMPTY ONE
for (ODistributedWorker q : workerThreads) {
if (q.localQueue.isEmpty()) {
q.processRequest(request);
found = true;
break;
}
}
if (!found)
// EXEC ON THE FIRST QUEUE
workerThreads.get(0).processRequest(request);
} else if (partitionKeys.length == 1 && partitionKeys[0] == -3) {
// SERVICE - LOCK
ODistributedServerLog.debug(this, localNodeName, request.getTask().getNodeSource(), DIRECTION.IN,
"Request %s on database '%s' dispatched to the lock worker", request, databaseName);
lockThread.processRequest(request);
} else if (partitionKeys.length == 1 && partitionKeys[0] == -4) {
// SERVICE - FAST_NOLOCK
ODistributedServerLog.debug(this, localNodeName, request.getTask().getNodeSource(), DIRECTION.IN,
"Request %s on database '%s' dispatched to the nowait worker", request, databaseName);
nowaitThread.processRequest(request);
} else {
processRequest(partitionKeys[0], request);
}
} } | public class class_name {
public void processRequest(final ODistributedRequest request, final boolean waitForAcceptingRequests) {
if (!running) {
throw new ODistributedException("Server is going down or is removing the database:'" + getDatabaseName() + "' discarding");
}
final ORemoteTask task = request.getTask();
if (waitForAcceptingRequests) {
waitIsReady(task); // depends on control dependency: [if], data = [none]
if (!running) {
throw new ODistributedException("Server is going down or is removing the database:'" + getDatabaseName() + "' discarding");
}
}
totalReceivedRequests.incrementAndGet();
// final ODistributedMomentum lastMomentum = filterByMomentum.get();
// if (lastMomentum != null && task instanceof OAbstractReplicatedTask) {
// final OLogSequenceNumber taskLastLSN = ((OAbstractReplicatedTask) task).getLastLSN();
//
// final String sourceServer = manager.getNodeNameById(request.getId().getNodeId());
// final OLogSequenceNumber lastLSNFromMomentum = lastMomentum.getLSN(sourceServer);
//
// if (taskLastLSN != null && lastLSNFromMomentum != null && taskLastLSN.compareTo(lastLSNFromMomentum) < 0) {
// // SKIP REQUEST BECAUSE CONTAINS AN OLD LSN
// final String msg = String.format("Skipped request %s on database '%s' because %s < current %s", request, databaseName,
// taskLastLSN, lastLSNFromMomentum);
// ODistributedServerLog.info(this, localNodeName, null, DIRECTION.NONE, msg);
// ODistributedWorker.sendResponseBack(this, manager, request, new ODistributedException(msg));
// return;
// }
// }
final int[] partitionKeys = task.getPartitionKey();
if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog
.debug(this, localNodeName, task.getNodeSource(), DIRECTION.IN, "Request %s on database '%s' partitionKeys=%s task=%s",
request, databaseName, Arrays.toString(partitionKeys), task);
if (partitionKeys.length > 1 || partitionKeys[0] == -1) {
final Set<Integer> involvedWorkerQueues;
if (partitionKeys.length > 1)
involvedWorkerQueues = getInvolvedQueuesByPartitionKeys(partitionKeys);
else
// LOCK ALL THE QUEUES
involvedWorkerQueues = ALL_QUEUES;
// if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog
.debug(this, localNodeName, null, DIRECTION.NONE, "Request %s on database '%s' involvedQueues=%s", request, databaseName,
involvedWorkerQueues);
if (involvedWorkerQueues.size() == 1)
// JUST ONE QUEUE INVOLVED: PROCESS IT IMMEDIATELY
processRequest(involvedWorkerQueues.iterator().next(), request);
else {
// INVOLVING MULTIPLE QUEUES
// if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog.debug(this, localNodeName, null, DIRECTION.NONE,
"Request %s on database '%s' waiting for all the previous requests to be completed", request, databaseName);
// WAIT ALL THE INVOLVED QUEUES ARE FREE AND SYNCHRONIZED
final CountDownLatch syncLatch = new CountDownLatch(involvedWorkerQueues.size());
final ODistributedRequest syncRequest = new ODistributedRequest(null, request.getId().getNodeId(), -1, databaseName,
new OSynchronizedTaskWrapper(syncLatch));
for (int queue : involvedWorkerQueues) {
ODistributedWorker worker = workerThreads.get(queue);
worker.processRequest(syncRequest);
}
// Make infinite timeout everytime
long taskTimeout = 0;
try {
if (taskTimeout <= 0)
syncLatch.await();
else {
// WAIT FOR COMPLETION. THE TIMEOUT IS MANAGED IN SMALLER CYCLES TO PROPERLY RECOGNIZE WHEN THE DB IS REMOVED
final long start = System.currentTimeMillis();
final long cycleTimeout = Math.min(taskTimeout, 2000);
boolean locked = false;
do {
if (syncLatch.await(cycleTimeout, TimeUnit.MILLISECONDS)) {
// DONE
locked = true;
break;
}
if (this.workerThreads.size() == 0)
// DATABASE WAS SHUTDOWN
break;
} while (System.currentTimeMillis() - start < taskTimeout);
if (!locked) {
final String msg = String.format(
"Cannot execute distributed request (%s) because all worker threads (%d) are busy (pending=%d timeout=%d)",
request, workerThreads.size(), syncLatch.getCount(), taskTimeout);
ODistributedWorker.sendResponseBack(this, manager, request, new ODistributedOperationException(msg));
return;
}
}
} catch (InterruptedException e) {
// IGNORE
Thread.currentThread().interrupt();
final String msg = String
.format("Cannot execute distributed request (%s) because all worker threads (%d) are busy", request,
workerThreads.size());
ODistributedWorker.sendResponseBack(this, manager, request, new ODistributedOperationException(msg));
return;
}
// PUT THE TASK TO EXECUTE ONLY IN THE FIRST QUEUE AND PUT WAIT-FOR TASKS IN THE OTHERS. WHEN THE REAL TASK IS EXECUTED,
// ALL THE OTHER TASKS WILL RETURN, SO THE QUEUES WILL BE BUSY DURING THE EXECUTION OF THE TASK. THIS AVOID CONCURRENT
// EXECUTION FOR THE SAME PARTITION
final CountDownLatch queueLatch = new CountDownLatch(1);
int i = 0;
for (int queue : involvedWorkerQueues) {
final ODistributedRequest req;
if (i++ == 0) {
// USE THE FIRST QUEUE TO PROCESS THE REQUEST
final String senderNodeName = manager.getNodeNameById(request.getId().getNodeId());
request.setTask(new OSynchronizedTaskWrapper(queueLatch, senderNodeName, task));
req = request;
} else
req = new ODistributedRequest(manager, request.getId().getNodeId(), -1, databaseName, new OWaitForTask(queueLatch));
workerThreads.get(queue).processRequest(req);
}
}
} else if (partitionKeys.length == 1 && partitionKeys[0] == -2) {
// ANY PARTITION: USE THE FIRST EMPTY IF ANY, OTHERWISE THE FIRST IN THE LIST
boolean found = false;
for (ODistributedWorker q : workerThreads) {
if (q.isWaitingForNextRequest() && q.localQueue.isEmpty()) {
q.processRequest(request);
found = true;
break;
}
}
if (!found)
// ALL THE THREADS ARE BUSY, SELECT THE FIRST EMPTY ONE
for (ODistributedWorker q : workerThreads) {
if (q.localQueue.isEmpty()) {
q.processRequest(request);
found = true;
break;
}
}
if (!found)
// EXEC ON THE FIRST QUEUE
workerThreads.get(0).processRequest(request);
} else if (partitionKeys.length == 1 && partitionKeys[0] == -3) {
// SERVICE - LOCK
ODistributedServerLog.debug(this, localNodeName, request.getTask().getNodeSource(), DIRECTION.IN,
"Request %s on database '%s' dispatched to the lock worker", request, databaseName);
lockThread.processRequest(request);
} else if (partitionKeys.length == 1 && partitionKeys[0] == -4) {
// SERVICE - FAST_NOLOCK
ODistributedServerLog.debug(this, localNodeName, request.getTask().getNodeSource(), DIRECTION.IN,
"Request %s on database '%s' dispatched to the nowait worker", request, databaseName);
nowaitThread.processRequest(request);
} else {
processRequest(partitionKeys[0], request);
}
} } |
public class class_name {
public static TempFileProvider create(final String providerType, final ScheduledExecutorService executor, final boolean cleanExisting) throws IOException {
if (cleanExisting) {
try {
// The "clean existing" logic is as follows:
// 1) Rename the root directory "foo" corresponding to the provider type to "bar"
// 2) Submit a task to delete "bar" and its contents, in a background thread, to the the passed executor.
// 3) Create a "foo" root directory for the provider type and return that TempFileProvider (while at the same time the background task is in progress)
// This ensures that the "foo" root directory for the providerType is empty and the older content is being cleaned up in the background (without affecting the current processing),
// thus simulating a "cleanup existing content"
final File possiblyExistingProviderRoot = new File(TMP_ROOT, providerType);
if (possiblyExistingProviderRoot.exists()) {
// rename it so that it can be deleted as a separate (background) task
final File toBeDeletedProviderRoot = new File(TMP_ROOT, createTempName(providerType + "-to-be-deleted-", ""));
final boolean renamed = possiblyExistingProviderRoot.renameTo(toBeDeletedProviderRoot);
if (!renamed) {
throw new IOException("Failed to rename " + possiblyExistingProviderRoot.getAbsolutePath() + " to " + toBeDeletedProviderRoot.getAbsolutePath());
} else {
// delete in the background
executor.submit(new DeleteTask(toBeDeletedProviderRoot, executor));
}
}
} catch (Throwable t) {
// just log a message if existing contents couldn't be deleted
VFSLogger.ROOT_LOGGER.failedToCleanExistingContentForTempFileProvider(providerType);
// log the cause of the failure
VFSLogger.ROOT_LOGGER.debug("Failed to clean existing content for temp file provider of type " + providerType, t);
}
}
// now create and return the TempFileProvider for the providerType
final File providerRoot = new File(TMP_ROOT, providerType);
return new TempFileProvider(createTempDir(providerType, "", providerRoot), executor);
} } | public class class_name {
public static TempFileProvider create(final String providerType, final ScheduledExecutorService executor, final boolean cleanExisting) throws IOException {
if (cleanExisting) {
try {
// The "clean existing" logic is as follows:
// 1) Rename the root directory "foo" corresponding to the provider type to "bar"
// 2) Submit a task to delete "bar" and its contents, in a background thread, to the the passed executor.
// 3) Create a "foo" root directory for the provider type and return that TempFileProvider (while at the same time the background task is in progress)
// This ensures that the "foo" root directory for the providerType is empty and the older content is being cleaned up in the background (without affecting the current processing),
// thus simulating a "cleanup existing content"
final File possiblyExistingProviderRoot = new File(TMP_ROOT, providerType);
if (possiblyExistingProviderRoot.exists()) {
// rename it so that it can be deleted as a separate (background) task
final File toBeDeletedProviderRoot = new File(TMP_ROOT, createTempName(providerType + "-to-be-deleted-", ""));
final boolean renamed = possiblyExistingProviderRoot.renameTo(toBeDeletedProviderRoot);
if (!renamed) {
throw new IOException("Failed to rename " + possiblyExistingProviderRoot.getAbsolutePath() + " to " + toBeDeletedProviderRoot.getAbsolutePath());
} else {
// delete in the background
executor.submit(new DeleteTask(toBeDeletedProviderRoot, executor)); // depends on control dependency: [if], data = [none]
}
}
} catch (Throwable t) {
// just log a message if existing contents couldn't be deleted
VFSLogger.ROOT_LOGGER.failedToCleanExistingContentForTempFileProvider(providerType);
// log the cause of the failure
VFSLogger.ROOT_LOGGER.debug("Failed to clean existing content for temp file provider of type " + providerType, t);
} // depends on control dependency: [catch], data = [none]
}
// now create and return the TempFileProvider for the providerType
final File providerRoot = new File(TMP_ROOT, providerType);
return new TempFileProvider(createTempDir(providerType, "", providerRoot), executor);
} } |
public class class_name {
private void cloneAndSortMappings(IReaction reaction, IReaction copyOfReaction,
Map<IAtomContainer, int[]> permutationMap) {
// make a lookup for the indices of the atoms in the copy
final Map<IChemObject, Integer> indexMap = new HashMap<IChemObject, Integer>();
List<IAtomContainer> all = ReactionManipulator.getAllAtomContainers(copyOfReaction);
int globalIndex = 0;
for (IAtomContainer ac : all) {
for (IAtom atom : ac.atoms()) {
indexMap.put(atom, globalIndex);
globalIndex++;
}
}
Map<IAtom, IAtom> atomAtomMap = atomAtomMap(reaction, copyOfReaction, permutationMap);
List<IMapping> map = cloneMappings(reaction, atomAtomMap);
Comparator<IMapping> mappingSorter = new Comparator<IMapping>() {
/**
* {@inheritDoc}
*/
@Override
public int compare(IMapping o1, IMapping o2) {
IChemObject o10 = o1.getChemObject(0);
IChemObject o20 = o2.getChemObject(0);
return indexMap.get(o10).compareTo(indexMap.get(o20));
}
};
Collections.sort(map, mappingSorter);
int mappingIndex = 0;
for (IMapping mapping : map) {
mapping.getChemObject(0).setProperty(CDKConstants.ATOM_ATOM_MAPPING, mappingIndex);
mapping.getChemObject(1).setProperty(CDKConstants.ATOM_ATOM_MAPPING, mappingIndex);
copyOfReaction.addMapping(mapping);
mappingIndex++;
}
} } | public class class_name {
private void cloneAndSortMappings(IReaction reaction, IReaction copyOfReaction,
Map<IAtomContainer, int[]> permutationMap) {
// make a lookup for the indices of the atoms in the copy
final Map<IChemObject, Integer> indexMap = new HashMap<IChemObject, Integer>();
List<IAtomContainer> all = ReactionManipulator.getAllAtomContainers(copyOfReaction);
int globalIndex = 0;
for (IAtomContainer ac : all) {
for (IAtom atom : ac.atoms()) {
indexMap.put(atom, globalIndex); // depends on control dependency: [for], data = [atom]
globalIndex++; // depends on control dependency: [for], data = [none]
}
}
Map<IAtom, IAtom> atomAtomMap = atomAtomMap(reaction, copyOfReaction, permutationMap);
List<IMapping> map = cloneMappings(reaction, atomAtomMap);
Comparator<IMapping> mappingSorter = new Comparator<IMapping>() {
/**
* {@inheritDoc}
*/
@Override
public int compare(IMapping o1, IMapping o2) {
IChemObject o10 = o1.getChemObject(0);
IChemObject o20 = o2.getChemObject(0);
return indexMap.get(o10).compareTo(indexMap.get(o20));
}
};
Collections.sort(map, mappingSorter);
int mappingIndex = 0;
for (IMapping mapping : map) {
mapping.getChemObject(0).setProperty(CDKConstants.ATOM_ATOM_MAPPING, mappingIndex); // depends on control dependency: [for], data = [mapping]
mapping.getChemObject(1).setProperty(CDKConstants.ATOM_ATOM_MAPPING, mappingIndex); // depends on control dependency: [for], data = [mapping]
copyOfReaction.addMapping(mapping); // depends on control dependency: [for], data = [mapping]
mappingIndex++; // depends on control dependency: [for], data = [mapping]
}
} } |
public class class_name {
private void placeLinearChains3D(IAtomContainer molecule, IAtomContainer startAtoms, AtomPlacer3D ap3d,
AtomTetrahedralLigandPlacer3D atlp3d, AtomPlacer atomPlacer) throws CDKException {
//logger.debug("****** PLACE LINEAR CHAINS ******");
IAtom dihPlacedAtom = null;
IAtom thirdPlacedAtom = null;
IAtomContainer longestUnplacedChain = molecule.getBuilder().newInstance(IAtomContainer.class);
if (startAtoms.getAtomCount() == 0) {
//no branch points ->linear chain
//logger.debug("------ LINEAR CHAIN - FINISH ------");
} else {
for (int i = 0; i < startAtoms.getAtomCount(); i++) {
//logger.debug("FOUND BRANCHED ALKAN");
//logger.debug("Atom NOT NULL:" + molecule.indexOf(startAtoms.getAtomAt(i)));
thirdPlacedAtom = ap3d.getPlacedHeavyAtom(molecule, startAtoms.getAtom(i));
dihPlacedAtom = ap3d.getPlacedHeavyAtom(molecule, thirdPlacedAtom, startAtoms.getAtom(i));
longestUnplacedChain.addAtom(dihPlacedAtom);
longestUnplacedChain.addAtom(thirdPlacedAtom);
longestUnplacedChain.addAtom(startAtoms.getAtom(i));
longestUnplacedChain.add(atomPlacer.getLongestUnplacedChain(molecule, startAtoms.getAtom(i)));
setAtomsToUnVisited(molecule);
if (longestUnplacedChain.getAtomCount() < 4) {
//di,third,sec
//logger.debug("------ SINGLE BRANCH METHYLTYP ------");
//break;
} else {
//logger.debug("LongestUnchainLength:"+longestUnplacedChain.getAtomCount());
ap3d.placeAliphaticHeavyChain(molecule, longestUnplacedChain);
ap3d.zmatrixChainToCartesian(molecule, true);
searchAndPlaceBranches(molecule, longestUnplacedChain, ap3d, atlp3d, atomPlacer);
}
longestUnplacedChain.removeAllElements();
}//for
}
//logger.debug("****** HANDLE ALIPHATICS END ******");
} } | public class class_name {
private void placeLinearChains3D(IAtomContainer molecule, IAtomContainer startAtoms, AtomPlacer3D ap3d,
AtomTetrahedralLigandPlacer3D atlp3d, AtomPlacer atomPlacer) throws CDKException {
//logger.debug("****** PLACE LINEAR CHAINS ******");
IAtom dihPlacedAtom = null;
IAtom thirdPlacedAtom = null;
IAtomContainer longestUnplacedChain = molecule.getBuilder().newInstance(IAtomContainer.class);
if (startAtoms.getAtomCount() == 0) {
//no branch points ->linear chain
//logger.debug("------ LINEAR CHAIN - FINISH ------");
} else {
for (int i = 0; i < startAtoms.getAtomCount(); i++) {
//logger.debug("FOUND BRANCHED ALKAN");
//logger.debug("Atom NOT NULL:" + molecule.indexOf(startAtoms.getAtomAt(i)));
thirdPlacedAtom = ap3d.getPlacedHeavyAtom(molecule, startAtoms.getAtom(i)); // depends on control dependency: [for], data = [i]
dihPlacedAtom = ap3d.getPlacedHeavyAtom(molecule, thirdPlacedAtom, startAtoms.getAtom(i)); // depends on control dependency: [for], data = [i]
longestUnplacedChain.addAtom(dihPlacedAtom); // depends on control dependency: [for], data = [none]
longestUnplacedChain.addAtom(thirdPlacedAtom); // depends on control dependency: [for], data = [none]
longestUnplacedChain.addAtom(startAtoms.getAtom(i)); // depends on control dependency: [for], data = [i]
longestUnplacedChain.add(atomPlacer.getLongestUnplacedChain(molecule, startAtoms.getAtom(i))); // depends on control dependency: [for], data = [i]
setAtomsToUnVisited(molecule); // depends on control dependency: [for], data = [none]
if (longestUnplacedChain.getAtomCount() < 4) {
//di,third,sec
//logger.debug("------ SINGLE BRANCH METHYLTYP ------");
//break;
} else {
//logger.debug("LongestUnchainLength:"+longestUnplacedChain.getAtomCount());
ap3d.placeAliphaticHeavyChain(molecule, longestUnplacedChain); // depends on control dependency: [if], data = [none]
ap3d.zmatrixChainToCartesian(molecule, true); // depends on control dependency: [if], data = [none]
searchAndPlaceBranches(molecule, longestUnplacedChain, ap3d, atlp3d, atomPlacer); // depends on control dependency: [if], data = [none]
}
longestUnplacedChain.removeAllElements(); // depends on control dependency: [for], data = [none]
}//for
}
//logger.debug("****** HANDLE ALIPHATICS END ******");
} } |
public class class_name {
public void addJar(Path jar) {
if (jar == null) {
throw new IllegalArgumentException();
}
if (!userJars.contains(jar)) {
userJars.add(jar);
}
} } | public class class_name {
public void addJar(Path jar) {
if (jar == null) {
throw new IllegalArgumentException();
}
if (!userJars.contains(jar)) {
userJars.add(jar); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
protected String renderLink(String fullPath) {
StringBuilder sb = new StringBuilder();
// displayAlternateStyles
if (alternate) {
sb.append(PRE_TAG_ALTERNATE);
} else {
sb.append(PRE_TAG);
}
sb.append(media);
if (StringUtils.isNotEmpty(title)) {
sb.append(TITLE_PREFIX_TAG).append(title);
}
sb.append(MID_TAG).append(fullPath).append(closingFlavor);
return sb.toString();
} } | public class class_name {
@Override
protected String renderLink(String fullPath) {
StringBuilder sb = new StringBuilder();
// displayAlternateStyles
if (alternate) {
sb.append(PRE_TAG_ALTERNATE); // depends on control dependency: [if], data = [none]
} else {
sb.append(PRE_TAG); // depends on control dependency: [if], data = [none]
}
sb.append(media);
if (StringUtils.isNotEmpty(title)) {
sb.append(TITLE_PREFIX_TAG).append(title); // depends on control dependency: [if], data = [none]
}
sb.append(MID_TAG).append(fullPath).append(closingFlavor);
return sb.toString();
} } |
public class class_name {
protected void removeExpired(Map<Integer, Context> contexts) {
Iterator<Context> contextsIter = contexts.values().iterator();
while (contextsIter.hasNext()) {
Context context = contextsIter.next();
if (context.completeMoment != null
&& System.currentTimeMillis() - context.completeMoment > contextTimeout) {
contextsIter.remove();
}
}
} } | public class class_name {
protected void removeExpired(Map<Integer, Context> contexts) {
Iterator<Context> contextsIter = contexts.values().iterator();
while (contextsIter.hasNext()) {
Context context = contextsIter.next();
if (context.completeMoment != null
&& System.currentTimeMillis() - context.completeMoment > contextTimeout) {
contextsIter.remove(); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public boolean isTransient() {
Transient ann = getAnnotation(Transient.class);
if(ann != null){
return true;
}
if(field != null){
return Modifier.isTransient(field.getModifiers());
}
return false;
} } | public class class_name {
public boolean isTransient() {
Transient ann = getAnnotation(Transient.class);
if(ann != null){
return true;
// depends on control dependency: [if], data = [none]
}
if(field != null){
return Modifier.isTransient(field.getModifiers());
// depends on control dependency: [if], data = [(field]
}
return false;
} } |
public class class_name {
INodeDirectory addToParent( byte[] src,
INodeDirectory parentINode,
INode newNode,
boolean propagateModTime,
int childIndex) {
// NOTE: This does not update space counts for parents
// add new node to the parent
INodeDirectory newParent = null;
writeLock();
try {
try {
newParent = rootDir.addToParent(src, newNode, parentINode,
false, propagateModTime, childIndex);
cacheName(newNode);
} catch (FileNotFoundException e) {
return null;
}
if(newParent == null)
return null;
if(!newNode.isDirectory()) {
// Add block->file mapping
INodeFile newF = (INodeFile)newNode;
BlockInfo[] blocks = newF.getBlocks();
for (int i = 0; i < blocks.length; i++) {
newF.setBlock(i, getFSNamesystem().blocksMap.addINodeForLoading(blocks[i], newF));
}
}
} finally {
writeUnlock();
}
return newParent;
} } | public class class_name {
INodeDirectory addToParent( byte[] src,
INodeDirectory parentINode,
INode newNode,
boolean propagateModTime,
int childIndex) {
// NOTE: This does not update space counts for parents
// add new node to the parent
INodeDirectory newParent = null;
writeLock();
try {
try {
newParent = rootDir.addToParent(src, newNode, parentINode,
false, propagateModTime, childIndex); // depends on control dependency: [try], data = [none]
cacheName(newNode); // depends on control dependency: [try], data = [none]
} catch (FileNotFoundException e) {
return null;
} // depends on control dependency: [catch], data = [none]
if(newParent == null)
return null;
if(!newNode.isDirectory()) {
// Add block->file mapping
INodeFile newF = (INodeFile)newNode;
BlockInfo[] blocks = newF.getBlocks();
for (int i = 0; i < blocks.length; i++) {
newF.setBlock(i, getFSNamesystem().blocksMap.addINodeForLoading(blocks[i], newF)); // depends on control dependency: [for], data = [i]
}
}
} finally {
writeUnlock();
}
return newParent;
} } |
public class class_name {
public WriteCampaignRequest withAdditionalTreatments(WriteTreatmentResource... additionalTreatments) {
if (this.additionalTreatments == null) {
setAdditionalTreatments(new java.util.ArrayList<WriteTreatmentResource>(additionalTreatments.length));
}
for (WriteTreatmentResource ele : additionalTreatments) {
this.additionalTreatments.add(ele);
}
return this;
} } | public class class_name {
public WriteCampaignRequest withAdditionalTreatments(WriteTreatmentResource... additionalTreatments) {
if (this.additionalTreatments == null) {
setAdditionalTreatments(new java.util.ArrayList<WriteTreatmentResource>(additionalTreatments.length)); // depends on control dependency: [if], data = [none]
}
for (WriteTreatmentResource ele : additionalTreatments) {
this.additionalTreatments.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public static String decodeUnicodeString(String s) {
if (Strings.isNullOrEmpty(s)) return s;
int length = s.length();
StringBuilder sb = new StringBuilder(length);
Matcher m = UNICODE_PATTERN.matcher(s);
int begin = 0;
while (m.find()) {
int start = m.start();
if (start > 0 && s.charAt(start - 1) == '\u0003') {
if (start - 1 > begin) {
sb.append(s, begin, start - 1);
}
begin = start;
continue;
}
sb.append(s, begin, start);
String mcStr = m.group(1);
try {
char charValue = (char) Integer.parseInt(mcStr, 16);
sb.append(charValue);
begin = m.end();
} catch (NumberFormatException e) {
System.out.println(e.getMessage());
}
}
if (begin < length) {
sb.append(s, begin, length);
}
return sb.toString();
} } | public class class_name {
public static String decodeUnicodeString(String s) {
if (Strings.isNullOrEmpty(s)) return s;
int length = s.length();
StringBuilder sb = new StringBuilder(length);
Matcher m = UNICODE_PATTERN.matcher(s);
int begin = 0;
while (m.find()) {
int start = m.start();
if (start > 0 && s.charAt(start - 1) == '\u0003') {
if (start - 1 > begin) {
sb.append(s, begin, start - 1); // depends on control dependency: [if], data = [none]
}
begin = start; // depends on control dependency: [if], data = [none]
continue;
}
sb.append(s, begin, start); // depends on control dependency: [while], data = [none]
String mcStr = m.group(1);
try {
char charValue = (char) Integer.parseInt(mcStr, 16);
sb.append(charValue); // depends on control dependency: [try], data = [none]
begin = m.end(); // depends on control dependency: [try], data = [none]
} catch (NumberFormatException e) {
System.out.println(e.getMessage());
} // depends on control dependency: [catch], data = [none]
}
if (begin < length) {
sb.append(s, begin, length); // depends on control dependency: [if], data = [length)]
}
return sb.toString();
} } |
public class class_name {
public static String getContext(final HttpServletRequest request) {
String context = request.getContextPath();
if ((context == null) || (context.equals("."))) {
context = "";
}
return context;
} } | public class class_name {
public static String getContext(final HttpServletRequest request) {
String context = request.getContextPath();
if ((context == null) || (context.equals("."))) {
context = ""; // depends on control dependency: [if], data = [none]
}
return context;
} } |
public class class_name {
@Override
public boolean onTap(LatLong tapLatLong, Point layerXY, Point tapXY) {
this.expanded = !this.expanded;
double centerX = layerXY.x + this.getHorizontalOffset();
double centerY = layerXY.y + this.getVerticalOffset();
double radiusX = this.getBitmap().getWidth() / 2;
double radiusY = this.getBitmap().getHeight() / 2;
double distX = Math.abs(centerX - tapXY.x);
double distY = Math.abs(centerY - tapXY.y);
if (distX < radiusX && distY < radiusY) {
if (this.expanded) {
// remove all child markers
for (Layer elt : this.layers) {
if (elt instanceof ChildMarker) {
this.layers.remove(elt);
}
}
// begin with (n). than the child marker will be over the line.
int i = this.children.size();
for (ChildMarker marker : this.children) {
marker.init(i, getBitmap(), getHorizontalOffset(), getVerticalOffset());
// add child to layer
this.layers.add(marker);
i--;
}
} else {
// remove all child layers
for (ChildMarker childMarker : this.children) {
this.layers.remove(childMarker);
}
}
return true;
}
return false;
} } | public class class_name {
@Override
public boolean onTap(LatLong tapLatLong, Point layerXY, Point tapXY) {
this.expanded = !this.expanded;
double centerX = layerXY.x + this.getHorizontalOffset();
double centerY = layerXY.y + this.getVerticalOffset();
double radiusX = this.getBitmap().getWidth() / 2;
double radiusY = this.getBitmap().getHeight() / 2;
double distX = Math.abs(centerX - tapXY.x);
double distY = Math.abs(centerY - tapXY.y);
if (distX < radiusX && distY < radiusY) {
if (this.expanded) {
// remove all child markers
for (Layer elt : this.layers) {
if (elt instanceof ChildMarker) {
this.layers.remove(elt); // depends on control dependency: [if], data = [none]
}
}
// begin with (n). than the child marker will be over the line.
int i = this.children.size();
for (ChildMarker marker : this.children) {
marker.init(i, getBitmap(), getHorizontalOffset(), getVerticalOffset()); // depends on control dependency: [for], data = [marker]
// add child to layer
this.layers.add(marker); // depends on control dependency: [for], data = [marker]
i--; // depends on control dependency: [for], data = [none]
}
} else {
// remove all child layers
for (ChildMarker childMarker : this.children) {
this.layers.remove(childMarker); // depends on control dependency: [for], data = [childMarker]
}
}
return true; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
@Nonnull
public static UpdateCenter createUpdateCenter(@CheckForNull UpdateCenterConfiguration config) {
String requiredClassName = SystemProperties.getString(UpdateCenter.class.getName()+".className", null);
if (requiredClassName == null) {
// Use the default Update Center
LOGGER.log(Level.FINE, "Using the default Update Center implementation");
return createDefaultUpdateCenter(config);
}
LOGGER.log(Level.FINE, "Using the custom update center: {0}", requiredClassName);
try {
final Class<?> clazz = Class.forName(requiredClassName).asSubclass(UpdateCenter.class);
if (!UpdateCenter.class.isAssignableFrom(clazz)) {
LOGGER.log(Level.SEVERE, "The specified custom Update Center {0} is not an instance of {1}. Falling back to default.",
new Object[] {requiredClassName, UpdateCenter.class.getName()});
return createDefaultUpdateCenter(config);
}
final Class<? extends UpdateCenter> ucClazz = clazz.asSubclass(UpdateCenter.class);
final Constructor<? extends UpdateCenter> defaultConstructor = ucClazz.getConstructor();
final Constructor<? extends UpdateCenter> configConstructor = ucClazz.getConstructor(UpdateCenterConfiguration.class);
LOGGER.log(Level.FINE, "Using the constructor {0} Update Center configuration for {1}",
new Object[] {config != null ? "with" : "without", requiredClassName});
return config != null ? configConstructor.newInstance(config) : defaultConstructor.newInstance();
} catch(ClassCastException e) {
// Should never happen
LOGGER.log(WARNING, "UpdateCenter class {0} does not extend hudson.model.UpdateCenter. Using default.", requiredClassName);
} catch(NoSuchMethodException e) {
LOGGER.log(WARNING, String.format("UpdateCenter class %s does not define one of the required constructors. Using default", requiredClassName), e);
} catch(Exception e) {
LOGGER.log(WARNING, String.format("Unable to instantiate custom plugin manager [%s]. Using default.", requiredClassName), e);
}
return createDefaultUpdateCenter(config);
} } | public class class_name {
@Nonnull
public static UpdateCenter createUpdateCenter(@CheckForNull UpdateCenterConfiguration config) {
String requiredClassName = SystemProperties.getString(UpdateCenter.class.getName()+".className", null);
if (requiredClassName == null) {
// Use the default Update Center
LOGGER.log(Level.FINE, "Using the default Update Center implementation"); // depends on control dependency: [if], data = [none]
return createDefaultUpdateCenter(config); // depends on control dependency: [if], data = [none]
}
LOGGER.log(Level.FINE, "Using the custom update center: {0}", requiredClassName);
try {
final Class<?> clazz = Class.forName(requiredClassName).asSubclass(UpdateCenter.class);
if (!UpdateCenter.class.isAssignableFrom(clazz)) {
LOGGER.log(Level.SEVERE, "The specified custom Update Center {0} is not an instance of {1}. Falling back to default.",
new Object[] {requiredClassName, UpdateCenter.class.getName()}); // depends on control dependency: [if], data = [none]
return createDefaultUpdateCenter(config); // depends on control dependency: [if], data = [none]
}
final Class<? extends UpdateCenter> ucClazz = clazz.asSubclass(UpdateCenter.class);
final Constructor<? extends UpdateCenter> defaultConstructor = ucClazz.getConstructor();
final Constructor<? extends UpdateCenter> configConstructor = ucClazz.getConstructor(UpdateCenterConfiguration.class);
LOGGER.log(Level.FINE, "Using the constructor {0} Update Center configuration for {1}",
new Object[] {config != null ? "with" : "without", requiredClassName}); // depends on control dependency: [try], data = [none]
return config != null ? configConstructor.newInstance(config) : defaultConstructor.newInstance(); // depends on control dependency: [try], data = [none]
} catch(ClassCastException e) {
// Should never happen
LOGGER.log(WARNING, "UpdateCenter class {0} does not extend hudson.model.UpdateCenter. Using default.", requiredClassName);
} catch(NoSuchMethodException e) { // depends on control dependency: [catch], data = [none]
LOGGER.log(WARNING, String.format("UpdateCenter class %s does not define one of the required constructors. Using default", requiredClassName), e);
} catch(Exception e) { // depends on control dependency: [catch], data = [none]
LOGGER.log(WARNING, String.format("Unable to instantiate custom plugin manager [%s]. Using default.", requiredClassName), e);
} // depends on control dependency: [catch], data = [none]
return createDefaultUpdateCenter(config);
} } |
public class class_name {
public static boolean isLongValue(final String value) {
try {
Long.parseLong(value);
return true;
} catch (final NumberFormatException ex) {
return false;
}
} } | public class class_name {
public static boolean isLongValue(final String value) {
try {
Long.parseLong(value); // depends on control dependency: [try], data = [none]
return true; // depends on control dependency: [try], data = [none]
} catch (final NumberFormatException ex) {
return false;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public Object setEL(PageContext pc, Collection.Key name, Object value) {
try {
return set(pc, name, value);
}
catch (PageException e) {
return null;
}
} } | public class class_name {
@Override
public Object setEL(PageContext pc, Collection.Key name, Object value) {
try {
return set(pc, name, value); // depends on control dependency: [try], data = [none]
}
catch (PageException e) {
return null;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
void processPubRel(String clientID, int messageID) {
Log.trace("PUB --PUBREL--> SRV processPubRel invoked for clientID {} ad messageID {}", clientID, messageID);
String publishKey = String.format("%s%d", clientID, messageID);
PublishEvent evt = m_storageService.retrieveQoS2Message(publishKey);
final String topic = evt.getTopic();
final AbstractMessage.QOSType qos = evt.getQos();
publish2Subscribers(topic, qos, evt.getMessage(), evt.isRetain(), evt.getMessageID());
m_storageService.removeQoS2Message(publishKey);
if (evt.isRetain()) {
m_storageService.storeRetained(topic, evt.getMessage(), qos);
}
sendPubComp(clientID, messageID);
} } | public class class_name {
void processPubRel(String clientID, int messageID) {
Log.trace("PUB --PUBREL--> SRV processPubRel invoked for clientID {} ad messageID {}", clientID, messageID);
String publishKey = String.format("%s%d", clientID, messageID);
PublishEvent evt = m_storageService.retrieveQoS2Message(publishKey);
final String topic = evt.getTopic();
final AbstractMessage.QOSType qos = evt.getQos();
publish2Subscribers(topic, qos, evt.getMessage(), evt.isRetain(), evt.getMessageID());
m_storageService.removeQoS2Message(publishKey);
if (evt.isRetain()) {
m_storageService.storeRetained(topic, evt.getMessage(), qos); // depends on control dependency: [if], data = [none]
}
sendPubComp(clientID, messageID);
} } |
public class class_name {
private PathMacros createPathMacroInstance() {
try {
return ClassUtil.newInstance(actionsManager.getPathMacroClass());
} catch (Exception ex) {
throw new MadvocException(ex);
}
} } | public class class_name {
private PathMacros createPathMacroInstance() {
try {
return ClassUtil.newInstance(actionsManager.getPathMacroClass()); // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
throw new MadvocException(ex);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void marshall(SigningMaterial signingMaterial, ProtocolMarshaller protocolMarshaller) {
if (signingMaterial == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(signingMaterial.getCertificateArn(), CERTIFICATEARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(SigningMaterial signingMaterial, ProtocolMarshaller protocolMarshaller) {
if (signingMaterial == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(signingMaterial.getCertificateArn(), CERTIFICATEARN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public CmsFileExplorerSettings getTableSettings() {
CmsFileExplorerSettings fileTableState = new CmsFileExplorerSettings();
fileTableState.setSortAscending(m_fileTable.isSortAscending());
fileTableState.setSortColumnId((CmsResourceTableProperty)m_fileTable.getSortContainerPropertyId());
List<CmsResourceTableProperty> collapsedCollumns = new ArrayList<CmsResourceTableProperty>();
Object[] visibleCols = m_fileTable.getVisibleColumns();
for (int i = 0; i < visibleCols.length; i++) {
if (m_fileTable.isColumnCollapsed(visibleCols[i])) {
collapsedCollumns.add((CmsResourceTableProperty)visibleCols[i]);
}
}
fileTableState.setCollapsedColumns(collapsedCollumns);
return fileTableState;
} } | public class class_name {
public CmsFileExplorerSettings getTableSettings() {
CmsFileExplorerSettings fileTableState = new CmsFileExplorerSettings();
fileTableState.setSortAscending(m_fileTable.isSortAscending());
fileTableState.setSortColumnId((CmsResourceTableProperty)m_fileTable.getSortContainerPropertyId());
List<CmsResourceTableProperty> collapsedCollumns = new ArrayList<CmsResourceTableProperty>();
Object[] visibleCols = m_fileTable.getVisibleColumns();
for (int i = 0; i < visibleCols.length; i++) {
if (m_fileTable.isColumnCollapsed(visibleCols[i])) {
collapsedCollumns.add((CmsResourceTableProperty)visibleCols[i]); // depends on control dependency: [if], data = [none]
}
}
fileTableState.setCollapsedColumns(collapsedCollumns);
return fileTableState;
} } |
public class class_name {
public static GrayU8 gaussian(GrayU8 input, @Nullable GrayU8 output, double sigma , int radius,
@Nullable GrayU8 storage ) {
output = InputSanityCheck.checkDeclare(input,output);
storage = InputSanityCheck.checkDeclare(input,storage,GrayU8.class);
boolean processed = BOverrideBlurImageOps.invokeNativeGaussian(input, output, sigma,radius, storage);
if( !processed ) {
Kernel1D_S32 kernel = FactoryKernelGaussian.gaussian(Kernel1D_S32.class, sigma, radius);
ConvolveImageNormalized.horizontal(kernel, input, storage);
ConvolveImageNormalized.vertical(kernel, storage, output);
}
return output;
} } | public class class_name {
public static GrayU8 gaussian(GrayU8 input, @Nullable GrayU8 output, double sigma , int radius,
@Nullable GrayU8 storage ) {
output = InputSanityCheck.checkDeclare(input,output);
storage = InputSanityCheck.checkDeclare(input,storage,GrayU8.class);
boolean processed = BOverrideBlurImageOps.invokeNativeGaussian(input, output, sigma,radius, storage);
if( !processed ) {
Kernel1D_S32 kernel = FactoryKernelGaussian.gaussian(Kernel1D_S32.class, sigma, radius);
ConvolveImageNormalized.horizontal(kernel, input, storage); // depends on control dependency: [if], data = [none]
ConvolveImageNormalized.vertical(kernel, storage, output); // depends on control dependency: [if], data = [none]
}
return output;
} } |
public class class_name {
public EEnum getFNCFNMRGLen() {
if (fncfnmrgLenEEnum == null) {
fncfnmrgLenEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(28);
}
return fncfnmrgLenEEnum;
} } | public class class_name {
public EEnum getFNCFNMRGLen() {
if (fncfnmrgLenEEnum == null) {
fncfnmrgLenEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(28); // depends on control dependency: [if], data = [none]
}
return fncfnmrgLenEEnum;
} } |
public class class_name {
private static String getToken(URI blobId) {
String[] parts = blobId.getSchemeSpecificPart().split("/");
if (parts.length == 2) {
return parts[1];
} else if (parts.length == 4) {
return parts[1] + "+" + uriDecode(parts[2]) + "+"
+ uriDecode(parts[3]);
} else {
throw new IllegalArgumentException("Malformed token-as-blobId: "
+ blobId);
}
} } | public class class_name {
private static String getToken(URI blobId) {
String[] parts = blobId.getSchemeSpecificPart().split("/");
if (parts.length == 2) {
return parts[1]; // depends on control dependency: [if], data = [none]
} else if (parts.length == 4) {
return parts[1] + "+" + uriDecode(parts[2]) + "+"
+ uriDecode(parts[3]); // depends on control dependency: [if], data = [none]
} else {
throw new IllegalArgumentException("Malformed token-as-blobId: "
+ blobId);
}
} } |
public class class_name {
@Override
public Optional<DigestCredentials> extract(WebContext context) {
final Optional<TokenCredentials> credentials = this.extractor.extract(context);
if (!credentials.isPresent()) {
return Optional.empty();
}
String token = credentials.get().getToken();
Map<String, String> valueMap = parseTokenValue(token);
String username = valueMap.get("username");
String response = valueMap.get("response");
if (CommonHelper.isBlank(username) || CommonHelper.isBlank(response)) {
throw new CredentialsException("Bad format of the digest auth header");
}
String realm = valueMap.get("realm");
String nonce = valueMap.get("nonce");
String uri = valueMap.get("uri");
String cnonce = valueMap.get("cnonce");
String nc = valueMap.get("nc");
String qop = valueMap.get("qop");
String method = context.getRequestMethod();
return Optional.of(new DigestCredentials(response, method, username, realm, nonce, uri, cnonce, nc, qop));
} } | public class class_name {
@Override
public Optional<DigestCredentials> extract(WebContext context) {
final Optional<TokenCredentials> credentials = this.extractor.extract(context);
if (!credentials.isPresent()) {
return Optional.empty(); // depends on control dependency: [if], data = [none]
}
String token = credentials.get().getToken();
Map<String, String> valueMap = parseTokenValue(token);
String username = valueMap.get("username");
String response = valueMap.get("response");
if (CommonHelper.isBlank(username) || CommonHelper.isBlank(response)) {
throw new CredentialsException("Bad format of the digest auth header");
}
String realm = valueMap.get("realm");
String nonce = valueMap.get("nonce");
String uri = valueMap.get("uri");
String cnonce = valueMap.get("cnonce");
String nc = valueMap.get("nc");
String qop = valueMap.get("qop");
String method = context.getRequestMethod();
return Optional.of(new DigestCredentials(response, method, username, realm, nonce, uri, cnonce, nc, qop));
} } |
public class class_name {
public void initializeCurrency() {
if (!currencyInitialized) {
sendConsoleMessage(Level.INFO, getLanguageManager().getString("loading_currency_manager"));
currencyManager = new CurrencyManager();
currencyInitialized = true;
sendConsoleMessage(Level.INFO, getLanguageManager().getString("currency_manager_loaded"));
}
} } | public class class_name {
public void initializeCurrency() {
if (!currencyInitialized) {
sendConsoleMessage(Level.INFO, getLanguageManager().getString("loading_currency_manager")); // depends on control dependency: [if], data = [none]
currencyManager = new CurrencyManager(); // depends on control dependency: [if], data = [none]
currencyInitialized = true; // depends on control dependency: [if], data = [none]
sendConsoleMessage(Level.INFO, getLanguageManager().getString("currency_manager_loaded")); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void recoverRoomsInRange(int begin, int end) {
if (end + 1 < this.nbrruns) {
copyValuesLength(this.valueslength, end + 1, this.valueslength, begin + 1,
this.nbrruns - 1 - end);
}
this.nbrruns -= end - begin;
} } | public class class_name {
private void recoverRoomsInRange(int begin, int end) {
if (end + 1 < this.nbrruns) {
copyValuesLength(this.valueslength, end + 1, this.valueslength, begin + 1,
this.nbrruns - 1 - end); // depends on control dependency: [if], data = [none]
}
this.nbrruns -= end - begin;
} } |
public class class_name {
@Override
public void search(String words, Collection<IHelpSet> helpSets, IHelpSearchListener listener) {
try {
if (queryBuilder == null) {
initQueryBuilder();
}
Query searchForWords = queryBuilder.createBooleanQuery("content", words, Occur.MUST);
Query searchForModules = queryBuilder.createBooleanQuery("module", StrUtil.fromList(helpSets, " "));
BooleanQuery query = new BooleanQuery();
query.add(searchForModules, Occur.MUST);
query.add(searchForWords, Occur.MUST);
TopDocs docs = indexSearcher.search(query, 9999);
List<HelpSearchHit> hits = new ArrayList<>(docs.totalHits);
for (ScoreDoc sdoc : docs.scoreDocs) {
Document doc = indexSearcher.doc(sdoc.doc);
String source = doc.get("source");
String title = doc.get("title");
String url = doc.get("url");
HelpTopic topic = new HelpTopic(new URL(url), title, source);
HelpSearchHit hit = new HelpSearchHit(topic, sdoc.score);
hits.add(hit);
}
listener.onSearchComplete(hits);
} catch (Exception e) {
MiscUtil.toUnchecked(e);
}
} } | public class class_name {
@Override
public void search(String words, Collection<IHelpSet> helpSets, IHelpSearchListener listener) {
try {
if (queryBuilder == null) {
initQueryBuilder(); // depends on control dependency: [if], data = [none]
}
Query searchForWords = queryBuilder.createBooleanQuery("content", words, Occur.MUST);
Query searchForModules = queryBuilder.createBooleanQuery("module", StrUtil.fromList(helpSets, " "));
BooleanQuery query = new BooleanQuery();
query.add(searchForModules, Occur.MUST); // depends on control dependency: [try], data = [none]
query.add(searchForWords, Occur.MUST); // depends on control dependency: [try], data = [none]
TopDocs docs = indexSearcher.search(query, 9999);
List<HelpSearchHit> hits = new ArrayList<>(docs.totalHits);
for (ScoreDoc sdoc : docs.scoreDocs) {
Document doc = indexSearcher.doc(sdoc.doc);
String source = doc.get("source");
String title = doc.get("title");
String url = doc.get("url");
HelpTopic topic = new HelpTopic(new URL(url), title, source);
HelpSearchHit hit = new HelpSearchHit(topic, sdoc.score);
hits.add(hit); // depends on control dependency: [for], data = [none]
}
listener.onSearchComplete(hits); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
MiscUtil.toUnchecked(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public String validateValue(final UIValue _value)
{
String ret = null;
try {
if (_value.getDbValue() != null) {
Long.valueOf(String.valueOf(_value.getDbValue()));
}
} catch (final NumberFormatException e) {
ret = DBProperties.getProperty(NumberUI.class.getName() + ".InvalidValue");
}
return ret;
} } | public class class_name {
@Override
public String validateValue(final UIValue _value)
{
String ret = null;
try {
if (_value.getDbValue() != null) {
Long.valueOf(String.valueOf(_value.getDbValue())); // depends on control dependency: [if], data = [(_value.getDbValue()]
}
} catch (final NumberFormatException e) {
ret = DBProperties.getProperty(NumberUI.class.getName() + ".InvalidValue");
} // depends on control dependency: [catch], data = [none]
return ret;
} } |
public class class_name {
public static void shredInputStream(final INodeWriteTrx wtx, final InputStream value,
final EShredderInsert child) {
final XMLInputFactory factory = XMLInputFactory.newInstance();
factory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
XMLEventReader parser;
try {
parser = factory.createXMLEventReader(value);
} catch (final XMLStreamException xmlse) {
throw new WebApplicationException(xmlse);
}
try {
final XMLShredder shredder = new XMLShredder(wtx, parser, child);
shredder.call();
} catch (final Exception exce) {
throw new WebApplicationException(exce);
}
} } | public class class_name {
public static void shredInputStream(final INodeWriteTrx wtx, final InputStream value,
final EShredderInsert child) {
final XMLInputFactory factory = XMLInputFactory.newInstance();
factory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
XMLEventReader parser;
try {
parser = factory.createXMLEventReader(value); // depends on control dependency: [try], data = [none]
} catch (final XMLStreamException xmlse) {
throw new WebApplicationException(xmlse);
} // depends on control dependency: [catch], data = [none]
try {
final XMLShredder shredder = new XMLShredder(wtx, parser, child);
shredder.call(); // depends on control dependency: [try], data = [none]
} catch (final Exception exce) {
throw new WebApplicationException(exce);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected void checkReferencesToRemovedElement(final String id) {
if (id != null) {
//NOTE: We only use an RPC call here to check for references on the server side. If, at a later point, we decide
//to add a save button again, this will have to be changed, because then we have to consider client-side state.
CmsRpcAction<CmsRemovedElementStatus> getStatusAction = new CmsRpcAction<CmsRemovedElementStatus>() {
@Override
public void execute() {
start(200, true);
getContainerpageService().getRemovedElementStatus(id, null, this);
}
@Override
public void onResponse(final CmsRemovedElementStatus status) {
stop(false);
if (status.isDeletionCandidate()) {
askWhetherRemovedElementShouldBeDeleted(status);
}
}
};
getStatusAction.execute();
}
} } | public class class_name {
protected void checkReferencesToRemovedElement(final String id) {
if (id != null) {
//NOTE: We only use an RPC call here to check for references on the server side. If, at a later point, we decide
//to add a save button again, this will have to be changed, because then we have to consider client-side state.
CmsRpcAction<CmsRemovedElementStatus> getStatusAction = new CmsRpcAction<CmsRemovedElementStatus>() {
@Override
public void execute() {
start(200, true);
getContainerpageService().getRemovedElementStatus(id, null, this);
}
@Override
public void onResponse(final CmsRemovedElementStatus status) {
stop(false);
if (status.isDeletionCandidate()) {
askWhetherRemovedElementShouldBeDeleted(status); // depends on control dependency: [if], data = [none]
}
}
};
getStatusAction.execute(); // depends on control dependency: [if], data = [none]
}
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.