code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
private static Long parseLong(final String text) {
if (text.length() == 0) {
return null;
} else {
try {
return Long.parseLong(text);
} catch (NumberFormatException ex) {
return null;
}
}
} } | public class class_name {
private static Long parseLong(final String text) {
if (text.length() == 0) {
return null; // depends on control dependency: [if], data = [none]
} else {
try {
return Long.parseLong(text); // depends on control dependency: [try], data = [none]
} catch (NumberFormatException ex) {
return null;
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
private Node getNode(Node nodeStart, String key, CreateMode createMode, boolean bReturnTextNode)
{
if (key.indexOf('/') != -1)
{
String strKeyParent = key.substring(0, key.indexOf('/'));
CreateMode createModeParent = createMode;
if (createModeParent == CreateMode.CREATE_NEW_NODE)
createModeParent = CreateMode.CREATE_IF_NOT_FOUND; // Only create a duplicate node at the leaf level.
nodeStart = this.getNode(nodeStart, strKeyParent, createModeParent, false);
if (nodeStart == null)
return null;
key = key.substring(key.indexOf('/') + 1);
return this.getNode(nodeStart, key, createMode, bReturnTextNode);
}
if (nodeStart == null)
nodeStart = this.getNode(true);
int iIndex = Integer.MAX_VALUE;
if (key.indexOf('[') != -1)
{
iIndex = Integer.parseInt(key.substring(key.indexOf('[') + 1, key.indexOf(']')));
key = key.substring(0, key.indexOf('['));
}
key = Util.fixDOMElementName(key);
Node node = null;
NodeList nodeList = ((Element)nodeStart).getElementsByTagName(key);
if (nodeList != null)
{
for (int i = 0; i < nodeList.getLength(); i++)
{
if (nodeList.item(i).getParentNode() == nodeStart)
{
node = nodeList.item(i); // Get the last child.
if (iIndex != Integer.MAX_VALUE)
{
if (--iIndex == 0)
break; // This is the correct node
node = null; // If you are looking for a specific node, and it doesn't exist, return null.
}
}
}
}
if (((node == null) && (createMode != CreateMode.DONT_CREATE))
|| (createMode == CreateMode.CREATE_NEW_NODE))
{
Document doc = null;
if (m_data instanceof Document)
doc = (Document)m_data;
if (doc == null)
doc = ((Node)m_data).getOwnerDocument();
node = doc.createElement(key);
if (bReturnTextNode)
{
if (createMode != CreateMode.CREATE_CDATA_NODE)
node.appendChild(doc.createTextNode(Constant.BLANK));
else
node.appendChild(doc.createCDATASection(Constant.BLANK));
}
nodeStart.appendChild(node);
}
if (node != null)
if (bReturnTextNode)
node = node.getFirstChild();
return node; // Return new node or null.
} } | public class class_name {
private Node getNode(Node nodeStart, String key, CreateMode createMode, boolean bReturnTextNode)
{
if (key.indexOf('/') != -1)
{
String strKeyParent = key.substring(0, key.indexOf('/'));
CreateMode createModeParent = createMode;
if (createModeParent == CreateMode.CREATE_NEW_NODE)
createModeParent = CreateMode.CREATE_IF_NOT_FOUND; // Only create a duplicate node at the leaf level.
nodeStart = this.getNode(nodeStart, strKeyParent, createModeParent, false); // depends on control dependency: [if], data = [none]
if (nodeStart == null)
return null;
key = key.substring(key.indexOf('/') + 1); // depends on control dependency: [if], data = [(key.indexOf('/')]
return this.getNode(nodeStart, key, createMode, bReturnTextNode); // depends on control dependency: [if], data = [none]
}
if (nodeStart == null)
nodeStart = this.getNode(true);
int iIndex = Integer.MAX_VALUE;
if (key.indexOf('[') != -1)
{
iIndex = Integer.parseInt(key.substring(key.indexOf('[') + 1, key.indexOf(']'))); // depends on control dependency: [if], data = [(key.indexOf('[')]
key = key.substring(0, key.indexOf('[')); // depends on control dependency: [if], data = [none]
}
key = Util.fixDOMElementName(key);
Node node = null;
NodeList nodeList = ((Element)nodeStart).getElementsByTagName(key);
if (nodeList != null)
{
for (int i = 0; i < nodeList.getLength(); i++)
{
if (nodeList.item(i).getParentNode() == nodeStart)
{
node = nodeList.item(i); // Get the last child. // depends on control dependency: [if], data = [none]
if (iIndex != Integer.MAX_VALUE)
{
if (--iIndex == 0)
break; // This is the correct node
node = null; // If you are looking for a specific node, and it doesn't exist, return null. // depends on control dependency: [if], data = [none]
}
}
}
}
if (((node == null) && (createMode != CreateMode.DONT_CREATE))
|| (createMode == CreateMode.CREATE_NEW_NODE))
{
Document doc = null;
if (m_data instanceof Document)
doc = (Document)m_data;
if (doc == null)
doc = ((Node)m_data).getOwnerDocument();
node = doc.createElement(key); // depends on control dependency: [if], data = [none]
if (bReturnTextNode)
{
if (createMode != CreateMode.CREATE_CDATA_NODE)
node.appendChild(doc.createTextNode(Constant.BLANK));
else
node.appendChild(doc.createCDATASection(Constant.BLANK));
}
nodeStart.appendChild(node); // depends on control dependency: [if], data = [none]
}
if (node != null)
if (bReturnTextNode)
node = node.getFirstChild();
return node; // Return new node or null.
} } |
public class class_name {
void mavenizeApp(ProjectModel projectModel)
{
LOG.info("Mavenizing ProjectModel " + projectModel.toPrettyString());
MavenizationContext mavCtx = new MavenizationContext();
mavCtx.graphContext = grCtx;
WindupConfigurationModel config = grCtx.getUnique(WindupConfigurationModel.class);
mavCtx.mavenizedBaseDir = config.getOutputPath().asFile().toPath().resolve(OUTPUT_SUBDIR_MAVENIZED);
mavCtx.unifiedGroupId = new ModuleAnalysisHelper(grCtx).deriveGroupId(projectModel);
mavCtx.unifiedAppName = normalizeDirName(projectModel.getName());
mavCtx.unifiedVersion = "1.0";
// 1) create the overall structure - a parent, and a BOM.
// Root pom.xml ( serves as a parent pom.xml in our resulting structure).
mavCtx.rootPom = new Pom(new MavenCoord(mavCtx.getUnifiedGroupId(), mavCtx.getUnifiedAppName() + "-parent", mavCtx.getUnifiedVersion()));
mavCtx.rootPom.role = Pom.ModuleRole.PARENT;
mavCtx.rootPom.name = projectModel.getName() + " - Parent";
mavCtx.rootPom.description = "Parent of " + projectModel.getName();
mavCtx.rootPom.root = true;
final String bomArtifactId = mavCtx.getUnifiedAppName() + "-bom";
// BOM
Pom bom = new Pom(new MavenCoord(mavCtx.getUnifiedGroupId(), bomArtifactId, mavCtx.getUnifiedVersion()));
bom.bom = getTargetTechnologies().contains("eap7")
? MavenizeRuleProvider.JBOSS_BOM_JAVAEE7_WITH_ALL
: MavenizeRuleProvider.JBOSS_BOM_JAVAEE6_WITH_ALL;
bom.role = Pom.ModuleRole.BOM;
bom.parent = new Pom(MavenizeRuleProvider.JBOSS_PARENT);
bom.description = "Bill of Materials. See https://maven.apache.org/guides/introduction/introduction-to-dependency-mechanism.html";
bom.name = projectModel.getName() + " - BOM";
mavCtx.getRootPom().submodules.put(bomArtifactId, bom);
mavCtx.bom = bom;
// BOM - dependencyManagement dependencies
for( ArchiveCoordinateModel dep : grCtx.getUnique(GlobalBomModel.class).getDependencies() ){
LOG.info("Adding dep to BOM: " + dep.toPrettyString());
bom.dependencies.add(new SimpleDependency(Dependency.Role.LIBRARY, MavenCoord.from(dep)));
}
// 2) Recursively add the modules.
mavCtx.rootAppPom = mavenizeModule(mavCtx, projectModel, null);
// TODO: MIGR-236 Sort the modules.
///mavCtx.rootPom.submodules = sortSubmodulesToReflectDependencies(mavCtx.rootAppPom);
// 3) Write the pom.xml's.
new MavenStructureRenderer(mavCtx).createMavenProjectDirectoryTree();
} } | public class class_name {
void mavenizeApp(ProjectModel projectModel)
{
LOG.info("Mavenizing ProjectModel " + projectModel.toPrettyString());
MavenizationContext mavCtx = new MavenizationContext();
mavCtx.graphContext = grCtx;
WindupConfigurationModel config = grCtx.getUnique(WindupConfigurationModel.class);
mavCtx.mavenizedBaseDir = config.getOutputPath().asFile().toPath().resolve(OUTPUT_SUBDIR_MAVENIZED);
mavCtx.unifiedGroupId = new ModuleAnalysisHelper(grCtx).deriveGroupId(projectModel);
mavCtx.unifiedAppName = normalizeDirName(projectModel.getName());
mavCtx.unifiedVersion = "1.0";
// 1) create the overall structure - a parent, and a BOM.
// Root pom.xml ( serves as a parent pom.xml in our resulting structure).
mavCtx.rootPom = new Pom(new MavenCoord(mavCtx.getUnifiedGroupId(), mavCtx.getUnifiedAppName() + "-parent", mavCtx.getUnifiedVersion()));
mavCtx.rootPom.role = Pom.ModuleRole.PARENT;
mavCtx.rootPom.name = projectModel.getName() + " - Parent";
mavCtx.rootPom.description = "Parent of " + projectModel.getName();
mavCtx.rootPom.root = true;
final String bomArtifactId = mavCtx.getUnifiedAppName() + "-bom";
// BOM
Pom bom = new Pom(new MavenCoord(mavCtx.getUnifiedGroupId(), bomArtifactId, mavCtx.getUnifiedVersion()));
bom.bom = getTargetTechnologies().contains("eap7")
? MavenizeRuleProvider.JBOSS_BOM_JAVAEE7_WITH_ALL
: MavenizeRuleProvider.JBOSS_BOM_JAVAEE6_WITH_ALL;
bom.role = Pom.ModuleRole.BOM;
bom.parent = new Pom(MavenizeRuleProvider.JBOSS_PARENT);
bom.description = "Bill of Materials. See https://maven.apache.org/guides/introduction/introduction-to-dependency-mechanism.html";
bom.name = projectModel.getName() + " - BOM";
mavCtx.getRootPom().submodules.put(bomArtifactId, bom);
mavCtx.bom = bom;
// BOM - dependencyManagement dependencies
for( ArchiveCoordinateModel dep : grCtx.getUnique(GlobalBomModel.class).getDependencies() ){
LOG.info("Adding dep to BOM: " + dep.toPrettyString()); // depends on control dependency: [for], data = [dep]
bom.dependencies.add(new SimpleDependency(Dependency.Role.LIBRARY, MavenCoord.from(dep))); // depends on control dependency: [for], data = [dep]
}
// 2) Recursively add the modules.
mavCtx.rootAppPom = mavenizeModule(mavCtx, projectModel, null);
// TODO: MIGR-236 Sort the modules.
///mavCtx.rootPom.submodules = sortSubmodulesToReflectDependencies(mavCtx.rootAppPom);
// 3) Write the pom.xml's.
new MavenStructureRenderer(mavCtx).createMavenProjectDirectoryTree();
} } |
public class class_name {
public PropertyType<ValidationConfigurationDescriptor> getOrCreateProperty()
{
List<Node> nodeList = model.get("property");
if (nodeList != null && nodeList.size() > 0)
{
return new PropertyTypeImpl<ValidationConfigurationDescriptor>(this, "property", model, nodeList.get(0));
}
return createProperty();
} } | public class class_name {
public PropertyType<ValidationConfigurationDescriptor> getOrCreateProperty()
{
List<Node> nodeList = model.get("property");
if (nodeList != null && nodeList.size() > 0)
{
return new PropertyTypeImpl<ValidationConfigurationDescriptor>(this, "property", model, nodeList.get(0)); // depends on control dependency: [if], data = [none]
}
return createProperty();
} } |
public class class_name {
private RmiServiceInvocation getServiceInvocation(Message message, RmiEndpointConfiguration endpointConfiguration) {
Object payload = message.getPayload();
RmiServiceInvocation serviceInvocation = null;
if (payload != null) {
if (payload instanceof RmiServiceInvocation) {
serviceInvocation = (RmiServiceInvocation) payload;
} else if (payload != null && StringUtils.hasText(message.getPayload(String.class))) {
serviceInvocation = (RmiServiceInvocation) endpointConfiguration.getMarshaller()
.unmarshal(message.getPayload(Source.class));
} else {
serviceInvocation = new RmiServiceInvocation();
}
}
return serviceInvocation;
} } | public class class_name {
private RmiServiceInvocation getServiceInvocation(Message message, RmiEndpointConfiguration endpointConfiguration) {
Object payload = message.getPayload();
RmiServiceInvocation serviceInvocation = null;
if (payload != null) {
if (payload instanceof RmiServiceInvocation) {
serviceInvocation = (RmiServiceInvocation) payload; // depends on control dependency: [if], data = [none]
} else if (payload != null && StringUtils.hasText(message.getPayload(String.class))) {
serviceInvocation = (RmiServiceInvocation) endpointConfiguration.getMarshaller()
.unmarshal(message.getPayload(Source.class)); // depends on control dependency: [if], data = [none]
} else {
serviceInvocation = new RmiServiceInvocation(); // depends on control dependency: [if], data = [none]
}
}
return serviceInvocation;
} } |
public class class_name {
private TypeInfo getTypeInfo(Map<String, TypeInfo> typeMaps, String path, Class<?> superType) {
TypeInfo typeInfo = typeMaps.get(path);
if (typeInfo == null) {
typeInfo = new TypeInfo(superType);
typeMaps.put(path, typeInfo);
}
return typeInfo;
} } | public class class_name {
private TypeInfo getTypeInfo(Map<String, TypeInfo> typeMaps, String path, Class<?> superType) {
TypeInfo typeInfo = typeMaps.get(path);
if (typeInfo == null) {
typeInfo = new TypeInfo(superType);
// depends on control dependency: [if], data = [none]
typeMaps.put(path, typeInfo);
// depends on control dependency: [if], data = [none]
}
return typeInfo;
} } |
public class class_name {
public static List<String> getAllAuthorities() {
final List<String> allPermissions = new ArrayList<>();
final Field[] declaredFields = SpPermission.class.getDeclaredFields();
for (final Field field : declaredFields) {
if (Modifier.isPublic(field.getModifiers()) && Modifier.isStatic(field.getModifiers())) {
field.setAccessible(true);
try {
final String role = (String) field.get(null);
allPermissions.add(role);
} catch (final IllegalAccessException e) {
LOGGER.error(e.getMessage(), e);
}
}
}
return allPermissions;
} } | public class class_name {
public static List<String> getAllAuthorities() {
final List<String> allPermissions = new ArrayList<>();
final Field[] declaredFields = SpPermission.class.getDeclaredFields();
for (final Field field : declaredFields) {
if (Modifier.isPublic(field.getModifiers()) && Modifier.isStatic(field.getModifiers())) {
field.setAccessible(true); // depends on control dependency: [if], data = [none]
try {
final String role = (String) field.get(null);
allPermissions.add(role); // depends on control dependency: [try], data = [none]
} catch (final IllegalAccessException e) {
LOGGER.error(e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
}
}
return allPermissions;
} } |
public class class_name {
public void stopHkTimer(final Stat name) {
timers.get(GuiceyTime).stop();
if (!HKTime.equals(name)) {
timers.get(HKTime).stop();
}
timers.get(name).stop();
} } | public class class_name {
public void stopHkTimer(final Stat name) {
timers.get(GuiceyTime).stop();
if (!HKTime.equals(name)) {
timers.get(HKTime).stop(); // depends on control dependency: [if], data = [none]
}
timers.get(name).stop();
} } |
public class class_name {
@Override
public AutoscalePolicyFilter and(AutoscalePolicyFilter otherFilter) {
checkNotNull(otherFilter, "Other filter must be not a null");
if (evaluation instanceof SingleFilterEvaluation &&
otherFilter.evaluation instanceof SingleFilterEvaluation) {
return
new AutoscalePolicyFilter(
getPredicate().and(otherFilter.getPredicate())
);
}
evaluation = new AndEvaluation<>(evaluation, otherFilter, AutoscalePolicyMetadata::getName);
return this;
} } | public class class_name {
@Override
public AutoscalePolicyFilter and(AutoscalePolicyFilter otherFilter) {
checkNotNull(otherFilter, "Other filter must be not a null");
if (evaluation instanceof SingleFilterEvaluation &&
otherFilter.evaluation instanceof SingleFilterEvaluation) {
return
new AutoscalePolicyFilter(
getPredicate().and(otherFilter.getPredicate())
); // depends on control dependency: [if], data = [none]
}
evaluation = new AndEvaluation<>(evaluation, otherFilter, AutoscalePolicyMetadata::getName);
return this;
} } |
public class class_name {
public static byte[] getLocalByteArray(File res) {
try {
return toByteArray(new FileInputStream(res));
} catch (FileNotFoundException e) {
logger.error("FileNotFound", e);
}
return null;
} } | public class class_name {
public static byte[] getLocalByteArray(File res) {
try {
return toByteArray(new FileInputStream(res)); // depends on control dependency: [try], data = [none]
} catch (FileNotFoundException e) {
logger.error("FileNotFound", e);
} // depends on control dependency: [catch], data = [none]
return null;
} } |
public class class_name {
private static synchronized Set<String> getSystemZIDs() {
Set<String> systemZones = null;
if (REF_SYSTEM_ZONES != null) {
systemZones = REF_SYSTEM_ZONES.get();
}
if (systemZones == null) {
Set<String> systemIDs = new TreeSet<String>();
String[] allIDs = getZoneIDs();
for (String id : allIDs) {
// exclude Etc/Unknown
if (id.equals(TimeZone.UNKNOWN_ZONE_ID)) {
continue;
}
systemIDs.add(id);
}
systemZones = Collections.unmodifiableSet(systemIDs);
REF_SYSTEM_ZONES = new SoftReference<Set<String>>(systemZones);
}
return systemZones;
} } | public class class_name {
private static synchronized Set<String> getSystemZIDs() {
Set<String> systemZones = null;
if (REF_SYSTEM_ZONES != null) {
systemZones = REF_SYSTEM_ZONES.get(); // depends on control dependency: [if], data = [none]
}
if (systemZones == null) {
Set<String> systemIDs = new TreeSet<String>();
String[] allIDs = getZoneIDs();
for (String id : allIDs) {
// exclude Etc/Unknown
if (id.equals(TimeZone.UNKNOWN_ZONE_ID)) {
continue;
}
systemIDs.add(id); // depends on control dependency: [for], data = [id]
}
systemZones = Collections.unmodifiableSet(systemIDs); // depends on control dependency: [if], data = [none]
REF_SYSTEM_ZONES = new SoftReference<Set<String>>(systemZones); // depends on control dependency: [if], data = [(systemZones]
}
return systemZones;
} } |
public class class_name {
public static List<Field> findMatchedFields(Class targetClass, Class ann) {
List<Field> ret = new ArrayList<Field>();
if (targetClass == null) {
return ret;
}
// Keep backing up the inheritance hierarchy.
do {
// Copy each field declared on this class unless it's static or
// file.
Field[] fields = targetClass.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
if (ann == null) {
ret.add(fields[i]);
continue;
}
Annotation protobuf = fields[i].getAnnotation(ann);
if (protobuf != null) {
ret.add(fields[i]);
}
}
targetClass = targetClass.getSuperclass();
} while (targetClass != null && targetClass != Object.class);
return ret;
} } | public class class_name {
public static List<Field> findMatchedFields(Class targetClass, Class ann) {
List<Field> ret = new ArrayList<Field>();
if (targetClass == null) {
return ret;
// depends on control dependency: [if], data = [none]
}
// Keep backing up the inheritance hierarchy.
do {
// Copy each field declared on this class unless it's static or
// file.
Field[] fields = targetClass.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
if (ann == null) {
ret.add(fields[i]);
// depends on control dependency: [if], data = [none]
continue;
}
Annotation protobuf = fields[i].getAnnotation(ann);
if (protobuf != null) {
ret.add(fields[i]);
// depends on control dependency: [if], data = [none]
}
}
targetClass = targetClass.getSuperclass();
} while (targetClass != null && targetClass != Object.class);
return ret;
} } |
public class class_name {
public static EJBException EJBException(String message,
Throwable cause) {
EJBException ejbex = null;
// -----------------------------------------------------------------------
// If a cause was not specified, then this method has been called to
// just create a generic EJBException with the specified message.
// -----------------------------------------------------------------------
if (cause == null) {
ejbex = new EJBException(message);
}
// -----------------------------------------------------------------------
// If the cause happens to be a WebSphere specific subclass of
// EJBException or RemoteException, then convert it to a plain
// EJBException or at least unwrap it, so a plain EJBException
// is created below.
// -----------------------------------------------------------------------
String causeMessage = null;
while (cause != null &&
(!(cause instanceof RecursiveInjectionException)) && // d408351
(cause instanceof ContainerException ||
cause instanceof UncheckedException ||
cause instanceof EJSPersistenceException ||
cause instanceof CPIException ||
cause instanceof CPMIException ||
cause instanceof CSIException ||
cause instanceof InjectionException || // d436080
cause instanceof ManagedObjectException ||
(cause instanceof EJBException &&
cause instanceof WsNestedException))) {
Throwable nextCause = cause.getCause();
if (nextCause == null) {
// Nothing was nested in the WebSphere specific exception,
// so convert to EJBException, copying the message and stack.
if (causeMessage == null) {
causeMessage = cause.getMessage();
}
ejbex = new EJBException(causeMessage);
ejbex.setStackTrace(cause.getStackTrace());
} else if (causeMessage == null && cause instanceof InjectionException) {
causeMessage = cause.getMessage();
}
cause = nextCause;
}
// -----------------------------------------------------------------------
// If the cause is not already an EJBException, then create a new
// EJBException. Since EJBException doesn't have a constructor that
// accepts a Throwable, wrap any Throwable in an Exception...
// but note that the cause on Throwable will be the root cause
// (i.e. not wrapped in Exception). In all cases, insure getCause()
// works if there is a cause, and clear the stack if the EJBException
// wasn't thrown by the customer.... let the cause stack point to
// the failure.
// -----------------------------------------------------------------------
if (ejbex == null) {
if (cause instanceof EJBException) {
ejbex = (EJBException) cause;
// EJBException doesn't normally set the cause on Throwable, so
// let's do that to be nice :-)
// Geronimo EJBException.getCause returns getCausedbyException, so
// we do not expect this code to be used. F53643
cause = ejbex.getCausedByException();
if (cause != null && ejbex.getCause() == null)
ejbex.initCause(cause);
} else {
if (causeMessage == null) {
causeMessage = message;
}
ejbex = new EJBException(causeMessage, Exception(cause));
// And finally... insure the cause is set on Throwable.
// Geronimo EJBException.getCause returns getCausedbyException, so
// we do not expect this code to be used. F53643
if (ejbex.getCause() == null) { // F743-16279
ejbex.initCause(cause);
}
}
}
return ejbex;
} } | public class class_name {
public static EJBException EJBException(String message,
Throwable cause) {
EJBException ejbex = null;
// -----------------------------------------------------------------------
// If a cause was not specified, then this method has been called to
// just create a generic EJBException with the specified message.
// -----------------------------------------------------------------------
if (cause == null) {
ejbex = new EJBException(message); // depends on control dependency: [if], data = [none]
}
// -----------------------------------------------------------------------
// If the cause happens to be a WebSphere specific subclass of
// EJBException or RemoteException, then convert it to a plain
// EJBException or at least unwrap it, so a plain EJBException
// is created below.
// -----------------------------------------------------------------------
String causeMessage = null;
while (cause != null &&
(!(cause instanceof RecursiveInjectionException)) && // d408351
(cause instanceof ContainerException ||
cause instanceof UncheckedException ||
cause instanceof EJSPersistenceException ||
cause instanceof CPIException ||
cause instanceof CPMIException ||
cause instanceof CSIException ||
cause instanceof InjectionException || // d436080
cause instanceof ManagedObjectException ||
(cause instanceof EJBException &&
cause instanceof WsNestedException))) {
Throwable nextCause = cause.getCause();
if (nextCause == null) {
// Nothing was nested in the WebSphere specific exception,
// so convert to EJBException, copying the message and stack.
if (causeMessage == null) {
causeMessage = cause.getMessage(); // depends on control dependency: [if], data = [none]
}
ejbex = new EJBException(causeMessage); // depends on control dependency: [if], data = [none]
ejbex.setStackTrace(cause.getStackTrace()); // depends on control dependency: [if], data = [none]
} else if (causeMessage == null && cause instanceof InjectionException) {
causeMessage = cause.getMessage(); // depends on control dependency: [if], data = [none]
}
cause = nextCause; // depends on control dependency: [while], data = [none]
}
// -----------------------------------------------------------------------
// If the cause is not already an EJBException, then create a new
// EJBException. Since EJBException doesn't have a constructor that
// accepts a Throwable, wrap any Throwable in an Exception...
// but note that the cause on Throwable will be the root cause
// (i.e. not wrapped in Exception). In all cases, insure getCause()
// works if there is a cause, and clear the stack if the EJBException
// wasn't thrown by the customer.... let the cause stack point to
// the failure.
// -----------------------------------------------------------------------
if (ejbex == null) {
if (cause instanceof EJBException) {
ejbex = (EJBException) cause; // depends on control dependency: [if], data = [none]
// EJBException doesn't normally set the cause on Throwable, so
// let's do that to be nice :-)
// Geronimo EJBException.getCause returns getCausedbyException, so
// we do not expect this code to be used. F53643
cause = ejbex.getCausedByException(); // depends on control dependency: [if], data = [none]
if (cause != null && ejbex.getCause() == null)
ejbex.initCause(cause);
} else {
if (causeMessage == null) {
causeMessage = message; // depends on control dependency: [if], data = [none]
}
ejbex = new EJBException(causeMessage, Exception(cause)); // depends on control dependency: [if], data = [none]
// And finally... insure the cause is set on Throwable.
// Geronimo EJBException.getCause returns getCausedbyException, so
// we do not expect this code to be used. F53643
if (ejbex.getCause() == null) { // F743-16279
ejbex.initCause(cause); // depends on control dependency: [if], data = [none]
}
}
}
return ejbex;
} } |
public class class_name {
public Set<OWLClassExpression> getNestedClassExpressions() {
Set<OWLClassExpression> subConcepts = new HashSet<OWLClassExpression>();
for (OWLAxiom ax : justification) {
subConcepts.addAll(ax.getNestedClassExpressions());
}
return subConcepts;
} } | public class class_name {
public Set<OWLClassExpression> getNestedClassExpressions() {
Set<OWLClassExpression> subConcepts = new HashSet<OWLClassExpression>();
for (OWLAxiom ax : justification) {
subConcepts.addAll(ax.getNestedClassExpressions()); // depends on control dependency: [for], data = [ax]
}
return subConcepts;
} } |
public class class_name {
public static String processSimpleMacros(String input, Object[] args)
{
if (input == null || input.isEmpty() || args.length == 0)
{
return input;
}
StringBuilder out = new StringBuilder();
int stringIndex = 0;
int argIndex = 0;
int len = input.length();
char current;
char next;
while (stringIndex < len)
{
current = input.charAt(stringIndex);
if (current == ESCAPE && stringIndex + 1 < len)
{
next = input.charAt(stringIndex + 1);
if (next == ESCAPE || next == MACRO_BEGIN)
{
stringIndex++;
out.append(next);
}
else
{
out.append(current);
}
}
else if (current == MACRO_BEGIN && stringIndex + 1 < len)
{
next = input.charAt(stringIndex + 1);
if (next == MACRO_END && argIndex < args.length)
{
stringIndex++;
out.append(args[argIndex++]);
}
else
{
out.append(current);
}
}
else
{
out.append(current);
}
stringIndex++;
}
return out.toString();
} } | public class class_name {
public static String processSimpleMacros(String input, Object[] args)
{
if (input == null || input.isEmpty() || args.length == 0)
{
return input; // depends on control dependency: [if], data = [none]
}
StringBuilder out = new StringBuilder();
int stringIndex = 0;
int argIndex = 0;
int len = input.length();
char current;
char next;
while (stringIndex < len)
{
current = input.charAt(stringIndex); // depends on control dependency: [while], data = [(stringIndex]
if (current == ESCAPE && stringIndex + 1 < len)
{
next = input.charAt(stringIndex + 1); // depends on control dependency: [if], data = [none]
if (next == ESCAPE || next == MACRO_BEGIN)
{
stringIndex++; // depends on control dependency: [if], data = [none]
out.append(next); // depends on control dependency: [if], data = [(next]
}
else
{
out.append(current); // depends on control dependency: [if], data = [none]
}
}
else if (current == MACRO_BEGIN && stringIndex + 1 < len)
{
next = input.charAt(stringIndex + 1); // depends on control dependency: [if], data = [none]
if (next == MACRO_END && argIndex < args.length)
{
stringIndex++; // depends on control dependency: [if], data = [none]
out.append(args[argIndex++]); // depends on control dependency: [if], data = [none]
}
else
{
out.append(current); // depends on control dependency: [if], data = [none]
}
}
else
{
out.append(current); // depends on control dependency: [if], data = [(current]
}
stringIndex++; // depends on control dependency: [while], data = [none]
}
return out.toString();
} } |
public class class_name {
public void marshall(LaunchDetails launchDetails, ProtocolMarshaller protocolMarshaller) {
if (launchDetails == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(launchDetails.getLatestLaunchTime(), LATESTLAUNCHTIME_BINDING);
protocolMarshaller.marshall(launchDetails.getStackName(), STACKNAME_BINDING);
protocolMarshaller.marshall(launchDetails.getStackId(), STACKID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(LaunchDetails launchDetails, ProtocolMarshaller protocolMarshaller) {
if (launchDetails == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(launchDetails.getLatestLaunchTime(), LATESTLAUNCHTIME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(launchDetails.getStackName(), STACKNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(launchDetails.getStackId(), STACKID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static boolean equalsOrMatch(final CharSequence string, final CharSequence pattern) {
if (string.equals(pattern)) {
return true;
}
return match(string, pattern, 0, 0);
} } | public class class_name {
public static boolean equalsOrMatch(final CharSequence string, final CharSequence pattern) {
if (string.equals(pattern)) {
return true; // depends on control dependency: [if], data = [none]
}
return match(string, pattern, 0, 0);
} } |
public class class_name {
public static <C extends SOABase> AbstractACModel loadACModel(String acFile, Map<String, C> availableContexts, boolean validateModel) throws Exception {
ACModelProperties testProperties = new ACModelProperties();
try {
testProperties.load(acFile);
} catch (IOException e) {
throw new IOException("Cannot load properties file: " + acFile + ".");
}
String contextName = null;
try {
contextName = testProperties.getContextName();
} catch (Exception e) {
throw new Exception("Exception while extracting context name from AC model properties", e);
}
if (contextName == null) {
throw new PropertyException(ACModelProperty.CONTEXT_NAME, null, "Cannot extract context name from AC model properties");
}
if (!availableContexts.containsKey(contextName)) {
throw new PropertyException(ACModelProperty.CONTEXT_NAME, contextName, "No context with adequate name available.");
}
// Check if suitable context is available
if (!availableContexts.containsKey(contextName)) {
throw new Exception("Required SOABase \"" + contextName + "\" is not available");
}
SOABase context = availableContexts.get(contextName);
if (context == null) {
throw new Exception("Required SOABase \"" + contextName + "\" is NULL");
}
// Check ACModel type
AbstractACModel newModel = null;
if (testProperties.getType() == ACModelType.ACL) {
ACLModelProperties aclProperties = new ACLModelProperties();
aclProperties.load(acFile);
newModel = new ACLModel(aclProperties, context);
} else {
RBACModelProperties rbacProperties = new RBACModelProperties();
rbacProperties.load(acFile);
newModel = new RBACModel(rbacProperties, context);
}
if (validateModel) {
try {
newModel.checkValidity();
} catch (ACMValidationException e) {
throw new ParameterException("Parsed AC-model is not valid", e);
}
}
return newModel;
} } | public class class_name {
public static <C extends SOABase> AbstractACModel loadACModel(String acFile, Map<String, C> availableContexts, boolean validateModel) throws Exception {
ACModelProperties testProperties = new ACModelProperties();
try {
testProperties.load(acFile);
} catch (IOException e) {
throw new IOException("Cannot load properties file: " + acFile + ".");
}
String contextName = null;
try {
contextName = testProperties.getContextName();
} catch (Exception e) {
throw new Exception("Exception while extracting context name from AC model properties", e);
}
if (contextName == null) {
throw new PropertyException(ACModelProperty.CONTEXT_NAME, null, "Cannot extract context name from AC model properties");
}
if (!availableContexts.containsKey(contextName)) {
throw new PropertyException(ACModelProperty.CONTEXT_NAME, contextName, "No context with adequate name available.");
}
// Check if suitable context is available
if (!availableContexts.containsKey(contextName)) {
throw new Exception("Required SOABase \"" + contextName + "\" is not available");
}
SOABase context = availableContexts.get(contextName);
if (context == null) {
throw new Exception("Required SOABase \"" + contextName + "\" is NULL");
}
// Check ACModel type
AbstractACModel newModel = null;
if (testProperties.getType() == ACModelType.ACL) {
ACLModelProperties aclProperties = new ACLModelProperties();
aclProperties.load(acFile);
newModel = new ACLModel(aclProperties, context);
} else {
RBACModelProperties rbacProperties = new RBACModelProperties();
rbacProperties.load(acFile);
newModel = new RBACModel(rbacProperties, context);
}
if (validateModel) {
try {
newModel.checkValidity(); // depends on control dependency: [try], data = [none]
} catch (ACMValidationException e) {
throw new ParameterException("Parsed AC-model is not valid", e);
} // depends on control dependency: [catch], data = [none]
}
return newModel;
} } |
public class class_name {
public void onSaveItems(final ItemStateChangesLog itemStates)
{
// if something happen we will rollback changes
boolean rollback = true;
try
{
ItemState lastDelete = null;
cache.beginTransaction();
Set<String> idsToSkip = null;
List<ItemState> states = itemStates.getAllStates();
for (int i = 0, length = states.size(); i < length; i++)
{
ItemState state = states.get(i);
if (state.isAdded())
{
if (state.isPersisted())
{
putItem(state.getData());
}
}
else if (state.isUpdated())
{
if (state.isPersisted())
{
// There was a problem with removing a list of samename siblings in on transaction,
// so putItemInBufferedCache(..) and updateInBufferedCache(..) used instead put(..) and update (..) methods.
ItemData prevItem = putItemInBufferedCache(state.getData());
if (state.isNode() && (prevItem != null || state.getOldPath() != null))
{
// nodes reordered, if previous is null it's InvalidItemState case
idsToSkip =
updateInBuffer((NodeData)state.getData(),
prevItem != null ? prevItem.getQPath() : state.getOldPath(), idsToSkip);
if (i + 1 < length)
{
// We check if the next state is another update on a persisted node, because if so we can keep the skip list otherwise we can get
// rid of it
ItemState nextState = states.get(i + 1);
if (!nextState.isUpdated() || !nextState.isNode() || !nextState.isPersisted())
{
// No order before has been detected so we have no need to keep the list of ids
idsToSkip = null;
}
}
}
}
}
else if (state.isDeleted())
{
if (state.isPersisted())
{
removeItem(state.getData());
}
}
else if (state.isRenamed())
{
renameItem(state, lastDelete);
}
else if (state.isPathChanged())
{
updateTreePath(state.getOldPath(), state.getData().getQPath(), (Set<String>)null);
}
else if (state.isMixinChanged())
{
if (state.isPersisted())
{
// update subtree ACLs
updateMixin((NodeData)state.getData());
}
}
if (state.isDeleted())
{
lastDelete = state;
}
}
cache.commitTransaction();
rollback = false;
}
finally
{
if (rollback)
{
cache.rollbackTransaction();
}
}
} } | public class class_name {
public void onSaveItems(final ItemStateChangesLog itemStates)
{
// if something happen we will rollback changes
boolean rollback = true;
try
{
ItemState lastDelete = null;
cache.beginTransaction();
// depends on control dependency: [try], data = [none]
Set<String> idsToSkip = null;
List<ItemState> states = itemStates.getAllStates();
for (int i = 0, length = states.size(); i < length; i++)
{
ItemState state = states.get(i);
if (state.isAdded())
{
if (state.isPersisted())
{
putItem(state.getData());
// depends on control dependency: [if], data = [none]
}
}
else if (state.isUpdated())
{
if (state.isPersisted())
{
// There was a problem with removing a list of samename siblings in on transaction,
// so putItemInBufferedCache(..) and updateInBufferedCache(..) used instead put(..) and update (..) methods.
ItemData prevItem = putItemInBufferedCache(state.getData());
if (state.isNode() && (prevItem != null || state.getOldPath() != null))
{
// nodes reordered, if previous is null it's InvalidItemState case
idsToSkip =
updateInBuffer((NodeData)state.getData(),
prevItem != null ? prevItem.getQPath() : state.getOldPath(), idsToSkip);
// depends on control dependency: [if], data = [none]
if (i + 1 < length)
{
// We check if the next state is another update on a persisted node, because if so we can keep the skip list otherwise we can get
// rid of it
ItemState nextState = states.get(i + 1);
if (!nextState.isUpdated() || !nextState.isNode() || !nextState.isPersisted())
{
// No order before has been detected so we have no need to keep the list of ids
idsToSkip = null;
// depends on control dependency: [if], data = [none]
}
}
}
}
}
else if (state.isDeleted())
{
if (state.isPersisted())
{
removeItem(state.getData());
// depends on control dependency: [if], data = [none]
}
}
else if (state.isRenamed())
{
renameItem(state, lastDelete);
// depends on control dependency: [if], data = [none]
}
else if (state.isPathChanged())
{
updateTreePath(state.getOldPath(), state.getData().getQPath(), (Set<String>)null);
// depends on control dependency: [if], data = [none]
}
else if (state.isMixinChanged())
{
if (state.isPersisted())
{
// update subtree ACLs
updateMixin((NodeData)state.getData());
// depends on control dependency: [if], data = [none]
}
}
if (state.isDeleted())
{
lastDelete = state;
// depends on control dependency: [if], data = [none]
}
}
cache.commitTransaction();
// depends on control dependency: [try], data = [none]
rollback = false;
// depends on control dependency: [try], data = [none]
}
finally
{
if (rollback)
{
cache.rollbackTransaction();
// depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
private boolean add(T e, boolean failWhenFull) {
final Queue<T> queue = this.getOrCreateQueue(e);
this.writeLock.lock();
try {
if (this.size == this.capacity) {
if (failWhenFull) {
throw new IllegalStateException("Queue is at capacity: " + this.capacity);
}
return false;
}
final boolean added = queue.add(e);
if (added) {
this.size++;
this.notEmpty.signal();
}
return added;
} finally {
this.writeLock.unlock();
}
} } | public class class_name {
private boolean add(T e, boolean failWhenFull) {
final Queue<T> queue = this.getOrCreateQueue(e);
this.writeLock.lock();
try {
if (this.size == this.capacity) {
if (failWhenFull) {
throw new IllegalStateException("Queue is at capacity: " + this.capacity);
}
return false; // depends on control dependency: [if], data = [none]
}
final boolean added = queue.add(e);
if (added) {
this.size++; // depends on control dependency: [if], data = [none]
this.notEmpty.signal(); // depends on control dependency: [if], data = [none]
}
return added; // depends on control dependency: [try], data = [none]
} finally {
this.writeLock.unlock();
}
} } |
public class class_name {
public JSONArray query(
List<RecordSelector> recordSelectors
,List<FieldSelector> fieldSelectors
,List<FieldSelector> groupBySelectors
,List<OrderSpecifier> orderBySpecifiers
,Integer limit
,Integer offset
) throws Exception {
OperationAccess operationAccess = tableSchema.getQueryAccess();
if( false == operationAccess.isAllowed() ) {
throw new Exception("Attempting to query a table while the privilege is not allowed: "+tableSchema.getLogicalName()+" ("+tableSchema.getPhysicalName()+")");
}
List<FieldSelector> effectiveFieldSelectors = new Vector<FieldSelector>();
{
// Create a list of queried fields
if( null == fieldSelectors ) {
// Select all available column for read
for( ColumnData columnData : tableSchema.getColumns() ) {
if( columnData.isReadable() ) {
effectiveFieldSelectors.add(new FieldSelectorColumn(columnData.getColumnName()));
}
}
} else {
for(FieldSelector fieldSelector : fieldSelectors) {
for( ColumnData columnData : fieldSelector.getColumnData(tableSchema) ) {
if( null == columnData || false == columnData.isReadable() ) {
throw new Exception(
"Invalid selection on "+fieldSelector
+" which is not available in table "+tableSchema.getLogicalName()
+"("+tableSchema.getPhysicalName()+")"
);
}
}
effectiveFieldSelectors.add(fieldSelector);
}
}
// Sort. This offers greater reusability of the prepared statement.
Collections.sort(effectiveFieldSelectors, fieldSelectorComparator);
}
// groupBy fields
List<FieldSelector> effectiveGroupBySelectors = new Vector<FieldSelector>();
{
if( null != groupBySelectors ) {
for(FieldSelector fieldSelector : groupBySelectors) {
for( ColumnData columnData : fieldSelector.getColumnData(tableSchema) ) {
if( null == columnData || false == columnData.isReadable() ) {
throw new Exception(
"Invalid GROUP BY on "+fieldSelector
+" which is not available in table "+tableSchema.getLogicalName()
+"("+tableSchema.getPhysicalName()+")"
);
}
}
effectiveGroupBySelectors.add(fieldSelector);
}
}
Collections.sort(effectiveGroupBySelectors,fieldSelectorComparator);
}
// ORDER BY specifiers
List<OrderSpecifier> effectiveOrderBySelectors = new Vector<OrderSpecifier>();
{
if( null != orderBySpecifiers ) {
for(OrderSpecifier orderSpecifier : orderBySpecifiers) {
for( ColumnData columnData : orderSpecifier.getColumnData(tableSchema) ) {
if( null == columnData || false == columnData.isReadable() ) {
throw new Exception(
"Invalid ORDER BY on "+orderSpecifier
+" which is not available in table "+tableSchema.getLogicalName()
+"("+tableSchema.getPhysicalName()+")"
);
}
}
effectiveOrderBySelectors.add(orderSpecifier);
}
}
}
// Figure out all WHERE clauses
List<RecordSelector> effectiveRecordSelectors = computeEffectiveWhereClauses(recordSelectors, operationAccess);
// Create SQL command
PreparedStatement pstmt = null;
{
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
pw.print("SELECT ");
{
boolean first = true;
for(FieldSelector fieldSelector : effectiveFieldSelectors) {
if( first ) {
first = false;
} else {
pw.print(",");
}
pw.print( fieldSelector.getQueryString(tableSchema, SqlElement.Phase.SELECT) );
}
}
pw.print(" FROM ");
pw.print(tableSchema.getPhysicalName());
{
boolean first = true;
for( RecordSelector exp : effectiveRecordSelectors ) {
if( first ) {
pw.print(" WHERE ");
first = false;
} else {
pw.print(" AND ");
}
pw.print( exp.getQueryString(tableSchema, SqlElement.Phase.WHERE) );
}
}
if( effectiveGroupBySelectors.size() > 0 ) {
boolean first = true;
for( FieldSelector groupColumn : effectiveGroupBySelectors ) {
if( first ) {
pw.print(" GROUP BY ");
first = false;
} else {
pw.print(",");
}
pw.print( groupColumn.getQueryString(tableSchema, SqlElement.Phase.GROUP_BY) );
}
}
if( effectiveOrderBySelectors.size() > 0 ) {
boolean first = true;
for( OrderSpecifier orderSpecifier : effectiveOrderBySelectors ) {
if( first ) {
pw.print(" ORDER BY ");
first = false;
} else {
pw.print(",");
}
pw.print( orderSpecifier.getQueryString(tableSchema, SqlElement.Phase.ORDER_BY) );
}
}
if( null != limit ) {
int limitInt = limit.intValue();
pw.print(" LIMIT ");
pw.print(limitInt);
if( null != offset ) {
int offsetInt = offset.intValue();
pw.print(" OFFSET ");
pw.print(offsetInt);
}
}
pw.flush();
String sqlQuery = sw.toString();
pstmt = connection.prepareStatement(sqlQuery);
//logger.info("SQL Query: "+sqlQuery);
// Populate prepared statement
int index = 1;
for( FieldSelector fs : effectiveFieldSelectors ) {
for(TypedValue value : fs.getQueryValues(tableSchema, variables)) {
//logger.info("Value "+value.getValue()+" ("+value.getColumnDataType()+")");
ColumnDataUtils.writeToPreparedStatement(pstmt, index, value);
++index;
}
}
for( RecordSelector exp : effectiveRecordSelectors ) {
for(TypedValue value : exp.getQueryValues(tableSchema, variables)) {
//logger.info("Value "+value.getValue()+" ("+value.getColumnDataType()+")");
ColumnDataUtils.writeToPreparedStatement(pstmt, index, value);
++index;
}
}
for( FieldSelector groupBySelector : effectiveGroupBySelectors ) {
for(TypedValue value : groupBySelector.getQueryValues(tableSchema, variables)) {
//logger.info("Value "+value.getValue()+" ("+value.getColumnDataType()+")");
ColumnDataUtils.writeToPreparedStatement(pstmt, index, value);
++index;
}
}
for( OrderSpecifier orderSpecifier : effectiveOrderBySelectors ) {
for(TypedValue value : orderSpecifier.getQueryValues(tableSchema, variables)) {
//logger.info("Value "+value.getValue()+" ("+value.getColumnDataType()+")");
ColumnDataUtils.writeToPreparedStatement(pstmt, index, value);
++index;
}
}
}
// Now, we need to retrieve the objects
JSONArray array = ColumnDataUtils.executeStatementToJson(pstmt);
return array;
} } | public class class_name {
public JSONArray query(
List<RecordSelector> recordSelectors
,List<FieldSelector> fieldSelectors
,List<FieldSelector> groupBySelectors
,List<OrderSpecifier> orderBySpecifiers
,Integer limit
,Integer offset
) throws Exception {
OperationAccess operationAccess = tableSchema.getQueryAccess();
if( false == operationAccess.isAllowed() ) {
throw new Exception("Attempting to query a table while the privilege is not allowed: "+tableSchema.getLogicalName()+" ("+tableSchema.getPhysicalName()+")");
}
List<FieldSelector> effectiveFieldSelectors = new Vector<FieldSelector>();
{
// Create a list of queried fields
if( null == fieldSelectors ) {
// Select all available column for read
for( ColumnData columnData : tableSchema.getColumns() ) {
if( columnData.isReadable() ) {
effectiveFieldSelectors.add(new FieldSelectorColumn(columnData.getColumnName()));
}
}
} else {
for(FieldSelector fieldSelector : fieldSelectors) {
for( ColumnData columnData : fieldSelector.getColumnData(tableSchema) ) {
if( null == columnData || false == columnData.isReadable() ) {
throw new Exception(
"Invalid selection on "+fieldSelector
+" which is not available in table "+tableSchema.getLogicalName()
+"("+tableSchema.getPhysicalName()+")"
);
}
}
effectiveFieldSelectors.add(fieldSelector);
}
}
// Sort. This offers greater reusability of the prepared statement.
Collections.sort(effectiveFieldSelectors, fieldSelectorComparator);
}
// groupBy fields
List<FieldSelector> effectiveGroupBySelectors = new Vector<FieldSelector>();
{
if( null != groupBySelectors ) {
for(FieldSelector fieldSelector : groupBySelectors) {
for( ColumnData columnData : fieldSelector.getColumnData(tableSchema) ) {
if( null == columnData || false == columnData.isReadable() ) {
throw new Exception(
"Invalid GROUP BY on "+fieldSelector
+" which is not available in table "+tableSchema.getLogicalName()
+"("+tableSchema.getPhysicalName()+")"
);
}
}
effectiveGroupBySelectors.add(fieldSelector);
}
}
Collections.sort(effectiveGroupBySelectors,fieldSelectorComparator);
}
// ORDER BY specifiers
List<OrderSpecifier> effectiveOrderBySelectors = new Vector<OrderSpecifier>();
{
if( null != orderBySpecifiers ) {
for(OrderSpecifier orderSpecifier : orderBySpecifiers) {
for( ColumnData columnData : orderSpecifier.getColumnData(tableSchema) ) {
if( null == columnData || false == columnData.isReadable() ) {
throw new Exception(
"Invalid ORDER BY on "+orderSpecifier
+" which is not available in table "+tableSchema.getLogicalName()
+"("+tableSchema.getPhysicalName()+")"
);
}
}
effectiveOrderBySelectors.add(orderSpecifier); // depends on control dependency: [for], data = [orderSpecifier]
}
}
}
// Figure out all WHERE clauses
List<RecordSelector> effectiveRecordSelectors = computeEffectiveWhereClauses(recordSelectors, operationAccess);
// Create SQL command
PreparedStatement pstmt = null;
{
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
pw.print("SELECT ");
{
boolean first = true;
for(FieldSelector fieldSelector : effectiveFieldSelectors) {
if( first ) {
first = false; // depends on control dependency: [if], data = [none]
} else {
pw.print(","); // depends on control dependency: [if], data = [none]
}
pw.print( fieldSelector.getQueryString(tableSchema, SqlElement.Phase.SELECT) ); // depends on control dependency: [for], data = [fieldSelector]
}
}
pw.print(" FROM ");
pw.print(tableSchema.getPhysicalName());
{
boolean first = true;
for( RecordSelector exp : effectiveRecordSelectors ) {
if( first ) {
pw.print(" WHERE "); // depends on control dependency: [if], data = [none]
first = false; // depends on control dependency: [if], data = [none]
} else {
pw.print(" AND "); // depends on control dependency: [if], data = [none]
}
pw.print( exp.getQueryString(tableSchema, SqlElement.Phase.WHERE) ); // depends on control dependency: [for], data = [exp]
}
}
if( effectiveGroupBySelectors.size() > 0 ) {
boolean first = true;
for( FieldSelector groupColumn : effectiveGroupBySelectors ) {
if( first ) {
pw.print(" GROUP BY "); // depends on control dependency: [if], data = [none]
first = false; // depends on control dependency: [if], data = [none]
} else {
pw.print(","); // depends on control dependency: [if], data = [none]
}
pw.print( groupColumn.getQueryString(tableSchema, SqlElement.Phase.GROUP_BY) ); // depends on control dependency: [for], data = [groupColumn]
}
}
if( effectiveOrderBySelectors.size() > 0 ) {
boolean first = true;
for( OrderSpecifier orderSpecifier : effectiveOrderBySelectors ) {
if( first ) {
pw.print(" ORDER BY "); // depends on control dependency: [if], data = [none]
first = false; // depends on control dependency: [if], data = [none]
} else {
pw.print(","); // depends on control dependency: [if], data = [none]
}
pw.print( orderSpecifier.getQueryString(tableSchema, SqlElement.Phase.ORDER_BY) ); // depends on control dependency: [for], data = [orderSpecifier]
}
}
if( null != limit ) {
int limitInt = limit.intValue();
pw.print(" LIMIT "); // depends on control dependency: [if], data = [none]
pw.print(limitInt); // depends on control dependency: [if], data = [none]
if( null != offset ) {
int offsetInt = offset.intValue();
pw.print(" OFFSET "); // depends on control dependency: [if], data = [none]
pw.print(offsetInt); // depends on control dependency: [if], data = [none]
}
}
pw.flush();
String sqlQuery = sw.toString();
pstmt = connection.prepareStatement(sqlQuery);
//logger.info("SQL Query: "+sqlQuery);
// Populate prepared statement
int index = 1;
for( FieldSelector fs : effectiveFieldSelectors ) {
for(TypedValue value : fs.getQueryValues(tableSchema, variables)) {
//logger.info("Value "+value.getValue()+" ("+value.getColumnDataType()+")");
ColumnDataUtils.writeToPreparedStatement(pstmt, index, value); // depends on control dependency: [for], data = [value]
++index; // depends on control dependency: [for], data = [none]
}
}
for( RecordSelector exp : effectiveRecordSelectors ) {
for(TypedValue value : exp.getQueryValues(tableSchema, variables)) {
//logger.info("Value "+value.getValue()+" ("+value.getColumnDataType()+")");
ColumnDataUtils.writeToPreparedStatement(pstmt, index, value); // depends on control dependency: [for], data = [value]
++index; // depends on control dependency: [for], data = [none]
}
}
for( FieldSelector groupBySelector : effectiveGroupBySelectors ) {
for(TypedValue value : groupBySelector.getQueryValues(tableSchema, variables)) {
//logger.info("Value "+value.getValue()+" ("+value.getColumnDataType()+")");
ColumnDataUtils.writeToPreparedStatement(pstmt, index, value); // depends on control dependency: [for], data = [value]
++index; // depends on control dependency: [for], data = [none]
}
}
for( OrderSpecifier orderSpecifier : effectiveOrderBySelectors ) {
for(TypedValue value : orderSpecifier.getQueryValues(tableSchema, variables)) {
//logger.info("Value "+value.getValue()+" ("+value.getColumnDataType()+")");
ColumnDataUtils.writeToPreparedStatement(pstmt, index, value); // depends on control dependency: [for], data = [value]
++index; // depends on control dependency: [for], data = [none]
}
}
}
// Now, we need to retrieve the objects
JSONArray array = ColumnDataUtils.executeStatementToJson(pstmt);
return array;
} } |
public class class_name {
public IOException unwrapRemoteException() {
try {
Class<?> realClass = Class.forName(getClassName());
return instantiateException(realClass.asSubclass(IOException.class));
} catch(Exception e) {
// cannot instantiate the original exception, just return this
}
return this;
} } | public class class_name {
public IOException unwrapRemoteException() {
try {
Class<?> realClass = Class.forName(getClassName());
return instantiateException(realClass.asSubclass(IOException.class)); // depends on control dependency: [try], data = [none]
} catch(Exception e) {
// cannot instantiate the original exception, just return this
} // depends on control dependency: [catch], data = [none]
return this;
} } |
public class class_name {
@SuppressWarnings("unchecked")
public <T> DomainObjectMatch<T> createMatchFor(T domainObject) {
DomainObjectMatch<T> ret;
if (domainObject.getClass().equals(DomainObject.class)) { // generic model
List<DomainObject> source = new ArrayList<DomainObject>();
source.add((DomainObject) domainObject);
String typeName = ((DomainObject)domainObject).getDomainObjectType().getName();
ret = (DomainObjectMatch<T>) createGenMatchForInternal(source, typeName);
} else {
List<T> source = new ArrayList<T>();
source.add(domainObject);
ret = this.createMatchForInternal(source, (Class<T>)domainObject.getClass());
}
DomainObjectMatch<?> delegate = APIAccess.getDelegate(ret);
DomainObjectMatch<?> match = delegate != null ? delegate : ret;
QueryRecorder.recordAssignment(this, "createMatchFor", match,
QueryRecorder.reference(domainObject));
return ret;
} } | public class class_name {
@SuppressWarnings("unchecked")
public <T> DomainObjectMatch<T> createMatchFor(T domainObject) {
DomainObjectMatch<T> ret;
if (domainObject.getClass().equals(DomainObject.class)) { // generic model
List<DomainObject> source = new ArrayList<DomainObject>();
source.add((DomainObject) domainObject); // depends on control dependency: [if], data = [none]
String typeName = ((DomainObject)domainObject).getDomainObjectType().getName();
ret = (DomainObjectMatch<T>) createGenMatchForInternal(source, typeName); // depends on control dependency: [if], data = [none]
} else {
List<T> source = new ArrayList<T>();
source.add(domainObject); // depends on control dependency: [if], data = [none]
ret = this.createMatchForInternal(source, (Class<T>)domainObject.getClass()); // depends on control dependency: [if], data = [none]
}
DomainObjectMatch<?> delegate = APIAccess.getDelegate(ret);
DomainObjectMatch<?> match = delegate != null ? delegate : ret;
QueryRecorder.recordAssignment(this, "createMatchFor", match,
QueryRecorder.reference(domainObject));
return ret;
} } |
public class class_name {
private void cacheDtdSystemId(I_CmsXmlConfiguration configuration) {
if (configuration.getDtdSystemLocation() != null) {
try {
String file = CmsFileUtil.readFile(
configuration.getDtdSystemLocation() + configuration.getDtdFilename(),
CmsEncoder.ENCODING_UTF_8);
CmsXmlEntityResolver.cacheSystemId(
configuration.getDtdUrlPrefix() + configuration.getDtdFilename(),
file.getBytes(CmsEncoder.ENCODING_UTF_8));
if (LOG.isDebugEnabled()) {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_CACHE_DTD_SYSTEM_ID_1,
configuration.getDtdUrlPrefix()
+ configuration.getDtdFilename()
+ " --> "
+ configuration.getDtdSystemLocation()
+ configuration.getDtdFilename()));
}
} catch (IOException e) {
LOG.error(
Messages.get().getBundle().key(
Messages.LOG_CACHE_DTD_SYSTEM_ID_FAILURE_1,
configuration.getDtdSystemLocation() + configuration.getDtdFilename()),
e);
}
}
} } | public class class_name {
private void cacheDtdSystemId(I_CmsXmlConfiguration configuration) {
if (configuration.getDtdSystemLocation() != null) {
try {
String file = CmsFileUtil.readFile(
configuration.getDtdSystemLocation() + configuration.getDtdFilename(),
CmsEncoder.ENCODING_UTF_8);
CmsXmlEntityResolver.cacheSystemId(
configuration.getDtdUrlPrefix() + configuration.getDtdFilename(),
file.getBytes(CmsEncoder.ENCODING_UTF_8)); // depends on control dependency: [try], data = [none]
if (LOG.isDebugEnabled()) {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_CACHE_DTD_SYSTEM_ID_1,
configuration.getDtdUrlPrefix()
+ configuration.getDtdFilename()
+ " --> "
+ configuration.getDtdSystemLocation()
+ configuration.getDtdFilename())); // depends on control dependency: [if], data = [none]
}
} catch (IOException e) {
LOG.error(
Messages.get().getBundle().key(
Messages.LOG_CACHE_DTD_SYSTEM_ID_FAILURE_1,
configuration.getDtdSystemLocation() + configuration.getDtdFilename()),
e);
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public static Map<String, String> toStringMap(final Props props, final boolean localOnly) {
final HashMap<String, String> map = new HashMap<>();
final Set<String> keyset = localOnly ? props.localKeySet() : props.getKeySet();
for (final String key : keyset) {
final String value = props.get(key);
map.put(key, value);
}
return map;
} } | public class class_name {
public static Map<String, String> toStringMap(final Props props, final boolean localOnly) {
final HashMap<String, String> map = new HashMap<>();
final Set<String> keyset = localOnly ? props.localKeySet() : props.getKeySet();
for (final String key : keyset) {
final String value = props.get(key);
map.put(key, value); // depends on control dependency: [for], data = [key]
}
return map;
} } |
public class class_name {
public Method[] findMethods( Pattern methodNamePattern ) {
final Method[] allMethods = this.targetClass.getMethods();
final List<Method> result = new ArrayList<Method>();
for (int i = 0; i < allMethods.length; i++) {
final Method m = allMethods[i];
if (methodNamePattern.matcher(m.getName()).matches()) {
result.add(m);
}
}
return result.toArray(new Method[result.size()]);
} } | public class class_name {
public Method[] findMethods( Pattern methodNamePattern ) {
final Method[] allMethods = this.targetClass.getMethods();
final List<Method> result = new ArrayList<Method>();
for (int i = 0; i < allMethods.length; i++) {
final Method m = allMethods[i];
if (methodNamePattern.matcher(m.getName()).matches()) {
result.add(m); // depends on control dependency: [if], data = [none]
}
}
return result.toArray(new Method[result.size()]);
} } |
public class class_name {
public static BitMatrix encode(String content, BarcodeFormat format, QrConfig config) {
final MultiFormatWriter multiFormatWriter = new MultiFormatWriter();
if (null == config) {
// 默认配置
config = new QrConfig();
}
BitMatrix bitMatrix;
try {
bitMatrix = multiFormatWriter.encode(content, format, config.width, config.height, config.toHints());
} catch (WriterException e) {
throw new QrCodeException(e);
}
return bitMatrix;
} } | public class class_name {
public static BitMatrix encode(String content, BarcodeFormat format, QrConfig config) {
final MultiFormatWriter multiFormatWriter = new MultiFormatWriter();
if (null == config) {
// 默认配置
config = new QrConfig();
// depends on control dependency: [if], data = [none]
}
BitMatrix bitMatrix;
try {
bitMatrix = multiFormatWriter.encode(content, format, config.width, config.height, config.toHints());
// depends on control dependency: [try], data = [none]
} catch (WriterException e) {
throw new QrCodeException(e);
}
// depends on control dependency: [catch], data = [none]
return bitMatrix;
} } |
public class class_name {
@Then("^table '(.+?)' exists$")
public void checkTable(String tableName) throws Exception {
Statement myStatement = null;
Connection myConnection = this.commonspec.getConnection();
//query checks table existence, existence table name in system table pg_tables
String query = "SELECT * FROM pg_tables WHERE tablename = " + "\'" + tableName + "\'" + ";";
try {
myStatement = myConnection.createStatement();
java.sql.ResultSet rs = myStatement.executeQuery(query);
//if there are no data row
if (rs.next() == false) {
Assertions.assertThat(rs.next()).as("there are no table " + tableName).isTrue();
} else {
//data exist
String resultTableName = rs.getString(2);
assertThat(resultTableName).as("there are incorrect table name " + tableName).contains(tableName);
}
rs.close();
myStatement.close();
} catch (Exception e) {
e.printStackTrace();
}
} } | public class class_name {
@Then("^table '(.+?)' exists$")
public void checkTable(String tableName) throws Exception {
Statement myStatement = null;
Connection myConnection = this.commonspec.getConnection();
//query checks table existence, existence table name in system table pg_tables
String query = "SELECT * FROM pg_tables WHERE tablename = " + "\'" + tableName + "\'" + ";";
try {
myStatement = myConnection.createStatement();
java.sql.ResultSet rs = myStatement.executeQuery(query);
//if there are no data row
if (rs.next() == false) {
Assertions.assertThat(rs.next()).as("there are no table " + tableName).isTrue(); // depends on control dependency: [if], data = [(rs.next()]
} else {
//data exist
String resultTableName = rs.getString(2);
assertThat(resultTableName).as("there are incorrect table name " + tableName).contains(tableName); // depends on control dependency: [if], data = [none]
}
rs.close();
myStatement.close();
} catch (Exception e) {
e.printStackTrace();
}
} } |
public class class_name {
private void fixDocLengths(List<List<IN>> docs) {
final int maxDocSize = flags.maxDocSize;
WordToSentenceProcessor<IN> wts = new WordToSentenceProcessor<IN>();
List<List<IN>> newDocuments = new ArrayList<List<IN>>();
for (List<IN> document : docs) {
if (maxDocSize <= 0 || document.size() <= maxDocSize) {
if (!document.isEmpty()) {
newDocuments.add(document);
}
continue;
}
List<List<IN>> sentences = wts.process(document);
List<IN> newDocument = new ArrayList<IN>();
for (List<IN> sentence : sentences) {
if (newDocument.size() + sentence.size() > maxDocSize) {
if (!newDocument.isEmpty()) {
newDocuments.add(newDocument);
}
newDocument = new ArrayList<IN>();
}
newDocument.addAll(sentence);
}
if (!newDocument.isEmpty()) {
newDocuments.add(newDocument);
}
}
docs.clear();
docs.addAll(newDocuments);
} } | public class class_name {
private void fixDocLengths(List<List<IN>> docs) {
final int maxDocSize = flags.maxDocSize;
WordToSentenceProcessor<IN> wts = new WordToSentenceProcessor<IN>();
List<List<IN>> newDocuments = new ArrayList<List<IN>>();
for (List<IN> document : docs) {
if (maxDocSize <= 0 || document.size() <= maxDocSize) {
if (!document.isEmpty()) {
newDocuments.add(document);
// depends on control dependency: [if], data = [none]
}
continue;
}
List<List<IN>> sentences = wts.process(document);
List<IN> newDocument = new ArrayList<IN>();
for (List<IN> sentence : sentences) {
if (newDocument.size() + sentence.size() > maxDocSize) {
if (!newDocument.isEmpty()) {
newDocuments.add(newDocument);
// depends on control dependency: [if], data = [none]
}
newDocument = new ArrayList<IN>();
// depends on control dependency: [if], data = [none]
}
newDocument.addAll(sentence);
// depends on control dependency: [for], data = [sentence]
}
if (!newDocument.isEmpty()) {
newDocuments.add(newDocument);
// depends on control dependency: [if], data = [none]
}
}
docs.clear();
docs.addAll(newDocuments);
} } |
public class class_name {
public void set(String name, String namespace, Object value) {
if (value != null) {
if (value instanceof Date) {
attributes.put(new XAttributeName(name, namespace, null), formatDateTime((Date)value));
} else {
attributes.put(new XAttributeName(name, namespace, null), value.toString());
}
} else {
attributes.remove(new XAttributeName(name, namespace, null));
}
} } | public class class_name {
public void set(String name, String namespace, Object value) {
if (value != null) {
if (value instanceof Date) {
attributes.put(new XAttributeName(name, namespace, null), formatDateTime((Date)value)); // depends on control dependency: [if], data = [none]
} else {
attributes.put(new XAttributeName(name, namespace, null), value.toString()); // depends on control dependency: [if], data = [none]
}
} else {
attributes.remove(new XAttributeName(name, namespace, null)); // depends on control dependency: [if], data = [null)]
}
} } |
public class class_name {
private InboundTransferTask addTransfer(Address source, IntSet segmentsFromSource) {
final InboundTransferTask inboundTransfer;
synchronized (transferMapsLock) {
if (trace) {
log.tracef("Adding transfer from %s for segments %s", source, segmentsFromSource);
}
segmentsFromSource.removeAll(transfersBySegment.keySet()); // already in progress segments are excluded
if (segmentsFromSource.isEmpty()) {
if (trace) {
log.tracef("All segments are already in progress, skipping");
}
return null;
}
inboundTransfer = new InboundTransferTask(segmentsFromSource, source, cacheTopology.getTopologyId(),
rpcManager, commandsFactory, timeout, cacheName, true);
addTransfer(inboundTransfer, segmentsFromSource);
}
stateRequestExecutor.executeAsync(() -> {
CompletableFuture<Void> transferStarted = inboundTransfer.requestSegments();
if (trace)
log.tracef("Waiting for inbound transfer to finish: %s", inboundTransfer);
return transferStarted.whenComplete((aVoid, throwable) -> onTaskCompletion(inboundTransfer));
});
return inboundTransfer;
} } | public class class_name {
private InboundTransferTask addTransfer(Address source, IntSet segmentsFromSource) {
final InboundTransferTask inboundTransfer;
synchronized (transferMapsLock) {
if (trace) {
log.tracef("Adding transfer from %s for segments %s", source, segmentsFromSource); // depends on control dependency: [if], data = [none]
}
segmentsFromSource.removeAll(transfersBySegment.keySet()); // already in progress segments are excluded
if (segmentsFromSource.isEmpty()) {
if (trace) {
log.tracef("All segments are already in progress, skipping"); // depends on control dependency: [if], data = [none]
}
return null; // depends on control dependency: [if], data = [none]
}
inboundTransfer = new InboundTransferTask(segmentsFromSource, source, cacheTopology.getTopologyId(),
rpcManager, commandsFactory, timeout, cacheName, true);
addTransfer(inboundTransfer, segmentsFromSource);
}
stateRequestExecutor.executeAsync(() -> {
CompletableFuture<Void> transferStarted = inboundTransfer.requestSegments();
if (trace)
log.tracef("Waiting for inbound transfer to finish: %s", inboundTransfer);
return transferStarted.whenComplete((aVoid, throwable) -> onTaskCompletion(inboundTransfer));
});
return inboundTransfer;
} } |
public class class_name {
private int determineVisibilityForSharedFieldAccessor(Field field) {
if (field.getOwner() == null) { // true field
int visibility = AstUtil.getVisibility(field.getAst());
if (visibility == Opcodes.ACC_PRIVATE) visibility = Opcodes.ACC_PROTECTED;
return visibility;
} else { // property
return Opcodes.ACC_PUBLIC;
}
} } | public class class_name {
private int determineVisibilityForSharedFieldAccessor(Field field) {
if (field.getOwner() == null) { // true field
int visibility = AstUtil.getVisibility(field.getAst());
if (visibility == Opcodes.ACC_PRIVATE) visibility = Opcodes.ACC_PROTECTED;
return visibility; // depends on control dependency: [if], data = [none]
} else { // property
return Opcodes.ACC_PUBLIC; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static int getTrimmedLength(CharSequence s) {
int len = s.length();
int start = 0;
while (start < len && s.charAt(start) <= ' ') {
start++;
}
int end = len;
while (end > start && s.charAt(end - 1) <= ' ') {
end--;
}
return end - start;
} } | public class class_name {
public static int getTrimmedLength(CharSequence s) {
int len = s.length();
int start = 0;
while (start < len && s.charAt(start) <= ' ') {
start++; // depends on control dependency: [while], data = [none]
}
int end = len;
while (end > start && s.charAt(end - 1) <= ' ') {
end--; // depends on control dependency: [while], data = [none]
}
return end - start;
} } |
public class class_name {
public String[] getProperty(final String key) {
final List<String> entriesList = new ArrayList<String>();
for (final Properties property : properties) {
final String values = property.getProperty(key);
if (values != null) {
entriesList.add(values);
}
}
final String[] entries = new String[entriesList.size()];
entriesList.toArray(entries);
return entries;
} } | public class class_name {
public String[] getProperty(final String key) {
final List<String> entriesList = new ArrayList<String>();
for (final Properties property : properties) {
final String values = property.getProperty(key);
if (values != null) {
entriesList.add(values); // depends on control dependency: [if], data = [(values]
}
}
final String[] entries = new String[entriesList.size()];
entriesList.toArray(entries);
return entries;
} } |
public class class_name {
public Set<Range> getRangeSet() {
if (rangeSet != null) {
return EnumSet.copyOf(rangeSet);
}
return Range.maskToRangeSet(mask);
} } | public class class_name {
public Set<Range> getRangeSet() {
if (rangeSet != null) {
return EnumSet.copyOf(rangeSet); // depends on control dependency: [if], data = [(rangeSet]
}
return Range.maskToRangeSet(mask);
} } |
public class class_name {
public static boolean isOptionalLong(TypeRef type) {
if (!(type instanceof ClassRef)) {
return false;
}
return JAVA_UTIL_OPTIONAL_LONG.equals(((ClassRef)type).getDefinition().getFullyQualifiedName());
} } | public class class_name {
public static boolean isOptionalLong(TypeRef type) {
if (!(type instanceof ClassRef)) {
return false; // depends on control dependency: [if], data = [none]
}
return JAVA_UTIL_OPTIONAL_LONG.equals(((ClassRef)type).getDefinition().getFullyQualifiedName());
} } |
public class class_name {
public final synchronized void add(final double v) {
sum0 += 1;
sum1 += v;
sum2 += v * v;
min = Math.min(min, v);
max = Math.max(max, v);
if (Math.abs(v) < com.simiacryptus.util.data.ScalarStatistics.zeroTol) {
zeros++;
}
else {
if (v < 0) {
negatives++;
}
else {
positives++;
}
sumLog += Math.log10(Math.abs(v));
}
} } | public class class_name {
public final synchronized void add(final double v) {
sum0 += 1;
sum1 += v;
sum2 += v * v;
min = Math.min(min, v);
max = Math.max(max, v);
if (Math.abs(v) < com.simiacryptus.util.data.ScalarStatistics.zeroTol) {
zeros++; // depends on control dependency: [if], data = [none]
}
else {
if (v < 0) {
negatives++; // depends on control dependency: [if], data = [none]
}
else {
positives++; // depends on control dependency: [if], data = [none]
}
sumLog += Math.log10(Math.abs(v)); // depends on control dependency: [if], data = [(Math.abs(v)]
}
} } |
public class class_name {
private void connect() {
try {
URL url = new URL(this.endpoint);
connection = (HttpURLConnection) url.openConnection();
connection.setReadTimeout(this.readTimeoutMs);
connection.setConnectTimeout(this.connectTimeoutMs);
connection.setRequestMethod(this.method.name());
if (method == HttpMethod.POST || method == HttpMethod.PUT) {
connection.setDoOutput(true);
} else {
connection.setDoOutput(false);
}
connection.setDoInput(true);
connection.setUseCaches(false);
for (String headerName : headers.keySet()) {
String headerValue = headers.get(headerName);
connection.setRequestProperty(headerName, headerValue);
}
connection.connect();
} catch (Exception e) {
throw new RuntimeException(e);
}
} } | public class class_name {
private void connect() {
try {
URL url = new URL(this.endpoint);
connection = (HttpURLConnection) url.openConnection(); // depends on control dependency: [try], data = [none]
connection.setReadTimeout(this.readTimeoutMs); // depends on control dependency: [try], data = [none]
connection.setConnectTimeout(this.connectTimeoutMs); // depends on control dependency: [try], data = [none]
connection.setRequestMethod(this.method.name()); // depends on control dependency: [try], data = [none]
if (method == HttpMethod.POST || method == HttpMethod.PUT) {
connection.setDoOutput(true); // depends on control dependency: [if], data = [none]
} else {
connection.setDoOutput(false); // depends on control dependency: [if], data = [none]
}
connection.setDoInput(true); // depends on control dependency: [try], data = [none]
connection.setUseCaches(false); // depends on control dependency: [try], data = [none]
for (String headerName : headers.keySet()) {
String headerValue = headers.get(headerName);
connection.setRequestProperty(headerName, headerValue); // depends on control dependency: [for], data = [headerName]
}
connection.connect(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static String charToEscape(char ch) {
String hexValue = Integer.toHexString(ch);
if (hexValue.length() == 1) {
return "\\u000" + hexValue;
}
if (hexValue.length() == 2) {
return "\\u00" + hexValue;
}
if (hexValue.length() == 3) {
return "\\u0" + hexValue;
}
return "\\u" + hexValue;
} } | public class class_name {
public static String charToEscape(char ch) {
String hexValue = Integer.toHexString(ch);
if (hexValue.length() == 1) {
return "\\u000" + hexValue;
// depends on control dependency: [if], data = [none]
}
if (hexValue.length() == 2) {
return "\\u00" + hexValue;
// depends on control dependency: [if], data = [none]
}
if (hexValue.length() == 3) {
return "\\u0" + hexValue;
// depends on control dependency: [if], data = [none]
}
return "\\u" + hexValue;
} } |
public class class_name {
public Record parseRecord(DocumentBuilder stringdb, Element elParent, String strTagName, String strDatabase, BaseTable table, BaseTable tableMain, boolean bIsMainHasReferenceField)
{
Record record = null;
if (table != null)
record = table.getRecord();
if (record == null)
{
record = this.getRecord(strTagName);
if (record == null)
{ // Not found, create it!
record = new XmlRecord();
((XmlRecord)record).setTableName(strTagName);
if (strDatabase != null)
((XmlRecord)record).setDatabaseName(strDatabase);
((XmlRecord)record).setDatabaseType(DBConstants.REMOTE);
record.init(this);
Record recMain = null;
if (tableMain != null)
recMain = tableMain.getRecord();
if (recMain != null) // If this is a sub-record
if (!bIsMainHasReferenceField) // And the main record doesn't have a reference to my key
{
ReferenceField fld = new ReferenceField(record, recMain.getTableNames(false) + "ID", DBConstants.DEFAULT_FIELD_LENGTH, null, null);
KeyArea keyArea = record.makeIndex(DBConstants.NOT_UNIQUE, recMain.getTableNames(false) + "ID");
keyArea.addKeyField(fld, DBConstants.ASCENDING);
}
RecordParser parser = new RecordParser(elParent);
while (parser.next(stringdb) != null)
{ // First, add the fields to the record
parser.findField(record, true); // This will create if not found
}
}
table = record.getTable();
}
// Now its time to move the data to the fields and write the record.
RecordParser parser = new RecordParser(elParent);
boolean bFirstTime = true;
while (parser.next(stringdb) != null)
{
Node elChild = parser.getChildNode();
String strName = parser.getName();
String strValue = parser.getValue();
boolean bIsRecord = parser.isRecord();
if (bFirstTime)
{ // First time thru
bFirstTime = false;
try {
table.addNew();
Record recMain = null;
if (tableMain != null)
recMain = tableMain.getRecord();
if (recMain != null)
{ // If this is a sub-file, be sure to update the reference field.
BaseField field = record.getField(recMain.getTableNames(false) + "ID");
if (field != null)
if (field instanceof ReferenceField)
if (!bIsMainHasReferenceField)
{ // If the main record has to get me, but doesn't have my key, save it's key
if (recMain.getEditMode() == DBConstants.EDIT_ADD)
{ // Main record not added yet, add it now
recMain = this.updateRecord(tableMain, true); // And return the record written
tableMain.edit();
}
((ReferenceField)field).setReference(recMain); // Reference the main record.
}
}
} catch (DBException ex) {
ex.printStackTrace();
}
}
BaseField field = parser.findField(record, true);
if (bIsRecord)
{ // If this field directly references a record, you have to write the sub-record first.
boolean bIsReferenceField = (parser.getReferenceName() != null);
Record recNew = this.parseRecord(stringdb, (Element)elChild, strName, strDatabase, null, table, bIsReferenceField);
if (bIsReferenceField) if (recNew != null)
{
((ReferenceField)field).setReference(recNew);
field = null; // Don't set it twice
}
}
if (field != null)
{ // Set the data
if (field instanceof ObjectField)
{
try {
XmlUtilities.decodeFieldData(field, strValue);
} catch (Exception e) {
Utility.getLogger().severe("Error - bad base64 input field: " + record.getRecordName());
e.printStackTrace();
}
}
else if (field instanceof DateTimeField)
XmlUtilities.decodeDateTime((DateTimeField)field, strValue);
else
field.setString(strValue);
}
}
record = this.updateRecord(table, bIsMainHasReferenceField);
return record;
} } | public class class_name {
public Record parseRecord(DocumentBuilder stringdb, Element elParent, String strTagName, String strDatabase, BaseTable table, BaseTable tableMain, boolean bIsMainHasReferenceField)
{
Record record = null;
if (table != null)
record = table.getRecord();
if (record == null)
{
record = this.getRecord(strTagName); // depends on control dependency: [if], data = [none]
if (record == null)
{ // Not found, create it!
record = new XmlRecord(); // depends on control dependency: [if], data = [none]
((XmlRecord)record).setTableName(strTagName); // depends on control dependency: [if], data = [none]
if (strDatabase != null)
((XmlRecord)record).setDatabaseName(strDatabase);
((XmlRecord)record).setDatabaseType(DBConstants.REMOTE); // depends on control dependency: [if], data = [none]
record.init(this); // depends on control dependency: [if], data = [none]
Record recMain = null;
if (tableMain != null)
recMain = tableMain.getRecord();
if (recMain != null) // If this is a sub-record
if (!bIsMainHasReferenceField) // And the main record doesn't have a reference to my key
{
ReferenceField fld = new ReferenceField(record, recMain.getTableNames(false) + "ID", DBConstants.DEFAULT_FIELD_LENGTH, null, null);
KeyArea keyArea = record.makeIndex(DBConstants.NOT_UNIQUE, recMain.getTableNames(false) + "ID");
keyArea.addKeyField(fld, DBConstants.ASCENDING); // depends on control dependency: [if], data = [none]
}
RecordParser parser = new RecordParser(elParent);
while (parser.next(stringdb) != null)
{ // First, add the fields to the record
parser.findField(record, true); // This will create if not found // depends on control dependency: [while], data = [none]
}
}
table = record.getTable(); // depends on control dependency: [if], data = [none]
}
// Now its time to move the data to the fields and write the record.
RecordParser parser = new RecordParser(elParent);
boolean bFirstTime = true;
while (parser.next(stringdb) != null)
{
Node elChild = parser.getChildNode();
String strName = parser.getName();
String strValue = parser.getValue();
boolean bIsRecord = parser.isRecord();
if (bFirstTime)
{ // First time thru
bFirstTime = false; // depends on control dependency: [if], data = [none]
try {
table.addNew(); // depends on control dependency: [try], data = [none]
Record recMain = null;
if (tableMain != null)
recMain = tableMain.getRecord();
if (recMain != null)
{ // If this is a sub-file, be sure to update the reference field.
BaseField field = record.getField(recMain.getTableNames(false) + "ID");
if (field != null)
if (field instanceof ReferenceField)
if (!bIsMainHasReferenceField)
{ // If the main record has to get me, but doesn't have my key, save it's key
if (recMain.getEditMode() == DBConstants.EDIT_ADD)
{ // Main record not added yet, add it now
recMain = this.updateRecord(tableMain, true); // And return the record written // depends on control dependency: [if], data = [none]
tableMain.edit(); // depends on control dependency: [if], data = [none]
}
((ReferenceField)field).setReference(recMain); // Reference the main record. // depends on control dependency: [if], data = [none]
}
}
} catch (DBException ex) {
ex.printStackTrace();
} // depends on control dependency: [catch], data = [none]
}
BaseField field = parser.findField(record, true);
if (bIsRecord)
{ // If this field directly references a record, you have to write the sub-record first.
boolean bIsReferenceField = (parser.getReferenceName() != null);
Record recNew = this.parseRecord(stringdb, (Element)elChild, strName, strDatabase, null, table, bIsReferenceField);
if (bIsReferenceField) if (recNew != null)
{
((ReferenceField)field).setReference(recNew); // depends on control dependency: [if], data = [(recNew]
field = null; // Don't set it twice // depends on control dependency: [if], data = [none]
}
}
if (field != null)
{ // Set the data
if (field instanceof ObjectField)
{
try {
XmlUtilities.decodeFieldData(field, strValue); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
Utility.getLogger().severe("Error - bad base64 input field: " + record.getRecordName());
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
}
else if (field instanceof DateTimeField)
XmlUtilities.decodeDateTime((DateTimeField)field, strValue);
else
field.setString(strValue);
}
}
record = this.updateRecord(table, bIsMainHasReferenceField);
return record;
} } |
public class class_name {
public static synchronized Map<String,OpDef> opDescs() {
if(DESCRIPTORS != null){
return DESCRIPTORS;
}
try (InputStream contents = new ClassPathResource("ops.proto").getInputStream(); BufferedInputStream bis2 = new BufferedInputStream(contents); BufferedReader reader = new BufferedReader(new InputStreamReader(bis2))) {
org.tensorflow.framework.OpList.Builder builder = org.tensorflow.framework.OpList.newBuilder();
StringBuilder str = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
str.append(line);//.append("\n");
}
TextFormat.getParser().merge(str.toString(), builder);
List<OpDef> list = builder.getOpList();
Map<String,OpDef> map = new HashMap<>();
for(OpDef opDef : list) {
map.put(opDef.getName(),opDef);
}
DESCRIPTORS = map;
return DESCRIPTORS;
} catch (Exception e) {
throw new ND4JIllegalStateException("Unable to load tensorflow descriptors", e);
}
} } | public class class_name {
public static synchronized Map<String,OpDef> opDescs() {
if(DESCRIPTORS != null){
return DESCRIPTORS; // depends on control dependency: [if], data = [none]
}
try (InputStream contents = new ClassPathResource("ops.proto").getInputStream(); BufferedInputStream bis2 = new BufferedInputStream(contents); BufferedReader reader = new BufferedReader(new InputStreamReader(bis2))) {
org.tensorflow.framework.OpList.Builder builder = org.tensorflow.framework.OpList.newBuilder();
StringBuilder str = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
str.append(line);//.append("\n"); // depends on control dependency: [while], data = [none]
}
TextFormat.getParser().merge(str.toString(), builder);
List<OpDef> list = builder.getOpList();
Map<String,OpDef> map = new HashMap<>();
for(OpDef opDef : list) {
map.put(opDef.getName(),opDef); // depends on control dependency: [for], data = [opDef]
}
DESCRIPTORS = map;
return DESCRIPTORS;
} catch (Exception e) {
throw new ND4JIllegalStateException("Unable to load tensorflow descriptors", e);
}
} } |
public class class_name {
public Integer getNumberFor(WebElement element) {
Integer number = null;
if ("li".equalsIgnoreCase(element.getTagName())
&& element.isDisplayed()) {
int num;
String ownVal = element.getAttribute("value");
if (ownVal != null && !"0".equals(ownVal)) {
num = toInt(ownVal, 0);
} else {
String start = element.findElement(By.xpath("ancestor::ol")).getAttribute("start");
num = toInt(start, 1);
List<WebElement> allItems = element.findElements(By.xpath("ancestor::ol/li"));
int index = allItems.indexOf(element);
for (int i = 0; i < index; i++) {
WebElement item = allItems.get(i);
if (item.isDisplayed()) {
num++;
String val = item.getAttribute("value");
int valNum = toInt(val, num);
if (valNum != 0) {
num = valNum + 1;
}
}
}
}
number = num;
}
return number;
} } | public class class_name {
public Integer getNumberFor(WebElement element) {
Integer number = null;
if ("li".equalsIgnoreCase(element.getTagName())
&& element.isDisplayed()) {
int num;
String ownVal = element.getAttribute("value");
if (ownVal != null && !"0".equals(ownVal)) {
num = toInt(ownVal, 0); // depends on control dependency: [if], data = [(ownVal]
} else {
String start = element.findElement(By.xpath("ancestor::ol")).getAttribute("start");
num = toInt(start, 1); // depends on control dependency: [if], data = [none]
List<WebElement> allItems = element.findElements(By.xpath("ancestor::ol/li"));
int index = allItems.indexOf(element);
for (int i = 0; i < index; i++) {
WebElement item = allItems.get(i);
if (item.isDisplayed()) {
num++; // depends on control dependency: [if], data = [none]
String val = item.getAttribute("value");
int valNum = toInt(val, num);
if (valNum != 0) {
num = valNum + 1; // depends on control dependency: [if], data = [none]
}
}
}
}
number = num; // depends on control dependency: [if], data = [none]
}
return number;
} } |
public class class_name {
protected final StackMapType[] calculate_live_local_types(MethodGen mg, int location) {
int max_local_index = -1;
StackMapType[] local_map_types = new StackMapType[mg.getMaxLocals()];
Arrays.fill(local_map_types, new StackMapType(Const.ITEM_Bogus, -1, pool.getConstantPool()));
for (LocalVariableGen lv : mg.getLocalVariables()) {
if (location >= lv.getStart().getPosition()) {
if (lv.getLiveToEnd() || location < lv.getEnd().getPosition()) {
int i = lv.getIndex();
local_map_types[i] = generate_StackMapType_from_Type(lv.getType());
max_local_index = Math.max(max_local_index, i);
}
}
}
return Arrays.copyOf(local_map_types, max_local_index + 1);
} } | public class class_name {
protected final StackMapType[] calculate_live_local_types(MethodGen mg, int location) {
int max_local_index = -1;
StackMapType[] local_map_types = new StackMapType[mg.getMaxLocals()];
Arrays.fill(local_map_types, new StackMapType(Const.ITEM_Bogus, -1, pool.getConstantPool()));
for (LocalVariableGen lv : mg.getLocalVariables()) {
if (location >= lv.getStart().getPosition()) {
if (lv.getLiveToEnd() || location < lv.getEnd().getPosition()) {
int i = lv.getIndex();
local_map_types[i] = generate_StackMapType_from_Type(lv.getType()); // depends on control dependency: [if], data = [none]
max_local_index = Math.max(max_local_index, i); // depends on control dependency: [if], data = [none]
}
}
}
return Arrays.copyOf(local_map_types, max_local_index + 1);
} } |
public class class_name {
boolean isPredecessorOf(final ControlFlowBlock possibleSuccessor) {
boolean result = false;
for (final ControlFlowBlock directSuccessor : successors) {
if (directSuccessor.equals(possibleSuccessor) || directSuccessor.isPredecessorOf(possibleSuccessor)) {
result = true;
break;
}
}
return result;
} } | public class class_name {
boolean isPredecessorOf(final ControlFlowBlock possibleSuccessor) {
boolean result = false;
for (final ControlFlowBlock directSuccessor : successors) {
if (directSuccessor.equals(possibleSuccessor) || directSuccessor.isPredecessorOf(possibleSuccessor)) {
result = true; // depends on control dependency: [if], data = [none]
break;
}
}
return result;
} } |
public class class_name {
public String getExponentPartAfter(final int digit) {
final int length = exponentPart.length();
if(length < digit || digit <= 0) {
return "";
}
return exponentPart.substring(0, length - digit + 1);
} } | public class class_name {
public String getExponentPartAfter(final int digit) {
final int length = exponentPart.length();
if(length < digit || digit <= 0) {
return "";
// depends on control dependency: [if], data = [none]
}
return exponentPart.substring(0, length - digit + 1);
} } |
public class class_name {
public ByteBuffer createMessage(boolean compress, int version)
{
int header = 0;
// set compression bit.
if (compress)
header |= 4;
// set streaming bit
header |= 8;
// Setting up the version bit
header |= (version << 8);
byte[] bytes;
try
{
int size = (int)StreamInitMessage.serializer.serializedSize(this, version);
DataOutputBuffer buffer = new DataOutputBuffer(size);
StreamInitMessage.serializer.serialize(this, buffer, version);
bytes = buffer.getData();
}
catch (IOException e)
{
throw new RuntimeException(e);
}
assert bytes.length > 0;
ByteBuffer buffer = ByteBuffer.allocate(4 + 4 + bytes.length);
buffer.putInt(MessagingService.PROTOCOL_MAGIC);
buffer.putInt(header);
buffer.put(bytes);
buffer.flip();
return buffer;
} } | public class class_name {
public ByteBuffer createMessage(boolean compress, int version)
{
int header = 0;
// set compression bit.
if (compress)
header |= 4;
// set streaming bit
header |= 8;
// Setting up the version bit
header |= (version << 8);
byte[] bytes;
try
{
int size = (int)StreamInitMessage.serializer.serializedSize(this, version);
DataOutputBuffer buffer = new DataOutputBuffer(size);
StreamInitMessage.serializer.serialize(this, buffer, version); // depends on control dependency: [try], data = [none]
bytes = buffer.getData(); // depends on control dependency: [try], data = [none]
}
catch (IOException e)
{
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
assert bytes.length > 0;
ByteBuffer buffer = ByteBuffer.allocate(4 + 4 + bytes.length);
buffer.putInt(MessagingService.PROTOCOL_MAGIC);
buffer.putInt(header);
buffer.put(bytes);
buffer.flip();
return buffer;
} } |
public class class_name {
public Block getStoredBlock(int namespaceId, long blkid,
boolean useOnDiskLength) throws IOException {
lock.readLock().lock();
try {
ReplicaToRead replica = getReplicaToRead(namespaceId, new Block(
blkid));
if (replica == null) {
return null;
}
File blockfile = replica.getDataFileToRead();
if (blockfile == null) {
return null;
}
File metafile = null;
if (!replica.isInlineChecksum()) {
metafile = BlockWithChecksumFileWriter.findMetaFile(blockfile, true);
if (metafile == null) {
return null;
}
}
Block block = new Block(blkid);
if (useOnDiskLength) {
block.setNumBytes(replica.getBytesWritten());
} else {
block.setNumBytes(replica.getBytesVisible());
}
if (replica.isInlineChecksum()) {
block.setGenerationStamp(BlockInlineChecksumReader
.getGenerationStampFromInlineChecksumFile(blockfile.getName()));
} else {
block.setGenerationStamp(BlockWithChecksumFileReader
.parseGenerationStampInMetaFile(blockfile, metafile));
}
return block;
} finally {
lock.readLock().unlock();
}
} } | public class class_name {
public Block getStoredBlock(int namespaceId, long blkid,
boolean useOnDiskLength) throws IOException {
lock.readLock().lock();
try {
ReplicaToRead replica = getReplicaToRead(namespaceId, new Block(
blkid));
if (replica == null) {
return null; // depends on control dependency: [if], data = [none]
}
File blockfile = replica.getDataFileToRead();
if (blockfile == null) {
return null; // depends on control dependency: [if], data = [none]
}
File metafile = null;
if (!replica.isInlineChecksum()) {
metafile = BlockWithChecksumFileWriter.findMetaFile(blockfile, true); // depends on control dependency: [if], data = [none]
if (metafile == null) {
return null; // depends on control dependency: [if], data = [none]
}
}
Block block = new Block(blkid);
if (useOnDiskLength) {
block.setNumBytes(replica.getBytesWritten()); // depends on control dependency: [if], data = [none]
} else {
block.setNumBytes(replica.getBytesVisible()); // depends on control dependency: [if], data = [none]
}
if (replica.isInlineChecksum()) {
block.setGenerationStamp(BlockInlineChecksumReader
.getGenerationStampFromInlineChecksumFile(blockfile.getName())); // depends on control dependency: [if], data = [none]
} else {
block.setGenerationStamp(BlockWithChecksumFileReader
.parseGenerationStampInMetaFile(blockfile, metafile)); // depends on control dependency: [if], data = [none]
}
return block;
} finally {
lock.readLock().unlock();
}
} } |
public class class_name {
public ProviderGroup add(ProviderInfo providerInfo) {
if (providerInfo == null) {
return this;
}
ConcurrentHashSet<ProviderInfo> tmp = new ConcurrentHashSet<ProviderInfo>(providerInfos);
tmp.add(providerInfo); // 排重
this.providerInfos = new ArrayList<ProviderInfo>(tmp);
return this;
} } | public class class_name {
public ProviderGroup add(ProviderInfo providerInfo) {
if (providerInfo == null) {
return this; // depends on control dependency: [if], data = [none]
}
ConcurrentHashSet<ProviderInfo> tmp = new ConcurrentHashSet<ProviderInfo>(providerInfos);
tmp.add(providerInfo); // 排重
this.providerInfos = new ArrayList<ProviderInfo>(tmp);
return this;
} } |
public class class_name {
public LightweightTypeReference getReturnTypeOfOverriddenOperation(JvmOperation operation, LightweightTypeReference context) {
if (operation.getVisibility() == JvmVisibility.PRIVATE || !InferredTypeIndicator.isInferred(operation.getReturnType())) {
return null;
}
BottomResolvedOperation resolvedOperation = new BottomResolvedOperation(operation, context, overrideTester);
List<IResolvedOperation> overriddenMethods = resolvedOperation.getOverriddenAndImplementedMethods();
if (overriddenMethods.isEmpty())
return null;
LightweightTypeReference result = overriddenMethods.get(0).getResolvedReturnType();
return result;
} } | public class class_name {
public LightweightTypeReference getReturnTypeOfOverriddenOperation(JvmOperation operation, LightweightTypeReference context) {
if (operation.getVisibility() == JvmVisibility.PRIVATE || !InferredTypeIndicator.isInferred(operation.getReturnType())) {
return null; // depends on control dependency: [if], data = [none]
}
BottomResolvedOperation resolvedOperation = new BottomResolvedOperation(operation, context, overrideTester);
List<IResolvedOperation> overriddenMethods = resolvedOperation.getOverriddenAndImplementedMethods();
if (overriddenMethods.isEmpty())
return null;
LightweightTypeReference result = overriddenMethods.get(0).getResolvedReturnType();
return result;
} } |
public class class_name {
private void bootFromDsqls(String path) {
File[] files = new File(path).listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".sql");
}
});
for (File file:files) {//In this context only add/update is cared for.
sqlSniffer.onCreate(Paths.get(file.toURI()));
}
} } | public class class_name {
private void bootFromDsqls(String path) {
File[] files = new File(path).listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".sql");
}
});
for (File file:files) {//In this context only add/update is cared for.
sqlSniffer.onCreate(Paths.get(file.toURI())); // depends on control dependency: [for], data = [file]
}
} } |
public class class_name {
@Override
public EntityManagerFactory getEntityManagerFactory
(JPAPuId puId,
J2EEName j2eeName, // d510184
boolean getEmfWrapper) // d416151.3.1
{
final boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
if (isTraceOn && tc.isEntryEnabled())
Tr.entry(tc, "getEntityManagerFactory : " + puId + ", " + j2eeName);
EntityManagerFactory rtnFactory = null;
JPAPUnitInfo puInfo = findPersistenceUnitInfo(puId);
if (puInfo != null)
{
rtnFactory = puInfo.getEntityManagerFactory(j2eeName); // d510184
if (getEmfWrapper) // d416151.3.1 d510184
{
rtnFactory = getJPARuntime().createJPAEMFactory(puId, j2eeName, rtnFactory); // d416151.3.1 d510184, d706751
}
}
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "getEntityManagerFactory : " + rtnFactory);
return rtnFactory;
} } | public class class_name {
@Override
public EntityManagerFactory getEntityManagerFactory
(JPAPuId puId,
J2EEName j2eeName, // d510184
boolean getEmfWrapper) // d416151.3.1
{
final boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
if (isTraceOn && tc.isEntryEnabled())
Tr.entry(tc, "getEntityManagerFactory : " + puId + ", " + j2eeName);
EntityManagerFactory rtnFactory = null;
JPAPUnitInfo puInfo = findPersistenceUnitInfo(puId);
if (puInfo != null)
{
rtnFactory = puInfo.getEntityManagerFactory(j2eeName); // d510184 // depends on control dependency: [if], data = [none]
if (getEmfWrapper) // d416151.3.1 d510184
{
rtnFactory = getJPARuntime().createJPAEMFactory(puId, j2eeName, rtnFactory); // d416151.3.1 d510184, d706751 // depends on control dependency: [if], data = [none]
}
}
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "getEntityManagerFactory : " + rtnFactory);
return rtnFactory;
} } |
public class class_name {
@Override
public EEnum getIfcProcedureTypeEnum() {
if (ifcProcedureTypeEnumEEnum == null) {
ifcProcedureTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(1036);
}
return ifcProcedureTypeEnumEEnum;
} } | public class class_name {
@Override
public EEnum getIfcProcedureTypeEnum() {
if (ifcProcedureTypeEnumEEnum == null) {
ifcProcedureTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(1036);
// depends on control dependency: [if], data = [none]
}
return ifcProcedureTypeEnumEEnum;
} } |
public class class_name {
public static Element drawCross(SVGPlot svgp, Projection2D proj, NumberVector mid, double radius) {
final double[] v_mid = mid.toArray();
final long[] dims = proj.getVisibleDimensions2D();
SVGPath path = new SVGPath();
for(int dim = BitsUtil.nextSetBit(dims, 0); dim >= 0; dim = BitsUtil.nextSetBit(dims, dim + 1)) {
v_mid[dim] += radius;
double[] p1 = proj.fastProjectDataToRenderSpace(v_mid);
v_mid[dim] -= radius * 2;
double[] p2 = proj.fastProjectDataToRenderSpace(v_mid);
v_mid[dim] += radius;
path.moveTo(p1[0], p1[1]).drawTo(p2[0], p2[1]).close();
}
return path.makeElement(svgp);
} } | public class class_name {
public static Element drawCross(SVGPlot svgp, Projection2D proj, NumberVector mid, double radius) {
final double[] v_mid = mid.toArray();
final long[] dims = proj.getVisibleDimensions2D();
SVGPath path = new SVGPath();
for(int dim = BitsUtil.nextSetBit(dims, 0); dim >= 0; dim = BitsUtil.nextSetBit(dims, dim + 1)) {
v_mid[dim] += radius; // depends on control dependency: [for], data = [dim]
double[] p1 = proj.fastProjectDataToRenderSpace(v_mid);
v_mid[dim] -= radius * 2; // depends on control dependency: [for], data = [dim]
double[] p2 = proj.fastProjectDataToRenderSpace(v_mid);
v_mid[dim] += radius; // depends on control dependency: [for], data = [dim]
path.moveTo(p1[0], p1[1]).drawTo(p2[0], p2[1]).close(); // depends on control dependency: [for], data = [none]
}
return path.makeElement(svgp);
} } |
public class class_name {
@SuppressWarnings("checkstyle:npathcomplexity")
protected void compile(List<File> classPath, List<File> sourcePaths, File sarlOutputPath,
File classOutputPath) throws MojoExecutionException, MojoFailureException {
final SarlBatchCompiler compiler = getBatchCompiler();
final MavenProject project = getProject();
compiler.setResourceSetProvider(new MavenProjectResourceSetProvider(project));
final Iterable<File> filtered = Iterables.filter(sourcePaths, input -> input.isDirectory());
if (Iterables.isEmpty(filtered)) {
final String dir = Iterables.toString(sourcePaths);
getLog().info(MessageFormat.format(Messages.AbstractSarlBatchCompilerMojo_1, dir));
return;
}
final String baseDir = project.getBasedir().getAbsolutePath();
final JavaCompiler compilerType = getJavaCompiler();
compiler.setJavaPostCompilationEnable(compilerType != JavaCompiler.NONE);
compiler.setOptimizationLevel(getOptimization());
compiler.setClassOutputPath(classOutputPath);
compiler.setJavaSourceVersion(getSourceVersion());
compiler.setBasePath(baseDir);
compiler.setTempDirectory(getTempDirectory());
compiler.setDeleteTempDirectory(false);
compiler.setClassPath(classPath);
final String bootClassPath = getBootClassPath();
compiler.setBootClassPath(bootClassPath);
final List<File> filteredSourcePaths = Lists.newArrayList(filtered);
compiler.setSourcePath(filteredSourcePaths);
compiler.setOutputPath(sarlOutputPath);
compiler.setFileEncoding(getEncoding());
compiler.setWriteTraceFiles(getGenerateTraceFiles());
compiler.setWriteStorageFiles(getGenerateStorageFiles());
compiler.setGenerateInlineAnnotation(getGenerateInlines());
compiler.setGeneratePureAnnotation(getGeneratePures());
compiler.setGenerateEqualityTestFunctions(getGenerateEqualityTestFunctions());
compiler.setGenerateToStringFunctions(getGenerateToStringFunctions());
compiler.setGenerateCloneFunctions(getGenerateCloneFunctions());
compiler.setGenerateSerialNumberFields(getGenerateSerialNumberFields());
final StringBuilder builder = new StringBuilder();
for (final String identifier : getExtraGenerators()) {
if (builder.length() > 0) {
builder.append(File.pathSeparator);
}
builder.append(identifier);
}
compiler.setExtraLanguageGenerators(builder.toString());
StaticLoggerBinder.getSingleton().registerMavenLogger(getLog());
final Logger logger = LoggerFactory.getLogger(getClass());
compiler.setLogger(logger);
compiler.setIssueMessageFormatter((issue, uriToProblem) -> {
final String filename;
if (uriToProblem != null) {
filename = uriToProblem.toFileString();
} else {
filename = Messages.AbstractSarlBatchCompilerMojo_2;
}
return MessageFormat.format(Messages.AbstractSarlBatchCompilerMojo_3,
filename, issue.getLineNumber(),
issue.getColumn(), issue.getMessage());
});
final String[] errorMessage = new String[] {null};
compiler.addIssueMessageListener((issue, uri, message) -> {
if ((issue.isSyntaxError() || issue.getSeverity() == Severity.ERROR) && (Strings.isEmpty(errorMessage[0]))) {
errorMessage[0] = message;
}
});
if (!compiler.compile()) {
final StringBuilder dir = new StringBuilder();
for (final File file : filtered) {
if (dir.length() > 0) {
dir.append(File.pathSeparator);
}
dir.append(file.getAbsolutePath());
}
if (Strings.isEmpty(errorMessage[0])) {
throw new MojoFailureException(Messages.AbstractSarlBatchCompilerMojo_4);
}
throw new MojoFailureException(errorMessage[0]);
}
} } | public class class_name {
@SuppressWarnings("checkstyle:npathcomplexity")
protected void compile(List<File> classPath, List<File> sourcePaths, File sarlOutputPath,
File classOutputPath) throws MojoExecutionException, MojoFailureException {
final SarlBatchCompiler compiler = getBatchCompiler();
final MavenProject project = getProject();
compiler.setResourceSetProvider(new MavenProjectResourceSetProvider(project));
final Iterable<File> filtered = Iterables.filter(sourcePaths, input -> input.isDirectory());
if (Iterables.isEmpty(filtered)) {
final String dir = Iterables.toString(sourcePaths);
getLog().info(MessageFormat.format(Messages.AbstractSarlBatchCompilerMojo_1, dir));
return;
}
final String baseDir = project.getBasedir().getAbsolutePath();
final JavaCompiler compilerType = getJavaCompiler();
compiler.setJavaPostCompilationEnable(compilerType != JavaCompiler.NONE);
compiler.setOptimizationLevel(getOptimization());
compiler.setClassOutputPath(classOutputPath);
compiler.setJavaSourceVersion(getSourceVersion());
compiler.setBasePath(baseDir);
compiler.setTempDirectory(getTempDirectory());
compiler.setDeleteTempDirectory(false);
compiler.setClassPath(classPath);
final String bootClassPath = getBootClassPath();
compiler.setBootClassPath(bootClassPath);
final List<File> filteredSourcePaths = Lists.newArrayList(filtered);
compiler.setSourcePath(filteredSourcePaths);
compiler.setOutputPath(sarlOutputPath);
compiler.setFileEncoding(getEncoding());
compiler.setWriteTraceFiles(getGenerateTraceFiles());
compiler.setWriteStorageFiles(getGenerateStorageFiles());
compiler.setGenerateInlineAnnotation(getGenerateInlines());
compiler.setGeneratePureAnnotation(getGeneratePures());
compiler.setGenerateEqualityTestFunctions(getGenerateEqualityTestFunctions());
compiler.setGenerateToStringFunctions(getGenerateToStringFunctions());
compiler.setGenerateCloneFunctions(getGenerateCloneFunctions());
compiler.setGenerateSerialNumberFields(getGenerateSerialNumberFields());
final StringBuilder builder = new StringBuilder();
for (final String identifier : getExtraGenerators()) {
if (builder.length() > 0) {
builder.append(File.pathSeparator); // depends on control dependency: [if], data = [none]
}
builder.append(identifier);
}
compiler.setExtraLanguageGenerators(builder.toString());
StaticLoggerBinder.getSingleton().registerMavenLogger(getLog());
final Logger logger = LoggerFactory.getLogger(getClass());
compiler.setLogger(logger);
compiler.setIssueMessageFormatter((issue, uriToProblem) -> {
final String filename;
if (uriToProblem != null) {
filename = uriToProblem.toFileString();
} else {
filename = Messages.AbstractSarlBatchCompilerMojo_2;
}
return MessageFormat.format(Messages.AbstractSarlBatchCompilerMojo_3,
filename, issue.getLineNumber(),
issue.getColumn(), issue.getMessage());
});
final String[] errorMessage = new String[] {null};
compiler.addIssueMessageListener((issue, uri, message) -> {
if ((issue.isSyntaxError() || issue.getSeverity() == Severity.ERROR) && (Strings.isEmpty(errorMessage[0]))) {
errorMessage[0] = message;
}
});
if (!compiler.compile()) {
final StringBuilder dir = new StringBuilder();
for (final File file : filtered) {
if (dir.length() > 0) {
dir.append(File.pathSeparator);
}
dir.append(file.getAbsolutePath());
}
if (Strings.isEmpty(errorMessage[0])) {
throw new MojoFailureException(Messages.AbstractSarlBatchCompilerMojo_4);
}
throw new MojoFailureException(errorMessage[0]);
}
} } |
public class class_name {
public void invalidateCookie(HttpServletRequest req, HttpServletResponse res, String cookieName, boolean enableHttpOnly) {
Cookie c = new Cookie(cookieName, "");
if (cookieName.equals("WASReqURL")) {
c.setPath(getPathName(req));
} else {
c.setPath("/");
}
c.setMaxAge(0);
if (enableHttpOnly && webAppSecConfig.getHttpOnlyCookies()) {
c.setHttpOnly(true);
}
if (webAppSecConfig.getSSORequiresSSL()) {
c.setSecure(true);
}
res.addCookie(c);
} } | public class class_name {
public void invalidateCookie(HttpServletRequest req, HttpServletResponse res, String cookieName, boolean enableHttpOnly) {
Cookie c = new Cookie(cookieName, "");
if (cookieName.equals("WASReqURL")) {
c.setPath(getPathName(req)); // depends on control dependency: [if], data = [none]
} else {
c.setPath("/"); // depends on control dependency: [if], data = [none]
}
c.setMaxAge(0);
if (enableHttpOnly && webAppSecConfig.getHttpOnlyCookies()) {
c.setHttpOnly(true); // depends on control dependency: [if], data = [none]
}
if (webAppSecConfig.getSSORequiresSSL()) {
c.setSecure(true); // depends on control dependency: [if], data = [none]
}
res.addCookie(c);
} } |
public class class_name {
private boolean peekOrPoll(@Nullable Duration claimTtl, EventSink rawSink) {
// If dedup activity is disabled then we're in the process of falling back to regular non-dedup'd channels.
// Don't do anything that might move events around--we're likely racing a call to moveToRawChannel().
if (!_dedupEnabled.get()) {
return false;
}
// When polling, protect from bad pollers that never ack.
if (claimTtl != null && _eventStore.getClaimCount(_readChannel) >= Limits.MAX_CLAIMS_OUTSTANDING) {
return false;
}
TrackingEventSink sink = new TrackingEventSink(rawSink);
// There are three places in which to look for events:
// 1. Read channel
// 2. Persistent sorted queue
// 3. Write channel
// This code is designed to, in the *common* case, poll only one of those sources. As things transition
// between the following 3 phases it will occasionally poll multiple sources, but hopefully a server will
// spend most of its time in one particular phase, resulting in efficient polling overall.
// Phase 1: Try to keep the read channel empty. Start by draining the read channel of un-acked events.
// Phase 2: When readers are slower than writers, events build up in the sorted queue. Source from the sorted queue.
// Phase 3: When readers are faster than writers, the sorted queue isn't useful. Source directly from the write channel.
// The read channel only contains unclaimed events when (a) claims have expired because something has gone
// wrong (crash, restart, timeout, etc.) or (b) polling the sorted queue or write channel pulled more items
// than the sink would accept and we had to write the overflow to the read channel. Neither is the common
// case, so rely on the DefaultEventStore "empty channel" cache to rate limit actual reads to one-per-second.
boolean moreRead = peekOrPollReadChannel(claimTtl, sink);
if (sink.isDone()) {
return moreRead || !getQueue().isEmpty() || !isWriteChannelEmpty();
}
// Do NOT dedup events in-memory between the read channel and the other sources. Once an event makes it to
// the read channel we can't dedup it with anything else. That lets us support the following sequence:
// 1. process A adds EventX
// 2. process B polls EventX and begins working on it
// 3. process A makes a change and adds EventX to be re-evaluated
// 4. process B acks EventX
// The event added in step 3 must be kept separate from the event added in step 1 so it doesn't get acked
// in step 4. Essentially, once a poller starts working on an event (indicated by its presence in the read
// channel) we can't dedup/consolidate that event with any other events.
Set<ByteBuffer> unique = Sets.newHashSet();
// Search for events in the write channel, copying them to the sorted queue or, under certain circumstances,
// copying them directly to the read channel before returning.
boolean moreWrite = peekOrPollWriteChannel(claimTtl, sink, unique);
if (moreWrite) {
// There are more unconsumed write channel events. Move them asynchronously to the sorted queue.
_asyncFiller.start();
}
// Search for events in the sorted queue, copying them to the read channel before returning.
boolean moreSorted = peekOrPollSortedQueue(claimTtl, sink, unique);
return moreWrite || moreSorted;
} } | public class class_name {
private boolean peekOrPoll(@Nullable Duration claimTtl, EventSink rawSink) {
// If dedup activity is disabled then we're in the process of falling back to regular non-dedup'd channels.
// Don't do anything that might move events around--we're likely racing a call to moveToRawChannel().
if (!_dedupEnabled.get()) {
return false; // depends on control dependency: [if], data = [none]
}
// When polling, protect from bad pollers that never ack.
if (claimTtl != null && _eventStore.getClaimCount(_readChannel) >= Limits.MAX_CLAIMS_OUTSTANDING) {
return false; // depends on control dependency: [if], data = [none]
}
TrackingEventSink sink = new TrackingEventSink(rawSink);
// There are three places in which to look for events:
// 1. Read channel
// 2. Persistent sorted queue
// 3. Write channel
// This code is designed to, in the *common* case, poll only one of those sources. As things transition
// between the following 3 phases it will occasionally poll multiple sources, but hopefully a server will
// spend most of its time in one particular phase, resulting in efficient polling overall.
// Phase 1: Try to keep the read channel empty. Start by draining the read channel of un-acked events.
// Phase 2: When readers are slower than writers, events build up in the sorted queue. Source from the sorted queue.
// Phase 3: When readers are faster than writers, the sorted queue isn't useful. Source directly from the write channel.
// The read channel only contains unclaimed events when (a) claims have expired because something has gone
// wrong (crash, restart, timeout, etc.) or (b) polling the sorted queue or write channel pulled more items
// than the sink would accept and we had to write the overflow to the read channel. Neither is the common
// case, so rely on the DefaultEventStore "empty channel" cache to rate limit actual reads to one-per-second.
boolean moreRead = peekOrPollReadChannel(claimTtl, sink);
if (sink.isDone()) {
return moreRead || !getQueue().isEmpty() || !isWriteChannelEmpty(); // depends on control dependency: [if], data = [none]
}
// Do NOT dedup events in-memory between the read channel and the other sources. Once an event makes it to
// the read channel we can't dedup it with anything else. That lets us support the following sequence:
// 1. process A adds EventX
// 2. process B polls EventX and begins working on it
// 3. process A makes a change and adds EventX to be re-evaluated
// 4. process B acks EventX
// The event added in step 3 must be kept separate from the event added in step 1 so it doesn't get acked
// in step 4. Essentially, once a poller starts working on an event (indicated by its presence in the read
// channel) we can't dedup/consolidate that event with any other events.
Set<ByteBuffer> unique = Sets.newHashSet();
// Search for events in the write channel, copying them to the sorted queue or, under certain circumstances,
// copying them directly to the read channel before returning.
boolean moreWrite = peekOrPollWriteChannel(claimTtl, sink, unique);
if (moreWrite) {
// There are more unconsumed write channel events. Move them asynchronously to the sorted queue.
_asyncFiller.start(); // depends on control dependency: [if], data = [none]
}
// Search for events in the sorted queue, copying them to the read channel before returning.
boolean moreSorted = peekOrPollSortedQueue(claimTtl, sink, unique);
return moreWrite || moreSorted;
} } |
public class class_name {
private void createInterceptors(InterceptorMetaData imd) {
ivInterceptors = new Object[imd.ivInterceptorClasses.length];
try {
imd.createInterceptorInstances(getInjectionEngine(), ivInterceptors, ivEjbManagedObjectContext, this);
} catch (Throwable t) {
FFDCFilter.processException(t, CLASS_NAME + ".ManagedBeanOBase", "177", this);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "interceptor creation failure", t);
throw ExceptionUtil.EJBException("Interceptor creation failure", t);
}
} } | public class class_name {
private void createInterceptors(InterceptorMetaData imd) {
ivInterceptors = new Object[imd.ivInterceptorClasses.length];
try {
imd.createInterceptorInstances(getInjectionEngine(), ivInterceptors, ivEjbManagedObjectContext, this); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
FFDCFilter.processException(t, CLASS_NAME + ".ManagedBeanOBase", "177", this);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "interceptor creation failure", t);
throw ExceptionUtil.EJBException("Interceptor creation failure", t);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected void loadProfileIfNeeded()
{
final Object key = getProfileKey();
if (key != null)
{
final MetadataManager mm = MetadataManager.getInstance();
if (!key.equals(mm.getCurrentProfileKey()))
{
mm.loadProfile(key);
}
}
} } | public class class_name {
protected void loadProfileIfNeeded()
{
final Object key = getProfileKey();
if (key != null)
{
final MetadataManager mm = MetadataManager.getInstance();
if (!key.equals(mm.getCurrentProfileKey()))
{
mm.loadProfile(key);
// depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public RamlType newProperty(String name, String type, boolean required, String... values) {
if (properties == null) {
properties = new LinkedHashMap<>();
}
if (values.length > 0) {
properties.put(required ? name : name + "?", ImmutableMap.of("enum", values));
} else {
properties.put(required ? name : name + "?", type);
}
return this;
} } | public class class_name {
public RamlType newProperty(String name, String type, boolean required, String... values) {
if (properties == null) {
properties = new LinkedHashMap<>(); // depends on control dependency: [if], data = [none]
}
if (values.length > 0) {
properties.put(required ? name : name + "?", ImmutableMap.of("enum", values)); // depends on control dependency: [if], data = [none]
} else {
properties.put(required ? name : name + "?", type); // depends on control dependency: [if], data = [none]
}
return this;
} } |
public class class_name {
private void init(final int WIDTH, final int HEIGHT) {
if (WIDTH <= 1) {
return;
}
if (batteryImage != null) {
batteryImage.flush();
}
batteryImage = create_BATTERY_Image(WIDTH, HEIGHT - getInsets().bottom, value);
} } | public class class_name {
private void init(final int WIDTH, final int HEIGHT) {
if (WIDTH <= 1) {
return; // depends on control dependency: [if], data = [none]
}
if (batteryImage != null) {
batteryImage.flush(); // depends on control dependency: [if], data = [none]
}
batteryImage = create_BATTERY_Image(WIDTH, HEIGHT - getInsets().bottom, value);
} } |
public class class_name {
public void initializeResources(final String packageName)
throws ClassNotFoundException, IOException, URISyntaxException
{
final Map<Class<?>, ImportResource[]> resourcesMap = ImportResourcesExtensions
.getImportResources(packageName);
for (final Entry<Class<?>, ImportResource[]> entry : resourcesMap.entrySet())
{
final Class<?> key = entry.getKey();
final ImportResource[] value = entry.getValue();
final Set<PackageResourceReferenceWrapper> packageResourceReferences = new LinkedHashSet<>();
for (final ImportResource importResource : value)
{
if (importResource.resourceType().equalsIgnoreCase("js"))
{
final PackageResourceReference t = new PackageResourceReference(key,
importResource.resourceName());
packageResourceReferences
.add(new PackageResourceReferenceWrapper(t, ResourceReferenceType.JS));
}
else if (importResource.resourceType().equalsIgnoreCase("css"))
{
final PackageResourceReference t = new PackageResourceReference(key,
importResource.resourceName());
packageResourceReferences
.add(new PackageResourceReferenceWrapper(t, ResourceReferenceType.CSS));
}
}
PackageResourceReferences.getInstance().getPackageResourceReferenceMap().put(key,
packageResourceReferences);
}
} } | public class class_name {
public void initializeResources(final String packageName)
throws ClassNotFoundException, IOException, URISyntaxException
{
final Map<Class<?>, ImportResource[]> resourcesMap = ImportResourcesExtensions
.getImportResources(packageName);
for (final Entry<Class<?>, ImportResource[]> entry : resourcesMap.entrySet())
{
final Class<?> key = entry.getKey();
final ImportResource[] value = entry.getValue();
final Set<PackageResourceReferenceWrapper> packageResourceReferences = new LinkedHashSet<>();
for (final ImportResource importResource : value)
{
if (importResource.resourceType().equalsIgnoreCase("js"))
{
final PackageResourceReference t = new PackageResourceReference(key,
importResource.resourceName());
packageResourceReferences
.add(new PackageResourceReferenceWrapper(t, ResourceReferenceType.JS)); // depends on control dependency: [if], data = [none]
}
else if (importResource.resourceType().equalsIgnoreCase("css"))
{
final PackageResourceReference t = new PackageResourceReference(key,
importResource.resourceName());
packageResourceReferences
.add(new PackageResourceReferenceWrapper(t, ResourceReferenceType.CSS)); // depends on control dependency: [if], data = [none]
}
}
PackageResourceReferences.getInstance().getPackageResourceReferenceMap().put(key,
packageResourceReferences);
}
} } |
public class class_name {
private static void reverse(int first, int last, Swapper swapper) {
// no more needed since manually inlined
while (first < --last) {
swapper.swap(first++,last);
}
} } | public class class_name {
private static void reverse(int first, int last, Swapper swapper) {
// no more needed since manually inlined
while (first < --last) {
swapper.swap(first++,last);
// depends on control dependency: [while], data = [(first]
}
} } |
public class class_name {
public ReplicatedServersConfig addNodeAddress(String... addresses) {
for (String address : addresses) {
nodeAddresses.add(URIBuilder.create(address));
}
return this;
} } | public class class_name {
public ReplicatedServersConfig addNodeAddress(String... addresses) {
for (String address : addresses) {
nodeAddresses.add(URIBuilder.create(address)); // depends on control dependency: [for], data = [address]
}
return this;
} } |
public class class_name {
@Nullable
/* package */ static ScanRecord parseFromBytes(@Nullable final byte[] scanRecord) {
if (scanRecord == null) {
return null;
}
int currentPos = 0;
int advertiseFlag = -1;
int txPowerLevel = Integer.MIN_VALUE;
String localName = null;
List<ParcelUuid> serviceUuids = null;
SparseArray<byte[]> manufacturerData = null;
Map<ParcelUuid, byte[]> serviceData = null;
try {
while (currentPos < scanRecord.length) {
// length is unsigned int.
final int length = scanRecord[currentPos++] & 0xFF;
if (length == 0) {
break;
}
// Note the length includes the length of the field type itself.
final int dataLength = length - 1;
// fieldType is unsigned int.
final int fieldType = scanRecord[currentPos++] & 0xFF;
switch (fieldType) {
case DATA_TYPE_FLAGS:
advertiseFlag = scanRecord[currentPos] & 0xFF;
break;
case DATA_TYPE_SERVICE_UUIDS_16_BIT_PARTIAL:
case DATA_TYPE_SERVICE_UUIDS_16_BIT_COMPLETE:
if (serviceUuids == null)
serviceUuids = new ArrayList<>();
parseServiceUuid(scanRecord, currentPos,
dataLength, BluetoothUuid.UUID_BYTES_16_BIT, serviceUuids);
break;
case DATA_TYPE_SERVICE_UUIDS_32_BIT_PARTIAL:
case DATA_TYPE_SERVICE_UUIDS_32_BIT_COMPLETE:
if (serviceUuids == null)
serviceUuids = new ArrayList<>();
parseServiceUuid(scanRecord, currentPos, dataLength,
BluetoothUuid.UUID_BYTES_32_BIT, serviceUuids);
break;
case DATA_TYPE_SERVICE_UUIDS_128_BIT_PARTIAL:
case DATA_TYPE_SERVICE_UUIDS_128_BIT_COMPLETE:
if (serviceUuids == null)
serviceUuids = new ArrayList<>();
parseServiceUuid(scanRecord, currentPos, dataLength,
BluetoothUuid.UUID_BYTES_128_BIT, serviceUuids);
break;
case DATA_TYPE_LOCAL_NAME_SHORT:
case DATA_TYPE_LOCAL_NAME_COMPLETE:
localName = new String(
extractBytes(scanRecord, currentPos, dataLength));
break;
case DATA_TYPE_TX_POWER_LEVEL:
txPowerLevel = scanRecord[currentPos];
break;
case DATA_TYPE_SERVICE_DATA_16_BIT:
case DATA_TYPE_SERVICE_DATA_32_BIT:
case DATA_TYPE_SERVICE_DATA_128_BIT:
int serviceUuidLength = BluetoothUuid.UUID_BYTES_16_BIT;
if (fieldType == DATA_TYPE_SERVICE_DATA_32_BIT) {
serviceUuidLength = BluetoothUuid.UUID_BYTES_32_BIT;
} else if (fieldType == DATA_TYPE_SERVICE_DATA_128_BIT) {
serviceUuidLength = BluetoothUuid.UUID_BYTES_128_BIT;
}
final byte[] serviceDataUuidBytes = extractBytes(scanRecord, currentPos,
serviceUuidLength);
final ParcelUuid serviceDataUuid = BluetoothUuid.parseUuidFrom(
serviceDataUuidBytes);
final byte[] serviceDataArray = extractBytes(scanRecord,
currentPos + serviceUuidLength, dataLength - serviceUuidLength);
if (serviceData == null)
serviceData = new HashMap<>();
serviceData.put(serviceDataUuid, serviceDataArray);
break;
case DATA_TYPE_MANUFACTURER_SPECIFIC_DATA:
// The first two bytes of the manufacturer specific data are
// manufacturer ids in little endian.
final int manufacturerId = ((scanRecord[currentPos + 1] & 0xFF) << 8) +
(scanRecord[currentPos] & 0xFF);
final byte[] manufacturerDataBytes = extractBytes(scanRecord, currentPos + 2,
dataLength - 2);
if (manufacturerData == null)
manufacturerData = new SparseArray<>();
manufacturerData.put(manufacturerId, manufacturerDataBytes);
break;
default:
// Just ignore, we don't handle such data type.
break;
}
currentPos += dataLength;
}
return new ScanRecord(serviceUuids, manufacturerData, serviceData,
advertiseFlag, txPowerLevel, localName, scanRecord);
} catch (final Exception e) {
Log.e(TAG, "unable to parse scan record: " + Arrays.toString(scanRecord));
// As the record is invalid, ignore all the parsed results for this packet
// and return an empty record with raw scanRecord bytes in results
return new ScanRecord(null, null, null,
-1, Integer.MIN_VALUE, null, scanRecord);
}
} } | public class class_name {
@Nullable
/* package */ static ScanRecord parseFromBytes(@Nullable final byte[] scanRecord) {
if (scanRecord == null) {
return null; // depends on control dependency: [if], data = [none]
}
int currentPos = 0;
int advertiseFlag = -1;
int txPowerLevel = Integer.MIN_VALUE;
String localName = null;
List<ParcelUuid> serviceUuids = null;
SparseArray<byte[]> manufacturerData = null;
Map<ParcelUuid, byte[]> serviceData = null;
try {
while (currentPos < scanRecord.length) {
// length is unsigned int.
final int length = scanRecord[currentPos++] & 0xFF;
if (length == 0) {
break;
}
// Note the length includes the length of the field type itself.
final int dataLength = length - 1;
// fieldType is unsigned int.
final int fieldType = scanRecord[currentPos++] & 0xFF;
switch (fieldType) {
case DATA_TYPE_FLAGS:
advertiseFlag = scanRecord[currentPos] & 0xFF;
break;
case DATA_TYPE_SERVICE_UUIDS_16_BIT_PARTIAL:
case DATA_TYPE_SERVICE_UUIDS_16_BIT_COMPLETE:
if (serviceUuids == null)
serviceUuids = new ArrayList<>();
parseServiceUuid(scanRecord, currentPos,
dataLength, BluetoothUuid.UUID_BYTES_16_BIT, serviceUuids);
break;
case DATA_TYPE_SERVICE_UUIDS_32_BIT_PARTIAL:
case DATA_TYPE_SERVICE_UUIDS_32_BIT_COMPLETE:
if (serviceUuids == null)
serviceUuids = new ArrayList<>();
parseServiceUuid(scanRecord, currentPos, dataLength,
BluetoothUuid.UUID_BYTES_32_BIT, serviceUuids);
break;
case DATA_TYPE_SERVICE_UUIDS_128_BIT_PARTIAL:
case DATA_TYPE_SERVICE_UUIDS_128_BIT_COMPLETE:
if (serviceUuids == null)
serviceUuids = new ArrayList<>();
parseServiceUuid(scanRecord, currentPos, dataLength,
BluetoothUuid.UUID_BYTES_128_BIT, serviceUuids);
break;
case DATA_TYPE_LOCAL_NAME_SHORT:
case DATA_TYPE_LOCAL_NAME_COMPLETE:
localName = new String(
extractBytes(scanRecord, currentPos, dataLength));
break;
case DATA_TYPE_TX_POWER_LEVEL:
txPowerLevel = scanRecord[currentPos];
break;
case DATA_TYPE_SERVICE_DATA_16_BIT:
case DATA_TYPE_SERVICE_DATA_32_BIT:
case DATA_TYPE_SERVICE_DATA_128_BIT:
int serviceUuidLength = BluetoothUuid.UUID_BYTES_16_BIT;
if (fieldType == DATA_TYPE_SERVICE_DATA_32_BIT) {
serviceUuidLength = BluetoothUuid.UUID_BYTES_32_BIT; // depends on control dependency: [if], data = [none]
} else if (fieldType == DATA_TYPE_SERVICE_DATA_128_BIT) {
serviceUuidLength = BluetoothUuid.UUID_BYTES_128_BIT; // depends on control dependency: [if], data = [none]
}
final byte[] serviceDataUuidBytes = extractBytes(scanRecord, currentPos,
serviceUuidLength);
final ParcelUuid serviceDataUuid = BluetoothUuid.parseUuidFrom(
serviceDataUuidBytes);
final byte[] serviceDataArray = extractBytes(scanRecord,
currentPos + serviceUuidLength, dataLength - serviceUuidLength);
if (serviceData == null)
serviceData = new HashMap<>();
serviceData.put(serviceDataUuid, serviceDataArray);
break;
case DATA_TYPE_MANUFACTURER_SPECIFIC_DATA:
// The first two bytes of the manufacturer specific data are
// manufacturer ids in little endian.
final int manufacturerId = ((scanRecord[currentPos + 1] & 0xFF) << 8) +
(scanRecord[currentPos] & 0xFF);
final byte[] manufacturerDataBytes = extractBytes(scanRecord, currentPos + 2,
dataLength - 2);
if (manufacturerData == null)
manufacturerData = new SparseArray<>();
manufacturerData.put(manufacturerId, manufacturerDataBytes);
break;
default:
// Just ignore, we don't handle such data type.
break;
}
currentPos += dataLength; // depends on control dependency: [while], data = [none]
}
return new ScanRecord(serviceUuids, manufacturerData, serviceData,
advertiseFlag, txPowerLevel, localName, scanRecord); // depends on control dependency: [try], data = [none]
} catch (final Exception e) {
Log.e(TAG, "unable to parse scan record: " + Arrays.toString(scanRecord));
// As the record is invalid, ignore all the parsed results for this packet
// and return an empty record with raw scanRecord bytes in results
return new ScanRecord(null, null, null,
-1, Integer.MIN_VALUE, null, scanRecord);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void drawInBackground(Canvas canvas) {
Axis axis = chart.getChartData().getAxisYLeft();
if (null != axis) {
prepareAxisToDraw(axis, LEFT);
drawAxisLines(canvas, axis, LEFT);
}
axis = chart.getChartData().getAxisYRight();
if (null != axis) {
prepareAxisToDraw(axis, RIGHT);
drawAxisLines(canvas, axis, RIGHT);
}
axis = chart.getChartData().getAxisXBottom();
if (null != axis) {
prepareAxisToDraw(axis, BOTTOM);
drawAxisLines(canvas, axis, BOTTOM);
}
axis = chart.getChartData().getAxisXTop();
if (null != axis) {
prepareAxisToDraw(axis, TOP);
drawAxisLines(canvas, axis, TOP);
}
} } | public class class_name {
public void drawInBackground(Canvas canvas) {
Axis axis = chart.getChartData().getAxisYLeft();
if (null != axis) {
prepareAxisToDraw(axis, LEFT); // depends on control dependency: [if], data = [none]
drawAxisLines(canvas, axis, LEFT); // depends on control dependency: [if], data = [none]
}
axis = chart.getChartData().getAxisYRight();
if (null != axis) {
prepareAxisToDraw(axis, RIGHT); // depends on control dependency: [if], data = [none]
drawAxisLines(canvas, axis, RIGHT); // depends on control dependency: [if], data = [none]
}
axis = chart.getChartData().getAxisXBottom();
if (null != axis) {
prepareAxisToDraw(axis, BOTTOM); // depends on control dependency: [if], data = [none]
drawAxisLines(canvas, axis, BOTTOM); // depends on control dependency: [if], data = [none]
}
axis = chart.getChartData().getAxisXTop();
if (null != axis) {
prepareAxisToDraw(axis, TOP); // depends on control dependency: [if], data = [none]
drawAxisLines(canvas, axis, TOP); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Pure
public static File getUserHomeDirectory() throws FileNotFoundException {
final String userHome = System.getProperty("user.home"); //$NON-NLS-1$
if (userHome != null && !userHome.isEmpty()) {
final File file = new File(userHome);
if (file.isDirectory()) {
return file;
}
}
if (OperatingSystem.ANDROID.isCurrentOS()) {
return join(File.listRoots()[0], Android.HOME_DIRECTORY);
}
throw new FileNotFoundException();
} } | public class class_name {
@Pure
public static File getUserHomeDirectory() throws FileNotFoundException {
final String userHome = System.getProperty("user.home"); //$NON-NLS-1$
if (userHome != null && !userHome.isEmpty()) {
final File file = new File(userHome);
if (file.isDirectory()) {
return file; // depends on control dependency: [if], data = [none]
}
}
if (OperatingSystem.ANDROID.isCurrentOS()) {
return join(File.listRoots()[0], Android.HOME_DIRECTORY);
}
throw new FileNotFoundException();
} } |
public class class_name {
public static org.geomajas.gwt.client.spatial.geometry.Geometry toGwt(Geometry geometry) {
if (geometry == null) {
return null;
}
GeometryFactory factory = new GeometryFactory(geometry.getSrid(), geometry.getPrecision());
org.geomajas.gwt.client.spatial.geometry.Geometry gwt;
String geometryType = geometry.getGeometryType();
if (Geometry.POINT.equals(geometryType)) {
if (geometry.getCoordinates() != null) {
gwt = factory.createPoint(geometry.getCoordinates()[0]);
} else {
gwt = factory.createPoint(null);
}
} else if (Geometry.LINEAR_RING.equals(geometryType)) {
gwt = factory.createLinearRing(geometry.getCoordinates());
} else if (Geometry.LINE_STRING.equals(geometryType)) {
gwt = factory.createLineString(geometry.getCoordinates());
} else if (Geometry.POLYGON.equals(geometryType)) {
if (geometry.getGeometries() == null) {
gwt = factory.createPolygon(null, null);
} else {
LinearRing exteriorRing = (LinearRing) toGwt(geometry.getGeometries()[0]);
LinearRing[] interiorRings = new LinearRing[geometry.getGeometries().length - 1];
for (int i = 0; i < interiorRings.length; i++) {
interiorRings[i] = (LinearRing) toGwt(geometry.getGeometries()[i + 1]);
}
gwt = factory.createPolygon(exteriorRing, interiorRings);
}
} else if (Geometry.MULTI_POINT.equals(geometryType)) {
if (geometry.getGeometries() == null) {
gwt = factory.createMultiPoint(null);
} else {
Point[] points = new Point[geometry.getGeometries().length];
gwt = factory.createMultiPoint((Point[]) convertGeometries(geometry, points));
}
} else if (Geometry.MULTI_LINE_STRING.equals(geometryType)) {
if (geometry.getGeometries() == null) {
gwt = factory.createMultiLineString(null);
} else {
LineString[] lineStrings = new LineString[geometry.getGeometries().length];
gwt = factory.createMultiLineString((LineString[]) convertGeometries(geometry, lineStrings));
}
} else if (Geometry.MULTI_POLYGON.equals(geometryType)) {
if (geometry.getGeometries() == null) {
gwt = factory.createMultiPolygon(null);
} else {
Polygon[] polygons = new Polygon[geometry.getGeometries().length];
gwt = factory.createMultiPolygon((Polygon[]) convertGeometries(geometry, polygons));
}
} else {
String msg = "GeometryConverter.toGwt() unrecognized geometry type " + geometryType;
Log.logServer(Log.LEVEL_ERROR, msg);
throw new IllegalStateException(msg);
}
return gwt;
} } | public class class_name {
public static org.geomajas.gwt.client.spatial.geometry.Geometry toGwt(Geometry geometry) {
if (geometry == null) {
return null; // depends on control dependency: [if], data = [none]
}
GeometryFactory factory = new GeometryFactory(geometry.getSrid(), geometry.getPrecision());
org.geomajas.gwt.client.spatial.geometry.Geometry gwt;
String geometryType = geometry.getGeometryType();
if (Geometry.POINT.equals(geometryType)) {
if (geometry.getCoordinates() != null) {
gwt = factory.createPoint(geometry.getCoordinates()[0]); // depends on control dependency: [if], data = [(geometry.getCoordinates()]
} else {
gwt = factory.createPoint(null); // depends on control dependency: [if], data = [null)]
}
} else if (Geometry.LINEAR_RING.equals(geometryType)) {
gwt = factory.createLinearRing(geometry.getCoordinates()); // depends on control dependency: [if], data = [none]
} else if (Geometry.LINE_STRING.equals(geometryType)) {
gwt = factory.createLineString(geometry.getCoordinates()); // depends on control dependency: [if], data = [none]
} else if (Geometry.POLYGON.equals(geometryType)) {
if (geometry.getGeometries() == null) {
gwt = factory.createPolygon(null, null); // depends on control dependency: [if], data = [null)]
} else {
LinearRing exteriorRing = (LinearRing) toGwt(geometry.getGeometries()[0]);
LinearRing[] interiorRings = new LinearRing[geometry.getGeometries().length - 1];
for (int i = 0; i < interiorRings.length; i++) {
interiorRings[i] = (LinearRing) toGwt(geometry.getGeometries()[i + 1]); // depends on control dependency: [for], data = [i]
}
gwt = factory.createPolygon(exteriorRing, interiorRings); // depends on control dependency: [if], data = [none]
}
} else if (Geometry.MULTI_POINT.equals(geometryType)) {
if (geometry.getGeometries() == null) {
gwt = factory.createMultiPoint(null); // depends on control dependency: [if], data = [null)]
} else {
Point[] points = new Point[geometry.getGeometries().length];
gwt = factory.createMultiPoint((Point[]) convertGeometries(geometry, points)); // depends on control dependency: [if], data = [none]
}
} else if (Geometry.MULTI_LINE_STRING.equals(geometryType)) {
if (geometry.getGeometries() == null) {
gwt = factory.createMultiLineString(null); // depends on control dependency: [if], data = [null)]
} else {
LineString[] lineStrings = new LineString[geometry.getGeometries().length];
gwt = factory.createMultiLineString((LineString[]) convertGeometries(geometry, lineStrings)); // depends on control dependency: [if], data = [none]
}
} else if (Geometry.MULTI_POLYGON.equals(geometryType)) {
if (geometry.getGeometries() == null) {
gwt = factory.createMultiPolygon(null); // depends on control dependency: [if], data = [null)]
} else {
Polygon[] polygons = new Polygon[geometry.getGeometries().length];
gwt = factory.createMultiPolygon((Polygon[]) convertGeometries(geometry, polygons)); // depends on control dependency: [if], data = [none]
}
} else {
String msg = "GeometryConverter.toGwt() unrecognized geometry type " + geometryType;
Log.logServer(Log.LEVEL_ERROR, msg); // depends on control dependency: [if], data = [none]
throw new IllegalStateException(msg);
}
return gwt;
} } |
public class class_name {
@Override
public int coalesceKamEdges() throws SQLException {
PreparedStatement eps = getPreparedStatement(SELECTED_ORDERED_EDGES);
PreparedStatement kesps = getPreparedStatement(UPDATE_KAM_EDGE_STATEMENT);
ResultSet rset = null;
int coalesced = 0;
try {
rset = eps.executeQuery();
if (rset.next()) {
int xSource = rset.getInt(2);
int xRel = rset.getInt(3);
int xTarget = rset.getInt(4);
int xEdgeId = rset.getInt(1);
int[] xTriple = new int[] {xSource, xRel, xTarget};
while (rset.next()) {
int edgeId = rset.getInt(1);
int source = rset.getInt(2);
int rel = rset.getInt(3);
int target = rset.getInt(4);
int[] triple = new int[] {source, rel, target};
if (Arrays.equals(triple, xTriple)) {
// duplicate triple, move over statements
kesps.setInt(1, xEdgeId);
kesps.setInt(2, edgeId);
kesps.executeUpdate();
// remove duplicate
removeKamEdges(new int[] {edgeId});
coalesced++;
} else {
// move to next unseen triple
xTriple = triple;
xEdgeId = edgeId;
}
}
}
} finally {
if (rset != null) {
try {
rset.close();
} catch (Exception e) {
// ignored
}
}
}
return coalesced;
} } | public class class_name {
@Override
public int coalesceKamEdges() throws SQLException {
PreparedStatement eps = getPreparedStatement(SELECTED_ORDERED_EDGES);
PreparedStatement kesps = getPreparedStatement(UPDATE_KAM_EDGE_STATEMENT);
ResultSet rset = null;
int coalesced = 0;
try {
rset = eps.executeQuery();
if (rset.next()) {
int xSource = rset.getInt(2);
int xRel = rset.getInt(3);
int xTarget = rset.getInt(4);
int xEdgeId = rset.getInt(1);
int[] xTriple = new int[] {xSource, xRel, xTarget};
while (rset.next()) {
int edgeId = rset.getInt(1);
int source = rset.getInt(2);
int rel = rset.getInt(3);
int target = rset.getInt(4);
int[] triple = new int[] {source, rel, target};
if (Arrays.equals(triple, xTriple)) {
// duplicate triple, move over statements
kesps.setInt(1, xEdgeId); // depends on control dependency: [if], data = [none]
kesps.setInt(2, edgeId); // depends on control dependency: [if], data = [none]
kesps.executeUpdate(); // depends on control dependency: [if], data = [none]
// remove duplicate
removeKamEdges(new int[] {edgeId}); // depends on control dependency: [if], data = [none]
coalesced++; // depends on control dependency: [if], data = [none]
} else {
// move to next unseen triple
xTriple = triple; // depends on control dependency: [if], data = [none]
xEdgeId = edgeId; // depends on control dependency: [if], data = [none]
}
}
}
} finally {
if (rset != null) {
try {
rset.close(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
// ignored
} // depends on control dependency: [catch], data = [none]
}
}
return coalesced;
} } |
public class class_name {
public boolean removeSearchListener(SearchListener<? super SolutionType> listener){
// acquire status lock
synchronized(statusLock){
// assert idle
assertIdle("Cannot remove search listener.");
// remove listener
if (searchListeners.remove(listener)){
// log
LOGGER.debug("{}: removed search listener {}", this, listener);
return true;
} else {
return false;
}
}
} } | public class class_name {
public boolean removeSearchListener(SearchListener<? super SolutionType> listener){
// acquire status lock
synchronized(statusLock){
// assert idle
assertIdle("Cannot remove search listener.");
// remove listener
if (searchListeners.remove(listener)){
// log
LOGGER.debug("{}: removed search listener {}", this, listener); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
} else {
return false; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public double cumulativeProbability(double x) {
if (means != null)
throw new IllegalStateException("Unable to sample from more than one mean");
final double dev = x - mean;
if (FastMath.abs(dev) > 40 * standardDeviation) {
return dev < 0 ? 0.0d : 1.0d;
}
return 0.5 * (1 + Erf.erf(dev / (standardDeviation * SQRT2)));
} } | public class class_name {
public double cumulativeProbability(double x) {
if (means != null)
throw new IllegalStateException("Unable to sample from more than one mean");
final double dev = x - mean;
if (FastMath.abs(dev) > 40 * standardDeviation) {
return dev < 0 ? 0.0d : 1.0d; // depends on control dependency: [if], data = [none]
}
return 0.5 * (1 + Erf.erf(dev / (standardDeviation * SQRT2)));
} } |
public class class_name {
public Collection<? extends GrantedAuthority> getAuthorities() {
if (principal == null) {
return Collections.emptyList();
}
return Collections.unmodifiableCollection(principal.getAuthorities());
} } | public class class_name {
public Collection<? extends GrantedAuthority> getAuthorities() {
if (principal == null) {
return Collections.emptyList(); // depends on control dependency: [if], data = [none]
}
return Collections.unmodifiableCollection(principal.getAuthorities());
} } |
public class class_name {
public static <T> List<T> mergeLists(List<T> list1, List<T> list2) {
List<T> merged = new LinkedList<T>();
if (list1 != null) {
merged.addAll(list1);
}
if (list2 != null) {
merged.addAll(list2);
}
return merged;
} } | public class class_name {
public static <T> List<T> mergeLists(List<T> list1, List<T> list2) {
List<T> merged = new LinkedList<T>();
if (list1 != null) {
merged.addAll(list1); // depends on control dependency: [if], data = [(list1]
}
if (list2 != null) {
merged.addAll(list2); // depends on control dependency: [if], data = [(list2]
}
return merged;
} } |
public class class_name {
private LNGIntVector generateClauseVector(final Formula clause) {
final LNGIntVector clauseVec = new LNGIntVector(clause.numberOfOperands());
for (final Literal lit : clause.literals()) {
int index = this.idxForName(lit.name());
if (index == -1) {
index = this.newVar(false, true);
this.addName(lit.name(), index);
}
final int litNum = lit.phase() ? index * 2 : (index * 2) ^ 1;
clauseVec.push(litNum);
}
return clauseVec;
} } | public class class_name {
private LNGIntVector generateClauseVector(final Formula clause) {
final LNGIntVector clauseVec = new LNGIntVector(clause.numberOfOperands());
for (final Literal lit : clause.literals()) {
int index = this.idxForName(lit.name());
if (index == -1) {
index = this.newVar(false, true); // depends on control dependency: [if], data = [none]
this.addName(lit.name(), index); // depends on control dependency: [if], data = [none]
}
final int litNum = lit.phase() ? index * 2 : (index * 2) ^ 1;
clauseVec.push(litNum); // depends on control dependency: [for], data = [lit]
}
return clauseVec;
} } |
public class class_name {
public static Method findMethod (Class<?> clazz, String name)
{
Method[] methods = clazz.getMethods();
for (Method method : methods) {
if (method.getName().equals(name)) {
return method;
}
}
return null;
} } | public class class_name {
public static Method findMethod (Class<?> clazz, String name)
{
Method[] methods = clazz.getMethods();
for (Method method : methods) {
if (method.getName().equals(name)) {
return method; // depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
@SuppressWarnings("unchecked")
public MType getMessage() {
if (message == null) {
// If message is null, the invariant is that we must be have a builder.
message = (MType) builder.buildPartial();
}
return message;
} } | public class class_name {
@SuppressWarnings("unchecked")
public MType getMessage() {
if (message == null) {
// If message is null, the invariant is that we must be have a builder.
message = (MType) builder.buildPartial();
// depends on control dependency: [if], data = [none]
}
return message;
} } |
public class class_name {
@Nonnull
public static <A> ImmutableList<A> fromBounded(@Nonnull A[] el, int start, int end) {
if (end == start) {
return empty();
}
NonEmptyImmutableList<A> l = cons(el[end - 1], ImmutableList.empty());
for (int i = end - 2; i >= start; i--) {
l = cons(el[i], l);
}
return l;
} } | public class class_name {
@Nonnull
public static <A> ImmutableList<A> fromBounded(@Nonnull A[] el, int start, int end) {
if (end == start) {
return empty(); // depends on control dependency: [if], data = [none]
}
NonEmptyImmutableList<A> l = cons(el[end - 1], ImmutableList.empty());
for (int i = end - 2; i >= start; i--) {
l = cons(el[i], l); // depends on control dependency: [for], data = [i]
}
return l;
} } |
public class class_name {
public static String getRandomString(final int stringLength) {
StringBuilder stringBuilder = getStringBuild();
for (int i = 0; i < stringLength; i++) {
stringBuilder.append(getRandomAlphabetic());
}
return stringBuilder.toString();
} } | public class class_name {
public static String getRandomString(final int stringLength) {
StringBuilder stringBuilder = getStringBuild();
for (int i = 0; i < stringLength; i++) {
stringBuilder.append(getRandomAlphabetic()); // depends on control dependency: [for], data = [none]
}
return stringBuilder.toString();
} } |
public class class_name {
private void addListener(Integer channel, Class<? extends Message> messageClass, MessageListener messageListener) {
// Build up the empty class listener map if not already there
if (!messageListenerMap.containsKey(messageClass)) {
messageListenerMap.put(messageClass, ArrayListMultimap.create());
}
// Get the message listener map for the given class
ArrayListMultimap<Integer, MessageListener> listenerMap = messageListenerMap.get(messageClass);
// Get the message listener array for the given channel
List<MessageListener> messageListeners = listenerMap.get(channel);
if (!messageListeners.contains(messageListener)) {
messageListeners.add(messageListener);
}
} } | public class class_name {
private void addListener(Integer channel, Class<? extends Message> messageClass, MessageListener messageListener) {
// Build up the empty class listener map if not already there
if (!messageListenerMap.containsKey(messageClass)) {
messageListenerMap.put(messageClass, ArrayListMultimap.create()); // depends on control dependency: [if], data = [none]
}
// Get the message listener map for the given class
ArrayListMultimap<Integer, MessageListener> listenerMap = messageListenerMap.get(messageClass);
// Get the message listener array for the given channel
List<MessageListener> messageListeners = listenerMap.get(channel);
if (!messageListeners.contains(messageListener)) {
messageListeners.add(messageListener); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static Object convert(Object value, PrimitiveTypeInfo typeInfo) {
if (value == null) {
return null;
}
if (value instanceof String) {
return CONVERTER.convert((String) value, type(typeInfo));
}
return value;
} } | public class class_name {
public static Object convert(Object value, PrimitiveTypeInfo typeInfo) {
if (value == null) {
return null; // depends on control dependency: [if], data = [none]
}
if (value instanceof String) {
return CONVERTER.convert((String) value, type(typeInfo)); // depends on control dependency: [if], data = [none]
}
return value;
} } |
public class class_name {
public Observable<ServiceResponse<DiagnosticCategoryInner>> getSiteDiagnosticCategoryWithServiceResponseAsync(String resourceGroupName, String siteName, String diagnosticCategory) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (siteName == null) {
throw new IllegalArgumentException("Parameter siteName is required and cannot be null.");
}
if (diagnosticCategory == null) {
throw new IllegalArgumentException("Parameter diagnosticCategory is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.getSiteDiagnosticCategory(resourceGroupName, siteName, diagnosticCategory, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<DiagnosticCategoryInner>>>() {
@Override
public Observable<ServiceResponse<DiagnosticCategoryInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<DiagnosticCategoryInner> clientResponse = getSiteDiagnosticCategoryDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
} } | public class class_name {
public Observable<ServiceResponse<DiagnosticCategoryInner>> getSiteDiagnosticCategoryWithServiceResponseAsync(String resourceGroupName, String siteName, String diagnosticCategory) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (siteName == null) {
throw new IllegalArgumentException("Parameter siteName is required and cannot be null.");
}
if (diagnosticCategory == null) {
throw new IllegalArgumentException("Parameter diagnosticCategory is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.getSiteDiagnosticCategory(resourceGroupName, siteName, diagnosticCategory, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<DiagnosticCategoryInner>>>() {
@Override
public Observable<ServiceResponse<DiagnosticCategoryInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<DiagnosticCategoryInner> clientResponse = getSiteDiagnosticCategoryDelegate(response);
return Observable.just(clientResponse); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
return Observable.error(t);
} // depends on control dependency: [catch], data = [none]
}
});
} } |
public class class_name {
public String dirtyString()
{
StringBuilder sb = new StringBuilder();
for (UUID cfId : getDirtyCFIDs())
{
CFMetaData m = Schema.instance.getCFMetaData(cfId);
sb.append(m == null ? "<deleted>" : m.cfName).append(" (").append(cfId).append("), ");
}
return sb.toString();
} } | public class class_name {
public String dirtyString()
{
StringBuilder sb = new StringBuilder();
for (UUID cfId : getDirtyCFIDs())
{
CFMetaData m = Schema.instance.getCFMetaData(cfId);
sb.append(m == null ? "<deleted>" : m.cfName).append(" (").append(cfId).append("), "); // depends on control dependency: [for], data = [cfId]
}
return sb.toString();
} } |
public class class_name {
public static void setValue(MetaPartition partition, AnyTypeObject dataObject, Object value) {
MetaDataObject meta = (MetaDataObject) partition.getMeta(dataObject.getClass());
if (value == null) {
for (MetaAttribute attr : meta.getAttributes()) {
attr.setValue(dataObject, null);
}
} else {
boolean found = false;
for (MetaAttribute attr : meta.getAttributes()) {
if (attr.getName().equals(TYPE_ATTRIBUTE)) {
continue;
}
if (attr.getType().getImplementationClass().isAssignableFrom(value.getClass())) {
attr.setValue(dataObject, value);
found = true;
} else {
attr.setValue(dataObject, null);
}
}
if (!found) {
throw new IllegalStateException("cannot assign " + value + " to " + dataObject);
}
}
} } | public class class_name {
public static void setValue(MetaPartition partition, AnyTypeObject dataObject, Object value) {
MetaDataObject meta = (MetaDataObject) partition.getMeta(dataObject.getClass());
if (value == null) {
for (MetaAttribute attr : meta.getAttributes()) {
attr.setValue(dataObject, null); // depends on control dependency: [for], data = [attr]
}
} else {
boolean found = false;
for (MetaAttribute attr : meta.getAttributes()) {
if (attr.getName().equals(TYPE_ATTRIBUTE)) {
continue;
}
if (attr.getType().getImplementationClass().isAssignableFrom(value.getClass())) {
attr.setValue(dataObject, value); // depends on control dependency: [if], data = [none]
found = true; // depends on control dependency: [if], data = [none]
} else {
attr.setValue(dataObject, null); // depends on control dependency: [if], data = [none]
}
}
if (!found) {
throw new IllegalStateException("cannot assign " + value + " to " + dataObject);
}
}
} } |
public class class_name {
public static boolean isImage(@Nullable String fileExtension) {
if (StringUtils.isEmpty(fileExtension)) {
return false;
}
return IMAGE_FILE_EXTENSIONS.contains(fileExtension.toLowerCase());
} } | public class class_name {
public static boolean isImage(@Nullable String fileExtension) {
if (StringUtils.isEmpty(fileExtension)) {
return false; // depends on control dependency: [if], data = [none]
}
return IMAGE_FILE_EXTENSIONS.contains(fileExtension.toLowerCase());
} } |
public class class_name {
public static boolean containsBuildCase(final Annotation anno, final BuildCase buildCase) {
Objects.requireNonNull(anno);
Objects.requireNonNull(buildCase);
final Optional<BuildCase[]> attrCases = getAnnotationAttribute(anno, "cases", BuildCase[].class);
if(attrCases.isPresent()) {
final BuildCase[] casesValue = attrCases.get();
if(casesValue.length == 0) {
// 値が空の配列の場合
return true;
}
for(BuildCase value : casesValue) {
if(value == buildCase) {
return true;
}
}
return false;
}
// 属性を持たない場合
return true;
} } | public class class_name {
public static boolean containsBuildCase(final Annotation anno, final BuildCase buildCase) {
Objects.requireNonNull(anno);
Objects.requireNonNull(buildCase);
final Optional<BuildCase[]> attrCases = getAnnotationAttribute(anno, "cases", BuildCase[].class);
if(attrCases.isPresent()) {
final BuildCase[] casesValue = attrCases.get();
if(casesValue.length == 0) {
// 値が空の配列の場合
return true;
// depends on control dependency: [if], data = [none]
}
for(BuildCase value : casesValue) {
if(value == buildCase) {
return true;
// depends on control dependency: [if], data = [none]
}
}
return false;
// depends on control dependency: [if], data = [none]
}
// 属性を持たない場合
return true;
} } |
public class class_name {
public void marshall(GetCampaignsRequest getCampaignsRequest, ProtocolMarshaller protocolMarshaller) {
if (getCampaignsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getCampaignsRequest.getApplicationId(), APPLICATIONID_BINDING);
protocolMarshaller.marshall(getCampaignsRequest.getPageSize(), PAGESIZE_BINDING);
protocolMarshaller.marshall(getCampaignsRequest.getToken(), TOKEN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(GetCampaignsRequest getCampaignsRequest, ProtocolMarshaller protocolMarshaller) {
if (getCampaignsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getCampaignsRequest.getApplicationId(), APPLICATIONID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(getCampaignsRequest.getPageSize(), PAGESIZE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(getCampaignsRequest.getToken(), TOKEN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static void main(final String[] args) {
MongoClient mongoClient;
if (args.length == 0) {
// connect to the local database server
mongoClient = MongoClients.create("mongodb://localhost:27017,localhost:27018,localhost:27019");
} else {
mongoClient = MongoClients.create(args[0]);
}
// Select the MongoDB database.
MongoDatabase database = mongoClient.getDatabase("testChangeStreams");
database.drop();
sleep();
// Select the collection to query.
MongoCollection<Document> collection = database.getCollection("documents");
/*
* Example 1
* Create a simple change stream against an existing collection.
*/
System.out.println("1. Initial document from the Change Stream:");
// Create the change stream cursor.
MongoCursor<ChangeStreamDocument<Document>> cursor = collection.watch().iterator();
// Insert a test document into the collection.
collection.insertOne(Document.parse("{username: 'alice123', name: 'Alice'}"));
ChangeStreamDocument<Document> next = cursor.next();
System.out.println(next);
cursor.close();
sleep();
/*
* Example 2
* Create a change stream with 'lookup' option enabled.
* The test document will be returned with a full version of the updated document.
*/
System.out.println("2. Document from the Change Stream, with lookup enabled:");
// Create the change stream cursor.
cursor = collection.watch().fullDocument(FullDocument.UPDATE_LOOKUP).iterator();
// Update the test document.
collection.updateOne(Document.parse("{username: 'alice123'}"), Document.parse("{$set : { email: 'alice@example.com'}}"));
// Block until the next result is returned
next = cursor.next();
System.out.println(next);
cursor.close();
sleep();
/*
* Example 3
* Create a change stream with 'lookup' option using a $match and ($redact or $project) stage.
*/
System.out.println("3. Document from the Change Stream, with lookup enabled, matching `update` operations only: ");
// Insert some dummy data.
collection.insertMany(asList(Document.parse("{updateMe: 1}"), Document.parse("{replaceMe: 1}")));
// Create $match pipeline stage.
List<Bson> pipeline = singletonList(
Aggregates.match(
Filters.or(
Document.parse("{'fullDocument.username': 'alice123'}"),
Filters.in("operationType", asList("update", "replace", "delete"))
)
)
);
// Create the change stream cursor with $match.
cursor = collection.watch(pipeline).fullDocument(FullDocument.UPDATE_LOOKUP).iterator();
// Forward to the end of the change stream
next = cursor.tryNext();
// Update the test document.
collection.updateOne(Filters.eq("updateMe", 1), Updates.set("updated", true));
next = cursor.next();
System.out.println(format("Update operationType: %s %n %s", next.getUpdateDescription(), next));
// Replace the test document.
collection.replaceOne(Filters.eq("replaceMe", 1), Document.parse("{replaced: true}"));
next = cursor.next();
System.out.println(format("Replace operationType: %s", next));
// Delete the test document.
collection.deleteOne(Filters.eq("username", "alice123"));
next = cursor.next();
System.out.println(format("Delete operationType: %s", next));
cursor.close();
sleep();
/**
* Example 4
* Resume a change stream using a resume token.
*/
System.out.println("4. Document from the Change Stream including a resume token:");
// Get the resume token from the last document we saw in the previous change stream cursor.
BsonDocument resumeToken = next.getResumeToken();
System.out.println(resumeToken);
// Pass the resume token to the resume after function to continue the change stream cursor.
cursor = collection.watch().resumeAfter(resumeToken).iterator();
// Insert a test document.
collection.insertOne(Document.parse("{test: 'd'}"));
// Block until the next result is returned
next = cursor.next();
System.out.println(next);
cursor.close();
} } | public class class_name {
public static void main(final String[] args) {
MongoClient mongoClient;
if (args.length == 0) {
// connect to the local database server
mongoClient = MongoClients.create("mongodb://localhost:27017,localhost:27018,localhost:27019");
} else {
mongoClient = MongoClients.create(args[0]); // depends on control dependency: [if], data = [none]
}
// Select the MongoDB database.
MongoDatabase database = mongoClient.getDatabase("testChangeStreams");
database.drop();
sleep();
// Select the collection to query.
MongoCollection<Document> collection = database.getCollection("documents");
/*
* Example 1
* Create a simple change stream against an existing collection.
*/
System.out.println("1. Initial document from the Change Stream:");
// Create the change stream cursor.
MongoCursor<ChangeStreamDocument<Document>> cursor = collection.watch().iterator();
// Insert a test document into the collection.
collection.insertOne(Document.parse("{username: 'alice123', name: 'Alice'}"));
ChangeStreamDocument<Document> next = cursor.next();
System.out.println(next);
cursor.close();
sleep();
/*
* Example 2
* Create a change stream with 'lookup' option enabled.
* The test document will be returned with a full version of the updated document.
*/
System.out.println("2. Document from the Change Stream, with lookup enabled:");
// Create the change stream cursor.
cursor = collection.watch().fullDocument(FullDocument.UPDATE_LOOKUP).iterator();
// Update the test document.
collection.updateOne(Document.parse("{username: 'alice123'}"), Document.parse("{$set : { email: 'alice@example.com'}}"));
// Block until the next result is returned
next = cursor.next();
System.out.println(next);
cursor.close();
sleep();
/*
* Example 3
* Create a change stream with 'lookup' option using a $match and ($redact or $project) stage.
*/
System.out.println("3. Document from the Change Stream, with lookup enabled, matching `update` operations only: ");
// Insert some dummy data.
collection.insertMany(asList(Document.parse("{updateMe: 1}"), Document.parse("{replaceMe: 1}")));
// Create $match pipeline stage.
List<Bson> pipeline = singletonList(
Aggregates.match(
Filters.or(
Document.parse("{'fullDocument.username': 'alice123'}"),
Filters.in("operationType", asList("update", "replace", "delete"))
)
)
);
// Create the change stream cursor with $match.
cursor = collection.watch(pipeline).fullDocument(FullDocument.UPDATE_LOOKUP).iterator();
// Forward to the end of the change stream
next = cursor.tryNext();
// Update the test document.
collection.updateOne(Filters.eq("updateMe", 1), Updates.set("updated", true));
next = cursor.next();
System.out.println(format("Update operationType: %s %n %s", next.getUpdateDescription(), next));
// Replace the test document.
collection.replaceOne(Filters.eq("replaceMe", 1), Document.parse("{replaced: true}"));
next = cursor.next();
System.out.println(format("Replace operationType: %s", next));
// Delete the test document.
collection.deleteOne(Filters.eq("username", "alice123"));
next = cursor.next();
System.out.println(format("Delete operationType: %s", next));
cursor.close();
sleep();
/**
* Example 4
* Resume a change stream using a resume token.
*/
System.out.println("4. Document from the Change Stream including a resume token:");
// Get the resume token from the last document we saw in the previous change stream cursor.
BsonDocument resumeToken = next.getResumeToken();
System.out.println(resumeToken);
// Pass the resume token to the resume after function to continue the change stream cursor.
cursor = collection.watch().resumeAfter(resumeToken).iterator();
// Insert a test document.
collection.insertOne(Document.parse("{test: 'd'}"));
// Block until the next result is returned
next = cursor.next();
System.out.println(next);
cursor.close();
} } |
public class class_name {
protected Object getDataValue(Trace trace, Node node, Direction direction, Map<String, ?> headers,
Object[] values) {
if (source == DataSource.Content) {
return values[index];
} else if (source == DataSource.Header) {
return headers.get(key);
}
return null;
} } | public class class_name {
protected Object getDataValue(Trace trace, Node node, Direction direction, Map<String, ?> headers,
Object[] values) {
if (source == DataSource.Content) {
return values[index]; // depends on control dependency: [if], data = [none]
} else if (source == DataSource.Header) {
return headers.get(key); // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public static String convertFormat(String format) {
if (format == null)
return null;
else {
// day of week
format = format.replaceAll("EEE", "D");
// year
format = format.replaceAll("yy", "y");
// month
if (format.indexOf("MMM") != -1) {
format = format.replaceAll("MMM", "M");
} else {
format = format.replaceAll("M", "m");
}
return format;
}
} } | public class class_name {
public static String convertFormat(String format) {
if (format == null)
return null;
else {
// day of week
format = format.replaceAll("EEE", "D"); // depends on control dependency: [if], data = [none]
// year
format = format.replaceAll("yy", "y"); // depends on control dependency: [if], data = [none]
// month
if (format.indexOf("MMM") != -1) {
format = format.replaceAll("MMM", "M"); // depends on control dependency: [if], data = [none]
} else {
format = format.replaceAll("M", "m"); // depends on control dependency: [if], data = [none]
}
return format; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
protected int writeUTF16Surrogate(char c, char ch[], int i, int end)
throws IOException
{
int codePoint = 0;
if (i + 1 >= end)
{
throw new IOException(
Utils.messages.createMessage(
MsgKey.ER_INVALID_UTF16_SURROGATE,
new Object[] { Integer.toHexString((int) c)}));
}
final char high = c;
final char low = ch[i+1];
if (!Encodings.isLowUTF16Surrogate(low)) {
throw new IOException(
Utils.messages.createMessage(
MsgKey.ER_INVALID_UTF16_SURROGATE,
new Object[] {
Integer.toHexString((int) c)
+ " "
+ Integer.toHexString(low)}));
}
final java.io.Writer writer = m_writer;
// If we make it to here we have a valid high, low surrogate pair
if (m_encodingInfo.isInEncoding(c,low)) {
// If the character formed by the surrogate pair
// is in the encoding, so just write it out
writer.write(ch,i,2);
}
else {
// Don't know what to do with this char, it is
// not in the encoding and not a high char in
// a surrogate pair, so write out as an entity ref
final String encoding = getEncoding();
if (encoding != null) {
/* The output encoding is known,
* so somthing is wrong.
*/
codePoint = Encodings.toCodePoint(high, low);
// not in the encoding, so write out a character reference
writer.write('&');
writer.write('#');
writer.write(Integer.toString(codePoint));
writer.write(';');
} else {
/* The output encoding is not known,
* so just write it out as-is.
*/
writer.write(ch, i, 2);
}
}
// non-zero only if character reference was written out.
return codePoint;
} } | public class class_name {
protected int writeUTF16Surrogate(char c, char ch[], int i, int end)
throws IOException
{
int codePoint = 0;
if (i + 1 >= end)
{
throw new IOException(
Utils.messages.createMessage(
MsgKey.ER_INVALID_UTF16_SURROGATE,
new Object[] { Integer.toHexString((int) c)}));
}
final char high = c;
final char low = ch[i+1];
if (!Encodings.isLowUTF16Surrogate(low)) {
throw new IOException(
Utils.messages.createMessage(
MsgKey.ER_INVALID_UTF16_SURROGATE,
new Object[] {
Integer.toHexString((int) c)
+ " "
+ Integer.toHexString(low)}));
}
final java.io.Writer writer = m_writer;
// If we make it to here we have a valid high, low surrogate pair
if (m_encodingInfo.isInEncoding(c,low)) {
// If the character formed by the surrogate pair
// is in the encoding, so just write it out
writer.write(ch,i,2);
}
else {
// Don't know what to do with this char, it is
// not in the encoding and not a high char in
// a surrogate pair, so write out as an entity ref
final String encoding = getEncoding();
if (encoding != null) {
/* The output encoding is known,
* so somthing is wrong.
*/
codePoint = Encodings.toCodePoint(high, low); // depends on control dependency: [if], data = [none]
// not in the encoding, so write out a character reference
writer.write('&'); // depends on control dependency: [if], data = [none]
writer.write('#'); // depends on control dependency: [if], data = [none]
writer.write(Integer.toString(codePoint)); // depends on control dependency: [if], data = [none]
writer.write(';'); // depends on control dependency: [if], data = [none]
} else {
/* The output encoding is not known,
* so just write it out as-is.
*/
writer.write(ch, i, 2); // depends on control dependency: [if], data = [none]
}
}
// non-zero only if character reference was written out.
return codePoint;
} } |
public class class_name {
synchronized long calculateNextExpiration()
{
boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
if (isTraceOn && tc.isEntryEnabled())
Tr.entry(tc, "calculateNextExpiration: " + this);
ivLastExpiration = ivExpiration; // 598265
if (ivParsedScheduleExpression != null) {
// F7437591.codRev - getNextTimeout returns -1 for "no more timeouts",
// but this class uses ivExpiration=0.
ivExpiration = Math.max(0, ivParsedScheduleExpression.getNextTimeout(ivExpiration));
}
else {
if (ivInterval > 0) {
ivExpiration += ivInterval; // 597753
}
else {
ivExpiration = 0;
}
}
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "calculateNextExpiration: " + ivExpiration); // 597753
return ivExpiration;
} } | public class class_name {
synchronized long calculateNextExpiration()
{
boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
if (isTraceOn && tc.isEntryEnabled())
Tr.entry(tc, "calculateNextExpiration: " + this);
ivLastExpiration = ivExpiration; // 598265
if (ivParsedScheduleExpression != null) {
// F7437591.codRev - getNextTimeout returns -1 for "no more timeouts",
// but this class uses ivExpiration=0.
ivExpiration = Math.max(0, ivParsedScheduleExpression.getNextTimeout(ivExpiration)); // depends on control dependency: [if], data = [none]
}
else {
if (ivInterval > 0) {
ivExpiration += ivInterval; // 597753 // depends on control dependency: [if], data = [none]
}
else {
ivExpiration = 0; // depends on control dependency: [if], data = [none]
}
}
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "calculateNextExpiration: " + ivExpiration); // 597753
return ivExpiration;
} } |
public class class_name {
protected Object getNextEntryFromReader(Writer writer)
{
try
{
parseError = false;
ValidationResult parseResult = reader.read();
if (parseResult.getMessages("FT.10").size() >= 1 && (fixMode || fixDiagnoseMode))
{
parseResult.removeMessage("FT.10"); // writer fixes automatically if quotes are not given for qualifier value in fix/fix_diagnose mode.
}
if (parseResult.count() != 0)
{
parseResults.add(parseResult);
writer.write("\n");
for (ValidationMessage<Origin> validationMessage : parseResult.getMessages())
{
validationMessage.writeMessage(writer);
}
parseError = true;
}
if (!reader.isEntry())
{
return null;
}
}
catch (IOException e)
{
e.printStackTrace();
}
return reader.getEntry();
} } | public class class_name {
protected Object getNextEntryFromReader(Writer writer)
{
try
{
parseError = false; // depends on control dependency: [try], data = [none]
ValidationResult parseResult = reader.read();
if (parseResult.getMessages("FT.10").size() >= 1 && (fixMode || fixDiagnoseMode))
{
parseResult.removeMessage("FT.10"); // writer fixes automatically if quotes are not given for qualifier value in fix/fix_diagnose mode. // depends on control dependency: [if], data = [none]
}
if (parseResult.count() != 0)
{
parseResults.add(parseResult); // depends on control dependency: [if], data = [none]
writer.write("\n"); // depends on control dependency: [if], data = [none]
for (ValidationMessage<Origin> validationMessage : parseResult.getMessages())
{
validationMessage.writeMessage(writer); // depends on control dependency: [for], data = [validationMessage]
}
parseError = true; // depends on control dependency: [if], data = [none]
}
if (!reader.isEntry())
{
return null; // depends on control dependency: [if], data = [none]
}
}
catch (IOException e)
{
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
return reader.getEntry();
} } |
public class class_name {
private void writeByteBufferFull(ByteBuffer buf, boolean withHeader)
{
if (withHeader) {
writeByteBufferSerdeHeader(buf, FULL_ENCODING_MODE);
}
writeByteBufferCommonFields(buf);
buf.asLongBuffer().put(histogram);
buf.position(buf.position() + Long.BYTES * histogram.length);
} } | public class class_name {
private void writeByteBufferFull(ByteBuffer buf, boolean withHeader)
{
if (withHeader) {
writeByteBufferSerdeHeader(buf, FULL_ENCODING_MODE); // depends on control dependency: [if], data = [none]
}
writeByteBufferCommonFields(buf);
buf.asLongBuffer().put(histogram);
buf.position(buf.position() + Long.BYTES * histogram.length);
} } |
public class class_name {
public boolean contains(Object value) {
if (value == null) {
throw new NullPointerException();
}
FastHashtableEntry tab[] = table;
for (int i = tab.length ; i-- > 0 ;) {
for (FastHashtableEntry e = tab[i] ; e != null ; e = e.next) {
if (e.value.equals(value)) {
return true;
}
}
}
return false;
} } | public class class_name {
public boolean contains(Object value) {
if (value == null) {
throw new NullPointerException();
}
FastHashtableEntry tab[] = table;
for (int i = tab.length ; i-- > 0 ;) {
for (FastHashtableEntry e = tab[i] ; e != null ; e = e.next) {
if (e.value.equals(value)) {
return true; // depends on control dependency: [if], data = [none]
}
}
}
return false;
} } |
public class class_name {
public void setReceiversClassesBlackList(final List<Class> receiversBlacklist) {
List<String> values = new LinkedList<String>();
for (Class aClass : receiversBlacklist) {
values.add(aClass.getName());
}
setReceiversBlackList(values);
} } | public class class_name {
public void setReceiversClassesBlackList(final List<Class> receiversBlacklist) {
List<String> values = new LinkedList<String>();
for (Class aClass : receiversBlacklist) {
values.add(aClass.getName()); // depends on control dependency: [for], data = [aClass]
}
setReceiversBlackList(values);
} } |
public class class_name {
public int getValue(int ch, boolean [] inBlockZero)
{
// valid, uncompacted trie and valid c?
if (m_isCompacted_ || ch > UCharacter.MAX_VALUE || ch < 0) {
if (inBlockZero != null) {
inBlockZero[0] = true;
}
return 0;
}
int block = m_index_[ch >> SHIFT_];
if (inBlockZero != null) {
inBlockZero[0] = (block == 0);
}
return m_data_[Math.abs(block) + (ch & MASK_)];
} } | public class class_name {
public int getValue(int ch, boolean [] inBlockZero)
{
// valid, uncompacted trie and valid c?
if (m_isCompacted_ || ch > UCharacter.MAX_VALUE || ch < 0) {
if (inBlockZero != null) {
inBlockZero[0] = true; // depends on control dependency: [if], data = [none]
}
return 0; // depends on control dependency: [if], data = [none]
}
int block = m_index_[ch >> SHIFT_];
if (inBlockZero != null) {
inBlockZero[0] = (block == 0); // depends on control dependency: [if], data = [none]
}
return m_data_[Math.abs(block) + (ch & MASK_)];
} } |
public class class_name {
public static String getIp() {
if (null != cachedIpAddress) {
return cachedIpAddress;
}
Enumeration<NetworkInterface> netInterfaces;
try {
netInterfaces = NetworkInterface.getNetworkInterfaces();
} catch (final SocketException ex) {
throw new EagleFrameException(ex);
}
String localIpAddress = null;
while (netInterfaces.hasMoreElements()) {
NetworkInterface netInterface = netInterfaces.nextElement();
Enumeration<InetAddress> ipAddresses = netInterface.getInetAddresses();
while (ipAddresses.hasMoreElements()) {
InetAddress ipAddress = ipAddresses.nextElement();
if (isPublicIpAddress(ipAddress)) {
String publicIpAddress = ipAddress.getHostAddress();
cachedIpAddress = publicIpAddress;
return publicIpAddress;
}
if (isLocalIpAddress(ipAddress)) {
localIpAddress = ipAddress.getHostAddress();
}
}
}
cachedIpAddress = localIpAddress;
return localIpAddress;
} } | public class class_name {
public static String getIp() {
if (null != cachedIpAddress) {
return cachedIpAddress; // depends on control dependency: [if], data = [none]
}
Enumeration<NetworkInterface> netInterfaces;
try {
netInterfaces = NetworkInterface.getNetworkInterfaces(); // depends on control dependency: [try], data = [none]
} catch (final SocketException ex) {
throw new EagleFrameException(ex);
} // depends on control dependency: [catch], data = [none]
String localIpAddress = null;
while (netInterfaces.hasMoreElements()) {
NetworkInterface netInterface = netInterfaces.nextElement();
Enumeration<InetAddress> ipAddresses = netInterface.getInetAddresses();
while (ipAddresses.hasMoreElements()) {
InetAddress ipAddress = ipAddresses.nextElement();
if (isPublicIpAddress(ipAddress)) {
String publicIpAddress = ipAddress.getHostAddress();
cachedIpAddress = publicIpAddress; // depends on control dependency: [if], data = [none]
return publicIpAddress; // depends on control dependency: [if], data = [none]
}
if (isLocalIpAddress(ipAddress)) {
localIpAddress = ipAddress.getHostAddress(); // depends on control dependency: [if], data = [none]
}
}
}
cachedIpAddress = localIpAddress;
return localIpAddress;
} } |
public class class_name {
public double toPValue(double d, int n) {
double b = d / 30 + 1. / (36 * n);
double z = .5 * MathUtil.PISQUARE * MathUtil.PISQUARE * n * b;
// Exponential approximation
if(z < 1.1 || z > 8.5) {
double e = FastMath.exp(0.3885037 - 1.164879 * z);
return (e > 1) ? 1 : (e < 0) ? 0 : e;
}
// Tabular approximation
for(int i = 0; i < 86; i++) {
if(TABPOS[i] >= z) {
// Exact table value
if(TABPOS[i] == z) {
return TABVAL[i];
}
// Linear interpolation
double x1 = TABPOS[i], x0 = TABPOS[i - 1];
double y1 = TABVAL[i], y0 = TABVAL[i - 1];
return y0 + (y1 - y0) * (z - x0) / (x1 - x0);
}
}
return -1;
} } | public class class_name {
public double toPValue(double d, int n) {
double b = d / 30 + 1. / (36 * n);
double z = .5 * MathUtil.PISQUARE * MathUtil.PISQUARE * n * b;
// Exponential approximation
if(z < 1.1 || z > 8.5) {
double e = FastMath.exp(0.3885037 - 1.164879 * z);
return (e > 1) ? 1 : (e < 0) ? 0 : e; // depends on control dependency: [if], data = [none]
}
// Tabular approximation
for(int i = 0; i < 86; i++) {
if(TABPOS[i] >= z) {
// Exact table value
if(TABPOS[i] == z) {
return TABVAL[i]; // depends on control dependency: [if], data = [none]
}
// Linear interpolation
double x1 = TABPOS[i], x0 = TABPOS[i - 1];
double y1 = TABVAL[i], y0 = TABVAL[i - 1];
return y0 + (y1 - y0) * (z - x0) / (x1 - x0); // depends on control dependency: [if], data = [none]
}
}
return -1;
} } |
public class class_name {
public synchronized Thread getFreeThreadAndRun(Runnable runnable) {
for (int i=0; i<pool.length; i++) {
if (pool[i] == null || !pool[i].isAlive()) {
pool[i] = new Thread(runnable);
if (threadsBaseName != null) {
pool[i].setName(threadsBaseName + i);
}
pool[i].setDaemon(true);
pool[i].start();
return pool[i];
}
}
return null;
} } | public class class_name {
public synchronized Thread getFreeThreadAndRun(Runnable runnable) {
for (int i=0; i<pool.length; i++) {
if (pool[i] == null || !pool[i].isAlive()) {
pool[i] = new Thread(runnable);
// depends on control dependency: [if], data = [none]
if (threadsBaseName != null) {
pool[i].setName(threadsBaseName + i);
// depends on control dependency: [if], data = [(threadsBaseName]
}
pool[i].setDaemon(true);
// depends on control dependency: [if], data = [none]
pool[i].start();
// depends on control dependency: [if], data = [none]
return pool[i];
// depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
public double scoreOf(Datum<L, F> example, L label) {
if(example instanceof RVFDatum<?, ?>)return scoreOfRVFDatum((RVFDatum<L,F>)example, label);
int iLabel = labelIndex.indexOf(label);
double score = 0.0;
for (F f : example.asFeatures()) {
score += weight(f, iLabel);
}
return score + thresholds[iLabel];
} } | public class class_name {
public double scoreOf(Datum<L, F> example, L label) {
if(example instanceof RVFDatum<?, ?>)return scoreOfRVFDatum((RVFDatum<L,F>)example, label);
int iLabel = labelIndex.indexOf(label);
double score = 0.0;
for (F f : example.asFeatures()) {
score += weight(f, iLabel);
// depends on control dependency: [for], data = [f]
}
return score + thresholds[iLabel];
} } |
public class class_name {
public HostName toHostName() {
HostName host = fromHost;
if(host == null) {
fromHost = host = toCanonicalHostName();
}
return host;
} } | public class class_name {
public HostName toHostName() {
HostName host = fromHost;
if(host == null) {
fromHost = host = toCanonicalHostName(); // depends on control dependency: [if], data = [none]
}
return host;
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.