code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
|---|---|
public class class_name {
void gotoState(@Nonnull ConnectivityState newState) {
checkNotNull(newState, "newState");
if (state != newState && state != ConnectivityState.SHUTDOWN) {
state = newState;
if (listeners.isEmpty()) {
return;
}
// Swap out callback list before calling them, because a callback may register new callbacks,
// if run in direct executor, can cause ConcurrentModificationException.
ArrayList<Listener> savedListeners = listeners;
listeners = new ArrayList<>();
for (Listener listener : savedListeners) {
listener.runInExecutor();
}
}
} }
|
public class class_name {
void gotoState(@Nonnull ConnectivityState newState) {
checkNotNull(newState, "newState");
if (state != newState && state != ConnectivityState.SHUTDOWN) {
state = newState; // depends on control dependency: [if], data = [none]
if (listeners.isEmpty()) {
return; // depends on control dependency: [if], data = [none]
}
// Swap out callback list before calling them, because a callback may register new callbacks,
// if run in direct executor, can cause ConcurrentModificationException.
ArrayList<Listener> savedListeners = listeners;
listeners = new ArrayList<>(); // depends on control dependency: [if], data = [none]
for (Listener listener : savedListeners) {
listener.runInExecutor(); // depends on control dependency: [for], data = [listener]
}
}
} }
|
public class class_name {
private String format(long number, NFRuleSet ruleSet) {
// all API format() routines that take a double vector through
// here. We have these two identical functions-- one taking a
// double and one taking a long-- the couple digits of precision
// that long has but double doesn't (both types are 8 bytes long,
// but double has to borrow some of the mantissa bits to hold
// the exponent).
// Create an empty string buffer where the result will
// be built, and pass it to the rule set (along with an insertion
// position of 0 and the number being formatted) to the rule set
// for formatting
StringBuilder result = new StringBuilder();
if (number == Long.MIN_VALUE) {
// We can't handle this value right now. Provide an accurate default value.
result.append(getDecimalFormat().format(Long.MIN_VALUE));
}
else {
ruleSet.format(number, result, 0, 0);
}
postProcess(result, ruleSet);
return result.toString();
} }
|
public class class_name {
private String format(long number, NFRuleSet ruleSet) {
// all API format() routines that take a double vector through
// here. We have these two identical functions-- one taking a
// double and one taking a long-- the couple digits of precision
// that long has but double doesn't (both types are 8 bytes long,
// but double has to borrow some of the mantissa bits to hold
// the exponent).
// Create an empty string buffer where the result will
// be built, and pass it to the rule set (along with an insertion
// position of 0 and the number being formatted) to the rule set
// for formatting
StringBuilder result = new StringBuilder();
if (number == Long.MIN_VALUE) {
// We can't handle this value right now. Provide an accurate default value.
result.append(getDecimalFormat().format(Long.MIN_VALUE)); // depends on control dependency: [if], data = [Long.MIN_VALUE)]
}
else {
ruleSet.format(number, result, 0, 0); // depends on control dependency: [if], data = [(number]
}
postProcess(result, ruleSet);
return result.toString();
} }
|
public class class_name {
public static int getCDSPosForward(int chromPos, List<Integer> exonStarts, List<Integer> exonEnds,
int cdsStart, int cdsEnd) {
// the genetic coordinate is not in a coding region
if ( (chromPos < (cdsStart+base) ) || ( chromPos > (cdsEnd+base) ) ) {
logger.debug("The "+format(chromPos)+" position is not in a coding region");
return -1;
}
logger.debug("looking for CDS position for " +format(chromPos));
// map the genetic coordinates of coding region on a stretch of a reverse strand
List<Range<Integer>> cdsRegions = getCDSRegions(exonStarts, exonEnds, cdsStart, cdsEnd);
int codingLength = 0;
int lengthExon = 0;
for (Range<Integer> range : cdsRegions) {
int start = range.lowerEndpoint();
int end = range.upperEndpoint();
lengthExon = end - start;
if (start+base <= chromPos && end >= chromPos ) {
return codingLength + (chromPos-start);
}
else {
codingLength += lengthExon;
}
}
return -1;
} }
|
public class class_name {
public static int getCDSPosForward(int chromPos, List<Integer> exonStarts, List<Integer> exonEnds,
int cdsStart, int cdsEnd) {
// the genetic coordinate is not in a coding region
if ( (chromPos < (cdsStart+base) ) || ( chromPos > (cdsEnd+base) ) ) {
logger.debug("The "+format(chromPos)+" position is not in a coding region"); // depends on control dependency: [if], data = [none]
return -1; // depends on control dependency: [if], data = [none]
}
logger.debug("looking for CDS position for " +format(chromPos));
// map the genetic coordinates of coding region on a stretch of a reverse strand
List<Range<Integer>> cdsRegions = getCDSRegions(exonStarts, exonEnds, cdsStart, cdsEnd);
int codingLength = 0;
int lengthExon = 0;
for (Range<Integer> range : cdsRegions) {
int start = range.lowerEndpoint();
int end = range.upperEndpoint();
lengthExon = end - start; // depends on control dependency: [for], data = [none]
if (start+base <= chromPos && end >= chromPos ) {
return codingLength + (chromPos-start); // depends on control dependency: [if], data = [none]
}
else {
codingLength += lengthExon; // depends on control dependency: [if], data = [none]
}
}
return -1;
} }
|
public class class_name {
public void deleteEntities(Collection<AtlasVertex> instanceVertices) throws AtlasException {
RequestContext requestContext = RequestContext.get();
Set<AtlasVertex> deletionCandidateVertices = new HashSet<>();
for (AtlasVertex instanceVertex : instanceVertices) {
String guid = GraphHelper.getGuid(instanceVertex);
Id.EntityState state = GraphHelper.getState(instanceVertex);
if (requestContext.getDeletedEntityIds().contains(guid) || state == Id.EntityState.DELETED) {
if (LOG.isDebugEnabled()) {
LOG.debug("Skipping deletion of {} as it is already deleted", guid);
}
continue;
}
// Get GUIDs and vertices for all deletion candidates.
Set<VertexInfo> compositeVertices = graphHelper.getCompositeVertices(instanceVertex);
// Record all deletion candidate GUIDs in RequestContext
// and gather deletion candidate vertices.
for (VertexInfo vertexInfo : compositeVertices) {
requestContext.recordEntityDelete(vertexInfo.getGuid(), vertexInfo.getTypeName());
deletionCandidateVertices.add(vertexInfo.getVertex());
}
}
// Delete traits and vertices.
for (AtlasVertex deletionCandidateVertex : deletionCandidateVertices) {
deleteAllTraits(deletionCandidateVertex);
deleteTypeVertex(deletionCandidateVertex, false);
}
} }
|
public class class_name {
public void deleteEntities(Collection<AtlasVertex> instanceVertices) throws AtlasException {
RequestContext requestContext = RequestContext.get();
Set<AtlasVertex> deletionCandidateVertices = new HashSet<>();
for (AtlasVertex instanceVertex : instanceVertices) {
String guid = GraphHelper.getGuid(instanceVertex);
Id.EntityState state = GraphHelper.getState(instanceVertex);
if (requestContext.getDeletedEntityIds().contains(guid) || state == Id.EntityState.DELETED) {
if (LOG.isDebugEnabled()) {
LOG.debug("Skipping deletion of {} as it is already deleted", guid); // depends on control dependency: [if], data = [none]
}
continue;
}
// Get GUIDs and vertices for all deletion candidates.
Set<VertexInfo> compositeVertices = graphHelper.getCompositeVertices(instanceVertex);
// Record all deletion candidate GUIDs in RequestContext
// and gather deletion candidate vertices.
for (VertexInfo vertexInfo : compositeVertices) {
requestContext.recordEntityDelete(vertexInfo.getGuid(), vertexInfo.getTypeName()); // depends on control dependency: [for], data = [vertexInfo]
deletionCandidateVertices.add(vertexInfo.getVertex()); // depends on control dependency: [for], data = [vertexInfo]
}
}
// Delete traits and vertices.
for (AtlasVertex deletionCandidateVertex : deletionCandidateVertices) {
deleteAllTraits(deletionCandidateVertex);
deleteTypeVertex(deletionCandidateVertex, false);
}
} }
|
public class class_name {
public static void transformMessage(Reader reader, Writer stringWriter, Reader readerxsl)
{
try {
StreamSource source = new StreamSource(reader);
Result result = new StreamResult(stringWriter);
TransformerFactory tFact = TransformerFactory.newInstance();
StreamSource streamTransformer = new StreamSource(readerxsl);
Transformer transformer = tFact.newTransformer(streamTransformer);
transformer.transform(source, result);
} catch (TransformerConfigurationException ex) {
ex.printStackTrace();
} catch (TransformerException ex) {
ex.printStackTrace();
}
} }
|
public class class_name {
public static void transformMessage(Reader reader, Writer stringWriter, Reader readerxsl)
{
try {
StreamSource source = new StreamSource(reader);
Result result = new StreamResult(stringWriter);
TransformerFactory tFact = TransformerFactory.newInstance();
StreamSource streamTransformer = new StreamSource(readerxsl);
Transformer transformer = tFact.newTransformer(streamTransformer);
transformer.transform(source, result); // depends on control dependency: [try], data = [none]
} catch (TransformerConfigurationException ex) {
ex.printStackTrace();
} catch (TransformerException ex) { // depends on control dependency: [catch], data = [none]
ex.printStackTrace();
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
@SuppressWarnings("StatementWithEmptyBody")
public static byte[] decode(final String input) {
ByteArrayInputStream in = new ByteArrayInputStream(input.getBytes());
ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
while (decodeChunk(out, in)) {
// do nothing
}
} catch (IOException ex) {
throw new RuntimeException(ex);
}
return out.toByteArray();
} }
|
public class class_name {
@SuppressWarnings("StatementWithEmptyBody")
public static byte[] decode(final String input) {
ByteArrayInputStream in = new ByteArrayInputStream(input.getBytes());
ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
while (decodeChunk(out, in)) {
// do nothing
}
} catch (IOException ex) {
throw new RuntimeException(ex);
} // depends on control dependency: [catch], data = [none]
return out.toByteArray();
} }
|
public class class_name {
public static Method[] getMethods(Class<?> clazz, String... methodNames) {
if (methodNames == null || methodNames.length == 0) {
throw new IllegalArgumentException("You must supply at least one method name.");
}
final List<Method> methodsToMock = new LinkedList<Method>();
Method[] allMethods = null;
if (clazz.isInterface()) {
allMethods = getAllPublicMethods(clazz);
} else {
allMethods = getAllMethods(clazz);
}
for (Method method : allMethods) {
for (String methodName : methodNames) {
if (method.getName().equals(methodName)) {
method.setAccessible(true);
methodsToMock.add(method);
}
}
}
final Method[] methodArray = methodsToMock.toArray(new Method[0]);
if (methodArray.length == 0) {
throw new MethodNotFoundException(String.format(
"No methods matching the name(s) %s were found in the class hierarchy of %s.",
concatenateStrings(methodNames), getType(clazz)));
}
return methodArray;
} }
|
public class class_name {
public static Method[] getMethods(Class<?> clazz, String... methodNames) {
if (methodNames == null || methodNames.length == 0) {
throw new IllegalArgumentException("You must supply at least one method name.");
}
final List<Method> methodsToMock = new LinkedList<Method>();
Method[] allMethods = null;
if (clazz.isInterface()) {
allMethods = getAllPublicMethods(clazz); // depends on control dependency: [if], data = [none]
} else {
allMethods = getAllMethods(clazz); // depends on control dependency: [if], data = [none]
}
for (Method method : allMethods) {
for (String methodName : methodNames) {
if (method.getName().equals(methodName)) {
method.setAccessible(true); // depends on control dependency: [if], data = [none]
methodsToMock.add(method); // depends on control dependency: [if], data = [none]
}
}
}
final Method[] methodArray = methodsToMock.toArray(new Method[0]);
if (methodArray.length == 0) {
throw new MethodNotFoundException(String.format(
"No methods matching the name(s) %s were found in the class hierarchy of %s.",
concatenateStrings(methodNames), getType(clazz)));
}
return methodArray;
} }
|
public class class_name {
private Signer computeSignerByServiceRegion(
String serviceName, String regionId,
String signerRegionOverride,
boolean isRegionIdAsSignerParam) {
String signerType = clientConfiguration.getSignerOverride();
Signer signer = signerType == null
? SignerFactory.getSigner(serviceName, regionId)
: SignerFactory.getSignerByTypeAndService(signerType, serviceName)
;
if (signer instanceof RegionAwareSigner) {
// Overrides the default region computed
RegionAwareSigner regionAwareSigner = (RegionAwareSigner)signer;
// (signerRegionOverride != null) means that it is likely to be AWS
// internal dev work, as "signerRegionOverride" is typically null
// when used in the external release
if (signerRegionOverride != null)
regionAwareSigner.setRegionName(signerRegionOverride);
else if (regionId != null && isRegionIdAsSignerParam)
regionAwareSigner.setRegionName(regionId);
}
if (signer instanceof EndpointPrefixAwareSigner) {
EndpointPrefixAwareSigner endpointPrefixAwareSigner = (EndpointPrefixAwareSigner) signer;
/*
* This will be used to compute the region name required for signing
* if signerRegionOverride is not provided
*/
endpointPrefixAwareSigner.setEndpointPrefix(endpointPrefix);
}
return signer;
} }
|
public class class_name {
private Signer computeSignerByServiceRegion(
String serviceName, String regionId,
String signerRegionOverride,
boolean isRegionIdAsSignerParam) {
String signerType = clientConfiguration.getSignerOverride();
Signer signer = signerType == null
? SignerFactory.getSigner(serviceName, regionId)
: SignerFactory.getSignerByTypeAndService(signerType, serviceName)
;
if (signer instanceof RegionAwareSigner) {
// Overrides the default region computed
RegionAwareSigner regionAwareSigner = (RegionAwareSigner)signer;
// (signerRegionOverride != null) means that it is likely to be AWS
// internal dev work, as "signerRegionOverride" is typically null
// when used in the external release
if (signerRegionOverride != null)
regionAwareSigner.setRegionName(signerRegionOverride);
else if (regionId != null && isRegionIdAsSignerParam)
regionAwareSigner.setRegionName(regionId);
}
if (signer instanceof EndpointPrefixAwareSigner) {
EndpointPrefixAwareSigner endpointPrefixAwareSigner = (EndpointPrefixAwareSigner) signer;
/*
* This will be used to compute the region name required for signing
* if signerRegionOverride is not provided
*/
endpointPrefixAwareSigner.setEndpointPrefix(endpointPrefix); // depends on control dependency: [if], data = [none]
}
return signer;
} }
|
public class class_name {
public List<ProgramElementDoc> excludeDeprecatedMembersAsList(
ProgramElementDoc[] members) {
List<ProgramElementDoc> list = new ArrayList<>();
for (ProgramElementDoc member : members) {
if (member.tags("deprecated").length == 0) {
list.add(member);
}
}
Collections.sort(list);
return list;
} }
|
public class class_name {
public List<ProgramElementDoc> excludeDeprecatedMembersAsList(
ProgramElementDoc[] members) {
List<ProgramElementDoc> list = new ArrayList<>();
for (ProgramElementDoc member : members) {
if (member.tags("deprecated").length == 0) {
list.add(member); // depends on control dependency: [if], data = [none]
}
}
Collections.sort(list);
return list;
} }
|
public class class_name {
protected void addModulesList(Collection<ModuleElement> modules, String text, String tableSummary, Content body) {
Content table = (configuration.isOutputHtml5())
? HtmlTree.TABLE(HtmlStyle.overviewSummary, getTableCaption(new RawHtml(text)))
: HtmlTree.TABLE(HtmlStyle.overviewSummary, tableSummary, getTableCaption(new RawHtml(text)));
table.addContent(getSummaryTableHeader(moduleTableHeader, "col"));
Content tbody = new HtmlTree(HtmlTag.TBODY);
addModulesList(modules, tbody);
table.addContent(tbody);
Content anchor = getMarkerAnchor(text);
Content div = HtmlTree.DIV(HtmlStyle.contentContainer, anchor);
div.addContent(table);
if (configuration.allowTag(HtmlTag.MAIN)) {
htmlTree.addContent(div);
} else {
body.addContent(div);
}
} }
|
public class class_name {
protected void addModulesList(Collection<ModuleElement> modules, String text, String tableSummary, Content body) {
Content table = (configuration.isOutputHtml5())
? HtmlTree.TABLE(HtmlStyle.overviewSummary, getTableCaption(new RawHtml(text)))
: HtmlTree.TABLE(HtmlStyle.overviewSummary, tableSummary, getTableCaption(new RawHtml(text)));
table.addContent(getSummaryTableHeader(moduleTableHeader, "col"));
Content tbody = new HtmlTree(HtmlTag.TBODY);
addModulesList(modules, tbody);
table.addContent(tbody);
Content anchor = getMarkerAnchor(text);
Content div = HtmlTree.DIV(HtmlStyle.contentContainer, anchor);
div.addContent(table);
if (configuration.allowTag(HtmlTag.MAIN)) {
htmlTree.addContent(div); // depends on control dependency: [if], data = [none]
} else {
body.addContent(div); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public final AbstractItem next(boolean allowUnavailable) throws SevereMessageStoreException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "next", Boolean.valueOf(allowUnavailable));
AbstractItem found = null;
// check from current position
AbstractItemLink lookAt = (AbstractItemLink)advance();
while (null != lookAt && null == found)
{
found = lookAt.matches(_filter, allowUnavailable);
if (null == found)
{
lookAt = (AbstractItemLink)advance();
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "next", found);
return found;
} }
|
public class class_name {
public final AbstractItem next(boolean allowUnavailable) throws SevereMessageStoreException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "next", Boolean.valueOf(allowUnavailable));
AbstractItem found = null;
// check from current position
AbstractItemLink lookAt = (AbstractItemLink)advance();
while (null != lookAt && null == found)
{
found = lookAt.matches(_filter, allowUnavailable);
if (null == found)
{
lookAt = (AbstractItemLink)advance(); // depends on control dependency: [if], data = [none]
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "next", found);
return found;
} }
|
public class class_name {
public PeriodDuration multipliedBy(int scalar) {
if (scalar == 1) {
return this;
}
return of(period.multipliedBy(scalar), duration.multipliedBy(scalar));
} }
|
public class class_name {
public PeriodDuration multipliedBy(int scalar) {
if (scalar == 1) {
return this; // depends on control dependency: [if], data = [none]
}
return of(period.multipliedBy(scalar), duration.multipliedBy(scalar));
} }
|
public class class_name {
public Collection<FileAnnotation> parse(final File file, final String moduleName) throws InvocationTargetException {
String oldProperty = System.getProperty(SAX_DRIVER_PROPERTY);
if (oldProperty != null) {
System.setProperty(SAX_DRIVER_PROPERTY, SAXParser.class.getName());
}
FileInputStream inputStream = null;
try {
for (AbstractDryParser parser : parsers) {
inputStream = new FileInputStream(file);
if (parser.accepts(inputStream)) {
IOUtils.closeQuietly(inputStream);
inputStream = new FileInputStream(file);
Collection<DuplicateCode> result = parser.parse(inputStream, moduleName);
createLinkNames(result);
Set<FileAnnotation> warnings = Sets.newHashSet();
warnings.addAll(result);
ContextHashCode hashCode = new ContextHashCode();
for (FileAnnotation duplication : warnings) {
duplication.setContextHashCode(hashCode.create(duplication.getFileName(), duplication.getPrimaryLineNumber(),
defaultEncoding));
}
return warnings;
}
}
throw new IOException("No parser found for duplicated code results file " + file.getAbsolutePath());
}
catch (IOException exception) {
throw new InvocationTargetException(exception);
}
finally {
IOUtils.closeQuietly(inputStream);
if (oldProperty != null) {
System.setProperty(SAX_DRIVER_PROPERTY, oldProperty);
}
}
} }
|
public class class_name {
public Collection<FileAnnotation> parse(final File file, final String moduleName) throws InvocationTargetException {
String oldProperty = System.getProperty(SAX_DRIVER_PROPERTY);
if (oldProperty != null) {
System.setProperty(SAX_DRIVER_PROPERTY, SAXParser.class.getName());
}
FileInputStream inputStream = null;
try {
for (AbstractDryParser parser : parsers) {
inputStream = new FileInputStream(file);
if (parser.accepts(inputStream)) {
IOUtils.closeQuietly(inputStream); // depends on control dependency: [if], data = [none]
inputStream = new FileInputStream(file); // depends on control dependency: [if], data = [none]
Collection<DuplicateCode> result = parser.parse(inputStream, moduleName);
createLinkNames(result); // depends on control dependency: [if], data = [none]
Set<FileAnnotation> warnings = Sets.newHashSet();
warnings.addAll(result); // depends on control dependency: [if], data = [none]
ContextHashCode hashCode = new ContextHashCode();
for (FileAnnotation duplication : warnings) {
duplication.setContextHashCode(hashCode.create(duplication.getFileName(), duplication.getPrimaryLineNumber(),
defaultEncoding)); // depends on control dependency: [for], data = [duplication]
}
return warnings; // depends on control dependency: [if], data = [none]
}
}
throw new IOException("No parser found for duplicated code results file " + file.getAbsolutePath());
}
catch (IOException exception) {
throw new InvocationTargetException(exception);
}
finally {
IOUtils.closeQuietly(inputStream);
if (oldProperty != null) {
System.setProperty(SAX_DRIVER_PROPERTY, oldProperty);
}
}
} }
|
public class class_name {
TableMapping generate(TableMetaData tableMetaData, ClassLoader classLoader) {
try {
String className = packageName + "." + naming.createClassName(tableMetaData.getTableRef());
LOGGER.info(String.format("Mapping table %s to %s", tableMetaData.getTableRef(), className));
TableMapping result = new TableMapping(tableMetaData);
CtClass ctClass = pool.makeClass(className);
for (ColumnMetaData ai : tableMetaData.getColumnMetaData()) {
generatePropertyForAttribute(result, ctClass, ai);
}
Class<?> clazz = loadClass(classLoader, ctClass);
result.setGeneratedClass(clazz);
return result;
} catch (CannotCompileException e) {
throw new RuntimeException("Problem generating class for table " + tableMetaData.getTableRef(), e);
}
} }
|
public class class_name {
TableMapping generate(TableMetaData tableMetaData, ClassLoader classLoader) {
try {
String className = packageName + "." + naming.createClassName(tableMetaData.getTableRef());
LOGGER.info(String.format("Mapping table %s to %s", tableMetaData.getTableRef(), className)); // depends on control dependency: [try], data = [none]
TableMapping result = new TableMapping(tableMetaData);
CtClass ctClass = pool.makeClass(className);
for (ColumnMetaData ai : tableMetaData.getColumnMetaData()) {
generatePropertyForAttribute(result, ctClass, ai); // depends on control dependency: [for], data = [ai]
}
Class<?> clazz = loadClass(classLoader, ctClass);
result.setGeneratedClass(clazz);
return result; // depends on control dependency: [try], data = [none]
} catch (CannotCompileException e) {
throw new RuntimeException("Problem generating class for table " + tableMetaData.getTableRef(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
@Override
public int compareTo(Object other) {
Index o = (Index) other;
int returnValue = 0;
if ((this.getRelation() != null) && (o.getRelation() != null)) {
returnValue = this.getRelation().compareTo(o.getRelation());
if ((returnValue == 0) && (this.getRelation().getSchema() != null) && (o.getRelation().getSchema() != null)) {
returnValue = StringUtil.trimToEmpty(this.getRelation().getSchema().getName()).compareToIgnoreCase(StringUtil.trimToEmpty(o.getRelation().getSchema().getName()));
}
}
if (returnValue == 0) {
String thisName = StringUtil.trimToEmpty(this.getName());
String oName = StringUtil.trimToEmpty(o.getName());
returnValue = thisName.compareTo(oName);
}
//We should not have two indexes that have the same name and tablename
/*if (returnValue == 0) {
returnValue = this.getColumnName().compareTo(o.getColumnName());
}*/
return returnValue;
} }
|
public class class_name {
@Override
public int compareTo(Object other) {
Index o = (Index) other;
int returnValue = 0;
if ((this.getRelation() != null) && (o.getRelation() != null)) {
returnValue = this.getRelation().compareTo(o.getRelation()); // depends on control dependency: [if], data = [none]
if ((returnValue == 0) && (this.getRelation().getSchema() != null) && (o.getRelation().getSchema() != null)) {
returnValue = StringUtil.trimToEmpty(this.getRelation().getSchema().getName()).compareToIgnoreCase(StringUtil.trimToEmpty(o.getRelation().getSchema().getName())); // depends on control dependency: [if], data = [none]
}
}
if (returnValue == 0) {
String thisName = StringUtil.trimToEmpty(this.getName());
String oName = StringUtil.trimToEmpty(o.getName());
returnValue = thisName.compareTo(oName); // depends on control dependency: [if], data = [none]
}
//We should not have two indexes that have the same name and tablename
/*if (returnValue == 0) {
returnValue = this.getColumnName().compareTo(o.getColumnName());
}*/
return returnValue;
} }
|
public class class_name {
public synchronized void updateBrowserInfo(URL url, String browserName, int maxInstances) {
logger.entering(new Object[] { url, browserName, maxInstances });
BrowserInformation browserInformation = BrowserInformation.createBrowserInfo(browserName, maxInstances);
TestSlotInformation testSlotInformation = (nodeMap.get(url) == null) ? new TestSlotInformation() : nodeMap
.get(url);
testSlotInformation.addBrowserInfo(browserInformation);
if (nodeMap.get(url) == null) {
logger.log(Level.FINE, "Creating new entry -> " + url + " : [" + browserName + ":" + maxInstances + "]");
nodeMap.put(url, testSlotInformation);
} else {
logger.log(Level.FINE, "Added entry -> " + url + " : " + " : [" + browserName + ":" + maxInstances + "]");
}
} }
|
public class class_name {
public synchronized void updateBrowserInfo(URL url, String browserName, int maxInstances) {
logger.entering(new Object[] { url, browserName, maxInstances });
BrowserInformation browserInformation = BrowserInformation.createBrowserInfo(browserName, maxInstances);
TestSlotInformation testSlotInformation = (nodeMap.get(url) == null) ? new TestSlotInformation() : nodeMap
.get(url);
testSlotInformation.addBrowserInfo(browserInformation);
if (nodeMap.get(url) == null) {
logger.log(Level.FINE, "Creating new entry -> " + url + " : [" + browserName + ":" + maxInstances + "]"); // depends on control dependency: [if], data = [none]
nodeMap.put(url, testSlotInformation); // depends on control dependency: [if], data = [none]
} else {
logger.log(Level.FINE, "Added entry -> " + url + " : " + " : [" + browserName + ":" + maxInstances + "]"); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@SuppressWarnings("unchecked")
public void registerTransport(String transportClazz) {
if(transportClazz == null){
return;
}
try {
registerTransport((Class<Transport>)Class.forName(transportClazz));
} catch (ClassNotFoundException e) {
return;
} catch(ClassCastException cce){
throw new ApiTransportException(cce);
}
} }
|
public class class_name {
@SuppressWarnings("unchecked")
public void registerTransport(String transportClazz) {
if(transportClazz == null){
return; // depends on control dependency: [if], data = [none]
}
try {
registerTransport((Class<Transport>)Class.forName(transportClazz)); // depends on control dependency: [try], data = [none]
} catch (ClassNotFoundException e) {
return;
} catch(ClassCastException cce){ // depends on control dependency: [catch], data = [none]
throw new ApiTransportException(cce);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static String cleanPath(String path) {
if (path == null) {
return null;
}
String pathToUse = replace(path, StringPool.BACK_SLASH, StringPool.SLASH);
// Strip prefix from path to analyze, to not treat it as part of the
// first path element. This is necessary to correctly parse paths like
// "file:core/../core/io/Resource.class", where the ".." should just
// strip the first "core" directory while keeping the "file:" prefix.
int prefixIndex = pathToUse.indexOf(":");
String prefix = "";
if (prefixIndex != -1) {
prefix = pathToUse.substring(0, prefixIndex + 1);
if (prefix.contains("/")) {
prefix = "";
} else {
pathToUse = pathToUse.substring(prefixIndex + 1);
}
}
if (pathToUse.startsWith(StringPool.SLASH)) {
prefix = prefix + StringPool.SLASH;
pathToUse = pathToUse.substring(1);
}
String[] pathArray = delimitedListToStringArray(pathToUse, StringPool.SLASH);
List<String> pathElements = new LinkedList<String>();
int tops = 0;
for (int i = pathArray.length - 1; i >= 0; i--) {
String element = pathArray[i];
// if (StringPool.DOT.equals(element)) {
// Points to current directory - drop it.
// }
/* else*/
if (StringPool.DOTDOT.equals(element)) {
// Registering top path found.
tops++;
} else {
if (tops > 0) {
// Merging path element with element corresponding to top path.
tops--;
} else {
// Normal path element found.
pathElements.add(0, element);
}
}
}
// Remaining top paths need to be retained.
for (int i = 0; i < tops; i++) {
pathElements.add(0, StringPool.DOTDOT);
}
return prefix + collectionToDelimitedString(pathElements, StringPool.SLASH);
} }
|
public class class_name {
public static String cleanPath(String path) {
if (path == null) {
return null; // depends on control dependency: [if], data = [none]
}
String pathToUse = replace(path, StringPool.BACK_SLASH, StringPool.SLASH);
// Strip prefix from path to analyze, to not treat it as part of the
// first path element. This is necessary to correctly parse paths like
// "file:core/../core/io/Resource.class", where the ".." should just
// strip the first "core" directory while keeping the "file:" prefix.
int prefixIndex = pathToUse.indexOf(":");
String prefix = "";
if (prefixIndex != -1) {
prefix = pathToUse.substring(0, prefixIndex + 1); // depends on control dependency: [if], data = [none]
if (prefix.contains("/")) {
prefix = ""; // depends on control dependency: [if], data = [none]
} else {
pathToUse = pathToUse.substring(prefixIndex + 1); // depends on control dependency: [if], data = [none]
}
}
if (pathToUse.startsWith(StringPool.SLASH)) {
prefix = prefix + StringPool.SLASH; // depends on control dependency: [if], data = [none]
pathToUse = pathToUse.substring(1); // depends on control dependency: [if], data = [none]
}
String[] pathArray = delimitedListToStringArray(pathToUse, StringPool.SLASH);
List<String> pathElements = new LinkedList<String>();
int tops = 0;
for (int i = pathArray.length - 1; i >= 0; i--) {
String element = pathArray[i];
// if (StringPool.DOT.equals(element)) {
// Points to current directory - drop it.
// }
/* else*/
if (StringPool.DOTDOT.equals(element)) {
// Registering top path found.
tops++; // depends on control dependency: [if], data = [none]
} else {
if (tops > 0) {
// Merging path element with element corresponding to top path.
tops--; // depends on control dependency: [if], data = [none]
} else {
// Normal path element found.
pathElements.add(0, element); // depends on control dependency: [if], data = [none]
}
}
}
// Remaining top paths need to be retained.
for (int i = 0; i < tops; i++) {
pathElements.add(0, StringPool.DOTDOT); // depends on control dependency: [for], data = [none]
}
return prefix + collectionToDelimitedString(pathElements, StringPool.SLASH);
} }
|
public class class_name {
public int[] computeUnlimitedChunking(List<Dimension> dims, int elemSize) {
int maxElements = defaultChunkSize / elemSize;
int[] result = fillRightmost(convertUnlimitedShape(dims), maxElements);
long resultSize = new Section(result).computeSize();
if (resultSize < minChunksize) {
maxElements = minChunksize / elemSize;
result = incrUnlimitedShape(dims, result, maxElements);
}
return result;
} }
|
public class class_name {
public int[] computeUnlimitedChunking(List<Dimension> dims, int elemSize) {
int maxElements = defaultChunkSize / elemSize;
int[] result = fillRightmost(convertUnlimitedShape(dims), maxElements);
long resultSize = new Section(result).computeSize();
if (resultSize < minChunksize) {
maxElements = minChunksize / elemSize; // depends on control dependency: [if], data = [none]
result = incrUnlimitedShape(dims, result, maxElements); // depends on control dependency: [if], data = [none]
}
return result;
} }
|
public class class_name {
public void visitParameter(String name, int access) {
if (api < Opcodes.ASM5) {
throw new RuntimeException();
}
if (mv != null) {
mv.visitParameter(name, access);
}
} }
|
public class class_name {
public void visitParameter(String name, int access) {
if (api < Opcodes.ASM5) {
throw new RuntimeException();
}
if (mv != null) {
mv.visitParameter(name, access); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void deltaStart(long time)
{
startCount.incrementAndGet();
if (time > 0)
{
startTotalTime.addAndGet(time);
if (time > startMaxTime.get())
startMaxTime.set(time);
}
} }
|
public class class_name {
public void deltaStart(long time)
{
startCount.incrementAndGet();
if (time > 0)
{
startTotalTime.addAndGet(time); // depends on control dependency: [if], data = [(time]
if (time > startMaxTime.get())
startMaxTime.set(time);
}
} }
|
public class class_name {
public static String toBinaryString(final byte[] data) {
if (data == null) {
return null;
}
final StringBuilder buf = new StringBuilder(data.length * 8);
for (final byte element : data) {
byte bits = element;
for (int j = 0; j < 8; j++) {
if ((bits & 0x80) == 0x80) {
buf.append('1');
} else {
buf.append('0');
}
bits <<= 1;
}
}
return buf.toString();
} }
|
public class class_name {
public static String toBinaryString(final byte[] data) {
if (data == null) {
return null; // depends on control dependency: [if], data = [none]
}
final StringBuilder buf = new StringBuilder(data.length * 8);
for (final byte element : data) {
byte bits = element;
for (int j = 0; j < 8; j++) {
if ((bits & 0x80) == 0x80) {
buf.append('1'); // depends on control dependency: [if], data = [none]
} else {
buf.append('0'); // depends on control dependency: [if], data = [none]
}
bits <<= 1; // depends on control dependency: [for], data = [none]
}
}
return buf.toString();
} }
|
public class class_name {
@Override
@Nullable
public String getMetricQueryServiceGatewayRpcAddress() {
if (queryService != null) {
return queryService.getSelfGateway(MetricQueryServiceGateway.class).getAddress();
} else {
return null;
}
} }
|
public class class_name {
@Override
@Nullable
public String getMetricQueryServiceGatewayRpcAddress() {
if (queryService != null) {
return queryService.getSelfGateway(MetricQueryServiceGateway.class).getAddress(); // depends on control dependency: [if], data = [none]
} else {
return null; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
protected void createIndexesOnColumns(EntityMetadata m, String tableName, List<Column> columns, Class columnType) {
Object pooledConnection = null;
try {
Cassandra.Client api = null;
pooledConnection = getConnection();
api = (org.apache.cassandra.thrift.Cassandra.Client) getConnection(pooledConnection);
KsDef ksDef = api.describe_keyspace(m.getSchema());
List<CfDef> cfDefs = ksDef.getCf_defs();
// Column family definition on which secondary index creation is
// required
CfDef columnFamilyDefToUpdate = null;
boolean isUpdatable = false;
for (CfDef cfDef : cfDefs) {
if (cfDef.getName().equals(tableName)) {
columnFamilyDefToUpdate = cfDef;
break;
}
}
if (columnFamilyDefToUpdate == null) {
log.error("Join table {} not available.", tableName);
throw new PersistenceException("table" + tableName + " not found!");
}
// create a column family, in case it is not already available.
// Get list of indexes already created
List<ColumnDef> columnMetadataList = columnFamilyDefToUpdate.getColumn_metadata();
List<String> indexList = new ArrayList<String>();
if (columnMetadataList != null) {
for (ColumnDef columnDef : columnMetadataList) {
indexList.add(new StringAccessor().fromBytes(String.class, columnDef.getName()));
}
// need to set them to null else it is giving problem on update
// column family and trying to add again existing indexes.
// columnFamilyDefToUpdate.column_metadata = null;
}
// Iterate over all columns for creating secondary index on them
for (Column column : columns) {
ColumnDef columnDef = new ColumnDef();
columnDef.setName(column.getName());
columnDef.setValidation_class(CassandraValidationClassMapper.getValidationClass(columnType, false));
columnDef.setIndex_type(IndexType.KEYS);
// Add secondary index only if it's not already created
// (if already created, it would be there in column family
// definition)
if (!indexList.contains(new StringAccessor().fromBytes(String.class, column.getName()))) {
isUpdatable = true;
columnFamilyDefToUpdate.addToColumn_metadata(columnDef);
}
}
// Finally, update column family with modified column family
// definition
if (isUpdatable) {
columnFamilyDefToUpdate.setKey_validation_class(CassandraValidationClassMapper
.getValidationClass(m.getIdAttribute().getJavaType(), isCql3Enabled(m)));
api.system_update_column_family(columnFamilyDefToUpdate);
}
} catch (Exception e) {
log.warn("Could not create secondary index on column family {}, Caused by: . ", tableName, e);
} finally {
releaseConnection(pooledConnection);
}
} }
|
public class class_name {
protected void createIndexesOnColumns(EntityMetadata m, String tableName, List<Column> columns, Class columnType) {
Object pooledConnection = null;
try {
Cassandra.Client api = null;
pooledConnection = getConnection(); // depends on control dependency: [try], data = [none]
api = (org.apache.cassandra.thrift.Cassandra.Client) getConnection(pooledConnection); // depends on control dependency: [try], data = [none]
KsDef ksDef = api.describe_keyspace(m.getSchema());
List<CfDef> cfDefs = ksDef.getCf_defs();
// Column family definition on which secondary index creation is
// required
CfDef columnFamilyDefToUpdate = null;
boolean isUpdatable = false;
for (CfDef cfDef : cfDefs) {
if (cfDef.getName().equals(tableName)) {
columnFamilyDefToUpdate = cfDef; // depends on control dependency: [if], data = [none]
break;
}
}
if (columnFamilyDefToUpdate == null) {
log.error("Join table {} not available.", tableName); // depends on control dependency: [if], data = [none]
throw new PersistenceException("table" + tableName + " not found!");
}
// create a column family, in case it is not already available.
// Get list of indexes already created
List<ColumnDef> columnMetadataList = columnFamilyDefToUpdate.getColumn_metadata();
List<String> indexList = new ArrayList<String>();
if (columnMetadataList != null) {
for (ColumnDef columnDef : columnMetadataList) {
indexList.add(new StringAccessor().fromBytes(String.class, columnDef.getName())); // depends on control dependency: [for], data = [columnDef]
}
// need to set them to null else it is giving problem on update
// column family and trying to add again existing indexes.
// columnFamilyDefToUpdate.column_metadata = null;
}
// Iterate over all columns for creating secondary index on them
for (Column column : columns) {
ColumnDef columnDef = new ColumnDef();
columnDef.setName(column.getName()); // depends on control dependency: [for], data = [column]
columnDef.setValidation_class(CassandraValidationClassMapper.getValidationClass(columnType, false)); // depends on control dependency: [for], data = [column]
columnDef.setIndex_type(IndexType.KEYS); // depends on control dependency: [for], data = [column]
// Add secondary index only if it's not already created
// (if already created, it would be there in column family
// definition)
if (!indexList.contains(new StringAccessor().fromBytes(String.class, column.getName()))) {
isUpdatable = true; // depends on control dependency: [if], data = [none]
columnFamilyDefToUpdate.addToColumn_metadata(columnDef); // depends on control dependency: [if], data = [none]
}
}
// Finally, update column family with modified column family
// definition
if (isUpdatable) {
columnFamilyDefToUpdate.setKey_validation_class(CassandraValidationClassMapper
.getValidationClass(m.getIdAttribute().getJavaType(), isCql3Enabled(m))); // depends on control dependency: [if], data = [none]
api.system_update_column_family(columnFamilyDefToUpdate); // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
log.warn("Could not create secondary index on column family {}, Caused by: . ", tableName, e);
} finally { // depends on control dependency: [catch], data = [none]
releaseConnection(pooledConnection);
}
} }
|
public class class_name {
public void marshall(UpdateMatchmakingConfigurationRequest updateMatchmakingConfigurationRequest, ProtocolMarshaller protocolMarshaller) {
if (updateMatchmakingConfigurationRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getName(), NAME_BINDING);
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getDescription(), DESCRIPTION_BINDING);
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getGameSessionQueueArns(), GAMESESSIONQUEUEARNS_BINDING);
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getRequestTimeoutSeconds(), REQUESTTIMEOUTSECONDS_BINDING);
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getAcceptanceTimeoutSeconds(), ACCEPTANCETIMEOUTSECONDS_BINDING);
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getAcceptanceRequired(), ACCEPTANCEREQUIRED_BINDING);
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getRuleSetName(), RULESETNAME_BINDING);
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getNotificationTarget(), NOTIFICATIONTARGET_BINDING);
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getAdditionalPlayerCount(), ADDITIONALPLAYERCOUNT_BINDING);
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getCustomEventData(), CUSTOMEVENTDATA_BINDING);
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getGameProperties(), GAMEPROPERTIES_BINDING);
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getGameSessionData(), GAMESESSIONDATA_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(UpdateMatchmakingConfigurationRequest updateMatchmakingConfigurationRequest, ProtocolMarshaller protocolMarshaller) {
if (updateMatchmakingConfigurationRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getDescription(), DESCRIPTION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getGameSessionQueueArns(), GAMESESSIONQUEUEARNS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getRequestTimeoutSeconds(), REQUESTTIMEOUTSECONDS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getAcceptanceTimeoutSeconds(), ACCEPTANCETIMEOUTSECONDS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getAcceptanceRequired(), ACCEPTANCEREQUIRED_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getRuleSetName(), RULESETNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getNotificationTarget(), NOTIFICATIONTARGET_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getAdditionalPlayerCount(), ADDITIONALPLAYERCOUNT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getCustomEventData(), CUSTOMEVENTDATA_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getGameProperties(), GAMEPROPERTIES_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateMatchmakingConfigurationRequest.getGameSessionData(), GAMESESSIONDATA_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private void addAttribute(int list, TreeHtmlAttributeInfo attr, RemoveInfo removes)
{
ArrayList al = _lists[list];
// if the array list is the empty list then we need to allocate a new array list
if (al == empty) {
al = new ArrayList();
_lists[list] = al;
}
// check to see if this attribute is already inside the tree.
int cnt = al.size();
for (int i = 0; i < cnt; i++) {
TreeHtmlAttributeInfo a = (TreeHtmlAttributeInfo) al.get(i);
assert(a != null);
if (a.getAttribute().equals(attr.getAttribute())) {
removes.removes.add(a);
if (!attr.isApplyToDescendents()) {
removes.scopeOverrides = true;
}
al.remove(a);
break;
}
}
// add this to the list
al.add(attr);
} }
|
public class class_name {
private void addAttribute(int list, TreeHtmlAttributeInfo attr, RemoveInfo removes)
{
ArrayList al = _lists[list];
// if the array list is the empty list then we need to allocate a new array list
if (al == empty) {
al = new ArrayList(); // depends on control dependency: [if], data = [none]
_lists[list] = al; // depends on control dependency: [if], data = [none]
}
// check to see if this attribute is already inside the tree.
int cnt = al.size();
for (int i = 0; i < cnt; i++) {
TreeHtmlAttributeInfo a = (TreeHtmlAttributeInfo) al.get(i);
assert(a != null); // depends on control dependency: [for], data = [none]
if (a.getAttribute().equals(attr.getAttribute())) {
removes.removes.add(a); // depends on control dependency: [if], data = [none]
if (!attr.isApplyToDescendents()) {
removes.scopeOverrides = true; // depends on control dependency: [if], data = [none]
}
al.remove(a); // depends on control dependency: [if], data = [none]
break;
}
}
// add this to the list
al.add(attr);
} }
|
public class class_name {
private List<BindingConfiguration> getPersistenceUnitRefs(DeploymentUnit deploymentUnit, DeploymentDescriptorEnvironment environment, ClassLoader classLoader, DeploymentReflectionIndex deploymentReflectionIndex, ResourceInjectionTarget resourceInjectionTarget) throws
DeploymentUnitProcessingException {
final List<BindingConfiguration> bindingConfigurations = new ArrayList<BindingConfiguration>();
if (environment.getEnvironment() == null) {
return bindingConfigurations;
}
PersistenceUnitReferencesMetaData persistenceUnitRefs = environment.getEnvironment().getPersistenceUnitRefs();
if (persistenceUnitRefs != null) {
if (persistenceUnitRefs.size() > 0) {
JPADeploymentMarker.mark(deploymentUnit);
}
for (PersistenceUnitReferenceMetaData puRef : persistenceUnitRefs) {
String name = puRef.getName();
String persistenceUnitName = puRef.getPersistenceUnitName();
String lookup = puRef.getLookupName();
if (!isEmpty(lookup) && !isEmpty(persistenceUnitName)) {
throw JpaLogger.ROOT_LOGGER.cannotSpecifyBoth("<lookup-name>", lookup, "persistence-unit-name", persistenceUnitName, "<persistence-unit-ref/>", resourceInjectionTarget);
}
if (!name.startsWith("java:")) {
name = environment.getDefaultContext() + name;
}
// our injection (source) comes from the local (ENC) lookup, no matter what.
LookupInjectionSource injectionSource = new LookupInjectionSource(name);
//add any injection targets
processInjectionTargets(resourceInjectionTarget, injectionSource, classLoader, deploymentReflectionIndex, puRef, EntityManagerFactory.class);
BindingConfiguration bindingConfiguration = null;
if (!isEmpty(lookup)) {
bindingConfiguration = new BindingConfiguration(name, new LookupInjectionSource(lookup));
} else {
InjectionSource puBindingSource = this.getPersistenceUnitBindingSource(deploymentUnit, persistenceUnitName);
bindingConfiguration = new BindingConfiguration(name, puBindingSource);
}
bindingConfigurations.add(bindingConfiguration);
}
}
return bindingConfigurations;
} }
|
public class class_name {
private List<BindingConfiguration> getPersistenceUnitRefs(DeploymentUnit deploymentUnit, DeploymentDescriptorEnvironment environment, ClassLoader classLoader, DeploymentReflectionIndex deploymentReflectionIndex, ResourceInjectionTarget resourceInjectionTarget) throws
DeploymentUnitProcessingException {
final List<BindingConfiguration> bindingConfigurations = new ArrayList<BindingConfiguration>();
if (environment.getEnvironment() == null) {
return bindingConfigurations;
}
PersistenceUnitReferencesMetaData persistenceUnitRefs = environment.getEnvironment().getPersistenceUnitRefs();
if (persistenceUnitRefs != null) {
if (persistenceUnitRefs.size() > 0) {
JPADeploymentMarker.mark(deploymentUnit); // depends on control dependency: [if], data = [none]
}
for (PersistenceUnitReferenceMetaData puRef : persistenceUnitRefs) {
String name = puRef.getName();
String persistenceUnitName = puRef.getPersistenceUnitName();
String lookup = puRef.getLookupName();
if (!isEmpty(lookup) && !isEmpty(persistenceUnitName)) {
throw JpaLogger.ROOT_LOGGER.cannotSpecifyBoth("<lookup-name>", lookup, "persistence-unit-name", persistenceUnitName, "<persistence-unit-ref/>", resourceInjectionTarget);
}
if (!name.startsWith("java:")) {
name = environment.getDefaultContext() + name; // depends on control dependency: [if], data = [none]
}
// our injection (source) comes from the local (ENC) lookup, no matter what.
LookupInjectionSource injectionSource = new LookupInjectionSource(name);
//add any injection targets
processInjectionTargets(resourceInjectionTarget, injectionSource, classLoader, deploymentReflectionIndex, puRef, EntityManagerFactory.class); // depends on control dependency: [for], data = [puRef]
BindingConfiguration bindingConfiguration = null;
if (!isEmpty(lookup)) {
bindingConfiguration = new BindingConfiguration(name, new LookupInjectionSource(lookup)); // depends on control dependency: [if], data = [none]
} else {
InjectionSource puBindingSource = this.getPersistenceUnitBindingSource(deploymentUnit, persistenceUnitName);
bindingConfiguration = new BindingConfiguration(name, puBindingSource); // depends on control dependency: [if], data = [none]
}
bindingConfigurations.add(bindingConfiguration); // depends on control dependency: [for], data = [none]
}
}
return bindingConfigurations;
} }
|
public class class_name {
public float getFloat(String path) {
final Object value = get(path);
//Groovy will always return a Double for floating point values.
if (value instanceof Double) {
return ((Double) value).floatValue();
} else {
return ObjectConverter.convertObjectTo(value, Float.class);
}
} }
|
public class class_name {
public float getFloat(String path) {
final Object value = get(path);
//Groovy will always return a Double for floating point values.
if (value instanceof Double) {
return ((Double) value).floatValue(); // depends on control dependency: [if], data = [none]
} else {
return ObjectConverter.convertObjectTo(value, Float.class); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public final synchronized FileChannel getChannel() {
if(channel == null) {
channel = NioUtils.newFileChannel(this, fd, mode);
}
return channel;
} }
|
public class class_name {
public final synchronized FileChannel getChannel() {
if(channel == null) {
channel = NioUtils.newFileChannel(this, fd, mode); // depends on control dependency: [if], data = [none]
}
return channel;
} }
|
public class class_name {
@Override
protected HandlerMethod lookupHandlerMethod(String urlPath, HttpServletRequest request) {
logger.debug("looking up handler for path: " + urlPath);
HandlerMethod handlerMethod = handlerMethods.get(urlPath);
if (handlerMethod != null) {
return handlerMethod;
}
for (String path : handlerMethods.keySet()) {
UriTemplate template = new UriTemplate(path);
if (template.matches(urlPath)) {
request.setAttribute(
HandlerMapping.URI_TEMPLATE_VARIABLES_ATTRIBUTE,
template.match(urlPath));
return handlerMethods.get(path);
}
}
return null;
} }
|
public class class_name {
@Override
protected HandlerMethod lookupHandlerMethod(String urlPath, HttpServletRequest request) {
logger.debug("looking up handler for path: " + urlPath);
HandlerMethod handlerMethod = handlerMethods.get(urlPath);
if (handlerMethod != null) {
return handlerMethod; // depends on control dependency: [if], data = [none]
}
for (String path : handlerMethods.keySet()) {
UriTemplate template = new UriTemplate(path);
if (template.matches(urlPath)) {
request.setAttribute(
HandlerMapping.URI_TEMPLATE_VARIABLES_ATTRIBUTE,
template.match(urlPath)); // depends on control dependency: [if], data = [none]
return handlerMethods.get(path); // depends on control dependency: [if], data = [none]
}
}
return null;
} }
|
public class class_name {
protected void initHtmlImportObject() {
Object o;
String uri = getJsp().getRequestContext().getUri();
if ((uri == null) || uri.endsWith(IMPORT_STANDARD_PATH)) {
m_dialogMode = MODE_STANDARD;
} else if (uri.endsWith(IMPORT_DEFAULT_PATH)) {
m_dialogMode = MODE_DEFAULT;
} else {
m_dialogMode = MODE_ADVANCED;
}
if (CmsStringUtil.isEmpty(getParamAction())) {
o = new CmsHtmlImport(getJsp().getCmsObject());
} else {
// this is not the initial call, get the job object from session
o = getDialogObject();
}
if (!(o instanceof CmsHtmlImport)) {
// create a new HTML import handler object
m_htmlimport = new CmsHtmlImport(getJsp().getCmsObject());
} else {
// reuse HTML import handler object stored in session
m_htmlimport = (CmsHtmlImport)o;
// this is needed, because the user can switch between the sites, now get the current
m_htmlimport.setCmsObject(getJsp().getCmsObject());
}
// gets the data from the configuration file
fillHtmlImport();
} }
|
public class class_name {
protected void initHtmlImportObject() {
Object o;
String uri = getJsp().getRequestContext().getUri();
if ((uri == null) || uri.endsWith(IMPORT_STANDARD_PATH)) {
m_dialogMode = MODE_STANDARD; // depends on control dependency: [if], data = [none]
} else if (uri.endsWith(IMPORT_DEFAULT_PATH)) {
m_dialogMode = MODE_DEFAULT; // depends on control dependency: [if], data = [none]
} else {
m_dialogMode = MODE_ADVANCED; // depends on control dependency: [if], data = [none]
}
if (CmsStringUtil.isEmpty(getParamAction())) {
o = new CmsHtmlImport(getJsp().getCmsObject()); // depends on control dependency: [if], data = [none]
} else {
// this is not the initial call, get the job object from session
o = getDialogObject(); // depends on control dependency: [if], data = [none]
}
if (!(o instanceof CmsHtmlImport)) {
// create a new HTML import handler object
m_htmlimport = new CmsHtmlImport(getJsp().getCmsObject()); // depends on control dependency: [if], data = [none]
} else {
// reuse HTML import handler object stored in session
m_htmlimport = (CmsHtmlImport)o; // depends on control dependency: [if], data = [none]
// this is needed, because the user can switch between the sites, now get the current
m_htmlimport.setCmsObject(getJsp().getCmsObject()); // depends on control dependency: [if], data = [none]
}
// gets the data from the configuration file
fillHtmlImport();
} }
|
public class class_name {
public static String removeOpenCmsContext(final String path) {
String context = OpenCms.getSystemInfo().getOpenCmsContext();
if (path.startsWith(context + "/")) {
return path.substring(context.length());
}
String renderPrefix = OpenCms.getStaticExportManager().getVfsPrefix();
if (path.startsWith(renderPrefix + "/")) {
return path.substring(renderPrefix.length());
}
return path;
} }
|
public class class_name {
public static String removeOpenCmsContext(final String path) {
String context = OpenCms.getSystemInfo().getOpenCmsContext();
if (path.startsWith(context + "/")) {
return path.substring(context.length()); // depends on control dependency: [if], data = [none]
}
String renderPrefix = OpenCms.getStaticExportManager().getVfsPrefix();
if (path.startsWith(renderPrefix + "/")) {
return path.substring(renderPrefix.length()); // depends on control dependency: [if], data = [none]
}
return path;
} }
|
public class class_name {
public void optimizeLineSection() {
/* Some debugging code
for (int i = 0; i < lineData.size(); i++) {
LineInfo li = (LineInfo)lineData.get(i);
System.out.print(li.toString());
}
*/
//Incorporate each LineInfo into the previous LineInfo's
//outputLineIncrement, if possible
int i = 0;
while (i < lineData.size() - 1) {
LineInfo li = lineData.get(i);
LineInfo liNext = lineData.get(i + 1);
if (!liNext.lineFileIDSet
&& liNext.inputStartLine == li.inputStartLine
&& liNext.inputLineCount == 1
&& li.inputLineCount == 1
&& liNext.outputStartLine
== li.outputStartLine
+ li.inputLineCount * li.outputLineIncrement) {
li.setOutputLineIncrement(
liNext.outputStartLine
- li.outputStartLine
+ liNext.outputLineIncrement);
lineData.remove(i + 1);
} else {
i++;
}
}
//Incorporate each LineInfo into the previous LineInfo's
//inputLineCount, if possible
i = 0;
while (i < lineData.size() - 1) {
LineInfo li = lineData.get(i);
LineInfo liNext = lineData.get(i + 1);
if (!liNext.lineFileIDSet
&& liNext.inputStartLine == li.inputStartLine + li.inputLineCount
&& liNext.outputLineIncrement == li.outputLineIncrement
&& liNext.outputStartLine
== li.outputStartLine
+ li.inputLineCount * li.outputLineIncrement) {
li.setInputLineCount(li.inputLineCount + liNext.inputLineCount);
lineData.remove(i + 1);
} else {
i++;
}
}
} }
|
public class class_name {
public void optimizeLineSection() {
/* Some debugging code
for (int i = 0; i < lineData.size(); i++) {
LineInfo li = (LineInfo)lineData.get(i);
System.out.print(li.toString());
}
*/
//Incorporate each LineInfo into the previous LineInfo's
//outputLineIncrement, if possible
int i = 0;
while (i < lineData.size() - 1) {
LineInfo li = lineData.get(i);
LineInfo liNext = lineData.get(i + 1);
if (!liNext.lineFileIDSet
&& liNext.inputStartLine == li.inputStartLine
&& liNext.inputLineCount == 1
&& li.inputLineCount == 1
&& liNext.outputStartLine
== li.outputStartLine
+ li.inputLineCount * li.outputLineIncrement) {
li.setOutputLineIncrement(
liNext.outputStartLine
- li.outputStartLine
+ liNext.outputLineIncrement); // depends on control dependency: [if], data = [none]
lineData.remove(i + 1); // depends on control dependency: [if], data = [none]
} else {
i++; // depends on control dependency: [if], data = [none]
}
}
//Incorporate each LineInfo into the previous LineInfo's
//inputLineCount, if possible
i = 0;
while (i < lineData.size() - 1) {
LineInfo li = lineData.get(i);
LineInfo liNext = lineData.get(i + 1);
if (!liNext.lineFileIDSet
&& liNext.inputStartLine == li.inputStartLine + li.inputLineCount
&& liNext.outputLineIncrement == li.outputLineIncrement
&& liNext.outputStartLine
== li.outputStartLine
+ li.inputLineCount * li.outputLineIncrement) {
li.setInputLineCount(li.inputLineCount + liNext.inputLineCount); // depends on control dependency: [if], data = [none]
lineData.remove(i + 1); // depends on control dependency: [if], data = [none]
} else {
i++; // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public static String escapeForJava( String string )
{
String result;
StringBuffer resultBuffer = null;
for( int i = 0, length = string.length(); i < length; i++ )
{
char ch = string.charAt( i );
String escape = escapeForJava( ch );
if( escape != null )
{
if( resultBuffer == null )
{
resultBuffer = new StringBuffer( string );
resultBuffer.setLength( i );
}
resultBuffer.append( escape );
}
else if( resultBuffer != null )
{
resultBuffer.append( ch );
}
}
result = (resultBuffer != null) ? resultBuffer.toString() : string;
return result;
} }
|
public class class_name {
public static String escapeForJava( String string )
{
String result;
StringBuffer resultBuffer = null;
for( int i = 0, length = string.length(); i < length; i++ )
{
char ch = string.charAt( i );
String escape = escapeForJava( ch );
if( escape != null )
{
if( resultBuffer == null )
{
resultBuffer = new StringBuffer( string ); // depends on control dependency: [if], data = [none]
resultBuffer.setLength( i ); // depends on control dependency: [if], data = [none]
}
resultBuffer.append( escape ); // depends on control dependency: [if], data = [( escape]
}
else if( resultBuffer != null )
{
resultBuffer.append( ch ); // depends on control dependency: [if], data = [none]
}
}
result = (resultBuffer != null) ? resultBuffer.toString() : string;
return result;
} }
|
public class class_name {
public Observable<Boolean> upsertConversations(List<ChatConversation> conversationsToAdd) {
return asObservable(new Executor<Boolean>() {
@Override
void execute(ChatStore store, Emitter<Boolean> emitter) {
store.beginTransaction();
boolean isSuccess = true;
for (ChatConversation conversation : conversationsToAdd) {
ChatConversation.Builder toSave = ChatConversation.builder().populate(conversation);
ChatConversationBase saved = store.getConversation(conversation.getConversationId());
if (saved == null) {
toSave.setFirstLocalEventId(-1L);
toSave.setLastLocalEventId(-1L);
if (conversation.getLastRemoteEventId() == null) {
toSave.setLastRemoteEventId(-1L);
} else {
toSave.setLastRemoteEventId(conversation.getLastRemoteEventId());
}
if (conversation.getUpdatedOn() == null) {
toSave.setUpdatedOn(System.currentTimeMillis());
} else {
toSave.setUpdatedOn(conversation.getUpdatedOn());
}
} else {
toSave.setFirstLocalEventId(saved.getFirstLocalEventId());
toSave.setLastLocalEventId(saved.getLastLocalEventId());
if (conversation.getLastRemoteEventId() == null) {
toSave.setLastRemoteEventId(saved.getLastRemoteEventId());
} else {
toSave.setLastRemoteEventId(Math.max(saved.getLastRemoteEventId(), conversation.getLastRemoteEventId()));
}
if (conversation.getUpdatedOn() == null) {
toSave.setUpdatedOn(System.currentTimeMillis());
} else {
toSave.setUpdatedOn(conversation.getUpdatedOn());
}
}
isSuccess = isSuccess && store.upsert(toSave.build());
}
store.endTransaction();
emitter.onNext(isSuccess);
emitter.onCompleted();
}
});
} }
|
public class class_name {
public Observable<Boolean> upsertConversations(List<ChatConversation> conversationsToAdd) {
return asObservable(new Executor<Boolean>() {
@Override
void execute(ChatStore store, Emitter<Boolean> emitter) {
store.beginTransaction();
boolean isSuccess = true;
for (ChatConversation conversation : conversationsToAdd) {
ChatConversation.Builder toSave = ChatConversation.builder().populate(conversation);
ChatConversationBase saved = store.getConversation(conversation.getConversationId());
if (saved == null) {
toSave.setFirstLocalEventId(-1L); // depends on control dependency: [if], data = [none]
toSave.setLastLocalEventId(-1L); // depends on control dependency: [if], data = [none]
if (conversation.getLastRemoteEventId() == null) {
toSave.setLastRemoteEventId(-1L); // depends on control dependency: [if], data = [none]
} else {
toSave.setLastRemoteEventId(conversation.getLastRemoteEventId()); // depends on control dependency: [if], data = [(conversation.getLastRemoteEventId()]
}
if (conversation.getUpdatedOn() == null) {
toSave.setUpdatedOn(System.currentTimeMillis()); // depends on control dependency: [if], data = [none]
} else {
toSave.setUpdatedOn(conversation.getUpdatedOn()); // depends on control dependency: [if], data = [(conversation.getUpdatedOn()]
}
} else {
toSave.setFirstLocalEventId(saved.getFirstLocalEventId()); // depends on control dependency: [if], data = [(saved]
toSave.setLastLocalEventId(saved.getLastLocalEventId()); // depends on control dependency: [if], data = [(saved]
if (conversation.getLastRemoteEventId() == null) {
toSave.setLastRemoteEventId(saved.getLastRemoteEventId()); // depends on control dependency: [if], data = [none]
} else {
toSave.setLastRemoteEventId(Math.max(saved.getLastRemoteEventId(), conversation.getLastRemoteEventId())); // depends on control dependency: [if], data = [none]
}
if (conversation.getUpdatedOn() == null) {
toSave.setUpdatedOn(System.currentTimeMillis()); // depends on control dependency: [if], data = [none]
} else {
toSave.setUpdatedOn(conversation.getUpdatedOn()); // depends on control dependency: [if], data = [(conversation.getUpdatedOn()]
}
}
isSuccess = isSuccess && store.upsert(toSave.build()); // depends on control dependency: [for], data = [none]
}
store.endTransaction();
emitter.onNext(isSuccess);
emitter.onCompleted();
}
});
} }
|
public class class_name {
private static void includeColumnRecursive(List<OrcProto.Type> types,
boolean[] result,
int typeId) {
result[typeId] = true;
OrcProto.Type type = types.get(typeId);
int children = type.getSubtypesCount();
for(int i=0; i < children; ++i) {
includeColumnRecursive(types, result, type.getSubtypes(i));
}
} }
|
public class class_name {
private static void includeColumnRecursive(List<OrcProto.Type> types,
boolean[] result,
int typeId) {
result[typeId] = true;
OrcProto.Type type = types.get(typeId);
int children = type.getSubtypesCount();
for(int i=0; i < children; ++i) {
includeColumnRecursive(types, result, type.getSubtypes(i)); // depends on control dependency: [for], data = [i]
}
} }
|
public class class_name {
public void setKeyPhrases(java.util.Collection<KeyPhrase> keyPhrases) {
if (keyPhrases == null) {
this.keyPhrases = null;
return;
}
this.keyPhrases = new java.util.ArrayList<KeyPhrase>(keyPhrases);
} }
|
public class class_name {
public void setKeyPhrases(java.util.Collection<KeyPhrase> keyPhrases) {
if (keyPhrases == null) {
this.keyPhrases = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.keyPhrases = new java.util.ArrayList<KeyPhrase>(keyPhrases);
} }
|
public class class_name {
public void unregisterWorkManager(WorkManager wm)
{
if (wm != null)
{
if (wm.getName() == null || wm.getName().trim().equals(""))
throw new IllegalArgumentException("The name of WorkManager is invalid: " + wm);
if (trace)
log.tracef("Unregistering WorkManager: %s", wm);
if (workmanagers.keySet().contains(wm.getName()))
{
workmanagers.remove(wm.getName());
// Clear any events
if (wm instanceof DistributedWorkManager)
{
WorkManagerEventQueue wmeq = WorkManagerEventQueue.getInstance();
List<WorkManagerEvent> events = wmeq.getEvents(wm.getName());
events.clear();
}
}
}
} }
|
public class class_name {
public void unregisterWorkManager(WorkManager wm)
{
if (wm != null)
{
if (wm.getName() == null || wm.getName().trim().equals(""))
throw new IllegalArgumentException("The name of WorkManager is invalid: " + wm);
if (trace)
log.tracef("Unregistering WorkManager: %s", wm);
if (workmanagers.keySet().contains(wm.getName()))
{
workmanagers.remove(wm.getName()); // depends on control dependency: [if], data = [none]
// Clear any events
if (wm instanceof DistributedWorkManager)
{
WorkManagerEventQueue wmeq = WorkManagerEventQueue.getInstance();
List<WorkManagerEvent> events = wmeq.getEvents(wm.getName());
events.clear(); // depends on control dependency: [if], data = [none]
}
}
}
} }
|
public class class_name {
private static URIRef relativeResolution(URI target, URI base) {
URIRef Base=URIRef.create(base); // NOSONAR
URIRef R=URIRef.create(target); // NOSONAR
URIRef T=URIRef.create(); // NOSONAR
if(defined(R.scheme)) {
T.scheme = R.scheme;
T.authority = R.authority;
T.path = removeDotSegments(R.path);
T.query = R.query;
} else {
if(defined(R.authority)) {
T.authority = R.authority;
T.path = removeDotSegments(R.path);
T.query = R.query;
} else {
resolvePathOnlyTarget(Base, R, T);
}
T.scheme = Base.scheme;
}
T.fragment = R.fragment;
return T;
} }
|
public class class_name {
private static URIRef relativeResolution(URI target, URI base) {
URIRef Base=URIRef.create(base); // NOSONAR
URIRef R=URIRef.create(target); // NOSONAR
URIRef T=URIRef.create(); // NOSONAR
if(defined(R.scheme)) {
T.scheme = R.scheme; // depends on control dependency: [if], data = [none]
T.authority = R.authority; // depends on control dependency: [if], data = [none]
T.path = removeDotSegments(R.path); // depends on control dependency: [if], data = [none]
T.query = R.query; // depends on control dependency: [if], data = [none]
} else {
if(defined(R.authority)) {
T.authority = R.authority; // depends on control dependency: [if], data = [none]
T.path = removeDotSegments(R.path); // depends on control dependency: [if], data = [none]
T.query = R.query; // depends on control dependency: [if], data = [none]
} else {
resolvePathOnlyTarget(Base, R, T); // depends on control dependency: [if], data = [none]
}
T.scheme = Base.scheme; // depends on control dependency: [if], data = [none]
}
T.fragment = R.fragment;
return T;
} }
|
public class class_name {
protected Dimension getDimensions(Image pImage, int pWidth, int pHeight,
int pUnits, boolean pUniformScale) {
// If uniform, make sure width and height are scaled the same ammount
// (use ONLY height or ONLY width).
//
// Algoritm:
// if uniform
// if newHeight not set
// find ratio newWidth / oldWidth
// oldHeight *= ratio
// else if newWidth not set
// find ratio newWidth / oldWidth
// oldHeight *= ratio
// else
// find both ratios and use the smallest one
// (this will be the largest version of the image that fits
// inside the rectangle given)
// (if PERCENT, just use smallest percentage).
//
// If units is percent, we only need old height and width
int oldWidth = ImageUtil.getWidth(pImage);
int oldHeight = ImageUtil.getHeight(pImage);
float ratio;
if (pUnits == UNITS_PERCENT) {
if (pWidth >= 0 && pHeight >= 0) {
// Non-uniform
pWidth = (int) ((float) oldWidth * (float) pWidth / 100f);
pHeight = (int) ((float) oldHeight * (float) pHeight / 100f);
}
else if (pWidth >= 0) {
// Find ratio from pWidth
ratio = (float) pWidth / 100f;
pWidth = (int) ((float) oldWidth * ratio);
pHeight = (int) ((float) oldHeight * ratio);
}
else if (pHeight >= 0) {
// Find ratio from pHeight
ratio = (float) pHeight / 100f;
pWidth = (int) ((float) oldWidth * ratio);
pHeight = (int) ((float) oldHeight * ratio);
}
// Else: No scale
}
else if (pUnits == UNITS_PIXELS) {
if (pUniformScale) {
if (pWidth >= 0 && pHeight >= 0) {
// Compute both ratios
ratio = (float) pWidth / (float) oldWidth;
float heightRatio = (float) pHeight / (float) oldHeight;
// Find the largest ratio, and use that for both
if (heightRatio < ratio) {
ratio = heightRatio;
pWidth = (int) ((float) oldWidth * ratio);
}
else {
pHeight = (int) ((float) oldHeight * ratio);
}
}
else if (pWidth >= 0) {
// Find ratio from pWidth
ratio = (float) pWidth / (float) oldWidth;
pHeight = (int) ((float) oldHeight * ratio);
}
else if (pHeight >= 0) {
// Find ratio from pHeight
ratio = (float) pHeight / (float) oldHeight;
pWidth = (int) ((float) oldWidth * ratio);
}
// Else: No scale
}
}
// Default is no scale, just work as a proxy
if (pWidth < 0) {
pWidth = oldWidth;
}
if (pHeight < 0) {
pHeight = oldHeight;
}
// Create new Dimension object and return
return new Dimension(pWidth, pHeight);
} }
|
public class class_name {
protected Dimension getDimensions(Image pImage, int pWidth, int pHeight,
int pUnits, boolean pUniformScale) {
// If uniform, make sure width and height are scaled the same ammount
// (use ONLY height or ONLY width).
//
// Algoritm:
// if uniform
// if newHeight not set
// find ratio newWidth / oldWidth
// oldHeight *= ratio
// else if newWidth not set
// find ratio newWidth / oldWidth
// oldHeight *= ratio
// else
// find both ratios and use the smallest one
// (this will be the largest version of the image that fits
// inside the rectangle given)
// (if PERCENT, just use smallest percentage).
//
// If units is percent, we only need old height and width
int oldWidth = ImageUtil.getWidth(pImage);
int oldHeight = ImageUtil.getHeight(pImage);
float ratio;
if (pUnits == UNITS_PERCENT) {
if (pWidth >= 0 && pHeight >= 0) {
// Non-uniform
pWidth = (int) ((float) oldWidth * (float) pWidth / 100f);
// depends on control dependency: [if], data = [none]
pHeight = (int) ((float) oldHeight * (float) pHeight / 100f);
// depends on control dependency: [if], data = [none]
}
else if (pWidth >= 0) {
// Find ratio from pWidth
ratio = (float) pWidth / 100f;
// depends on control dependency: [if], data = [none]
pWidth = (int) ((float) oldWidth * ratio);
// depends on control dependency: [if], data = [none]
pHeight = (int) ((float) oldHeight * ratio);
// depends on control dependency: [if], data = [none]
}
else if (pHeight >= 0) {
// Find ratio from pHeight
ratio = (float) pHeight / 100f;
// depends on control dependency: [if], data = [none]
pWidth = (int) ((float) oldWidth * ratio);
// depends on control dependency: [if], data = [none]
pHeight = (int) ((float) oldHeight * ratio);
// depends on control dependency: [if], data = [none]
}
// Else: No scale
}
else if (pUnits == UNITS_PIXELS) {
if (pUniformScale) {
if (pWidth >= 0 && pHeight >= 0) {
// Compute both ratios
ratio = (float) pWidth / (float) oldWidth;
// depends on control dependency: [if], data = [none]
float heightRatio = (float) pHeight / (float) oldHeight;
// Find the largest ratio, and use that for both
if (heightRatio < ratio) {
ratio = heightRatio;
// depends on control dependency: [if], data = [none]
pWidth = (int) ((float) oldWidth * ratio);
// depends on control dependency: [if], data = [ratio)]
}
else {
pHeight = (int) ((float) oldHeight * ratio);
// depends on control dependency: [if], data = [ratio)]
}
}
else if (pWidth >= 0) {
// Find ratio from pWidth
ratio = (float) pWidth / (float) oldWidth;
// depends on control dependency: [if], data = [none]
pHeight = (int) ((float) oldHeight * ratio);
// depends on control dependency: [if], data = [none]
}
else if (pHeight >= 0) {
// Find ratio from pHeight
ratio = (float) pHeight / (float) oldHeight;
// depends on control dependency: [if], data = [none]
pWidth = (int) ((float) oldWidth * ratio);
// depends on control dependency: [if], data = [none]
}
// Else: No scale
}
}
// Default is no scale, just work as a proxy
if (pWidth < 0) {
pWidth = oldWidth;
// depends on control dependency: [if], data = [none]
}
if (pHeight < 0) {
pHeight = oldHeight;
// depends on control dependency: [if], data = [none]
}
// Create new Dimension object and return
return new Dimension(pWidth, pHeight);
} }
|
public class class_name {
public void marshall(DirectoryConnectSettings directoryConnectSettings, ProtocolMarshaller protocolMarshaller) {
if (directoryConnectSettings == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(directoryConnectSettings.getVpcId(), VPCID_BINDING);
protocolMarshaller.marshall(directoryConnectSettings.getSubnetIds(), SUBNETIDS_BINDING);
protocolMarshaller.marshall(directoryConnectSettings.getCustomerDnsIps(), CUSTOMERDNSIPS_BINDING);
protocolMarshaller.marshall(directoryConnectSettings.getCustomerUserName(), CUSTOMERUSERNAME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(DirectoryConnectSettings directoryConnectSettings, ProtocolMarshaller protocolMarshaller) {
if (directoryConnectSettings == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(directoryConnectSettings.getVpcId(), VPCID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(directoryConnectSettings.getSubnetIds(), SUBNETIDS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(directoryConnectSettings.getCustomerDnsIps(), CUSTOMERDNSIPS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(directoryConnectSettings.getCustomerUserName(), CUSTOMERUSERNAME_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public Observable<ServiceResponse<Page<SiteInner>>> beginResumeWithServiceResponseAsync(final String resourceGroupName, final String name) {
return beginResumeSinglePageAsync(resourceGroupName, name)
.concatMap(new Func1<ServiceResponse<Page<SiteInner>>, Observable<ServiceResponse<Page<SiteInner>>>>() {
@Override
public Observable<ServiceResponse<Page<SiteInner>>> call(ServiceResponse<Page<SiteInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(beginResumeNextWithServiceResponseAsync(nextPageLink));
}
});
} }
|
public class class_name {
public Observable<ServiceResponse<Page<SiteInner>>> beginResumeWithServiceResponseAsync(final String resourceGroupName, final String name) {
return beginResumeSinglePageAsync(resourceGroupName, name)
.concatMap(new Func1<ServiceResponse<Page<SiteInner>>, Observable<ServiceResponse<Page<SiteInner>>>>() {
@Override
public Observable<ServiceResponse<Page<SiteInner>>> call(ServiceResponse<Page<SiteInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page); // depends on control dependency: [if], data = [none]
}
return Observable.just(page).concatWith(beginResumeNextWithServiceResponseAsync(nextPageLink));
}
});
} }
|
public class class_name {
@Nullable
public AbsoluteRange getHistogramBoundaries() {
if (boundaries == null) {
boundaries = Tools.extractHistogramBoundaries(getBuiltQuery()).orElse(null);
}
return boundaries;
} }
|
public class class_name {
@Nullable
public AbsoluteRange getHistogramBoundaries() {
if (boundaries == null) {
boundaries = Tools.extractHistogramBoundaries(getBuiltQuery()).orElse(null); // depends on control dependency: [if], data = [null)]
}
return boundaries;
} }
|
public class class_name {
private MapRow getRow(int index)
{
MapRow result;
if (index == m_rows.size())
{
result = new MapRow(this, new HashMap<FastTrackField, Object>());
m_rows.add(result);
}
else
{
result = m_rows.get(index);
}
return result;
} }
|
public class class_name {
private MapRow getRow(int index)
{
MapRow result;
if (index == m_rows.size())
{
result = new MapRow(this, new HashMap<FastTrackField, Object>()); // depends on control dependency: [if], data = [none]
m_rows.add(result); // depends on control dependency: [if], data = [none]
}
else
{
result = m_rows.get(index); // depends on control dependency: [if], data = [(index]
}
return result;
} }
|
public class class_name {
private double stretch(AtomPair pair, IntStack stack, Point2d[] coords, Map<IBond,AtomPair> firstVisit) {
stackBackup.clear();
final double score = congestion.score();
double min = score;
for (IBond bond : pair.bndAt) {
// don't stretch ring bonds
if (bond.isInRing())
continue;
if (bfix.contains(bond)) continue;
// has this bond already been tested as part of another pair
AtomPair first = firstVisit.get(bond);
if (first == null)
firstVisit.put(bond, first = pair);
if (first != pair)
continue;
final IAtom beg = bond.getBegin();
final IAtom end = bond.getEnd();
final int begIdx = idxs.get(beg);
final int endIdx = idxs.get(end);
int begPriority = beg.getProperty(AtomPlacer.PRIORITY);
int endPriority = end.getProperty(AtomPlacer.PRIORITY);
Arrays.fill(visited, false);
if (begPriority < endPriority)
stack.len = visit(visited, stack.xs, endIdx, begIdx, 0);
else
stack.len = visit(visited, stack.xs, begIdx, endIdx, 0);
backupCoords(backup, stack);
if (begPriority < endPriority)
stretch(stack, end, beg, pair.attempt * STRETCH_STEP);
else
stretch(stack, beg, end, pair.attempt * STRETCH_STEP);
congestion.update(visited, stack.xs, stack.len);
if (percDiff(score, congestion.score()) >= IMPROVEMENT_PERC_THRESHOLD && congestion.score() < min) {
backupCoords(coords, stack);
min = congestion.score();
stackBackup.copyFrom(stack);
}
restoreCoords(stack, backup);
congestion.update(visited, stack.xs, stack.len);
congestion.score = score;
}
stack.copyFrom(stackBackup);
return min;
} }
|
public class class_name {
private double stretch(AtomPair pair, IntStack stack, Point2d[] coords, Map<IBond,AtomPair> firstVisit) {
stackBackup.clear();
final double score = congestion.score();
double min = score;
for (IBond bond : pair.bndAt) {
// don't stretch ring bonds
if (bond.isInRing())
continue;
if (bfix.contains(bond)) continue;
// has this bond already been tested as part of another pair
AtomPair first = firstVisit.get(bond);
if (first == null)
firstVisit.put(bond, first = pair);
if (first != pair)
continue;
final IAtom beg = bond.getBegin();
final IAtom end = bond.getEnd();
final int begIdx = idxs.get(beg);
final int endIdx = idxs.get(end);
int begPriority = beg.getProperty(AtomPlacer.PRIORITY);
int endPriority = end.getProperty(AtomPlacer.PRIORITY);
Arrays.fill(visited, false); // depends on control dependency: [for], data = [none]
if (begPriority < endPriority)
stack.len = visit(visited, stack.xs, endIdx, begIdx, 0);
else
stack.len = visit(visited, stack.xs, begIdx, endIdx, 0);
backupCoords(backup, stack); // depends on control dependency: [for], data = [none]
if (begPriority < endPriority)
stretch(stack, end, beg, pair.attempt * STRETCH_STEP);
else
stretch(stack, beg, end, pair.attempt * STRETCH_STEP);
congestion.update(visited, stack.xs, stack.len); // depends on control dependency: [for], data = [none]
if (percDiff(score, congestion.score()) >= IMPROVEMENT_PERC_THRESHOLD && congestion.score() < min) {
backupCoords(coords, stack); // depends on control dependency: [if], data = [none]
min = congestion.score(); // depends on control dependency: [if], data = [none]
stackBackup.copyFrom(stack); // depends on control dependency: [if], data = [none]
}
restoreCoords(stack, backup); // depends on control dependency: [for], data = [none]
congestion.update(visited, stack.xs, stack.len); // depends on control dependency: [for], data = [none]
congestion.score = score; // depends on control dependency: [for], data = [none]
}
stack.copyFrom(stackBackup);
return min;
} }
|
public class class_name {
protected final void loadKeys(String key, String[] value) {
String[] parsed = parseKey(key);
if (parsed == null) {
return;
}
if (!queryMap.containsKey(parsed[0])) {
queryMap.put(parsed[0], new QueryParamsMap());
}
if (!parsed[1].isEmpty()) {
queryMap.get(parsed[0]).loadKeys(parsed[1], value);
} else {
queryMap.get(parsed[0]).values = value.clone();
}
} }
|
public class class_name {
protected final void loadKeys(String key, String[] value) {
String[] parsed = parseKey(key);
if (parsed == null) {
return; // depends on control dependency: [if], data = [none]
}
if (!queryMap.containsKey(parsed[0])) {
queryMap.put(parsed[0], new QueryParamsMap()); // depends on control dependency: [if], data = [none]
}
if (!parsed[1].isEmpty()) {
queryMap.get(parsed[0]).loadKeys(parsed[1], value); // depends on control dependency: [if], data = [none]
} else {
queryMap.get(parsed[0]).values = value.clone(); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void touchModule(String moduleName) {
CmsModule module = OpenCms.getModuleManager().getModule(moduleName);
if (module == null) {
m_shell.getOut().println("touchModule: could not find module " + moduleName);
} else {
m_shell.getOut().println("Touching module: " + moduleName);
module.setCheckpointTime(System.currentTimeMillis());
m_shell.getOut().println("Writing module configuration.");
OpenCms.getModuleManager().updateModuleConfiguration();
}
} }
|
public class class_name {
public void touchModule(String moduleName) {
CmsModule module = OpenCms.getModuleManager().getModule(moduleName);
if (module == null) {
m_shell.getOut().println("touchModule: could not find module " + moduleName); // depends on control dependency: [if], data = [none]
} else {
m_shell.getOut().println("Touching module: " + moduleName); // depends on control dependency: [if], data = [none]
module.setCheckpointTime(System.currentTimeMillis()); // depends on control dependency: [if], data = [none]
m_shell.getOut().println("Writing module configuration."); // depends on control dependency: [if], data = [none]
OpenCms.getModuleManager().updateModuleConfiguration(); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static boolean areValid(String... strings) {
boolean result = false;
if(strings != null) {
result = true;
for(String string : strings) {
result = result && isValid(string);
}
}
return result;
} }
|
public class class_name {
public static boolean areValid(String... strings) {
boolean result = false;
if(strings != null) {
result = true; // depends on control dependency: [if], data = [none]
for(String string : strings) {
result = result && isValid(string); // depends on control dependency: [for], data = [string]
}
}
return result;
} }
|
public class class_name {
public SignatureVisitor onNonGenericType(Generic typeDescription) {
if (typeDescription.isArray()) {
typeDescription.getComponentType().accept(new ForSignatureVisitor(signatureVisitor.visitArrayType()));
} else if (typeDescription.isPrimitive()) {
signatureVisitor.visitBaseType(typeDescription.asErasure().getDescriptor().charAt(ONLY_CHARACTER));
} else {
signatureVisitor.visitClassType(typeDescription.asErasure().getInternalName());
signatureVisitor.visitEnd();
}
return signatureVisitor;
} }
|
public class class_name {
public SignatureVisitor onNonGenericType(Generic typeDescription) {
if (typeDescription.isArray()) {
typeDescription.getComponentType().accept(new ForSignatureVisitor(signatureVisitor.visitArrayType())); // depends on control dependency: [if], data = [none]
} else if (typeDescription.isPrimitive()) {
signatureVisitor.visitBaseType(typeDescription.asErasure().getDescriptor().charAt(ONLY_CHARACTER)); // depends on control dependency: [if], data = [none]
} else {
signatureVisitor.visitClassType(typeDescription.asErasure().getInternalName()); // depends on control dependency: [if], data = [none]
signatureVisitor.visitEnd(); // depends on control dependency: [if], data = [none]
}
return signatureVisitor;
} }
|
public class class_name {
@Override
public void delete(Object entity, Object pKey)
{
EntityMetadata entityMetadata = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, entity.getClass());
MetamodelImpl metaModel = (MetamodelImpl) kunderaMetadata.getApplicationMetadata().getMetamodel(
entityMetadata.getPersistenceUnit());
AbstractManagedType managedType = (AbstractManagedType) metaModel.entity(entityMetadata.getEntityClazz());
DBObject query = new BasicDBObject();
if (managedType.hasLobAttribute())
{
KunderaGridFS gfs = new KunderaGridFS(mongoDb, entityMetadata.getTableName());
String id = ((AbstractAttribute) entityMetadata.getIdAttribute()).getJPAColumnName();
query.put("metadata." + id, pKey);
gfs.remove(query);
}
else
{
if (metaModel.isEmbeddable(entityMetadata.getIdAttribute().getBindableJavaType()))
{
MongoDBUtils.populateCompoundKey(query, entityMetadata, metaModel, pKey);
}
else
{
query.put("_id", MongoDBUtils.populateValue(pKey, pKey.getClass()));
}
// For secondary tables.
List<String> secondaryTables = ((DefaultEntityAnnotationProcessor) managedType.getEntityAnnotation())
.getSecondaryTablesName();
secondaryTables.add(entityMetadata.getTableName());
for (String collectionName : secondaryTables)
{
KunderaCoreUtils.printQuery("Drop existing collection:" + query, showQuery);
DBCollection dbCollection = mongoDb.getCollection(collectionName);
dbCollection.remove(query, getWriteConcern(), encoder);
}
getIndexManager().remove(entityMetadata, entity, pKey);
}
} }
|
public class class_name {
@Override
public void delete(Object entity, Object pKey)
{
EntityMetadata entityMetadata = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, entity.getClass());
MetamodelImpl metaModel = (MetamodelImpl) kunderaMetadata.getApplicationMetadata().getMetamodel(
entityMetadata.getPersistenceUnit());
AbstractManagedType managedType = (AbstractManagedType) metaModel.entity(entityMetadata.getEntityClazz());
DBObject query = new BasicDBObject();
if (managedType.hasLobAttribute())
{
KunderaGridFS gfs = new KunderaGridFS(mongoDb, entityMetadata.getTableName());
String id = ((AbstractAttribute) entityMetadata.getIdAttribute()).getJPAColumnName();
query.put("metadata." + id, pKey); // depends on control dependency: [if], data = [none]
gfs.remove(query); // depends on control dependency: [if], data = [none]
}
else
{
if (metaModel.isEmbeddable(entityMetadata.getIdAttribute().getBindableJavaType()))
{
MongoDBUtils.populateCompoundKey(query, entityMetadata, metaModel, pKey); // depends on control dependency: [if], data = [none]
}
else
{
query.put("_id", MongoDBUtils.populateValue(pKey, pKey.getClass())); // depends on control dependency: [if], data = [none]
}
// For secondary tables.
List<String> secondaryTables = ((DefaultEntityAnnotationProcessor) managedType.getEntityAnnotation())
.getSecondaryTablesName();
secondaryTables.add(entityMetadata.getTableName()); // depends on control dependency: [if], data = [none]
for (String collectionName : secondaryTables)
{
KunderaCoreUtils.printQuery("Drop existing collection:" + query, showQuery); // depends on control dependency: [for], data = [none]
DBCollection dbCollection = mongoDb.getCollection(collectionName);
dbCollection.remove(query, getWriteConcern(), encoder); // depends on control dependency: [for], data = [none]
}
getIndexManager().remove(entityMetadata, entity, pKey); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
protected Type getType()
throws EFapsException
{
init();
final Type ret;
if (getValues().isEmpty()) {
final List<EventDefinition> events = getCommand().getEvents(EventType.UI_TABLE_EVALUATE);
String typeName = null;
if (events.size() > 1) {
throw new EFapsException(this.getClass(), "execute4NoInstance.moreThanOneEvaluate");
} else {
final EventDefinition event = events.get(0);
// test for basic or abstract types
if (event.getProperty("Type") != null) {
typeName = event.getProperty("Type");
}
// no type yet search alternatives
if (typeName == null) {
for (int i = 1; i < 100; i++) {
final String nameTmp = "Type" + String.format("%02d", i);
if (event.getProperty(nameTmp) != null) {
typeName = event.getProperty(nameTmp);
} else {
break;
}
}
}
}
ret = typeName == null ? null : Type.get(typeName);
} else {
ret = getValues().get(0).getInstance().getType();
}
return ret;
} }
|
public class class_name {
protected Type getType()
throws EFapsException
{
init();
final Type ret;
if (getValues().isEmpty()) {
final List<EventDefinition> events = getCommand().getEvents(EventType.UI_TABLE_EVALUATE);
String typeName = null;
if (events.size() > 1) {
throw new EFapsException(this.getClass(), "execute4NoInstance.moreThanOneEvaluate");
} else {
final EventDefinition event = events.get(0);
// test for basic or abstract types
if (event.getProperty("Type") != null) {
typeName = event.getProperty("Type"); // depends on control dependency: [if], data = [none]
}
// no type yet search alternatives
if (typeName == null) {
for (int i = 1; i < 100; i++) {
final String nameTmp = "Type" + String.format("%02d", i);
if (event.getProperty(nameTmp) != null) {
typeName = event.getProperty(nameTmp); // depends on control dependency: [if], data = [none]
} else {
break;
}
}
}
}
ret = typeName == null ? null : Type.get(typeName);
} else {
ret = getValues().get(0).getInstance().getType();
}
return ret;
} }
|
public class class_name {
private void ensureReferencedPKs(ModelDef modelDef, CollectionDescriptorDef collDef) throws ConstraintException
{
String elementClassName = collDef.getProperty(PropertyHelper.OJB_PROPERTY_ELEMENT_CLASS_REF);
ClassDescriptorDef elementClassDef = modelDef.getClass(elementClassName);
String indirTable = collDef.getProperty(PropertyHelper.OJB_PROPERTY_INDIRECTION_TABLE);
String localKey = collDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY);
String remoteKey = collDef.getProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY);
boolean hasRemoteKey = remoteKey != null;
ArrayList fittingCollections = new ArrayList();
// we're checking for the fitting remote collection(s) and also
// use their foreignkey as remote-foreignkey in the original collection definition
for (Iterator it = elementClassDef.getAllExtentClasses(); it.hasNext();)
{
ClassDescriptorDef subTypeDef = (ClassDescriptorDef)it.next();
// find the collection in the element class that has the same indirection table
for (Iterator collIt = subTypeDef.getCollections(); collIt.hasNext();)
{
CollectionDescriptorDef curCollDef = (CollectionDescriptorDef)collIt.next();
if (indirTable.equals(curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_INDIRECTION_TABLE)) &&
(collDef != curCollDef) &&
(!hasRemoteKey || CommaListIterator.sameLists(remoteKey, curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY))) &&
(!curCollDef.hasProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY) ||
CommaListIterator.sameLists(localKey, curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY))))
{
fittingCollections.add(curCollDef);
}
}
}
if (!fittingCollections.isEmpty())
{
// if there is more than one, check that they match, i.e. that they all have the same foreignkeys
if (!hasRemoteKey && (fittingCollections.size() > 1))
{
CollectionDescriptorDef firstCollDef = (CollectionDescriptorDef)fittingCollections.get(0);
String foreignKey = firstCollDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY);
for (int idx = 1; idx < fittingCollections.size(); idx++)
{
CollectionDescriptorDef curCollDef = (CollectionDescriptorDef)fittingCollections.get(idx);
if (!CommaListIterator.sameLists(foreignKey, curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY)))
{
throw new ConstraintException("Cannot determine the element-side collection that corresponds to the collection "+
collDef.getName()+" in type "+collDef.getOwner().getName()+
" because there are at least two different collections that would fit."+
" Specifying remote-foreignkey in the original collection "+collDef.getName()+
" will perhaps help");
}
}
// store the found keys at the collections
collDef.setProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY, foreignKey);
for (int idx = 0; idx < fittingCollections.size(); idx++)
{
CollectionDescriptorDef curCollDef = (CollectionDescriptorDef)fittingCollections.get(idx);
curCollDef.setProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY, localKey);
}
}
}
// copy subclass pk fields into target class (if not already present)
ensurePKsFromHierarchy(elementClassDef);
} }
|
public class class_name {
private void ensureReferencedPKs(ModelDef modelDef, CollectionDescriptorDef collDef) throws ConstraintException
{
String elementClassName = collDef.getProperty(PropertyHelper.OJB_PROPERTY_ELEMENT_CLASS_REF);
ClassDescriptorDef elementClassDef = modelDef.getClass(elementClassName);
String indirTable = collDef.getProperty(PropertyHelper.OJB_PROPERTY_INDIRECTION_TABLE);
String localKey = collDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY);
String remoteKey = collDef.getProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY);
boolean hasRemoteKey = remoteKey != null;
ArrayList fittingCollections = new ArrayList();
// we're checking for the fitting remote collection(s) and also
// use their foreignkey as remote-foreignkey in the original collection definition
for (Iterator it = elementClassDef.getAllExtentClasses(); it.hasNext();)
{
ClassDescriptorDef subTypeDef = (ClassDescriptorDef)it.next();
// find the collection in the element class that has the same indirection table
for (Iterator collIt = subTypeDef.getCollections(); collIt.hasNext();)
{
CollectionDescriptorDef curCollDef = (CollectionDescriptorDef)collIt.next();
if (indirTable.equals(curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_INDIRECTION_TABLE)) &&
(collDef != curCollDef) &&
(!hasRemoteKey || CommaListIterator.sameLists(remoteKey, curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY))) &&
(!curCollDef.hasProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY) ||
CommaListIterator.sameLists(localKey, curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY))))
{
fittingCollections.add(curCollDef);
// depends on control dependency: [if], data = [none]
}
}
}
if (!fittingCollections.isEmpty())
{
// if there is more than one, check that they match, i.e. that they all have the same foreignkeys
if (!hasRemoteKey && (fittingCollections.size() > 1))
{
CollectionDescriptorDef firstCollDef = (CollectionDescriptorDef)fittingCollections.get(0);
String foreignKey = firstCollDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY);
for (int idx = 1; idx < fittingCollections.size(); idx++)
{
CollectionDescriptorDef curCollDef = (CollectionDescriptorDef)fittingCollections.get(idx);
if (!CommaListIterator.sameLists(foreignKey, curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY)))
{
throw new ConstraintException("Cannot determine the element-side collection that corresponds to the collection "+
collDef.getName()+" in type "+collDef.getOwner().getName()+
" because there are at least two different collections that would fit."+
" Specifying remote-foreignkey in the original collection "+collDef.getName()+
" will perhaps help");
}
}
// store the found keys at the collections
collDef.setProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY, foreignKey);
for (int idx = 0; idx < fittingCollections.size(); idx++)
{
CollectionDescriptorDef curCollDef = (CollectionDescriptorDef)fittingCollections.get(idx);
curCollDef.setProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY, localKey);
// depends on control dependency: [for], data = [none]
}
}
}
// copy subclass pk fields into target class (if not already present)
ensurePKsFromHierarchy(elementClassDef);
} }
|
public class class_name {
public static List<String> readBuildConfigFile( String fileName, File outputDir )
throws IOException
{
List<String> arguments = new ArrayList<String>();
File argFile = new File( outputDir, fileName );
if ( FileUtils.fileExists( argFile.getAbsolutePath() ) )
{
FileReader reader = new FileReader( argFile );
BufferedReader bufRead = new BufferedReader( reader );
String line = null;
do
{
line = bufRead.readLine();
if ( null != line )
{
arguments.add( line );
}
}
while ( null != line );
}
return arguments;
} }
|
public class class_name {
public static List<String> readBuildConfigFile( String fileName, File outputDir )
throws IOException
{
List<String> arguments = new ArrayList<String>();
File argFile = new File( outputDir, fileName );
if ( FileUtils.fileExists( argFile.getAbsolutePath() ) )
{
FileReader reader = new FileReader( argFile );
BufferedReader bufRead = new BufferedReader( reader );
String line = null;
do
{
line = bufRead.readLine();
if ( null != line )
{
arguments.add( line ); // depends on control dependency: [if], data = [line )]
}
}
while ( null != line );
}
return arguments;
} }
|
public class class_name {
@Override
public boolean isValidName(String src) {
StringTokenizer tokens = new StringTokenizer(src, Path.SEPARATOR);
while (tokens.hasMoreTokens()) {
String element = tokens.nextToken();
if (element.equals("..") || element.equals(".")) {
return false;
}
}
return true;
} }
|
public class class_name {
@Override
public boolean isValidName(String src) {
StringTokenizer tokens = new StringTokenizer(src, Path.SEPARATOR);
while (tokens.hasMoreTokens()) {
String element = tokens.nextToken();
if (element.equals("..") || element.equals(".")) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} }
|
public class class_name {
public static List<Failure> validateConfigPropertiesType(ValidateClass vo, String section, String failMsg)
{
List<Failure> failures = new ArrayList<Failure>(1);
for (ConfigProperty cpmd : vo.getConfigProperties())
{
try
{
containGetOrIsMethod(vo, "get", cpmd, section, failMsg, failures);
}
catch (Throwable t)
{
try
{
containGetOrIsMethod(vo, "is", cpmd, section, failMsg, failures);
}
catch (Throwable it)
{
// Ignore
}
}
}
if (failures.isEmpty())
return null;
return failures;
} }
|
public class class_name {
public static List<Failure> validateConfigPropertiesType(ValidateClass vo, String section, String failMsg)
{
List<Failure> failures = new ArrayList<Failure>(1);
for (ConfigProperty cpmd : vo.getConfigProperties())
{
try
{
containGetOrIsMethod(vo, "get", cpmd, section, failMsg, failures); // depends on control dependency: [try], data = [none]
}
catch (Throwable t)
{
try
{
containGetOrIsMethod(vo, "is", cpmd, section, failMsg, failures); // depends on control dependency: [try], data = [none]
}
catch (Throwable it)
{
// Ignore
} // depends on control dependency: [catch], data = [none]
} // depends on control dependency: [catch], data = [none]
}
if (failures.isEmpty())
return null;
return failures;
} }
|
public class class_name {
public static boolean encryptionIsReversible(ECKey originalKey, ECKey encryptedKey, KeyCrypter keyCrypter, KeyParameter aesKey) {
try {
ECKey rebornUnencryptedKey = encryptedKey.decrypt(keyCrypter, aesKey);
byte[] originalPrivateKeyBytes = originalKey.getPrivKeyBytes();
byte[] rebornKeyBytes = rebornUnencryptedKey.getPrivKeyBytes();
if (!Arrays.equals(originalPrivateKeyBytes, rebornKeyBytes)) {
log.error("The check that encryption could be reversed failed for {}", originalKey);
return false;
}
return true;
} catch (KeyCrypterException kce) {
log.error(kce.getMessage());
return false;
}
} }
|
public class class_name {
public static boolean encryptionIsReversible(ECKey originalKey, ECKey encryptedKey, KeyCrypter keyCrypter, KeyParameter aesKey) {
try {
ECKey rebornUnencryptedKey = encryptedKey.decrypt(keyCrypter, aesKey);
byte[] originalPrivateKeyBytes = originalKey.getPrivKeyBytes();
byte[] rebornKeyBytes = rebornUnencryptedKey.getPrivKeyBytes();
if (!Arrays.equals(originalPrivateKeyBytes, rebornKeyBytes)) {
log.error("The check that encryption could be reversed failed for {}", originalKey); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
return true; // depends on control dependency: [try], data = [none]
} catch (KeyCrypterException kce) {
log.error(kce.getMessage());
return false;
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
static public String[] getMultiElementContent( XMLNode root, String path )
{
final List list = new ArrayList();
visitElements( root, path, new ElementVisitor()
{
public void visitElement( XMLNode n )
{
String value = getElementContent( n, "" );
if ( value != null )
{
list.add( value );
}
}
} );
if ( list.size() == 0 )
{
return null;
}
return (String[]) list.toArray( new String[list.size()] );
} }
|
public class class_name {
static public String[] getMultiElementContent( XMLNode root, String path )
{
final List list = new ArrayList();
visitElements( root, path, new ElementVisitor()
{
public void visitElement( XMLNode n )
{
String value = getElementContent( n, "" );
if ( value != null )
{
list.add( value ); // depends on control dependency: [if], data = [( value]
}
}
} );
if ( list.size() == 0 )
{
return null; // depends on control dependency: [if], data = [none]
}
return (String[]) list.toArray( new String[list.size()] );
} }
|
public class class_name {
public static boolean hasModifier(Constructor<?> constructor, ModifierType... modifierTypes) {
if (null == constructor || ArrayUtil.isEmpty(modifierTypes)) {
return false;
}
return 0 != (constructor.getModifiers() & modifiersToInt(modifierTypes));
} }
|
public class class_name {
public static boolean hasModifier(Constructor<?> constructor, ModifierType... modifierTypes) {
if (null == constructor || ArrayUtil.isEmpty(modifierTypes)) {
return false;
// depends on control dependency: [if], data = [none]
}
return 0 != (constructor.getModifiers() & modifiersToInt(modifierTypes));
} }
|
public class class_name {
@Override
public EClass getIfcThermalConductivityMeasure() {
if (ifcThermalConductivityMeasureEClass == null) {
ifcThermalConductivityMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(880);
}
return ifcThermalConductivityMeasureEClass;
} }
|
public class class_name {
@Override
public EClass getIfcThermalConductivityMeasure() {
if (ifcThermalConductivityMeasureEClass == null) {
ifcThermalConductivityMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(880);
// depends on control dependency: [if], data = [none]
}
return ifcThermalConductivityMeasureEClass;
} }
|
public class class_name {
public Request getRequest() {
if (clientTransaction != null) {
return clientTransaction.getRequest();
}
if (serverTransaction != null) {
return serverTransaction.getRequest();
}
return null;
} }
|
public class class_name {
public Request getRequest() {
if (clientTransaction != null) {
return clientTransaction.getRequest(); // depends on control dependency: [if], data = [none]
}
if (serverTransaction != null) {
return serverTransaction.getRequest(); // depends on control dependency: [if], data = [none]
}
return null;
} }
|
public class class_name {
void setInConditionsAsTables() {
for (int i = rangeVariables.length - 1; i >= 0; i--) {
RangeVariable rangeVar = rangeVariables[i];
Expression in = inExpressions[i];
if (in != null) {
Index index = rangeVar.rangeTable.getIndexForColumn(
in.getLeftNode().nodes[0].getColumnIndex());
RangeVariable newRangeVar =
new RangeVariable(in.getRightNode().subQuery.getTable(),
null, null, null, compileContext);
RangeVariable[] newList =
new RangeVariable[rangeVariables.length + 1];
ArrayUtil.copyAdjustArray(rangeVariables, newList,
newRangeVar, i, 1);
rangeVariables = newList;
// make two columns as arg
ColumnSchema left = rangeVar.rangeTable.getColumn(
in.getLeftNode().nodes[0].getColumnIndex());
ColumnSchema right = newRangeVar.rangeTable.getColumn(0);
Expression e = new ExpressionLogical(rangeVar, left,
newRangeVar, right);
rangeVar.addIndexCondition(e, index, flags[i]);
}
}
} }
|
public class class_name {
void setInConditionsAsTables() {
for (int i = rangeVariables.length - 1; i >= 0; i--) {
RangeVariable rangeVar = rangeVariables[i];
Expression in = inExpressions[i];
if (in != null) {
Index index = rangeVar.rangeTable.getIndexForColumn(
in.getLeftNode().nodes[0].getColumnIndex());
RangeVariable newRangeVar =
new RangeVariable(in.getRightNode().subQuery.getTable(),
null, null, null, compileContext);
RangeVariable[] newList =
new RangeVariable[rangeVariables.length + 1];
ArrayUtil.copyAdjustArray(rangeVariables, newList,
newRangeVar, i, 1); // depends on control dependency: [if], data = [none]
rangeVariables = newList; // depends on control dependency: [if], data = [none]
// make two columns as arg
ColumnSchema left = rangeVar.rangeTable.getColumn(
in.getLeftNode().nodes[0].getColumnIndex());
ColumnSchema right = newRangeVar.rangeTable.getColumn(0);
Expression e = new ExpressionLogical(rangeVar, left,
newRangeVar, right);
rangeVar.addIndexCondition(e, index, flags[i]); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public static String getIpInformation(final WebBrowser webBrowser) {
String ipInformation=webBrowser.getAddress();
final HttpServletRequest httpRequest=((ServletRequestAttributes)RequestContextHolder.currentRequestAttributes()).getRequest();
final String xForwardedForHeader = httpRequest.getHeader(X_FORWARDED_FOR);
if (xForwardedForHeader != null) {
final String[] split = xForwardedForHeader.split(",");
if (split.length != 0) {
ipInformation = split[0];
}
}
return ipInformation;
} }
|
public class class_name {
public static String getIpInformation(final WebBrowser webBrowser) {
String ipInformation=webBrowser.getAddress();
final HttpServletRequest httpRequest=((ServletRequestAttributes)RequestContextHolder.currentRequestAttributes()).getRequest();
final String xForwardedForHeader = httpRequest.getHeader(X_FORWARDED_FOR);
if (xForwardedForHeader != null) {
final String[] split = xForwardedForHeader.split(",");
if (split.length != 0) {
ipInformation = split[0]; // depends on control dependency: [if], data = [none]
}
}
return ipInformation;
} }
|
public class class_name {
static public String getStackTrace(Exception e) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
e.printStackTrace(ps);
ps.flush();
try {
return baos.toString("UTF-8");
} catch (UnsupportedEncodingException ee) {
return baos.toString();
}
} }
|
public class class_name {
static public String getStackTrace(Exception e) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
e.printStackTrace(ps);
ps.flush();
try {
return baos.toString("UTF-8"); // depends on control dependency: [try], data = [none]
} catch (UnsupportedEncodingException ee) {
return baos.toString();
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private void overlayValue(HeaderElement elem) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Overlaying existing header: " + elem.getName());
}
int next_index = this.lastCRLFBufferIndex;
int next_pos = this.lastCRLFPosition;
if (null != elem.nextSequence && !elem.nextSequence.wasAdded()) {
next_index = elem.nextSequence.getLastCRLFBufferIndex();
next_pos = elem.nextSequence.getLastCRLFPosition();
}
WsByteBuffer buffer = this.parseBuffers[elem.getLastCRLFBufferIndex()];
buffer.position(elem.getLastCRLFPosition() + (elem.isLastCRLFaCR() ? 2 : 1));
if (next_index == elem.getLastCRLFBufferIndex()) {
// all in one buffer
buffer.put(elem.getKey().getMarshalledByteArray(foundCompactHeader()));
buffer.put(elem.asRawBytes(), elem.getOffset(), elem.getValueLength());
} else {
// header straddles buffers
int index = elem.getLastCRLFBufferIndex();
index = overlayBytes(elem.getKey().getMarshalledByteArray(foundCompactHeader()), 0, -1, index);
index = overlayBytes(elem.asRawBytes(), elem.getOffset(), elem.getValueLength(), index);
buffer = this.parseBuffers[index];
}
// pad trailing whitespace if we need it
int start = buffer.position();
if (start < next_pos) {
scribbleWhiteSpace(buffer, start, next_pos);
}
} }
|
public class class_name {
private void overlayValue(HeaderElement elem) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Overlaying existing header: " + elem.getName()); // depends on control dependency: [if], data = [none]
}
int next_index = this.lastCRLFBufferIndex;
int next_pos = this.lastCRLFPosition;
if (null != elem.nextSequence && !elem.nextSequence.wasAdded()) {
next_index = elem.nextSequence.getLastCRLFBufferIndex(); // depends on control dependency: [if], data = [none]
next_pos = elem.nextSequence.getLastCRLFPosition(); // depends on control dependency: [if], data = [none]
}
WsByteBuffer buffer = this.parseBuffers[elem.getLastCRLFBufferIndex()];
buffer.position(elem.getLastCRLFPosition() + (elem.isLastCRLFaCR() ? 2 : 1));
if (next_index == elem.getLastCRLFBufferIndex()) {
// all in one buffer
buffer.put(elem.getKey().getMarshalledByteArray(foundCompactHeader())); // depends on control dependency: [if], data = [none]
buffer.put(elem.asRawBytes(), elem.getOffset(), elem.getValueLength()); // depends on control dependency: [if], data = [none]
} else {
// header straddles buffers
int index = elem.getLastCRLFBufferIndex();
index = overlayBytes(elem.getKey().getMarshalledByteArray(foundCompactHeader()), 0, -1, index); // depends on control dependency: [if], data = [none]
index = overlayBytes(elem.asRawBytes(), elem.getOffset(), elem.getValueLength(), index); // depends on control dependency: [if], data = [none]
buffer = this.parseBuffers[index]; // depends on control dependency: [if], data = [none]
}
// pad trailing whitespace if we need it
int start = buffer.position();
if (start < next_pos) {
scribbleWhiteSpace(buffer, start, next_pos); // depends on control dependency: [if], data = [next_pos)]
}
} }
|
public class class_name {
public void marshall(StartFlowRequest startFlowRequest, ProtocolMarshaller protocolMarshaller) {
if (startFlowRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(startFlowRequest.getFlowArn(), FLOWARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(StartFlowRequest startFlowRequest, ProtocolMarshaller protocolMarshaller) {
if (startFlowRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(startFlowRequest.getFlowArn(), FLOWARN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public MapView addCoordinateLine(final CoordinateLine coordinateLine) {
if (!getInitialized()) {
if (logger.isWarnEnabled()) {
logger.warn(MAP_VIEW_NOT_YET_INITIALIZED);
}
} else {
// sync on the coordinatesLines map as the cleaner thread accesses this as well
synchronized (coordinateLines) {
final String id = requireNonNull(coordinateLine).getId();
if (!coordinateLines.containsKey(id)) {
if (logger.isDebugEnabled()) {
logger.debug("adding coordinate line {}", coordinateLine);
}
final JSObject jsCoordinateLine = (JSObject) jsMapView.call("getCoordinateLine", id);
coordinateLine.getCoordinateStream().forEach(
(coord) -> jsCoordinateLine
.call("addCoordinate", coord.getLatitude(), coord.getLongitude()));
final javafx.scene.paint.Color color = coordinateLine.getColor();
jsCoordinateLine.call("setColor",
color.getRed() * 255, color.getGreen() * 255, color.getBlue() * 255,
color.getOpacity());
final javafx.scene.paint.Color fillColor = coordinateLine.getFillColor();
jsCoordinateLine.call("setFillColor",
fillColor.getRed() * 255, fillColor.getGreen() * 255, fillColor.getBlue() * 255,
fillColor.getOpacity());
jsCoordinateLine.call("setWidth", coordinateLine.getWidth());
jsCoordinateLine.call("setClosed", coordinateLine.isClosed());
jsCoordinateLine.call("seal");
final ChangeListener<Boolean> changeListener =
(observable, newValue, oldValue) -> setCoordinateLineVisibleInMap(id);
coordinateLine.visibleProperty().addListener(changeListener);
// store the listener as we must unregister on removeCooridnateLine
coordinateLineListeners.put(id, new CoordinateLineListener(changeListener));
// store a weak reference to be able to remove the line from the map if the caller forgets to do so
coordinateLines.put(id, new WeakReference<>(coordinateLine, weakReferenceQueue));
setCoordinateLineVisibleInMap(id);
}
}
}
return this;
} }
|
public class class_name {
public MapView addCoordinateLine(final CoordinateLine coordinateLine) {
if (!getInitialized()) {
if (logger.isWarnEnabled()) {
logger.warn(MAP_VIEW_NOT_YET_INITIALIZED); // depends on control dependency: [if], data = [none]
}
} else {
// sync on the coordinatesLines map as the cleaner thread accesses this as well
synchronized (coordinateLines) { // depends on control dependency: [if], data = [none]
final String id = requireNonNull(coordinateLine).getId();
if (!coordinateLines.containsKey(id)) {
if (logger.isDebugEnabled()) {
logger.debug("adding coordinate line {}", coordinateLine); // depends on control dependency: [if], data = [none]
}
final JSObject jsCoordinateLine = (JSObject) jsMapView.call("getCoordinateLine", id);
coordinateLine.getCoordinateStream().forEach(
(coord) -> jsCoordinateLine
.call("addCoordinate", coord.getLatitude(), coord.getLongitude())); // depends on control dependency: [if], data = [none]
final javafx.scene.paint.Color color = coordinateLine.getColor();
jsCoordinateLine.call("setColor",
color.getRed() * 255, color.getGreen() * 255, color.getBlue() * 255,
color.getOpacity()); // depends on control dependency: [if], data = [none]
final javafx.scene.paint.Color fillColor = coordinateLine.getFillColor();
jsCoordinateLine.call("setFillColor",
fillColor.getRed() * 255, fillColor.getGreen() * 255, fillColor.getBlue() * 255,
fillColor.getOpacity()); // depends on control dependency: [if], data = [none]
jsCoordinateLine.call("setWidth", coordinateLine.getWidth()); // depends on control dependency: [if], data = [none]
jsCoordinateLine.call("setClosed", coordinateLine.isClosed()); // depends on control dependency: [if], data = [none]
jsCoordinateLine.call("seal"); // depends on control dependency: [if], data = [none]
final ChangeListener<Boolean> changeListener =
(observable, newValue, oldValue) -> setCoordinateLineVisibleInMap(id); // depends on control dependency: [if], data = [none]
coordinateLine.visibleProperty().addListener(changeListener); // depends on control dependency: [if], data = [none]
// store the listener as we must unregister on removeCooridnateLine
coordinateLineListeners.put(id, new CoordinateLineListener(changeListener)); // depends on control dependency: [if], data = [none]
// store a weak reference to be able to remove the line from the map if the caller forgets to do so
coordinateLines.put(id, new WeakReference<>(coordinateLine, weakReferenceQueue)); // depends on control dependency: [if], data = [none]
setCoordinateLineVisibleInMap(id); // depends on control dependency: [if], data = [none]
}
}
}
return this;
} }
|
public class class_name {
public void setZoom(float scale, float focusX, float focusY, ScaleType scaleType) {
//
// setZoom can be called before the image is on the screen, but at this point,
// image and view sizes have not yet been calculated in onMeasure. Thus, we should
// delay calling setZoom until the view has been measured.
//
if (!onDrawReady) {
delayedZoomVariables = new ZoomVariables(scale, focusX, focusY, scaleType);
return;
}
if (scaleType != mScaleType) {
setScaleType(scaleType);
}
resetZoom();
scaleImage(scale, viewWidth / 2, viewHeight / 2, true);
matrix.getValues(m);
m[Matrix.MTRANS_X] = -((focusX * getImageWidth()) - (viewWidth * 0.5f));
m[Matrix.MTRANS_Y] = -((focusY * getImageHeight()) - (viewHeight * 0.5f));
matrix.setValues(m);
fixTrans();
setImageMatrix(matrix);
} }
|
public class class_name {
public void setZoom(float scale, float focusX, float focusY, ScaleType scaleType) {
//
// setZoom can be called before the image is on the screen, but at this point,
// image and view sizes have not yet been calculated in onMeasure. Thus, we should
// delay calling setZoom until the view has been measured.
//
if (!onDrawReady) {
delayedZoomVariables = new ZoomVariables(scale, focusX, focusY, scaleType); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
if (scaleType != mScaleType) {
setScaleType(scaleType); // depends on control dependency: [if], data = [(scaleType]
}
resetZoom();
scaleImage(scale, viewWidth / 2, viewHeight / 2, true);
matrix.getValues(m);
m[Matrix.MTRANS_X] = -((focusX * getImageWidth()) - (viewWidth * 0.5f));
m[Matrix.MTRANS_Y] = -((focusY * getImageHeight()) - (viewHeight * 0.5f));
matrix.setValues(m);
fixTrans();
setImageMatrix(matrix);
} }
|
public class class_name {
private void populateEntity(EntityMetadata entityMetadata, List entities, DBObject fetchedDocument)
{
// handler.getEntityFromGFSDBFile(entityClazz, entity, m, outputFile,
// kunderaMetadata)
Map<String, Object> relationValue = null;
if (fetchedDocument != null)
{
MetamodelImpl metaModel = (MetamodelImpl) kunderaMetadata.getApplicationMetadata().getMetamodel(
entityMetadata.getPersistenceUnit());
EntityType entityType = metaModel.entity(entityMetadata.getEntityClazz());
List<AbstractManagedType> subManagedType = ((AbstractManagedType) entityType).getSubManagedType();
EntityMetadata subEntityMetadata = null;
Object enhancedEntity = null;
if (!subManagedType.isEmpty())
{
for (AbstractManagedType subEntity : subManagedType)
{
String discColumn = subEntity.getDiscriminatorColumn();
String disColValue = subEntity.getDiscriminatorValue();
Object value = fetchedDocument.get(discColumn);
if (value != null && value.toString().equals(disColValue))
{
subEntityMetadata = KunderaMetadataManager.getEntityMetadata(kunderaMetadata,
subEntity.getJavaType());
break;
}
}
enhancedEntity = instantiateEntity(subEntityMetadata.getEntityClazz(), enhancedEntity);
relationValue = handler.getEntityFromDocument(subEntityMetadata.getEntityClazz(), enhancedEntity,
subEntityMetadata, fetchedDocument, subEntityMetadata.getRelationNames(), relationValue,
kunderaMetadata);
}
else
{
enhancedEntity = instantiateEntity(entityMetadata.getEntityClazz(), enhancedEntity);
relationValue = handler.getEntityFromDocument(entityMetadata.getEntityClazz(), enhancedEntity,
entityMetadata, fetchedDocument, entityMetadata.getRelationNames(), relationValue,
kunderaMetadata);
}
if (relationValue != null && !relationValue.isEmpty())
{
enhancedEntity = new EnhanceEntity(enhancedEntity, PropertyAccessorHelper.getId(enhancedEntity,
entityMetadata), relationValue);
}
if (enhancedEntity != null)
{
entities.add(enhancedEntity);
}
}
} }
|
public class class_name {
private void populateEntity(EntityMetadata entityMetadata, List entities, DBObject fetchedDocument)
{
// handler.getEntityFromGFSDBFile(entityClazz, entity, m, outputFile,
// kunderaMetadata)
Map<String, Object> relationValue = null;
if (fetchedDocument != null)
{
MetamodelImpl metaModel = (MetamodelImpl) kunderaMetadata.getApplicationMetadata().getMetamodel(
entityMetadata.getPersistenceUnit());
EntityType entityType = metaModel.entity(entityMetadata.getEntityClazz());
List<AbstractManagedType> subManagedType = ((AbstractManagedType) entityType).getSubManagedType();
EntityMetadata subEntityMetadata = null;
Object enhancedEntity = null;
if (!subManagedType.isEmpty())
{
for (AbstractManagedType subEntity : subManagedType)
{
String discColumn = subEntity.getDiscriminatorColumn();
String disColValue = subEntity.getDiscriminatorValue();
Object value = fetchedDocument.get(discColumn);
if (value != null && value.toString().equals(disColValue))
{
subEntityMetadata = KunderaMetadataManager.getEntityMetadata(kunderaMetadata,
subEntity.getJavaType()); // depends on control dependency: [if], data = [none]
break;
}
}
enhancedEntity = instantiateEntity(subEntityMetadata.getEntityClazz(), enhancedEntity); // depends on control dependency: [if], data = [none]
relationValue = handler.getEntityFromDocument(subEntityMetadata.getEntityClazz(), enhancedEntity,
subEntityMetadata, fetchedDocument, subEntityMetadata.getRelationNames(), relationValue,
kunderaMetadata); // depends on control dependency: [if], data = [none]
}
else
{
enhancedEntity = instantiateEntity(entityMetadata.getEntityClazz(), enhancedEntity); // depends on control dependency: [if], data = [none]
relationValue = handler.getEntityFromDocument(entityMetadata.getEntityClazz(), enhancedEntity,
entityMetadata, fetchedDocument, entityMetadata.getRelationNames(), relationValue,
kunderaMetadata); // depends on control dependency: [if], data = [none]
}
if (relationValue != null && !relationValue.isEmpty())
{
enhancedEntity = new EnhanceEntity(enhancedEntity, PropertyAccessorHelper.getId(enhancedEntity,
entityMetadata), relationValue); // depends on control dependency: [if], data = [none]
}
if (enhancedEntity != null)
{
entities.add(enhancedEntity); // depends on control dependency: [if], data = [(enhancedEntity]
}
}
} }
|
public class class_name {
public static double reduce(
DoubleTuple t0, double identity, DoubleBinaryOperator op)
{
double result = identity;
int n = t0.getSize();
for (int i=0; i<n; i++)
{
double operand0 = t0.get(i);
result = op.applyAsDouble(result, operand0);
}
return result;
} }
|
public class class_name {
public static double reduce(
DoubleTuple t0, double identity, DoubleBinaryOperator op)
{
double result = identity;
int n = t0.getSize();
for (int i=0; i<n; i++)
{
double operand0 = t0.get(i);
result = op.applyAsDouble(result, operand0);
// depends on control dependency: [for], data = [none]
}
return result;
} }
|
public class class_name {
public void specifyAmbiguousValues(JCas jcas) {
// build up a list with all found TIMEX expressions
List<Timex3> linearDates = new ArrayList<Timex3>();
FSIterator iterTimex = jcas.getAnnotationIndex(Timex3.type).iterator();
// Create List of all Timexes of types "date" and "time"
while (iterTimex.hasNext()) {
Timex3 timex = (Timex3) iterTimex.next();
if (timex.getTimexType().equals("DATE") || timex.getTimexType().equals("TIME")) {
linearDates.add(timex);
}
if(timex.getTimexType().equals("DURATION") && !timex.getEmptyValue().equals("")) {
linearDates.add(timex);
}
}
//////////////////////////////////////////////
// go through list of Date and Time timexes //
//////////////////////////////////////////////
for (int i = 0; i < linearDates.size(); i++) {
Timex3 t_i = (Timex3) linearDates.get(i);
String value_i = t_i.getTimexValue();
String valueNew = value_i;
// handle the value attribute only if we have a TIME or DATE
if(t_i.getTimexType().equals("TIME") || t_i.getTimexType().equals("DATE"))
valueNew = specifyAmbiguousValuesString(value_i, t_i, i, linearDates, jcas);
// handle the emptyValue attribute for any type
if(t_i.getEmptyValue() != null && t_i.getEmptyValue().length() > 0) {
String emptyValueNew = specifyAmbiguousValuesString(t_i.getEmptyValue(), t_i, i, linearDates, jcas);
t_i.setEmptyValue(emptyValueNew);
}
t_i.removeFromIndexes();
Logger.printDetail(t_i.getTimexId()+" DISAMBIGUATION PHASE: foundBy:"+t_i.getFoundByRule()+" text:"+t_i.getCoveredText()+" value:"+t_i.getTimexValue()+" NEW value:"+valueNew);
t_i.setTimexValue(valueNew);
t_i.addToIndexes();
linearDates.set(i, t_i);
}
} }
|
public class class_name {
public void specifyAmbiguousValues(JCas jcas) {
// build up a list with all found TIMEX expressions
List<Timex3> linearDates = new ArrayList<Timex3>();
FSIterator iterTimex = jcas.getAnnotationIndex(Timex3.type).iterator();
// Create List of all Timexes of types "date" and "time"
while (iterTimex.hasNext()) {
Timex3 timex = (Timex3) iterTimex.next();
if (timex.getTimexType().equals("DATE") || timex.getTimexType().equals("TIME")) {
linearDates.add(timex); // depends on control dependency: [if], data = [none]
}
if(timex.getTimexType().equals("DURATION") && !timex.getEmptyValue().equals("")) {
linearDates.add(timex); // depends on control dependency: [if], data = [none]
}
}
//////////////////////////////////////////////
// go through list of Date and Time timexes //
//////////////////////////////////////////////
for (int i = 0; i < linearDates.size(); i++) {
Timex3 t_i = (Timex3) linearDates.get(i);
String value_i = t_i.getTimexValue();
String valueNew = value_i;
// handle the value attribute only if we have a TIME or DATE
if(t_i.getTimexType().equals("TIME") || t_i.getTimexType().equals("DATE"))
valueNew = specifyAmbiguousValuesString(value_i, t_i, i, linearDates, jcas);
// handle the emptyValue attribute for any type
if(t_i.getEmptyValue() != null && t_i.getEmptyValue().length() > 0) {
String emptyValueNew = specifyAmbiguousValuesString(t_i.getEmptyValue(), t_i, i, linearDates, jcas);
t_i.setEmptyValue(emptyValueNew); // depends on control dependency: [if], data = [none]
}
t_i.removeFromIndexes(); // depends on control dependency: [for], data = [none]
Logger.printDetail(t_i.getTimexId()+" DISAMBIGUATION PHASE: foundBy:"+t_i.getFoundByRule()+" text:"+t_i.getCoveredText()+" value:"+t_i.getTimexValue()+" NEW value:"+valueNew); // depends on control dependency: [for], data = [none]
t_i.setTimexValue(valueNew); // depends on control dependency: [for], data = [none]
t_i.addToIndexes(); // depends on control dependency: [for], data = [none]
linearDates.set(i, t_i); // depends on control dependency: [for], data = [i]
}
} }
|
public class class_name {
@Override
public boolean remove(String name) {
final Metric metric = metrics.remove(name);
metadata.remove(name);
if (metric != null) {
return true;
}
return false;
} }
|
public class class_name {
@Override
public boolean remove(String name) {
final Metric metric = metrics.remove(name);
metadata.remove(name);
if (metric != null) {
return true; // depends on control dependency: [if], data = [none]
}
return false;
} }
|
public class class_name {
@Override
public ImageBuffer filter(ImageBuffer source)
{
final int width = source.getWidth();
final int height = source.getHeight();
if (width < MIN_SIZE || height < MIN_SIZE)
{
return source;
}
final int[] inPixels = new int[width * height];
final int[] outPixels = new int[width * height];
source.getRgb(0, 0, width, height, inPixels, 0, width);
final Kernel kernel = createKernel(radius, width, height);
compute(kernel, inPixels, outPixels, width, height, alpha, edge);
compute(kernel, outPixels, inPixels, height, width, alpha, edge);
final ImageBuffer dest = Graphics.createImageBuffer(width, height, source.getTransparentColor());
dest.setRgb(0, 0, width, height, inPixels, 0, width);
return dest;
} }
|
public class class_name {
@Override
public ImageBuffer filter(ImageBuffer source)
{
final int width = source.getWidth();
final int height = source.getHeight();
if (width < MIN_SIZE || height < MIN_SIZE)
{
return source;
// depends on control dependency: [if], data = [none]
}
final int[] inPixels = new int[width * height];
final int[] outPixels = new int[width * height];
source.getRgb(0, 0, width, height, inPixels, 0, width);
final Kernel kernel = createKernel(radius, width, height);
compute(kernel, inPixels, outPixels, width, height, alpha, edge);
compute(kernel, outPixels, inPixels, height, width, alpha, edge);
final ImageBuffer dest = Graphics.createImageBuffer(width, height, source.getTransparentColor());
dest.setRgb(0, 0, width, height, inPixels, 0, width);
return dest;
} }
|
public class class_name {
public T removeHead() {
if( first == null )
throw new IllegalArgumentException("Empty list");
T ret = first.getObject();
Element<T> e = first;
available.push(first);
if( first.next != null ) {
first.next.previous = null;
first = first.next;
} else {
// there's only one element in the list
first = last = null;
}
e.clear();
size--;
return ret;
} }
|
public class class_name {
public T removeHead() {
if( first == null )
throw new IllegalArgumentException("Empty list");
T ret = first.getObject();
Element<T> e = first;
available.push(first);
if( first.next != null ) {
first.next.previous = null; // depends on control dependency: [if], data = [none]
first = first.next; // depends on control dependency: [if], data = [none]
} else {
// there's only one element in the list
first = last = null; // depends on control dependency: [if], data = [none]
}
e.clear();
size--;
return ret;
} }
|
public class class_name {
Object pop() {
Object ret = callStack.pop();
if (ret != null && !SIMPLE_TYPES.contains(ret.getClass())) {
objectsInCallStack.remove(ret);
}
return ret;
} }
|
public class class_name {
Object pop() {
Object ret = callStack.pop();
if (ret != null && !SIMPLE_TYPES.contains(ret.getClass())) {
objectsInCallStack.remove(ret); // depends on control dependency: [if], data = [(ret]
}
return ret;
} }
|
public class class_name {
public static void fillUniform(GrayF64 img, Random rand , double min , double max) {
double range = max-min;
double[] data = img.data;
for (int y = 0; y < img.height; y++) {
int index = img.getStartIndex() + y * img.getStride();
for (int x = 0; x < img.width; x++) {
data[index++] = rand.nextDouble()*range+min;
}
}
} }
|
public class class_name {
public static void fillUniform(GrayF64 img, Random rand , double min , double max) {
double range = max-min;
double[] data = img.data;
for (int y = 0; y < img.height; y++) {
int index = img.getStartIndex() + y * img.getStride();
for (int x = 0; x < img.width; x++) {
data[index++] = rand.nextDouble()*range+min; // depends on control dependency: [for], data = [none]
}
}
} }
|
public class class_name {
public static <ITEM> List<Map<String, Object>> selectFrom(List<Selector> selectors, Collection<ITEM> results, Map<String, FieldAccess> fields) {
List<Map<String, Object>> rows = new ArrayList<>( results.size() );
for ( Selector s : selectors ) {
s.handleStart( results );
}
int index = 0;
for ( ITEM item : results ) {
Map<String, Object> row = new LinkedHashMap<>();
for ( Selector s : selectors ) {
s.handleRow( index, row, item, fields );
}
index++;
rows.add( row );
}
for ( Selector s : selectors ) {
s.handleComplete( rows );
}
return rows;
} }
|
public class class_name {
public static <ITEM> List<Map<String, Object>> selectFrom(List<Selector> selectors, Collection<ITEM> results, Map<String, FieldAccess> fields) {
List<Map<String, Object>> rows = new ArrayList<>( results.size() );
for ( Selector s : selectors ) {
s.handleStart( results ); // depends on control dependency: [for], data = [s]
}
int index = 0;
for ( ITEM item : results ) {
Map<String, Object> row = new LinkedHashMap<>();
for ( Selector s : selectors ) {
s.handleRow( index, row, item, fields ); // depends on control dependency: [for], data = [s]
}
index++; // depends on control dependency: [for], data = [none]
rows.add( row ); // depends on control dependency: [for], data = [none]
}
for ( Selector s : selectors ) {
s.handleComplete( rows ); // depends on control dependency: [for], data = [s]
}
return rows;
} }
|
public class class_name {
public static ArgumentListExpression createArgumentListFromParameters(Parameter[] parameterTypes, boolean thisAsFirstArgument, Map<String, ClassNode> genericsPlaceholders) {
ArgumentListExpression arguments = new ArgumentListExpression();
if (thisAsFirstArgument) {
arguments.addExpression(new VariableExpression("this"));
}
for (Parameter parameterType : parameterTypes) {
arguments.addExpression(new VariableExpression(parameterType.getName(), replaceGenericsPlaceholders(parameterType.getType(), genericsPlaceholders)));
}
return arguments;
} }
|
public class class_name {
public static ArgumentListExpression createArgumentListFromParameters(Parameter[] parameterTypes, boolean thisAsFirstArgument, Map<String, ClassNode> genericsPlaceholders) {
ArgumentListExpression arguments = new ArgumentListExpression();
if (thisAsFirstArgument) {
arguments.addExpression(new VariableExpression("this")); // depends on control dependency: [if], data = [none]
}
for (Parameter parameterType : parameterTypes) {
arguments.addExpression(new VariableExpression(parameterType.getName(), replaceGenericsPlaceholders(parameterType.getType(), genericsPlaceholders))); // depends on control dependency: [for], data = [parameterType]
}
return arguments;
} }
|
public class class_name {
@Override
public JspConfigDescriptor getJspConfigDescriptor() {
if (withinContextInitOfProgAddListener) {
throw new UnsupportedOperationException(MessageFormat.format(
nls.getString("Unsupported.op.from.servlet.context.listener"),
new Object[] {"getJspConfigDescriptor", lastProgAddListenerInitialized, getApplicationName()})); // PI41941
}
JspConfigDescriptorImpl jspConfigDescriptor = new JspConfigDescriptorImpl(this);
if (jspConfigDescriptor.getJspPropertyGroups().isEmpty() && jspConfigDescriptor.getTaglibs().isEmpty()) {
return null;
}
return jspConfigDescriptor;
} }
|
public class class_name {
@Override
public JspConfigDescriptor getJspConfigDescriptor() {
if (withinContextInitOfProgAddListener) {
throw new UnsupportedOperationException(MessageFormat.format(
nls.getString("Unsupported.op.from.servlet.context.listener"),
new Object[] {"getJspConfigDescriptor", lastProgAddListenerInitialized, getApplicationName()})); // PI41941
}
JspConfigDescriptorImpl jspConfigDescriptor = new JspConfigDescriptorImpl(this);
if (jspConfigDescriptor.getJspPropertyGroups().isEmpty() && jspConfigDescriptor.getTaglibs().isEmpty()) {
return null; // depends on control dependency: [if], data = [none]
}
return jspConfigDescriptor;
} }
|
public class class_name {
private void processSpecialSubjects(ConfigurationAdmin configAdmin,
String roleName,
Dictionary<String, Object> roleProps, Set<String> pids) {
String[] specialSubjectPids = (String[]) roleProps.get(CFG_KEY_SPECIAL_SUBJECT);
if (specialSubjectPids == null || specialSubjectPids.length == 0) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "No special subjects in role " + roleName);
}
} else {
Set<String> badSpecialSubjects = new HashSet<String>();
for (int i = 0; i < specialSubjectPids.length; i++) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "special subject pid " + i + ": " + specialSubjectPids[i]);
}
pids.add(specialSubjectPids[i]);
Configuration specialSubjectConfig = null;
try {
specialSubjectConfig = configAdmin.getConfiguration(specialSubjectPids[i], bundleLocation);
} catch (IOException ioe) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Invalid special subject entry " + specialSubjectPids[i]);
}
continue;
}
if (specialSubjectConfig == null || specialSubjectConfig.getProperties() == null) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Null special subject element", specialSubjectPids[i]);
}
continue;
}
Dictionary<String, Object> specialSubjectProps = specialSubjectConfig.getProperties();
final String type = (String) specialSubjectProps.get("type");
if (type == null || type.trim().isEmpty()) {
continue;
}
if (badSpecialSubjects.contains(type)) {
// This special subject is already flagged as a duplicate
continue;
}
// TODO: check for invalid type
if (type.trim().isEmpty()) {
// Empty entry, ignoring
continue;
}
if (!specialSubjects.add(type)) {
Tr.error(tc, "AUTHZ_TABLE_DUPLICATE_ROLE_MEMBER", getRoleName(), CFG_KEY_SPECIAL_SUBJECT, type);
badSpecialSubjects.add(type);
specialSubjects.remove(type);
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Role " + roleName + " has special subjects:", specialSubjects);
}
}
} }
|
public class class_name {
private void processSpecialSubjects(ConfigurationAdmin configAdmin,
String roleName,
Dictionary<String, Object> roleProps, Set<String> pids) {
String[] specialSubjectPids = (String[]) roleProps.get(CFG_KEY_SPECIAL_SUBJECT);
if (specialSubjectPids == null || specialSubjectPids.length == 0) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "No special subjects in role " + roleName); // depends on control dependency: [if], data = [none]
}
} else {
Set<String> badSpecialSubjects = new HashSet<String>();
for (int i = 0; i < specialSubjectPids.length; i++) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "special subject pid " + i + ": " + specialSubjectPids[i]); // depends on control dependency: [if], data = [none]
}
pids.add(specialSubjectPids[i]); // depends on control dependency: [for], data = [i]
Configuration specialSubjectConfig = null;
try {
specialSubjectConfig = configAdmin.getConfiguration(specialSubjectPids[i], bundleLocation); // depends on control dependency: [try], data = [none]
} catch (IOException ioe) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Invalid special subject entry " + specialSubjectPids[i]); // depends on control dependency: [if], data = [none]
}
continue;
} // depends on control dependency: [catch], data = [none]
if (specialSubjectConfig == null || specialSubjectConfig.getProperties() == null) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Null special subject element", specialSubjectPids[i]); // depends on control dependency: [if], data = [none]
}
continue;
}
Dictionary<String, Object> specialSubjectProps = specialSubjectConfig.getProperties();
final String type = (String) specialSubjectProps.get("type");
if (type == null || type.trim().isEmpty()) {
continue;
}
if (badSpecialSubjects.contains(type)) {
// This special subject is already flagged as a duplicate
continue;
}
// TODO: check for invalid type
if (type.trim().isEmpty()) {
// Empty entry, ignoring
continue;
}
if (!specialSubjects.add(type)) {
Tr.error(tc, "AUTHZ_TABLE_DUPLICATE_ROLE_MEMBER", getRoleName(), CFG_KEY_SPECIAL_SUBJECT, type); // depends on control dependency: [if], data = [none]
badSpecialSubjects.add(type); // depends on control dependency: [if], data = [none]
specialSubjects.remove(type); // depends on control dependency: [if], data = [none]
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Role " + roleName + " has special subjects:", specialSubjects); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public Set<GraphObject> run()
{
/**
* Candidate contains all the graph objects that are the results of BFS.
* Eliminating nodes from candidate according to their labels will
* yield result.
*/
Map<GraphObject, Integer> candidate = new HashMap<GraphObject, Integer>();
Set<GraphObject> result = new HashSet<GraphObject>();
BFS bfsFwd = null;
BFS bfsRev = null;
if (limitType == LimitType.NORMAL && !strict)
{
bfsFwd = new BFS(sourceSet, null, Direction.DOWNSTREAM, stopDistance);
bfsRev = new BFS(targetSet, null, Direction.UPSTREAM, stopDistance);
}
else if (limitType == LimitType.NORMAL && strict)
{
bfsFwd = new BFS(sourceSet, targetSet, Direction.DOWNSTREAM, stopDistance);
bfsRev = new BFS(targetSet, sourceSet, Direction.UPSTREAM, stopDistance);
}
else if (limitType == LimitType.SHORTEST_PLUS_K && !strict)
{
bfsFwd = new BFS(sourceSet, null, Direction.DOWNSTREAM, LIMIT_FOR_SP_SEARCH);
bfsRev = new BFS(targetSet, null, Direction.UPSTREAM, LIMIT_FOR_SP_SEARCH);
}
else if (limitType == LimitType.SHORTEST_PLUS_K && strict)
{
bfsFwd = new BFS(sourceSet, targetSet, Direction.DOWNSTREAM, LIMIT_FOR_SP_SEARCH);
bfsRev = new BFS(targetSet, sourceSet, Direction.UPSTREAM, LIMIT_FOR_SP_SEARCH);
}
candidate.putAll(bfsFwd.run());
candidate.putAll(bfsRev.run());
int limit = stopDistance;
if(limitType == LimitType.NORMAL)
{
/**
* Only the graph objects whose sum of two search labels being
* smaller than or equal to the distance limit will be in the result.
*/
for (GraphObject go : candidate.keySet())
{
if ((bfsFwd.getLabel(go) + bfsRev.getLabel(go)) <= limit)
{
result.add(go);
}
}
}
else
{
int shortestPath = Integer.MAX_VALUE;
/**
* Summing up the labels of two search will give the length of the
* path that passes through that particular graph object and the
* minimum of those lengths will be the length of the shortest path.
*/
for (GraphObject go : candidate.keySet())
{
if ((bfsFwd.getLabel(go) + bfsRev.getLabel(go)) <= shortestPath)
{
shortestPath = (bfsFwd.getLabel(go) + bfsRev.getLabel(go));
}
}
limit = shortestPath + stopDistance;
// Proceed only if there is a shortest path found
if (shortestPath < Integer.MAX_VALUE / 2)
{
/**
* Only the graph objects whose sum of two search labels being
* smaller than or equal to the "shortest + limit" will be in the
* result.
*/
for (GraphObject go : candidate.keySet())
{
if ((bfsFwd.getLabel(go) + bfsRev.getLabel(go)) <= limit)
{
result.add(go);
}
}
}
}
Set<Node> ST = new HashSet<Node>(sourceSet);
ST.addAll(targetSet);
CycleBreaker breaker = new CycleBreaker(result, ST, limit);
breaker.breakCycles();
Prune prune = new Prune(result, ST);
prune.run();
return result;
} }
|
public class class_name {
public Set<GraphObject> run()
{
/**
* Candidate contains all the graph objects that are the results of BFS.
* Eliminating nodes from candidate according to their labels will
* yield result.
*/
Map<GraphObject, Integer> candidate = new HashMap<GraphObject, Integer>();
Set<GraphObject> result = new HashSet<GraphObject>();
BFS bfsFwd = null;
BFS bfsRev = null;
if (limitType == LimitType.NORMAL && !strict)
{
bfsFwd = new BFS(sourceSet, null, Direction.DOWNSTREAM, stopDistance);
// depends on control dependency: [if], data = [none]
bfsRev = new BFS(targetSet, null, Direction.UPSTREAM, stopDistance);
// depends on control dependency: [if], data = [none]
}
else if (limitType == LimitType.NORMAL && strict)
{
bfsFwd = new BFS(sourceSet, targetSet, Direction.DOWNSTREAM, stopDistance);
// depends on control dependency: [if], data = [none]
bfsRev = new BFS(targetSet, sourceSet, Direction.UPSTREAM, stopDistance);
// depends on control dependency: [if], data = [none]
}
else if (limitType == LimitType.SHORTEST_PLUS_K && !strict)
{
bfsFwd = new BFS(sourceSet, null, Direction.DOWNSTREAM, LIMIT_FOR_SP_SEARCH);
// depends on control dependency: [if], data = [none]
bfsRev = new BFS(targetSet, null, Direction.UPSTREAM, LIMIT_FOR_SP_SEARCH);
// depends on control dependency: [if], data = [none]
}
else if (limitType == LimitType.SHORTEST_PLUS_K && strict)
{
bfsFwd = new BFS(sourceSet, targetSet, Direction.DOWNSTREAM, LIMIT_FOR_SP_SEARCH);
// depends on control dependency: [if], data = [none]
bfsRev = new BFS(targetSet, sourceSet, Direction.UPSTREAM, LIMIT_FOR_SP_SEARCH);
// depends on control dependency: [if], data = [none]
}
candidate.putAll(bfsFwd.run());
candidate.putAll(bfsRev.run());
int limit = stopDistance;
if(limitType == LimitType.NORMAL)
{
/**
* Only the graph objects whose sum of two search labels being
* smaller than or equal to the distance limit will be in the result.
*/
for (GraphObject go : candidate.keySet())
{
if ((bfsFwd.getLabel(go) + bfsRev.getLabel(go)) <= limit)
{
result.add(go);
// depends on control dependency: [if], data = [none]
}
}
}
else
{
int shortestPath = Integer.MAX_VALUE;
/**
* Summing up the labels of two search will give the length of the
* path that passes through that particular graph object and the
* minimum of those lengths will be the length of the shortest path.
*/
for (GraphObject go : candidate.keySet())
{
if ((bfsFwd.getLabel(go) + bfsRev.getLabel(go)) <= shortestPath)
{
shortestPath = (bfsFwd.getLabel(go) + bfsRev.getLabel(go));
// depends on control dependency: [if], data = [none]
}
}
limit = shortestPath + stopDistance;
// depends on control dependency: [if], data = [none]
// Proceed only if there is a shortest path found
if (shortestPath < Integer.MAX_VALUE / 2)
{
/**
* Only the graph objects whose sum of two search labels being
* smaller than or equal to the "shortest + limit" will be in the
* result.
*/
for (GraphObject go : candidate.keySet())
{
if ((bfsFwd.getLabel(go) + bfsRev.getLabel(go)) <= limit)
{
result.add(go);
// depends on control dependency: [if], data = [none]
}
}
}
}
Set<Node> ST = new HashSet<Node>(sourceSet);
ST.addAll(targetSet);
CycleBreaker breaker = new CycleBreaker(result, ST, limit);
breaker.breakCycles();
Prune prune = new Prune(result, ST);
prune.run();
return result;
} }
|
public class class_name {
@Override
public void nextPage() {
if (!paged) {
this.firstPage();
paged = true;
} else if (!isLast()) {
offset = (getPageNumber() + 1) * pageSize;
execute();
}
} }
|
public class class_name {
@Override
public void nextPage() {
if (!paged) {
this.firstPage(); // depends on control dependency: [if], data = [none]
paged = true; // depends on control dependency: [if], data = [none]
} else if (!isLast()) {
offset = (getPageNumber() + 1) * pageSize; // depends on control dependency: [if], data = [none]
execute(); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public void configureConstraintWeight(Rule rule, HardSoftScore constraintWeight) {
super.configureConstraintWeight(rule, constraintWeight);
BiConsumer<RuleContext, Integer> matchExecutor;
if (constraintWeight.equals(HardSoftScore.ZERO)) {
matchExecutor = (RuleContext kcontext, Integer matchWeight) -> {};
} else if (constraintWeight.getSoftScore() == 0) {
matchExecutor = (RuleContext kcontext, Integer matchWeight)
-> addHardConstraintMatch(kcontext, constraintWeight.getHardScore() * matchWeight);
} else if (constraintWeight.getHardScore() == 0) {
matchExecutor = (RuleContext kcontext, Integer matchWeight)
-> addSoftConstraintMatch(kcontext, constraintWeight.getSoftScore() * matchWeight);
} else {
matchExecutor = (RuleContext kcontext, Integer matchWeight)
-> addMultiConstraintMatch(kcontext,
constraintWeight.getHardScore() * matchWeight,
constraintWeight.getSoftScore() * matchWeight);
}
matchExecutorByNumberMap.put(rule, matchExecutor);
matchExecutorByScoreMap.put(rule, (RuleContext kcontext,
HardSoftScore weightMultiplier) -> addMultiConstraintMatch(kcontext,
constraintWeight.getHardScore() * weightMultiplier.getHardScore(),
constraintWeight.getSoftScore() * weightMultiplier.getSoftScore()));
} }
|
public class class_name {
@Override
public void configureConstraintWeight(Rule rule, HardSoftScore constraintWeight) {
super.configureConstraintWeight(rule, constraintWeight);
BiConsumer<RuleContext, Integer> matchExecutor;
if (constraintWeight.equals(HardSoftScore.ZERO)) {
matchExecutor = (RuleContext kcontext, Integer matchWeight) -> {};
} else if (constraintWeight.getSoftScore() == 0) {
matchExecutor = (RuleContext kcontext, Integer matchWeight)
-> addHardConstraintMatch(kcontext, constraintWeight.getHardScore() * matchWeight); // depends on control dependency: [if], data = [none]
} else if (constraintWeight.getHardScore() == 0) {
matchExecutor = (RuleContext kcontext, Integer matchWeight)
-> addSoftConstraintMatch(kcontext, constraintWeight.getSoftScore() * matchWeight); // depends on control dependency: [if], data = [none]
} else {
matchExecutor = (RuleContext kcontext, Integer matchWeight)
-> addMultiConstraintMatch(kcontext,
constraintWeight.getHardScore() * matchWeight,
constraintWeight.getSoftScore() * matchWeight); // depends on control dependency: [if], data = [none]
}
matchExecutorByNumberMap.put(rule, matchExecutor);
matchExecutorByScoreMap.put(rule, (RuleContext kcontext,
HardSoftScore weightMultiplier) -> addMultiConstraintMatch(kcontext,
constraintWeight.getHardScore() * weightMultiplier.getHardScore(),
constraintWeight.getSoftScore() * weightMultiplier.getSoftScore()));
} }
|
public class class_name {
public static void stopJob(String jobName, String key, String pValue)
throws SundialSchedulerException {
logger.debug("key= " + key);
logger.debug("value= " + pValue);
try {
List<JobExecutionContext> currentlyExecutingJobs = getScheduler().getCurrentlyExecutingJobs();
for (JobExecutionContext jobExecutionContext : currentlyExecutingJobs) {
String currentlyExecutingJobName = jobExecutionContext.getJobDetail().getName();
if (currentlyExecutingJobName.equals(jobName)) {
if (jobExecutionContext.getJobInstance() instanceof Job) {
JobDataMap jobDataMap = jobExecutionContext.getMergedJobDataMap();
String value = jobDataMap.getString(key);
if (value != null & value.equalsIgnoreCase(pValue)) {
((Job) jobExecutionContext.getJobInstance()).interrupt();
}
} else {
logger.warn("CANNOT STOP NON-INTERRUPTABLE JOB!!!");
}
} else {
logger.debug("Non-matching Job found. Not Stopping!");
}
}
} catch (SchedulerException e) {
throw new SundialSchedulerException("ERROR DURING STOP JOB!!!", e);
}
} }
|
public class class_name {
public static void stopJob(String jobName, String key, String pValue)
throws SundialSchedulerException {
logger.debug("key= " + key);
logger.debug("value= " + pValue);
try {
List<JobExecutionContext> currentlyExecutingJobs = getScheduler().getCurrentlyExecutingJobs();
for (JobExecutionContext jobExecutionContext : currentlyExecutingJobs) {
String currentlyExecutingJobName = jobExecutionContext.getJobDetail().getName();
if (currentlyExecutingJobName.equals(jobName)) {
if (jobExecutionContext.getJobInstance() instanceof Job) {
JobDataMap jobDataMap = jobExecutionContext.getMergedJobDataMap();
String value = jobDataMap.getString(key);
if (value != null & value.equalsIgnoreCase(pValue)) {
((Job) jobExecutionContext.getJobInstance()).interrupt();
// depends on control dependency: [if], data = [none]
}
} else {
logger.warn("CANNOT STOP NON-INTERRUPTABLE JOB!!!");
// depends on control dependency: [if], data = [none]
}
} else {
logger.debug("Non-matching Job found. Not Stopping!");
// depends on control dependency: [if], data = [none]
}
}
} catch (SchedulerException e) {
throw new SundialSchedulerException("ERROR DURING STOP JOB!!!", e);
}
} }
|
public class class_name {
private String englishLanguageFirstSentence(String s) {
if (s == null) {
return null;
}
int len = s.length();
boolean period = false;
for (int i = 0 ; i < len ; i++) {
switch (s.charAt(i)) {
case '.':
period = true;
break;
case ' ':
case '\t':
case '\n':
case '\r':
case '\f':
if (period) {
return s.substring(0, i);
}
break;
case '<':
if (i > 0) {
if (htmlSentenceTerminatorFound(s, i)) {
return s.substring(0, i);
}
}
break;
default:
period = false;
}
}
return s;
} }
|
public class class_name {
private String englishLanguageFirstSentence(String s) {
if (s == null) {
return null; // depends on control dependency: [if], data = [none]
}
int len = s.length();
boolean period = false;
for (int i = 0 ; i < len ; i++) {
switch (s.charAt(i)) { // depends on control dependency: [for], data = [i]
case '.':
period = true;
break;
case ' ':
case '\t':
case '\n':
case '\r':
case '\f':
if (period) {
return s.substring(0, i); // depends on control dependency: [if], data = [none]
}
break;
case '<':
if (i > 0) {
if (htmlSentenceTerminatorFound(s, i)) {
return s.substring(0, i); // depends on control dependency: [if], data = [none]
}
}
break;
default:
period = false;
}
}
return s;
} }
|
public class class_name {
public static void shutdownAll() {
for (HazelcastClientProxy proxy : CLIENTS.values()) {
HazelcastClientInstanceImpl client = proxy.client;
if (client == null) {
continue;
}
proxy.client = null;
try {
client.shutdown();
} catch (Throwable ignored) {
EmptyStatement.ignore(ignored);
}
}
OutOfMemoryErrorDispatcher.clearClients();
CLIENTS.clear();
} }
|
public class class_name {
public static void shutdownAll() {
for (HazelcastClientProxy proxy : CLIENTS.values()) {
HazelcastClientInstanceImpl client = proxy.client;
if (client == null) {
continue;
}
proxy.client = null; // depends on control dependency: [for], data = [proxy]
try {
client.shutdown(); // depends on control dependency: [try], data = [none]
} catch (Throwable ignored) {
EmptyStatement.ignore(ignored);
} // depends on control dependency: [catch], data = [none]
}
OutOfMemoryErrorDispatcher.clearClients();
CLIENTS.clear();
} }
|
public class class_name {
public String dump(final boolean printDateTime, final boolean includeNamespacePrefix) {
final StringWriter sOut = new StringWriter();
final PrintWriter out = new PrintWriter(sOut);
if (printDateTime) {
// output a timestamp comment
final SimpleDateFormat timeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
out.format("#%s\n", timeFormat.format(new Date()));
}
List<String> keys = new ArrayList<>(params.keySet());
Collections.sort(keys);
for (final String rawKey : keys) {
final String key;
if (includeNamespacePrefix) {
key = fullString(rawKey);
} else {
key = rawKey;
}
out.format("%s: %s\n", key, params.get(rawKey));
}
out.close();
return sOut.toString();
} }
|
public class class_name {
public String dump(final boolean printDateTime, final boolean includeNamespacePrefix) {
final StringWriter sOut = new StringWriter();
final PrintWriter out = new PrintWriter(sOut);
if (printDateTime) {
// output a timestamp comment
final SimpleDateFormat timeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
out.format("#%s\n", timeFormat.format(new Date())); // depends on control dependency: [if], data = [none]
}
List<String> keys = new ArrayList<>(params.keySet());
Collections.sort(keys);
for (final String rawKey : keys) {
final String key;
if (includeNamespacePrefix) {
key = fullString(rawKey); // depends on control dependency: [if], data = [none]
} else {
key = rawKey; // depends on control dependency: [if], data = [none]
}
out.format("%s: %s\n", key, params.get(rawKey)); // depends on control dependency: [for], data = [none] // depends on control dependency: [for], data = [rawKey]
}
out.close();
return sOut.toString();
} }
|
public class class_name {
public char getCharAt(int c, int r)
{
Character character = data.get((long) c, (long) r);
if (character == null)
{
return ' ';
}
else
{
return character;
}
} }
|
public class class_name {
public char getCharAt(int c, int r)
{
Character character = data.get((long) c, (long) r);
if (character == null)
{
return ' '; // depends on control dependency: [if], data = [none]
}
else
{
return character; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static INDArray[] adjustSchemeToBounds(INDArray x,INDArray h,int numSteps,INDArray lowerBound,INDArray upperBound) {
INDArray oneSided = Nd4j.onesLike(h);
if(and(lowerBound.eq(Double.NEGATIVE_INFINITY),upperBound.eq(Double.POSITIVE_INFINITY)).sumNumber().doubleValue() > 0) {
return new INDArray[] {h,oneSided};
}
INDArray hTotal = h.mul(numSteps);
INDArray hAdjusted = h.dup();
INDArray lowerDist = x.sub(lowerBound);
INDArray upperBound2 = upperBound.sub(x);
INDArray central = and(greaterThanOrEqual(lowerDist,hTotal),greaterThanOrEqual(upperBound2,hTotal));
INDArray forward = and(greaterThanOrEqual(upperBound,lowerDist),not(central));
hAdjusted.put(forward,min(h.get(forward),upperBound2.get(forward).mul(0.5).divi(numSteps)));
oneSided.put(forward,Nd4j.scalar(1.0));
INDArray backward = and(upperBound2.lt(lowerBound),not(central));
hAdjusted.put(backward,min(h.get(backward),lowerDist.get(backward).mul(0.5).divi(numSteps)));
oneSided.put(backward,Nd4j.scalar(1.0));
INDArray minDist = min(upperBound2,lowerDist).divi(numSteps);
INDArray adjustedCentral = and(not(central),lessThanOrEqual(abs(hAdjusted),minDist));
hAdjusted.put(adjustedCentral,minDist.get(adjustedCentral));
oneSided.put(adjustedCentral,Nd4j.scalar(0.0));
return new INDArray[] {hAdjusted,oneSided};
} }
|
public class class_name {
public static INDArray[] adjustSchemeToBounds(INDArray x,INDArray h,int numSteps,INDArray lowerBound,INDArray upperBound) {
INDArray oneSided = Nd4j.onesLike(h);
if(and(lowerBound.eq(Double.NEGATIVE_INFINITY),upperBound.eq(Double.POSITIVE_INFINITY)).sumNumber().doubleValue() > 0) {
return new INDArray[] {h,oneSided}; // depends on control dependency: [if], data = [none]
}
INDArray hTotal = h.mul(numSteps);
INDArray hAdjusted = h.dup();
INDArray lowerDist = x.sub(lowerBound);
INDArray upperBound2 = upperBound.sub(x);
INDArray central = and(greaterThanOrEqual(lowerDist,hTotal),greaterThanOrEqual(upperBound2,hTotal));
INDArray forward = and(greaterThanOrEqual(upperBound,lowerDist),not(central));
hAdjusted.put(forward,min(h.get(forward),upperBound2.get(forward).mul(0.5).divi(numSteps)));
oneSided.put(forward,Nd4j.scalar(1.0));
INDArray backward = and(upperBound2.lt(lowerBound),not(central));
hAdjusted.put(backward,min(h.get(backward),lowerDist.get(backward).mul(0.5).divi(numSteps)));
oneSided.put(backward,Nd4j.scalar(1.0));
INDArray minDist = min(upperBound2,lowerDist).divi(numSteps);
INDArray adjustedCentral = and(not(central),lessThanOrEqual(abs(hAdjusted),minDist));
hAdjusted.put(adjustedCentral,minDist.get(adjustedCentral));
oneSided.put(adjustedCentral,Nd4j.scalar(0.0));
return new INDArray[] {hAdjusted,oneSided};
} }
|
public class class_name {
private void initPlatformSEorEE() {
String seMode = serviceImplClassNames.get(Name.JAVA_EDITION_IS_SE_DUMMY_SERVICE);
if (seMode.equalsIgnoreCase("true")) {
platformMode = PlatformMode.SE;
batchConfigImpl.setJ2seMode(true);
} else {
platformMode = PlatformMode.EE;
batchConfigImpl.setJ2seMode(false);
}
} }
|
public class class_name {
private void initPlatformSEorEE() {
String seMode = serviceImplClassNames.get(Name.JAVA_EDITION_IS_SE_DUMMY_SERVICE);
if (seMode.equalsIgnoreCase("true")) {
platformMode = PlatformMode.SE;
// depends on control dependency: [if], data = [none]
batchConfigImpl.setJ2seMode(true);
// depends on control dependency: [if], data = [none]
} else {
platformMode = PlatformMode.EE;
// depends on control dependency: [if], data = [none]
batchConfigImpl.setJ2seMode(false);
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
AsteriskQueueImpl getQueueByName(String queueName)
{
refreshQueueIfForced(queueName);
AsteriskQueueImpl queue = getInternalQueueByName(queueName);
if (queue == null)
{
logger.error("Requested queue '" + queueName + "' not found!");
}
return queue;
} }
|
public class class_name {
AsteriskQueueImpl getQueueByName(String queueName)
{
refreshQueueIfForced(queueName);
AsteriskQueueImpl queue = getInternalQueueByName(queueName);
if (queue == null)
{
logger.error("Requested queue '" + queueName + "' not found!"); // depends on control dependency: [if], data = [none]
}
return queue;
} }
|
public class class_name {
public static Method[] getMethods(Class<?> clazz, String methodName, Class<?>[] expectedTypes,
boolean exactParameterTypeMatch) {
List<Method> matchingArgumentTypes = new LinkedList<Method>();
Method[] methods = getMethods(clazz, methodName);
for (Method method : methods) {
final Class<?>[] parameterTypes = method.getParameterTypes();
if (checkIfParameterTypesAreSame(method.isVarArgs(), expectedTypes, parameterTypes)
|| (!exactParameterTypeMatch && checkIfParameterTypesAreSame(method.isVarArgs(),
convertParameterTypesToPrimitive(expectedTypes), parameterTypes))) {
matchingArgumentTypes.add(method);
}
}
final Method[] methodArray = matchingArgumentTypes.toArray(new Method[0]);
if (methodArray.length == 0) {
throw new MethodNotFoundException(String.format(
"No methods matching the name(s) %s were found in the class hierarchy of %s.",
concatenateStrings(methodName), getType(clazz)));
}
return matchingArgumentTypes.toArray(new Method[matchingArgumentTypes.size()]);
} }
|
public class class_name {
public static Method[] getMethods(Class<?> clazz, String methodName, Class<?>[] expectedTypes,
boolean exactParameterTypeMatch) {
List<Method> matchingArgumentTypes = new LinkedList<Method>();
Method[] methods = getMethods(clazz, methodName);
for (Method method : methods) {
final Class<?>[] parameterTypes = method.getParameterTypes();
if (checkIfParameterTypesAreSame(method.isVarArgs(), expectedTypes, parameterTypes)
|| (!exactParameterTypeMatch && checkIfParameterTypesAreSame(method.isVarArgs(),
convertParameterTypesToPrimitive(expectedTypes), parameterTypes))) {
matchingArgumentTypes.add(method); // depends on control dependency: [if], data = [none]
}
}
final Method[] methodArray = matchingArgumentTypes.toArray(new Method[0]);
if (methodArray.length == 0) {
throw new MethodNotFoundException(String.format(
"No methods matching the name(s) %s were found in the class hierarchy of %s.",
concatenateStrings(methodName), getType(clazz)));
}
return matchingArgumentTypes.toArray(new Method[matchingArgumentTypes.size()]);
} }
|
public class class_name {
protected Throwable decomposeException(Exception e) {
if (e instanceof IllegalStateException) {
return (IllegalStateException) e;
}
if (e instanceof HystrixBadRequestException) {
if (shouldNotBeWrapped(e.getCause())) {
return e.getCause();
}
return (HystrixBadRequestException) e;
}
if (e.getCause() instanceof HystrixBadRequestException) {
if(shouldNotBeWrapped(e.getCause().getCause())) {
return e.getCause().getCause();
}
return (HystrixBadRequestException) e.getCause();
}
if (e instanceof HystrixRuntimeException) {
return (HystrixRuntimeException) e;
}
// if we have an exception we know about we'll throw it directly without the wrapper exception
if (e.getCause() instanceof HystrixRuntimeException) {
return (HystrixRuntimeException) e.getCause();
}
if (shouldNotBeWrapped(e)) {
return e;
}
if (shouldNotBeWrapped(e.getCause())) {
return e.getCause();
}
// we don't know what kind of exception this is so create a generic message and throw a new HystrixRuntimeException
String message = getLogMessagePrefix() + " failed while executing.";
logger.debug(message, e); // debug only since we're throwing the exception and someone higher will do something with it
return new HystrixRuntimeException(FailureType.COMMAND_EXCEPTION, this.getClass(), message, e, null);
} }
|
public class class_name {
protected Throwable decomposeException(Exception e) {
if (e instanceof IllegalStateException) {
return (IllegalStateException) e; // depends on control dependency: [if], data = [none]
}
if (e instanceof HystrixBadRequestException) {
if (shouldNotBeWrapped(e.getCause())) {
return e.getCause(); // depends on control dependency: [if], data = [none]
}
return (HystrixBadRequestException) e; // depends on control dependency: [if], data = [none]
}
if (e.getCause() instanceof HystrixBadRequestException) {
if(shouldNotBeWrapped(e.getCause().getCause())) {
return e.getCause().getCause(); // depends on control dependency: [if], data = [none]
}
return (HystrixBadRequestException) e.getCause(); // depends on control dependency: [if], data = [none]
}
if (e instanceof HystrixRuntimeException) {
return (HystrixRuntimeException) e; // depends on control dependency: [if], data = [none]
}
// if we have an exception we know about we'll throw it directly without the wrapper exception
if (e.getCause() instanceof HystrixRuntimeException) {
return (HystrixRuntimeException) e.getCause(); // depends on control dependency: [if], data = [none]
}
if (shouldNotBeWrapped(e)) {
return e; // depends on control dependency: [if], data = [none]
}
if (shouldNotBeWrapped(e.getCause())) {
return e.getCause(); // depends on control dependency: [if], data = [none]
}
// we don't know what kind of exception this is so create a generic message and throw a new HystrixRuntimeException
String message = getLogMessagePrefix() + " failed while executing.";
logger.debug(message, e); // debug only since we're throwing the exception and someone higher will do something with it
return new HystrixRuntimeException(FailureType.COMMAND_EXCEPTION, this.getClass(), message, e, null);
} }
|
public class class_name {
public static Object gatRandomItem(final List items) {
if (items == null || items.size() == 0) {
return null;
}
final int itemIndex = (int) (Math.random() * items.size());
return items.get(itemIndex);
} }
|
public class class_name {
public static Object gatRandomItem(final List items) {
if (items == null || items.size() == 0) {
return null;
// depends on control dependency: [if], data = [none]
}
final int itemIndex = (int) (Math.random() * items.size());
return items.get(itemIndex);
} }
|
public class class_name {
private Model buildModel(final MP params, Grid<MP> grid, int paramsIdx, String protoModelKey) {
// Make sure that the model is not yet built (can be case of duplicated hyper parameters).
// We first look in the grid _models cache, then we look in the DKV.
// FIXME: get checksum here since model builder will modify instance of params!!!
final long checksum = params.checksum();
Key<Model> key = grid.getModelKey(checksum);
if (key != null) {
if (DKV.get(key) == null) {
// We know about a model that's been removed; rebuild.
Log.info("GridSearch.buildModel(): model with these parameters was built but removed, rebuilding; checksum: " + checksum);
} else {
Log.info("GridSearch.buildModel(): model with these parameters already exists, skipping; checksum: " + checksum);
return key.get();
}
}
// Is there a model with the same params in the DKV?
@SuppressWarnings("unchecked")
final Key<Model>[] modelKeys = KeySnapshot.globalSnapshot().filter(new KeySnapshot.KVFilter() {
@Override
public boolean filter(KeySnapshot.KeyInfo k) {
if (! Value.isSubclassOf(k._type, Model.class))
return false;
Model m = ((Model)k._key.get());
if ((m == null) || (m._parms == null))
return false;
try {
return m._parms.checksum() == checksum;
} catch (H2OConcurrentModificationException e) {
// We are inspecting model parameters that doesn't belong to us - they might be modified (or deleted) while
// checksum is being calculated: we skip them (see PUBDEV-5286)
Log.warn("GridSearch encountered concurrent modification while searching DKV", e);
return false;
} catch (final RuntimeException e) {
Throwable ex = e;
boolean concurrentModification = false;
while (ex.getCause() != null) {
ex = ex.getCause();
if (ex instanceof H2OConcurrentModificationException) {
concurrentModification = true;
break;
}
}
if (! concurrentModification)
throw e;
Log.warn("GridSearch encountered concurrent modification while searching DKV", e);
return false;
}
}
}).keys();
if (modelKeys.length > 0) {
grid.putModel(checksum, modelKeys[0]);
return modelKeys[0].get();
}
// Modify model key to have nice version with counter
// Note: Cannot create it before checking the cache since checksum would differ for each model
Key<Model> result = Key.make(protoModelKey + paramsIdx);
// Build a new model
// THIS IS BLOCKING call since we do not have enough information about free resources
// FIXME: we should allow here any launching strategy (not only sequential)
assert grid.getModel(params) == null;
Model m = ModelBuilder.trainModelNested(_job, result, params, null);
grid.putModel(checksum, result);
return m;
} }
|
public class class_name {
private Model buildModel(final MP params, Grid<MP> grid, int paramsIdx, String protoModelKey) {
// Make sure that the model is not yet built (can be case of duplicated hyper parameters).
// We first look in the grid _models cache, then we look in the DKV.
// FIXME: get checksum here since model builder will modify instance of params!!!
final long checksum = params.checksum();
Key<Model> key = grid.getModelKey(checksum);
if (key != null) {
if (DKV.get(key) == null) {
// We know about a model that's been removed; rebuild.
Log.info("GridSearch.buildModel(): model with these parameters was built but removed, rebuilding; checksum: " + checksum); // depends on control dependency: [if], data = [none]
} else {
Log.info("GridSearch.buildModel(): model with these parameters already exists, skipping; checksum: " + checksum); // depends on control dependency: [if], data = [none]
return key.get(); // depends on control dependency: [if], data = [none]
}
}
// Is there a model with the same params in the DKV?
@SuppressWarnings("unchecked")
final Key<Model>[] modelKeys = KeySnapshot.globalSnapshot().filter(new KeySnapshot.KVFilter() {
@Override
public boolean filter(KeySnapshot.KeyInfo k) {
if (! Value.isSubclassOf(k._type, Model.class))
return false;
Model m = ((Model)k._key.get());
if ((m == null) || (m._parms == null))
return false;
try {
return m._parms.checksum() == checksum; // depends on control dependency: [try], data = [none]
} catch (H2OConcurrentModificationException e) {
// We are inspecting model parameters that doesn't belong to us - they might be modified (or deleted) while
// checksum is being calculated: we skip them (see PUBDEV-5286)
Log.warn("GridSearch encountered concurrent modification while searching DKV", e);
return false;
} catch (final RuntimeException e) { // depends on control dependency: [catch], data = [none]
Throwable ex = e;
boolean concurrentModification = false;
while (ex.getCause() != null) {
ex = ex.getCause(); // depends on control dependency: [while], data = [none]
if (ex instanceof H2OConcurrentModificationException) {
concurrentModification = true; // depends on control dependency: [if], data = [none]
break;
}
}
if (! concurrentModification)
throw e;
Log.warn("GridSearch encountered concurrent modification while searching DKV", e);
return false;
} // depends on control dependency: [catch], data = [none]
}
}).keys();
if (modelKeys.length > 0) {
grid.putModel(checksum, modelKeys[0]); // depends on control dependency: [if], data = [none]
return modelKeys[0].get(); // depends on control dependency: [if], data = [none]
}
// Modify model key to have nice version with counter
// Note: Cannot create it before checking the cache since checksum would differ for each model
Key<Model> result = Key.make(protoModelKey + paramsIdx);
// Build a new model
// THIS IS BLOCKING call since we do not have enough information about free resources
// FIXME: we should allow here any launching strategy (not only sequential)
assert grid.getModel(params) == null;
Model m = ModelBuilder.trainModelNested(_job, result, params, null);
grid.putModel(checksum, result);
return m;
} }
|
public class class_name {
protected Selector selector() throws ParserException {
int initialPosition = pos;
StringBuilder sb = new StringBuilder();
List<SimpleSelectorSequence> simpleSelectors = new ArrayList<SimpleSelectorSequence>();
List<Combinator> combinators = new ArrayList<Combinator>();
int lastChar = pos;
while (!end()) {
//finds combinator, but not in the first iteration
Combinator combinator = null;
if (!simpleSelectors.isEmpty()) {
//stores if it has spaces until the next token
boolean hasWhitespace = false;
if (!end() && Character.isWhitespace(current)) {
hasWhitespace = true;
ignoreWhitespaces();
}
if (!end()) {
//implements "combinator" rule
if (current == '+') combinator = Combinator.ADJASCENT_SIBLING;
else if (current == '>') combinator = Combinator.CHILD;
else if (current == '~') combinator = Combinator.GENERAL_SIBLING;
//if hasn't any but spaces
else if (hasWhitespace) combinator = Combinator.DESCENDANT;
//is it the end?
if (combinator == null || current == ',') {
break;
}
//don't advance because spaces were just advanced
if (combinator != Combinator.DESCENDANT) {
sb.append(current);
next();
} else {
sb.append(' ');
}
ignoreWhitespaces();
if (end()) {
throw new ParserException("Unexpected end of selector at position " + pos);
}
} else {
break;
}
combinators.add(combinator);
}
//get next sequence
SimpleSelectorSequence simpleSelectorSequence = simpleSelectorSequence();
sb.append(simpleSelectorSequence);
//sends combinator here (the first case it's null)
simpleSelectors.add(simpleSelectorSequence);
parserListener.selectorSequence(simpleSelectorSequence, combinator);
lastChar = pos;
}
return new Selector(simpleSelectors, combinators, new Context(content, sb.toString(), initialPosition, lastChar));
} }
|
public class class_name {
protected Selector selector() throws ParserException {
int initialPosition = pos;
StringBuilder sb = new StringBuilder();
List<SimpleSelectorSequence> simpleSelectors = new ArrayList<SimpleSelectorSequence>();
List<Combinator> combinators = new ArrayList<Combinator>();
int lastChar = pos;
while (!end()) {
//finds combinator, but not in the first iteration
Combinator combinator = null;
if (!simpleSelectors.isEmpty()) {
//stores if it has spaces until the next token
boolean hasWhitespace = false;
if (!end() && Character.isWhitespace(current)) {
hasWhitespace = true;
// depends on control dependency: [if], data = [none]
ignoreWhitespaces();
// depends on control dependency: [if], data = [none]
}
if (!end()) {
//implements "combinator" rule
if (current == '+') combinator = Combinator.ADJASCENT_SIBLING;
else if (current == '>') combinator = Combinator.CHILD;
else if (current == '~') combinator = Combinator.GENERAL_SIBLING;
//if hasn't any but spaces
else if (hasWhitespace) combinator = Combinator.DESCENDANT;
//is it the end?
if (combinator == null || current == ',') {
break;
}
//don't advance because spaces were just advanced
if (combinator != Combinator.DESCENDANT) {
sb.append(current);
// depends on control dependency: [if], data = [none]
next();
// depends on control dependency: [if], data = [none]
} else {
sb.append(' ');
// depends on control dependency: [if], data = [none]
}
ignoreWhitespaces();
// depends on control dependency: [if], data = [none]
if (end()) {
throw new ParserException("Unexpected end of selector at position " + pos);
}
} else {
break;
}
combinators.add(combinator);
// depends on control dependency: [if], data = [none]
}
//get next sequence
SimpleSelectorSequence simpleSelectorSequence = simpleSelectorSequence();
sb.append(simpleSelectorSequence);
//sends combinator here (the first case it's null)
simpleSelectors.add(simpleSelectorSequence);
parserListener.selectorSequence(simpleSelectorSequence, combinator);
lastChar = pos;
}
return new Selector(simpleSelectors, combinators, new Context(content, sb.toString(), initialPosition, lastChar));
} }
|
public class class_name {
public static int toShort(byte[] b, int off, boolean littleEndian) {
if (littleEndian) {
return ((b[off] & 0xFF) | ((b[off + 1] & 0xFF) << 8));
}
return (((b[off] & 0xFF) << 8) | (b[off + 1] & 0xFF));
} }
|
public class class_name {
public static int toShort(byte[] b, int off, boolean littleEndian) {
if (littleEndian) {
return ((b[off] & 0xFF) | ((b[off + 1] & 0xFF) << 8));
// depends on control dependency: [if], data = [none]
}
return (((b[off] & 0xFF) << 8) | (b[off + 1] & 0xFF));
} }
|
public class class_name {
@Nullable
public final Node getAncestor(int level) {
checkArgument(level >= 0);
Node node = this;
while (node != null && level-- > 0) {
node = node.getParent();
}
return node;
} }
|
public class class_name {
@Nullable
public final Node getAncestor(int level) {
checkArgument(level >= 0);
Node node = this;
while (node != null && level-- > 0) {
node = node.getParent(); // depends on control dependency: [while], data = [none]
}
return node;
} }
|
public class class_name {
public static DeploymentReflectionIndex create() {
final SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(ServerPermission.CREATE_DEPLOYMENT_REFLECTION_INDEX);
}
return new DeploymentReflectionIndex();
} }
|
public class class_name {
public static DeploymentReflectionIndex create() {
final SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(ServerPermission.CREATE_DEPLOYMENT_REFLECTION_INDEX); // depends on control dependency: [if], data = [none]
}
return new DeploymentReflectionIndex();
} }
|
public class class_name {
public IntervalCollection<T> withComplement(ChronoInterval<T> timeWindow) {
if (timeWindow.isEmpty()) {
List<ChronoInterval<T>> zero = Collections.emptyList();
return this.create(zero);
}
ChronoInterval<T> window = this.adjust(timeWindow);
IntervalCollection<T> coll = this.withFilter(window);
if (coll.isEmpty()) {
return this.create(Collections.singletonList(window));
}
Boundary<T> lower = window.getStart();
Boundary<T> upper = window.getEnd();
List<ChronoInterval<T>> gaps = new ArrayList<>();
// left edge
T min = coll.getMinimum();
if (min != null) {
if (lower.isInfinite()) {
this.addLeft(gaps, min);
} else {
T s = lower.getTemporal();
if (lower.isOpen()) {
s = this.getTimeLine().stepBackwards(s);
if (s == null) {
this.addLeft(gaps, min);
} else {
this.addLeft(gaps, s, min);
}
} else {
this.addLeft(gaps, s, min);
}
}
}
// inner gaps
gaps.addAll(coll.withGaps().getIntervals());
// right edge
T max = coll.getMaximum();
if (max != null) {
T s = this.getTimeLine().stepForward(max);
if (s != null) {
Boundary<T> bs = Boundary.ofClosed(s);
Boundary<T> be;
if (upper.isInfinite()) {
be = upper;
gaps.add(this.newInterval(bs, be));
} else if (this.isCalendrical()) {
if (upper.isClosed()) {
be = upper;
} else {
T e = upper.getTemporal();
e = this.getTimeLine().stepBackwards(e);
be = Boundary.ofClosed(e);
}
if (!this.isAfter(s, be.getTemporal())) {
gaps.add(this.newInterval(bs, be));
}
} else {
if (upper.isOpen()) {
be = upper;
} else {
T e = upper.getTemporal();
e = this.getTimeLine().stepForward(e);
if (e == null) {
be = Boundary.infiniteFuture();
} else {
be = Boundary.ofOpen(e);
}
}
if (this.isBefore(s, be.getTemporal())) {
gaps.add(this.newInterval(bs, be));
}
}
}
}
return this.create(gaps);
} }
|
public class class_name {
public IntervalCollection<T> withComplement(ChronoInterval<T> timeWindow) {
if (timeWindow.isEmpty()) {
List<ChronoInterval<T>> zero = Collections.emptyList();
return this.create(zero); // depends on control dependency: [if], data = [none]
}
ChronoInterval<T> window = this.adjust(timeWindow);
IntervalCollection<T> coll = this.withFilter(window);
if (coll.isEmpty()) {
return this.create(Collections.singletonList(window)); // depends on control dependency: [if], data = [none]
}
Boundary<T> lower = window.getStart();
Boundary<T> upper = window.getEnd();
List<ChronoInterval<T>> gaps = new ArrayList<>();
// left edge
T min = coll.getMinimum();
if (min != null) {
if (lower.isInfinite()) {
this.addLeft(gaps, min); // depends on control dependency: [if], data = [none]
} else {
T s = lower.getTemporal();
if (lower.isOpen()) {
s = this.getTimeLine().stepBackwards(s); // depends on control dependency: [if], data = [none]
if (s == null) {
this.addLeft(gaps, min); // depends on control dependency: [if], data = [none]
} else {
this.addLeft(gaps, s, min); // depends on control dependency: [if], data = [none]
}
} else {
this.addLeft(gaps, s, min); // depends on control dependency: [if], data = [none]
}
}
}
// inner gaps
gaps.addAll(coll.withGaps().getIntervals());
// right edge
T max = coll.getMaximum();
if (max != null) {
T s = this.getTimeLine().stepForward(max);
if (s != null) {
Boundary<T> bs = Boundary.ofClosed(s);
Boundary<T> be;
if (upper.isInfinite()) {
be = upper; // depends on control dependency: [if], data = [none]
gaps.add(this.newInterval(bs, be)); // depends on control dependency: [if], data = [none]
} else if (this.isCalendrical()) {
if (upper.isClosed()) {
be = upper; // depends on control dependency: [if], data = [none]
} else {
T e = upper.getTemporal();
e = this.getTimeLine().stepBackwards(e); // depends on control dependency: [if], data = [none]
be = Boundary.ofClosed(e); // depends on control dependency: [if], data = [none]
}
if (!this.isAfter(s, be.getTemporal())) {
gaps.add(this.newInterval(bs, be)); // depends on control dependency: [if], data = [none]
}
} else {
if (upper.isOpen()) {
be = upper; // depends on control dependency: [if], data = [none]
} else {
T e = upper.getTemporal();
e = this.getTimeLine().stepForward(e); // depends on control dependency: [if], data = [none]
if (e == null) {
be = Boundary.infiniteFuture(); // depends on control dependency: [if], data = [none]
} else {
be = Boundary.ofOpen(e); // depends on control dependency: [if], data = [(e]
}
}
if (this.isBefore(s, be.getTemporal())) {
gaps.add(this.newInterval(bs, be)); // depends on control dependency: [if], data = [none]
}
}
}
}
return this.create(gaps);
} }
|
public class class_name {
public TableRef on(StorageEvent eventType, final ItemAttribute primary, final OnItemSnapshot onItemSnapshot, final OnError onError) {
if(eventType == StorageEvent.PUT) {
final TableRef self = this;
TableMetadata tm = context.getTableMeta(this.name);
if(tm == null) {
this.meta(new OnTableMetadata(){
@Override
public void run(TableMetadata tableMetadata) {
// see if equals filter exists over the primary key
if(!filterExists(StorageFilter.EQUALS, tableMetadata.getPrimaryKeyName())) {
self.equals(tableMetadata.getPrimaryKeyName(), primary);
}
_getItems(onItemSnapshot, onError);
}
}, onError);
}
else {
// see if equals filter exists over the primary key
if(!filterExists(StorageFilter.EQUALS, tm.getPrimaryKeyName())) {
equals(tm.getPrimaryKeyName(), primary);
}
_getItems(onItemSnapshot, onError);
}
}
Event ev = new Event(eventType, this.name, primary, null, false, true, pushNotificationsEnabled, onItemSnapshot);
context.addEvent(ev);
//if(eventType.compareTo(StorageEvent.PUT)==0){
// this.getItems(onItemSnapshot, null);
//}
return this;
} }
|
public class class_name {
public TableRef on(StorageEvent eventType, final ItemAttribute primary, final OnItemSnapshot onItemSnapshot, final OnError onError) {
if(eventType == StorageEvent.PUT) {
final TableRef self = this;
TableMetadata tm = context.getTableMeta(this.name);
if(tm == null) {
this.meta(new OnTableMetadata(){
@Override
public void run(TableMetadata tableMetadata) {
// see if equals filter exists over the primary key
if(!filterExists(StorageFilter.EQUALS, tableMetadata.getPrimaryKeyName())) {
self.equals(tableMetadata.getPrimaryKeyName(), primary); // depends on control dependency: [if], data = [none]
}
_getItems(onItemSnapshot, onError);
}
}, onError); // depends on control dependency: [if], data = [none]
}
else {
// see if equals filter exists over the primary key
if(!filterExists(StorageFilter.EQUALS, tm.getPrimaryKeyName())) {
equals(tm.getPrimaryKeyName(), primary); // depends on control dependency: [if], data = [none]
}
_getItems(onItemSnapshot, onError); // depends on control dependency: [if], data = [none]
}
}
Event ev = new Event(eventType, this.name, primary, null, false, true, pushNotificationsEnabled, onItemSnapshot);
context.addEvent(ev);
//if(eventType.compareTo(StorageEvent.PUT)==0){
// this.getItems(onItemSnapshot, null);
//}
return this;
} }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.