code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
public Page getPage(Object parameterObject, RowBounds rowBounds) {
Page page = PageHelper.getLocalPage();
if (page == null) {
if (rowBounds != RowBounds.DEFAULT) {
if (offsetAsPageNum) {
page = new Page(rowBounds.getOffset(), rowBounds.getLimit(), rowBoundsWithCount);
} else {
page = new Page(new int[]{rowBounds.getOffset(), rowBounds.getLimit()}, rowBoundsWithCount);
//offsetAsPageNum=false的时候,由于PageNum问题,不能使用reasonable,这里会强制为false
page.setReasonable(false);
}
if(rowBounds instanceof PageRowBounds){
PageRowBounds pageRowBounds = (PageRowBounds)rowBounds;
page.setCount(pageRowBounds.getCount() == null || pageRowBounds.getCount());
}
} else if(parameterObject instanceof IPage || supportMethodsArguments){
try {
page = PageObjectUtil.getPageFromObject(parameterObject, false);
} catch (Exception e) {
return null;
}
}
if(page == null){
return null;
}
PageHelper.setLocalPage(page);
}
//分页合理化
if (page.getReasonable() == null) {
page.setReasonable(reasonable);
}
//当设置为true的时候,如果pagesize设置为0(或RowBounds的limit=0),就不执行分页,返回全部结果
if (page.getPageSizeZero() == null) {
page.setPageSizeZero(pageSizeZero);
}
return page;
} } | public class class_name {
public Page getPage(Object parameterObject, RowBounds rowBounds) {
Page page = PageHelper.getLocalPage();
if (page == null) {
if (rowBounds != RowBounds.DEFAULT) {
if (offsetAsPageNum) {
page = new Page(rowBounds.getOffset(), rowBounds.getLimit(), rowBoundsWithCount); // depends on control dependency: [if], data = [none]
} else {
page = new Page(new int[]{rowBounds.getOffset(), rowBounds.getLimit()}, rowBoundsWithCount); // depends on control dependency: [if], data = [none]
//offsetAsPageNum=false的时候,由于PageNum问题,不能使用reasonable,这里会强制为false
page.setReasonable(false); // depends on control dependency: [if], data = [none]
}
if(rowBounds instanceof PageRowBounds){
PageRowBounds pageRowBounds = (PageRowBounds)rowBounds;
page.setCount(pageRowBounds.getCount() == null || pageRowBounds.getCount()); // depends on control dependency: [if], data = [none]
}
} else if(parameterObject instanceof IPage || supportMethodsArguments){
try {
page = PageObjectUtil.getPageFromObject(parameterObject, false); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
return null;
} // depends on control dependency: [catch], data = [none]
}
if(page == null){
return null; // depends on control dependency: [if], data = [none]
}
PageHelper.setLocalPage(page); // depends on control dependency: [if], data = [(page]
}
//分页合理化
if (page.getReasonable() == null) {
page.setReasonable(reasonable); // depends on control dependency: [if], data = [none]
}
//当设置为true的时候,如果pagesize设置为0(或RowBounds的limit=0),就不执行分页,返回全部结果
if (page.getPageSizeZero() == null) {
page.setPageSizeZero(pageSizeZero); // depends on control dependency: [if], data = [none]
}
return page;
} } |
public class class_name {
protected synchronized void filter(ValueSet filterValueSet) {
boolean explicitBufferEmpty = this.explicitBuffer.isEmpty();
boolean scanBufferEmpty = this.scanBuffer.isEmpty();
if (filterValueSet != null && !filterValueSet.isEmpty() && (!explicitBufferEmpty || !scanBufferEmpty)) {
Iterator it = filterValueSet.iterator();
while (it.hasNext()) {
Object o = it.next();
if (!explicitBufferEmpty && this.explicitBuffer.containsKey(o)) {
it.remove();
} else if (!scanBufferEmpty && this.scanBuffer.contains(o)) {
it.remove();
}
}
}
} } | public class class_name {
protected synchronized void filter(ValueSet filterValueSet) {
boolean explicitBufferEmpty = this.explicitBuffer.isEmpty();
boolean scanBufferEmpty = this.scanBuffer.isEmpty();
if (filterValueSet != null && !filterValueSet.isEmpty() && (!explicitBufferEmpty || !scanBufferEmpty)) {
Iterator it = filterValueSet.iterator();
while (it.hasNext()) {
Object o = it.next();
if (!explicitBufferEmpty && this.explicitBuffer.containsKey(o)) {
it.remove(); // depends on control dependency: [if], data = [none]
} else if (!scanBufferEmpty && this.scanBuffer.contains(o)) {
it.remove(); // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
@Nullable
private Integer findDOMNodeCreationPoint(int index) {
if (stack.getStackDepth() <= index) {
return null;
}
return nodeCreations.remove(stack.getStackItem(index));
} } | public class class_name {
@Nullable
private Integer findDOMNodeCreationPoint(int index) {
if (stack.getStackDepth() <= index) {
return null; // depends on control dependency: [if], data = [none]
}
return nodeCreations.remove(stack.getStackItem(index));
} } |
public class class_name {
public void addOption(@NonNull NamedOption namedOption) {
options.put(namedOption.getName(), namedOption);
optionSet.add(namedOption);
if (namedOption.getAliases() != null) {
for (String alias : namedOption.getAliases()) {
options.put(alias, namedOption);
}
}
} } | public class class_name {
public void addOption(@NonNull NamedOption namedOption) {
options.put(namedOption.getName(), namedOption);
optionSet.add(namedOption);
if (namedOption.getAliases() != null) {
for (String alias : namedOption.getAliases()) {
options.put(alias, namedOption); // depends on control dependency: [for], data = [alias]
}
}
} } |
public class class_name {
private void compute(final List<Integer> variables) {
final Stack<Integer> candidates = createInitialCandidates(variables);
while (candidates.size() > 0) {
final int lit = candidates.pop();
if (solve(lit)) {
refineUpperBound();
} else {
addBackboneLiteral(lit);
}
}
} } | public class class_name {
private void compute(final List<Integer> variables) {
final Stack<Integer> candidates = createInitialCandidates(variables);
while (candidates.size() > 0) {
final int lit = candidates.pop();
if (solve(lit)) {
refineUpperBound(); // depends on control dependency: [if], data = [none]
} else {
addBackboneLiteral(lit); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
@SuppressWarnings("unchecked")
private void prepareListeners() {
if (preparedListeners != null && preparedUnspecifiedListeners != null) {
// Already created, skip
return;
}
preparedListeners = new ConcurrentHashMap<>();
Stream<Class<? extends GloballyAttachableListener>> eventTypes = Stream.concat(
listeners.keySet().stream(),
Stream.concat(listenerSuppliers.keySet().stream(),
listenerFunctions.keySet().stream())
).distinct();
eventTypes.forEach(type -> {
ArrayList<Function<DiscordApi, GloballyAttachableListener>> typeListenerFunctions = new ArrayList<>();
listeners.getOrDefault(type, Collections.emptyList()).forEach(
listener -> typeListenerFunctions.add(api -> listener)
);
listenerSuppliers.getOrDefault(type, Collections.emptyList()).forEach(
supplier -> typeListenerFunctions.add(api -> supplier.get())
);
listenerFunctions.getOrDefault(type, Collections.emptyList()).forEach(
function -> typeListenerFunctions.add((Function<DiscordApi, GloballyAttachableListener>) function)
);
preparedListeners.put(type, typeListenerFunctions);
});
// Unspecified Listeners
preparedUnspecifiedListeners = new CopyOnWriteArrayList<>(unspecifiedListenerFunctions);
unspecifiedListenerSuppliers.forEach(supplier -> preparedUnspecifiedListeners.add((api) -> supplier.get()));
unspecifiedListeners.forEach(listener -> preparedUnspecifiedListeners.add((api) -> listener));
} } | public class class_name {
@SuppressWarnings("unchecked")
private void prepareListeners() {
if (preparedListeners != null && preparedUnspecifiedListeners != null) {
// Already created, skip
return; // depends on control dependency: [if], data = [none]
}
preparedListeners = new ConcurrentHashMap<>();
Stream<Class<? extends GloballyAttachableListener>> eventTypes = Stream.concat(
listeners.keySet().stream(),
Stream.concat(listenerSuppliers.keySet().stream(),
listenerFunctions.keySet().stream())
).distinct();
eventTypes.forEach(type -> {
ArrayList<Function<DiscordApi, GloballyAttachableListener>> typeListenerFunctions = new ArrayList<>();
listeners.getOrDefault(type, Collections.emptyList()).forEach(
listener -> typeListenerFunctions.add(api -> listener)
);
listenerSuppliers.getOrDefault(type, Collections.emptyList()).forEach(
supplier -> typeListenerFunctions.add(api -> supplier.get())
);
listenerFunctions.getOrDefault(type, Collections.emptyList()).forEach(
function -> typeListenerFunctions.add((Function<DiscordApi, GloballyAttachableListener>) function)
);
preparedListeners.put(type, typeListenerFunctions);
});
// Unspecified Listeners
preparedUnspecifiedListeners = new CopyOnWriteArrayList<>(unspecifiedListenerFunctions);
unspecifiedListenerSuppliers.forEach(supplier -> preparedUnspecifiedListeners.add((api) -> supplier.get()));
unspecifiedListeners.forEach(listener -> preparedUnspecifiedListeners.add((api) -> listener));
} } |
public class class_name {
public JSONObject faceAudit(byte[][] imgData, HashMap<String, String> options) {
AipRequest request = new AipRequest();
ArrayList<String> buffer = new ArrayList<String>();
for (byte[] data : imgData) {
String base64Str = Base64Util.encode(data);
buffer.add(base64Str);
}
String imgDataAll = Util.mkString(buffer.iterator(), ',');
request.addBody("images", imgDataAll);
if (options != null) {
for (Map.Entry<String, String> entry : options.entrySet()) {
request.addBody(entry.getKey(), entry.getValue());
}
}
return faceAuditHelper(request, options);
} } | public class class_name {
public JSONObject faceAudit(byte[][] imgData, HashMap<String, String> options) {
AipRequest request = new AipRequest();
ArrayList<String> buffer = new ArrayList<String>();
for (byte[] data : imgData) {
String base64Str = Base64Util.encode(data);
buffer.add(base64Str); // depends on control dependency: [for], data = [none]
}
String imgDataAll = Util.mkString(buffer.iterator(), ',');
request.addBody("images", imgDataAll);
if (options != null) {
for (Map.Entry<String, String> entry : options.entrySet()) {
request.addBody(entry.getKey(), entry.getValue()); // depends on control dependency: [for], data = [entry]
}
}
return faceAuditHelper(request, options);
} } |
public class class_name {
public static void searchUnitMap(Consumer<Map<String, Unit>> consumer) {
readWriteLock.readLock().lock();
try {
consumer.accept(Collections.unmodifiableMap(searchUnitMap));
} finally {
readWriteLock.readLock().unlock();
}
} } | public class class_name {
public static void searchUnitMap(Consumer<Map<String, Unit>> consumer) {
readWriteLock.readLock().lock();
try {
consumer.accept(Collections.unmodifiableMap(searchUnitMap)); // depends on control dependency: [try], data = [none]
} finally {
readWriteLock.readLock().unlock();
}
} } |
public class class_name {
final public P createParametersSchema() {
// special case, because ModelBuilderSchema is the top of the tree and is parameterized differently
if (ModelBuilderSchema.class == this.getClass()) {
return (P)new ModelParametersSchemaV3();
}
try {
Class<? extends ModelParametersSchemaV3> parameters_class = ReflectionUtils.findActualClassParameter(this.getClass(), 2);
return (P)parameters_class.newInstance();
}
catch (Exception e) {
throw H2O.fail("Caught exception trying to instantiate a builder instance for ModelBuilderSchema: " + this + ": " + e, e);
}
} } | public class class_name {
final public P createParametersSchema() {
// special case, because ModelBuilderSchema is the top of the tree and is parameterized differently
if (ModelBuilderSchema.class == this.getClass()) {
return (P)new ModelParametersSchemaV3(); // depends on control dependency: [if], data = [none]
}
try {
Class<? extends ModelParametersSchemaV3> parameters_class = ReflectionUtils.findActualClassParameter(this.getClass(), 2); // depends on control dependency: [try], data = [none]
return (P)parameters_class.newInstance(); // depends on control dependency: [try], data = [none]
}
catch (Exception e) {
throw H2O.fail("Caught exception trying to instantiate a builder instance for ModelBuilderSchema: " + this + ": " + e, e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public boolean close(K key) {
emptyBuffers();
final OClosableEntry<K, V> entry = data.get(key);
if (entry == null)
return true;
if (entry.makeClosed()) {
countClosedFiles();
return true;
}
return false;
} } | public class class_name {
public boolean close(K key) {
emptyBuffers();
final OClosableEntry<K, V> entry = data.get(key);
if (entry == null)
return true;
if (entry.makeClosed()) {
countClosedFiles(); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
@Override
public void increment(double coord, double value) {
// Store in cache
if (cachefill >= 0) {
if (cachefill < cachec.length) {
cachec[cachefill] = coord;
cachev[cachefill] = value;
cachefill ++;
return;
} else {
materialize();
// But continue below!
}
}
// Check if we need to resample to accomodate this bin.
testResample(coord);
// super class will handle histogram resizing / shifting
super.increment(coord, value);
} } | public class class_name {
@Override
public void increment(double coord, double value) {
// Store in cache
if (cachefill >= 0) {
if (cachefill < cachec.length) {
cachec[cachefill] = coord; // depends on control dependency: [if], data = [none]
cachev[cachefill] = value; // depends on control dependency: [if], data = [none]
cachefill ++; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
} else {
materialize(); // depends on control dependency: [if], data = [none]
// But continue below!
}
}
// Check if we need to resample to accomodate this bin.
testResample(coord);
// super class will handle histogram resizing / shifting
super.increment(coord, value);
} } |
public class class_name {
public List<ServiceComponentImpl> buildComponents(String serviceDescriptorFileName, JarFile deployableUnitJar) throws DeploymentException {
// make component jar entry
JarEntry componentDescriptor = deployableUnitJar.getJarEntry(serviceDescriptorFileName);
InputStream componentDescriptorInputStream = null;
List<ServiceComponentImpl> result = new ArrayList<ServiceComponentImpl>();
try {
componentDescriptorInputStream = deployableUnitJar.getInputStream(componentDescriptor);
ServiceDescriptorFactoryImpl descriptorFactory = componentManagement.getComponentDescriptorFactory().getServiceDescriptorFactory();
for (ServiceDescriptorImpl descriptor : descriptorFactory.parse(componentDescriptorInputStream)) {
result.add(new ServiceComponentImpl(descriptor));
}
} catch (IOException e) {
throw new DeploymentException("failed to parse service descriptor from "+componentDescriptor.getName(),e);
}
finally {
if (componentDescriptorInputStream != null) {
try {
componentDescriptorInputStream.close();
} catch (IOException e) {
logger.error("failed to close inputstream of descriptor for jar "+componentDescriptor.getName());
}
}
}
return result;
} } | public class class_name {
public List<ServiceComponentImpl> buildComponents(String serviceDescriptorFileName, JarFile deployableUnitJar) throws DeploymentException {
// make component jar entry
JarEntry componentDescriptor = deployableUnitJar.getJarEntry(serviceDescriptorFileName);
InputStream componentDescriptorInputStream = null;
List<ServiceComponentImpl> result = new ArrayList<ServiceComponentImpl>();
try {
componentDescriptorInputStream = deployableUnitJar.getInputStream(componentDescriptor);
ServiceDescriptorFactoryImpl descriptorFactory = componentManagement.getComponentDescriptorFactory().getServiceDescriptorFactory();
for (ServiceDescriptorImpl descriptor : descriptorFactory.parse(componentDescriptorInputStream)) {
result.add(new ServiceComponentImpl(descriptor)); // depends on control dependency: [for], data = [descriptor]
}
} catch (IOException e) {
throw new DeploymentException("failed to parse service descriptor from "+componentDescriptor.getName(),e);
}
finally {
if (componentDescriptorInputStream != null) {
try {
componentDescriptorInputStream.close(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
logger.error("failed to close inputstream of descriptor for jar "+componentDescriptor.getName());
} // depends on control dependency: [catch], data = [none]
}
}
return result;
} } |
public class class_name {
@Override
public void execute()
{
if ( "pom".equals( project.getPackaging() ) )
{
return;
}
File baseDir = project.getBasedir();
File mainScalaPath = new File( baseDir, "src/main/scala" );
if ( mainScalaPath.isDirectory() )
{
String mainScalaPathStr = mainScalaPath.getAbsolutePath();
if ( !project.getCompileSourceRoots().contains( mainScalaPathStr ) )
{
project.addCompileSourceRoot( mainScalaPathStr );
getLog().debug( "Added source directory: " + mainScalaPathStr );
}
}
File testScalaPath = new File( baseDir, "src/test/scala" );
if ( testScalaPath.isDirectory() )
{
String testScalaPathStr = testScalaPath.getAbsolutePath();
if ( !project.getTestCompileSourceRoots().contains( testScalaPathStr ) )
{
project.addTestCompileSourceRoot( testScalaPathStr );
getLog().debug( "Added test source directory: " + testScalaPathStr );
}
}
} } | public class class_name {
@Override
public void execute()
{
if ( "pom".equals( project.getPackaging() ) )
{
return; // depends on control dependency: [if], data = [none]
}
File baseDir = project.getBasedir();
File mainScalaPath = new File( baseDir, "src/main/scala" );
if ( mainScalaPath.isDirectory() )
{
String mainScalaPathStr = mainScalaPath.getAbsolutePath();
if ( !project.getCompileSourceRoots().contains( mainScalaPathStr ) )
{
project.addCompileSourceRoot( mainScalaPathStr ); // depends on control dependency: [if], data = [none]
getLog().debug( "Added source directory: " + mainScalaPathStr ); // depends on control dependency: [if], data = [none]
}
}
File testScalaPath = new File( baseDir, "src/test/scala" );
if ( testScalaPath.isDirectory() )
{
String testScalaPathStr = testScalaPath.getAbsolutePath();
if ( !project.getTestCompileSourceRoots().contains( testScalaPathStr ) )
{
project.addTestCompileSourceRoot( testScalaPathStr ); // depends on control dependency: [if], data = [none]
getLog().debug( "Added test source directory: " + testScalaPathStr ); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void populateFromAttributes(
@Nonnull final Template template,
@Nonnull final Map<String, Attribute> attributes,
@Nonnull final PObject requestJsonAttributes) {
if (requestJsonAttributes.has(JSON_REQUEST_HEADERS) &&
requestJsonAttributes.getObject(JSON_REQUEST_HEADERS).has(JSON_REQUEST_HEADERS) &&
!attributes.containsKey(JSON_REQUEST_HEADERS)) {
attributes.put(JSON_REQUEST_HEADERS, new HttpRequestHeadersAttribute());
}
for (Map.Entry<String, Attribute> attribute: attributes.entrySet()) {
try {
put(attribute.getKey(),
attribute.getValue().getValue(template, attribute.getKey(), requestJsonAttributes));
} catch (ObjectMissingException | IllegalArgumentException e) {
throw e;
} catch (Throwable e) {
String templateName = "unknown";
for (Map.Entry<String, Template> entry: template.getConfiguration().getTemplates()
.entrySet()) {
if (entry.getValue() == template) {
templateName = entry.getKey();
break;
}
}
String defaults = "";
if (attribute instanceof ReflectiveAttribute<?>) {
ReflectiveAttribute<?> reflectiveAttribute = (ReflectiveAttribute<?>) attribute;
defaults = "\n\n The attribute defaults are: " + reflectiveAttribute.getDefaultValue();
}
String errorMsg = "An error occurred when creating a value from the '" + attribute.getKey() +
"' attribute for the '" +
templateName + "' template.\n\nThe JSON is: \n" + requestJsonAttributes + defaults +
"\n" +
e.toString();
throw new AttributeParsingException(errorMsg, e);
}
}
if (template.getConfiguration().isThrowErrorOnExtraParameters()) {
final List<String> extraProperties = new ArrayList<>();
for (Iterator<String> it = requestJsonAttributes.keys(); it.hasNext(); ) {
final String attributeName = it.next();
if (!attributes.containsKey(attributeName)) {
extraProperties.add(attributeName);
}
}
if (!extraProperties.isEmpty()) {
throw new ExtraPropertyException("Extra properties found in the request attributes",
extraProperties, attributes.keySet());
}
}
} } | public class class_name {
public void populateFromAttributes(
@Nonnull final Template template,
@Nonnull final Map<String, Attribute> attributes,
@Nonnull final PObject requestJsonAttributes) {
if (requestJsonAttributes.has(JSON_REQUEST_HEADERS) &&
requestJsonAttributes.getObject(JSON_REQUEST_HEADERS).has(JSON_REQUEST_HEADERS) &&
!attributes.containsKey(JSON_REQUEST_HEADERS)) {
attributes.put(JSON_REQUEST_HEADERS, new HttpRequestHeadersAttribute()); // depends on control dependency: [if], data = [none]
}
for (Map.Entry<String, Attribute> attribute: attributes.entrySet()) {
try {
put(attribute.getKey(),
attribute.getValue().getValue(template, attribute.getKey(), requestJsonAttributes)); // depends on control dependency: [try], data = [none]
} catch (ObjectMissingException | IllegalArgumentException e) {
throw e;
} catch (Throwable e) { // depends on control dependency: [catch], data = [none]
String templateName = "unknown";
for (Map.Entry<String, Template> entry: template.getConfiguration().getTemplates()
.entrySet()) {
if (entry.getValue() == template) {
templateName = entry.getKey(); // depends on control dependency: [if], data = [none]
break;
}
}
String defaults = "";
if (attribute instanceof ReflectiveAttribute<?>) {
ReflectiveAttribute<?> reflectiveAttribute = (ReflectiveAttribute<?>) attribute;
defaults = "\n\n The attribute defaults are: " + reflectiveAttribute.getDefaultValue(); // depends on control dependency: [if], data = [)]
}
String errorMsg = "An error occurred when creating a value from the '" + attribute.getKey() +
"' attribute for the '" +
templateName + "' template.\n\nThe JSON is: \n" + requestJsonAttributes + defaults +
"\n" +
e.toString();
throw new AttributeParsingException(errorMsg, e);
} // depends on control dependency: [catch], data = [none]
}
if (template.getConfiguration().isThrowErrorOnExtraParameters()) {
final List<String> extraProperties = new ArrayList<>();
for (Iterator<String> it = requestJsonAttributes.keys(); it.hasNext(); ) {
final String attributeName = it.next();
if (!attributes.containsKey(attributeName)) {
extraProperties.add(attributeName); // depends on control dependency: [if], data = [none]
}
}
if (!extraProperties.isEmpty()) {
throw new ExtraPropertyException("Extra properties found in the request attributes",
extraProperties, attributes.keySet());
}
}
} } |
public class class_name {
public java.util.List<String> getVTLDeviceARNs() {
if (vTLDeviceARNs == null) {
vTLDeviceARNs = new com.amazonaws.internal.SdkInternalList<String>();
}
return vTLDeviceARNs;
} } | public class class_name {
public java.util.List<String> getVTLDeviceARNs() {
if (vTLDeviceARNs == null) {
vTLDeviceARNs = new com.amazonaws.internal.SdkInternalList<String>(); // depends on control dependency: [if], data = [none]
}
return vTLDeviceARNs;
} } |
public class class_name {
public static void postMessengerProfile(StringEntity input) {
String pageToken = FbBotMillContext.getInstance().getPageToken();
// If the page token is invalid, returns.
if (!validatePageToken(pageToken)) {
return;
}
String url = FbBotMillNetworkConstants.FACEBOOK_BASE_URL
+ FbBotMillNetworkConstants.FACEBOOK_MESSENGER_PROFILE
+ pageToken;
postInternal(url, input);
} } | public class class_name {
public static void postMessengerProfile(StringEntity input) {
String pageToken = FbBotMillContext.getInstance().getPageToken();
// If the page token is invalid, returns.
if (!validatePageToken(pageToken)) {
return; // depends on control dependency: [if], data = [none]
}
String url = FbBotMillNetworkConstants.FACEBOOK_BASE_URL
+ FbBotMillNetworkConstants.FACEBOOK_MESSENGER_PROFILE
+ pageToken;
postInternal(url, input);
} } |
public class class_name {
@Override
public Map<Class<? extends Saga>, SagaHandlersMap> scanHandledMessageTypes() {
if (scanResult == null) {
populateSagaHandlers();
}
return scanResult;
} } | public class class_name {
@Override
public Map<Class<? extends Saga>, SagaHandlersMap> scanHandledMessageTypes() {
if (scanResult == null) {
populateSagaHandlers(); // depends on control dependency: [if], data = [none]
}
return scanResult;
} } |
public class class_name {
public void marshall(GetNamespaceRequest getNamespaceRequest, ProtocolMarshaller protocolMarshaller) {
if (getNamespaceRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getNamespaceRequest.getId(), ID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(GetNamespaceRequest getNamespaceRequest, ProtocolMarshaller protocolMarshaller) {
if (getNamespaceRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getNamespaceRequest.getId(), ID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@SuppressWarnings({"ForLoopReplaceableByForEach"})
protected void selectMatchSet(List<Point> dataSet, double threshold, Model param) {
candidatePoints.clear();
modelDistance.setModel(param);
for (int i = 0; i < dataSet.size(); i++) {
Point point = dataSet.get(i);
double distance = modelDistance.computeDistance(point);
if (distance < threshold) {
matchToInput[candidatePoints.size()] = i;
candidatePoints.add(point);
}
}
} } | public class class_name {
@SuppressWarnings({"ForLoopReplaceableByForEach"})
protected void selectMatchSet(List<Point> dataSet, double threshold, Model param) {
candidatePoints.clear();
modelDistance.setModel(param);
for (int i = 0; i < dataSet.size(); i++) {
Point point = dataSet.get(i);
double distance = modelDistance.computeDistance(point);
if (distance < threshold) {
matchToInput[candidatePoints.size()] = i; // depends on control dependency: [if], data = [none]
candidatePoints.add(point); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public CompletableFuture<Revision> addRepo(Author author, String projectName, String repoName,
PerRolePermissions permission) {
requireNonNull(author, "author");
requireNonNull(projectName, "projectName");
requireNonNull(repoName, "repoName");
requireNonNull(permission, "permission");
final JsonPointer path = JsonPointer.compile("/repos" + encodeSegment(repoName));
final RepositoryMetadata newRepositoryMetadata = new RepositoryMetadata(repoName,
UserAndTimestamp.of(author),
permission);
final Change<JsonNode> change =
Change.ofJsonPatch(METADATA_JSON,
asJsonArray(new TestAbsenceOperation(path),
new AddOperation(path,
Jackson.valueToTree(newRepositoryMetadata))));
final String commitSummary =
"Add a repo '" + newRepositoryMetadata.id() + "' to the project " + projectName;
return metadataRepo.push(projectName, Project.REPO_DOGMA, author, commitSummary, change)
.handle((revision, cause) -> {
if (cause != null) {
if (Exceptions.peel(cause) instanceof ChangeConflictException) {
throw new RepositoryExistsException(repoName);
} else {
return Exceptions.throwUnsafely(cause);
}
}
return revision;
});
} } | public class class_name {
public CompletableFuture<Revision> addRepo(Author author, String projectName, String repoName,
PerRolePermissions permission) {
requireNonNull(author, "author");
requireNonNull(projectName, "projectName");
requireNonNull(repoName, "repoName");
requireNonNull(permission, "permission");
final JsonPointer path = JsonPointer.compile("/repos" + encodeSegment(repoName));
final RepositoryMetadata newRepositoryMetadata = new RepositoryMetadata(repoName,
UserAndTimestamp.of(author),
permission);
final Change<JsonNode> change =
Change.ofJsonPatch(METADATA_JSON,
asJsonArray(new TestAbsenceOperation(path),
new AddOperation(path,
Jackson.valueToTree(newRepositoryMetadata))));
final String commitSummary =
"Add a repo '" + newRepositoryMetadata.id() + "' to the project " + projectName;
return metadataRepo.push(projectName, Project.REPO_DOGMA, author, commitSummary, change)
.handle((revision, cause) -> {
if (cause != null) {
if (Exceptions.peel(cause) instanceof ChangeConflictException) {
throw new RepositoryExistsException(repoName);
} else {
return Exceptions.throwUnsafely(cause); // depends on control dependency: [if], data = [none]
}
}
return revision;
});
} } |
public class class_name {
public void printRulesTree(State rootState) {
List<State> nextStates = rootState.getNextStates();
if (nextStates.isEmpty()) {
return;
}
for (int i = 0; i < rootState.getNextStates().size(); i++) {
State currState = nextStates.get(i);
String accept = "";
if (currState instanceof AcceptState) {
accept = Long.toString(((AcceptState) currState).getRule().getId());
}
System.out.printf("state[%4d], parent[%4d], rule[%4s], element[%s]\n", Integer.valueOf(currState.getName()), Integer.valueOf(rootState.getName()), accept, RuleUtils.getPatternElementAsString(currState.getElement()));
this.printRulesTree(nextStates.get(i));
}
} } | public class class_name {
public void printRulesTree(State rootState) {
List<State> nextStates = rootState.getNextStates();
if (nextStates.isEmpty()) {
return;
// depends on control dependency: [if], data = [none]
}
for (int i = 0; i < rootState.getNextStates().size(); i++) {
State currState = nextStates.get(i);
String accept = "";
if (currState instanceof AcceptState) {
accept = Long.toString(((AcceptState) currState).getRule().getId());
// depends on control dependency: [if], data = [none]
}
System.out.printf("state[%4d], parent[%4d], rule[%4s], element[%s]\n", Integer.valueOf(currState.getName()), Integer.valueOf(rootState.getName()), accept, RuleUtils.getPatternElementAsString(currState.getElement()));
// depends on control dependency: [for], data = [none]
this.printRulesTree(nextStates.get(i));
// depends on control dependency: [for], data = [i]
}
} } |
public class class_name {
public List<CaseStageInstance> internalGetCaseStages(CaseDefinition caseDef, String caseId, boolean activeOnly, QueryContext queryContext) {
CorrelationKey correlationKey = correlationKeyFactory.newCorrelationKey(caseId);
Collection<org.jbpm.services.api.model.NodeInstanceDesc> nodes = runtimeDataService.getNodeInstancesByCorrelationKeyNodeType(correlationKey,
Arrays.asList(ProcessInstance.STATE_ACTIVE),
Arrays.asList("DynamicNode"),
queryContext);
Collection<Long> completedNodes = nodes.stream().filter(n -> ((NodeInstanceDesc)n).getType() == 1).map(n -> n.getId()).collect(toList());
Map<String, CaseStage> stagesByName = caseDef.getCaseStages().stream()
.collect(toMap(CaseStage::getId, c -> c));
Predicate<org.jbpm.services.api.model.NodeInstanceDesc> filterNodes = null;
if (activeOnly) {
filterNodes = n -> ((NodeInstanceDesc)n).getType() == 0 && !completedNodes.contains(((NodeInstanceDesc)n).getId());
} else {
filterNodes = n -> ((NodeInstanceDesc)n).getType() == 0;
}
List<String> triggeredStages = new ArrayList<>();
List<CaseStageInstance> stages = new ArrayList<>();
nodes.stream()
.filter(filterNodes)
.map(n -> {
StageStatus status = StageStatus.Active;
if (completedNodes.contains(((NodeInstanceDesc)n).getId())) {
status = StageStatus.Completed;
}
Collection<org.jbpm.services.api.model.NodeInstanceDesc> activeNodes = getActiveNodesForCaseAndStage(caseId, n.getNodeId(), new QueryContext(0, 100));
return new CaseStageInstanceImpl(n.getNodeId(), n.getName(), stagesByName.get(n.getNodeId()).getAdHocFragments(), activeNodes, status);
})
.forEach(csi -> {
stages.add(csi);
triggeredStages.add(csi.getName());
});
if (!activeOnly) {
// add other stages that are present in the definition
caseDef.getCaseStages().stream()
.filter(cs -> !triggeredStages.contains(cs.getName()))
.map(cs -> new CaseStageInstanceImpl(cs.getId(), cs.getName(), cs.getAdHocFragments(), Collections.emptyList(), StageStatus.Available))
.forEach(csi -> stages.add(csi));
}
return stages;
} } | public class class_name {
public List<CaseStageInstance> internalGetCaseStages(CaseDefinition caseDef, String caseId, boolean activeOnly, QueryContext queryContext) {
CorrelationKey correlationKey = correlationKeyFactory.newCorrelationKey(caseId);
Collection<org.jbpm.services.api.model.NodeInstanceDesc> nodes = runtimeDataService.getNodeInstancesByCorrelationKeyNodeType(correlationKey,
Arrays.asList(ProcessInstance.STATE_ACTIVE),
Arrays.asList("DynamicNode"),
queryContext);
Collection<Long> completedNodes = nodes.stream().filter(n -> ((NodeInstanceDesc)n).getType() == 1).map(n -> n.getId()).collect(toList());
Map<String, CaseStage> stagesByName = caseDef.getCaseStages().stream()
.collect(toMap(CaseStage::getId, c -> c));
Predicate<org.jbpm.services.api.model.NodeInstanceDesc> filterNodes = null;
if (activeOnly) {
filterNodes = n -> ((NodeInstanceDesc)n).getType() == 0 && !completedNodes.contains(((NodeInstanceDesc)n).getId()); // depends on control dependency: [if], data = [none]
} else {
filterNodes = n -> ((NodeInstanceDesc)n).getType() == 0; // depends on control dependency: [if], data = [none]
}
List<String> triggeredStages = new ArrayList<>();
List<CaseStageInstance> stages = new ArrayList<>();
nodes.stream()
.filter(filterNodes)
.map(n -> {
StageStatus status = StageStatus.Active;
if (completedNodes.contains(((NodeInstanceDesc)n).getId())) {
status = StageStatus.Completed;
}
Collection<org.jbpm.services.api.model.NodeInstanceDesc> activeNodes = getActiveNodesForCaseAndStage(caseId, n.getNodeId(), new QueryContext(0, 100));
return new CaseStageInstanceImpl(n.getNodeId(), n.getName(), stagesByName.get(n.getNodeId()).getAdHocFragments(), activeNodes, status);
})
.forEach(csi -> {
stages.add(csi);
triggeredStages.add(csi.getName());
});
if (!activeOnly) {
// add other stages that are present in the definition
caseDef.getCaseStages().stream()
.filter(cs -> !triggeredStages.contains(cs.getName()))
.map(cs -> new CaseStageInstanceImpl(cs.getId(), cs.getName(), cs.getAdHocFragments(), Collections.emptyList(), StageStatus.Available))
.forEach(csi -> stages.add(csi));
}
return stages;
} } |
public class class_name {
public static void register(Context context, String name) {
final Resources res = context.getResources();
int androidId = res.getIdentifier(name, "layout", "android");
int appId = res.getIdentifier(name, "layout", context.getPackageName());
if (androidId != 0 && appId != 0) {
register(androidId, appId);
} else {
HoloEverywhere.warn("Failed to register layout remapping:\n" +
" Android ID: 0x%8x\n" +
" Application ID: 0x%8x",
androidId, appId);
}
} } | public class class_name {
public static void register(Context context, String name) {
final Resources res = context.getResources();
int androidId = res.getIdentifier(name, "layout", "android");
int appId = res.getIdentifier(name, "layout", context.getPackageName());
if (androidId != 0 && appId != 0) {
register(androidId, appId); // depends on control dependency: [if], data = [(androidId]
} else {
HoloEverywhere.warn("Failed to register layout remapping:\n" +
" Android ID: 0x%8x\n" +
" Application ID: 0x%8x",
androidId, appId); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public CtorDescriptor getDefaultCtorDescriptor(final boolean declared) {
CtorDescriptor defaultCtor = getCtors().getDefaultCtor();
if ((defaultCtor != null) && defaultCtor.matchDeclared(declared)) {
return defaultCtor;
}
return null;
} } | public class class_name {
public CtorDescriptor getDefaultCtorDescriptor(final boolean declared) {
CtorDescriptor defaultCtor = getCtors().getDefaultCtor();
if ((defaultCtor != null) && defaultCtor.matchDeclared(declared)) {
return defaultCtor; // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public void startScanFeatures(Observation obsr) {
currentFeatures.clear();
currentFeatureIdx = 0;
// scan over all context predicates
for (int i = 0; i < obsr.cps.length; i++) {
Element elem = (Element)dict.dict.get(new Integer(obsr.cps[i]));
if (elem == null) {//this context predicate doesn't appear in the dictionary of training data
continue;
}
if (!(elem.isScanned)) {
// scan all labels for features
Iterator it = elem.lbCntFidxes.keySet().iterator();
while (it.hasNext()) {
Integer labelInt = (Integer)it.next();
CountFIdx cntFIdx = (CountFIdx)elem.lbCntFidxes.get(labelInt);
if (cntFIdx.fidx >= 0) {
Feature f = new Feature();
f.FeatureInit(labelInt.intValue(), obsr.cps[i]);
f.idx = cntFIdx.fidx;
elem.cpFeatures.add(f);
}
}
elem.isScanned = true;
}
for (int j = 0; j < elem.cpFeatures.size(); j++) {
currentFeatures.add(elem.cpFeatures.get(j));
}
}
} } | public class class_name {
public void startScanFeatures(Observation obsr) {
currentFeatures.clear();
currentFeatureIdx = 0;
// scan over all context predicates
for (int i = 0; i < obsr.cps.length; i++) {
Element elem = (Element)dict.dict.get(new Integer(obsr.cps[i]));
if (elem == null) {//this context predicate doesn't appear in the dictionary of training data
continue;
}
if (!(elem.isScanned)) {
// scan all labels for features
Iterator it = elem.lbCntFidxes.keySet().iterator();
while (it.hasNext()) {
Integer labelInt = (Integer)it.next();
CountFIdx cntFIdx = (CountFIdx)elem.lbCntFidxes.get(labelInt);
if (cntFIdx.fidx >= 0) {
Feature f = new Feature();
f.FeatureInit(labelInt.intValue(), obsr.cps[i]); // depends on control dependency: [if], data = [none]
f.idx = cntFIdx.fidx; // depends on control dependency: [if], data = [none]
elem.cpFeatures.add(f); // depends on control dependency: [if], data = [none]
}
}
elem.isScanned = true; // depends on control dependency: [if], data = [none]
}
for (int j = 0; j < elem.cpFeatures.size(); j++) {
currentFeatures.add(elem.cpFeatures.get(j)); // depends on control dependency: [for], data = [j]
}
}
} } |
public class class_name {
@Override
public void setBusName(final String busName) {
if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) {
SibTr.debug(this, TRACE, "setBusName", busName);
}
_busName = busName;
} } | public class class_name {
@Override
public void setBusName(final String busName) {
if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) {
SibTr.debug(this, TRACE, "setBusName", busName); // depends on control dependency: [if], data = [none]
}
_busName = busName;
} } |
public class class_name {
private IAtomContainer readMolecule() throws Exception {
boolean foundCompound = false;
while (parser.next() != XMLEvent.END_DOCUMENT) {
if (parser.getEventType() == XMLEvent.START_ELEMENT) {
if (parser.getLocalName().equals("PC-Compound")) {
foundCompound = true;
break;
}
}
}
if (foundCompound) {
return parserHelper.parseMolecule(parser, builder);
}
return null;
} } | public class class_name {
private IAtomContainer readMolecule() throws Exception {
boolean foundCompound = false;
while (parser.next() != XMLEvent.END_DOCUMENT) {
if (parser.getEventType() == XMLEvent.START_ELEMENT) {
if (parser.getLocalName().equals("PC-Compound")) {
foundCompound = true; // depends on control dependency: [if], data = [none]
break;
}
}
}
if (foundCompound) {
return parserHelper.parseMolecule(parser, builder);
}
return null;
} } |
public class class_name {
public void setLayers(java.util.Collection<String> layers) {
if (layers == null) {
this.layers = null;
return;
}
this.layers = new com.amazonaws.internal.SdkInternalList<String>(layers);
} } | public class class_name {
public void setLayers(java.util.Collection<String> layers) {
if (layers == null) {
this.layers = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.layers = new com.amazonaws.internal.SdkInternalList<String>(layers);
} } |
public class class_name {
private void addClassByJar(ClassLoader classLoader, URL url, String packageName, String packageDirName) {
// 从url中获取jar,然后从此jar包得到一个枚举类,然后进行迭代.
JarFile jar;
try {
jar = ((JarURLConnection) url.openConnection()).getJarFile();
Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
// 获取jar里的一个实体 可以是目录 和一些jar包里的其他文件 如META-INF等文件
JarEntry entry = entries.nextElement();
String name = entry.getName();
// 如果是以/开头的,则获取后面的字符串
if (name.charAt(0) == '/') {
name = name.substring(1);
}
// 如果前半部分和定义的包名相同.
if (name.startsWith(packageDirName)) {
int index = name.lastIndexOf('/');
// 如果以"/"结尾,则是一个包,获取包名并把"/"替换成"."
if (index != -1) {
packageName = name.substring(0, index).replace('/', '.');
}
// 如果可以迭代下去 并且是一个包,如果是一个.class文件 而且不是目录
if (index != -1 && name.endsWith(".class") && !entry.isDirectory()) {
// 去掉后面的".class" 获取真正的类名
String className = name.substring(packageName.length() + 1, name.length() - 6);
this.addClassByName(classLoader, packageName + '.' + className);
}
}
}
} catch (IOException expected) {
// 此处不打印堆栈信息.
log.warn("从jar文件中读取class出错.");
}
} } | public class class_name {
private void addClassByJar(ClassLoader classLoader, URL url, String packageName, String packageDirName) {
// 从url中获取jar,然后从此jar包得到一个枚举类,然后进行迭代.
JarFile jar;
try {
jar = ((JarURLConnection) url.openConnection()).getJarFile(); // depends on control dependency: [try], data = [none]
Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
// 获取jar里的一个实体 可以是目录 和一些jar包里的其他文件 如META-INF等文件
JarEntry entry = entries.nextElement();
String name = entry.getName();
// 如果是以/开头的,则获取后面的字符串
if (name.charAt(0) == '/') {
name = name.substring(1); // depends on control dependency: [if], data = [none]
}
// 如果前半部分和定义的包名相同.
if (name.startsWith(packageDirName)) {
int index = name.lastIndexOf('/');
// 如果以"/"结尾,则是一个包,获取包名并把"/"替换成"."
if (index != -1) {
packageName = name.substring(0, index).replace('/', '.'); // depends on control dependency: [if], data = [none]
}
// 如果可以迭代下去 并且是一个包,如果是一个.class文件 而且不是目录
if (index != -1 && name.endsWith(".class") && !entry.isDirectory()) {
// 去掉后面的".class" 获取真正的类名
String className = name.substring(packageName.length() + 1, name.length() - 6);
this.addClassByName(classLoader, packageName + '.' + className); // depends on control dependency: [if], data = [none]
}
}
}
} catch (IOException expected) {
// 此处不打印堆栈信息.
log.warn("从jar文件中读取class出错.");
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static SObject loadResource(String url) {
InputStream is = SObject.class.getResourceAsStream(url);
if (null == is) {
return null;
}
String filename = S.afterLast(url, "/");
if (S.blank(filename)) {
filename = url;
}
return of(randomKey(), is, ATTR_FILE_NAME, filename);
} } | public class class_name {
public static SObject loadResource(String url) {
InputStream is = SObject.class.getResourceAsStream(url);
if (null == is) {
return null; // depends on control dependency: [if], data = [none]
}
String filename = S.afterLast(url, "/");
if (S.blank(filename)) {
filename = url; // depends on control dependency: [if], data = [none]
}
return of(randomKey(), is, ATTR_FILE_NAME, filename);
} } |
public class class_name {
public List<Integer> getMissList(File file)
{
List<Integer> misses = new Vector<Integer>();
synchronized (allLocations)
{
for (ILexLocation l : allLocations)
{
if (l.getHits() == 0 && l.getFile().equals(file))
{
misses.add(l.getStartLine());
}
}
}
return misses;
} } | public class class_name {
public List<Integer> getMissList(File file)
{
List<Integer> misses = new Vector<Integer>();
synchronized (allLocations)
{
for (ILexLocation l : allLocations)
{
if (l.getHits() == 0 && l.getFile().equals(file))
{
misses.add(l.getStartLine()); // depends on control dependency: [if], data = [none]
}
}
}
return misses;
} } |
public class class_name {
public static DeviceType getDevice(final HttpServletRequest request) {
// User agent
String userAgent = ((HttpServletRequest) request).getHeader("User-Agent");
if (Util.empty(userAgent)) {
LOG.warn("No User-Agent details in the request headers. Will assume normal device.");
return DeviceType.NORMAL;
}
// Check for device type
UAgentInfo agentInfo = new UAgentInfo(userAgent, null);
if (agentInfo.detectMobileQuick()) {
return DeviceType.MOBILE;
} else if (agentInfo.detectTierTablet()) {
return DeviceType.TABLET;
}
return DeviceType.NORMAL;
} } | public class class_name {
public static DeviceType getDevice(final HttpServletRequest request) {
// User agent
String userAgent = ((HttpServletRequest) request).getHeader("User-Agent");
if (Util.empty(userAgent)) {
LOG.warn("No User-Agent details in the request headers. Will assume normal device."); // depends on control dependency: [if], data = [none]
return DeviceType.NORMAL; // depends on control dependency: [if], data = [none]
}
// Check for device type
UAgentInfo agentInfo = new UAgentInfo(userAgent, null);
if (agentInfo.detectMobileQuick()) {
return DeviceType.MOBILE; // depends on control dependency: [if], data = [none]
} else if (agentInfo.detectTierTablet()) {
return DeviceType.TABLET; // depends on control dependency: [if], data = [none]
}
return DeviceType.NORMAL;
} } |
public class class_name {
public static Properties readPropertiesQuietly( String fileContent, Logger logger ) {
Properties result = new Properties();
try {
if( fileContent != null ) {
InputStream in = new ByteArrayInputStream( fileContent.getBytes( StandardCharsets.UTF_8 ));
result.load( in );
}
} catch( Exception e ) {
logger.severe( "Properties could not be read from a string." );
logException( logger, e );
}
return result;
} } | public class class_name {
public static Properties readPropertiesQuietly( String fileContent, Logger logger ) {
Properties result = new Properties();
try {
if( fileContent != null ) {
InputStream in = new ByteArrayInputStream( fileContent.getBytes( StandardCharsets.UTF_8 ));
result.load( in ); // depends on control dependency: [if], data = [none]
}
} catch( Exception e ) {
logger.severe( "Properties could not be read from a string." );
logException( logger, e );
} // depends on control dependency: [catch], data = [none]
return result;
} } |
public class class_name {
public void prepareForShutdown() {
Collection<CompletableFuture<Void>> prepareShutdownFutures = shutdownListeners.stream()
.map(shutdownListener -> CompletableFuture.runAsync(() -> shutdownListener.prepareForShutdown()))
.collect(Collectors.toList());
try {
CompletableFuture.allOf(prepareShutdownFutures.toArray(new CompletableFuture[prepareShutdownFutures.size()]))
.get(prepareForShutdownTimeoutSec, TimeUnit.SECONDS);
} catch (Exception e) {
logger.error("Exception occurred while preparing shutdown.", e);
}
} } | public class class_name {
public void prepareForShutdown() {
Collection<CompletableFuture<Void>> prepareShutdownFutures = shutdownListeners.stream()
.map(shutdownListener -> CompletableFuture.runAsync(() -> shutdownListener.prepareForShutdown()))
.collect(Collectors.toList());
try {
CompletableFuture.allOf(prepareShutdownFutures.toArray(new CompletableFuture[prepareShutdownFutures.size()]))
.get(prepareForShutdownTimeoutSec, TimeUnit.SECONDS); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
logger.error("Exception occurred while preparing shutdown.", e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public GetSearchSuggestionsResult withPropertyNameSuggestions(PropertyNameSuggestion... propertyNameSuggestions) {
if (this.propertyNameSuggestions == null) {
setPropertyNameSuggestions(new java.util.ArrayList<PropertyNameSuggestion>(propertyNameSuggestions.length));
}
for (PropertyNameSuggestion ele : propertyNameSuggestions) {
this.propertyNameSuggestions.add(ele);
}
return this;
} } | public class class_name {
public GetSearchSuggestionsResult withPropertyNameSuggestions(PropertyNameSuggestion... propertyNameSuggestions) {
if (this.propertyNameSuggestions == null) {
setPropertyNameSuggestions(new java.util.ArrayList<PropertyNameSuggestion>(propertyNameSuggestions.length)); // depends on control dependency: [if], data = [none]
}
for (PropertyNameSuggestion ele : propertyNameSuggestions) {
this.propertyNameSuggestions.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public synchronized Set<? extends Node<ElkNamedIndividual>> getInstancesQuietly(
ElkClassExpression classExpression, boolean direct)
throws ElkException {
try {
return getInstances(classExpression, direct);
} catch (final ElkInconsistentOntologyException e) {
// All classes are equivalent to each other, so also to owl:Nothing.
final TypeNode<ElkClass, ElkNamedIndividual> node = getInstanceTaxonomyQuietly()
.getBottomNode();
return direct ? node.getDirectInstanceNodes()
: node.getAllInstanceNodes();
}
} } | public class class_name {
public synchronized Set<? extends Node<ElkNamedIndividual>> getInstancesQuietly(
ElkClassExpression classExpression, boolean direct)
throws ElkException {
try {
return getInstances(classExpression, direct); // depends on control dependency: [try], data = [none]
} catch (final ElkInconsistentOntologyException e) {
// All classes are equivalent to each other, so also to owl:Nothing.
final TypeNode<ElkClass, ElkNamedIndividual> node = getInstanceTaxonomyQuietly()
.getBottomNode();
return direct ? node.getDirectInstanceNodes()
: node.getAllInstanceNodes();
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void whitelistOrBlacklistLibOrExtJars(final boolean whitelist, final String... jarLeafNames) {
if (jarLeafNames.length == 0) {
// If no jar leafnames are given, whitelist or blacklist all lib or ext jars
for (final String libOrExtJar : SystemJarFinder.getJreLibOrExtJars()) {
whitelistOrBlacklistLibOrExtJars(whitelist, JarUtils.leafName(libOrExtJar));
}
} else {
for (final String jarLeafName : jarLeafNames) {
final String leafName = JarUtils.leafName(jarLeafName);
if (!leafName.equals(jarLeafName)) {
throw new IllegalArgumentException("Can only " + (whitelist ? "whitelist" : "blacklist")
+ " jars by leafname: " + jarLeafName);
}
if (jarLeafName.contains("*")) {
// Compare wildcarded pattern against all jars in lib and ext dirs
final Pattern pattern = WhiteBlackList.globToPattern(jarLeafName);
boolean found = false;
for (final String libOrExtJarPath : SystemJarFinder.getJreLibOrExtJars()) {
final String libOrExtJarLeafName = JarUtils.leafName(libOrExtJarPath);
if (pattern.matcher(libOrExtJarLeafName).matches()) {
// Check for "*" in filename to prevent infinite recursion (shouldn't happen)
if (!libOrExtJarLeafName.contains("*")) {
whitelistOrBlacklistLibOrExtJars(whitelist, libOrExtJarLeafName);
}
found = true;
}
}
if (!found && topLevelLog != null) {
topLevelLog.log("Could not find lib or ext jar matching wildcard: " + jarLeafName);
}
} else {
// No wildcards, just whitelist or blacklist the named jar, if present
boolean found = false;
for (final String libOrExtJarPath : SystemJarFinder.getJreLibOrExtJars()) {
final String libOrExtJarLeafName = JarUtils.leafName(libOrExtJarPath);
if (jarLeafName.equals(libOrExtJarLeafName)) {
if (whitelist) {
scanSpec.libOrExtJarWhiteBlackList.addToWhitelist(jarLeafName);
} else {
scanSpec.libOrExtJarWhiteBlackList.addToBlacklist(jarLeafName);
}
if (topLevelLog != null) {
topLevelLog.log((whitelist ? "Whitelisting" : "Blacklisting") + " lib or ext jar: "
+ libOrExtJarPath);
}
found = true;
break;
}
}
if (!found && topLevelLog != null) {
topLevelLog.log("Could not find lib or ext jar: " + jarLeafName);
}
}
}
}
} } | public class class_name {
private void whitelistOrBlacklistLibOrExtJars(final boolean whitelist, final String... jarLeafNames) {
if (jarLeafNames.length == 0) {
// If no jar leafnames are given, whitelist or blacklist all lib or ext jars
for (final String libOrExtJar : SystemJarFinder.getJreLibOrExtJars()) {
whitelistOrBlacklistLibOrExtJars(whitelist, JarUtils.leafName(libOrExtJar)); // depends on control dependency: [for], data = [libOrExtJar]
}
} else {
for (final String jarLeafName : jarLeafNames) {
final String leafName = JarUtils.leafName(jarLeafName);
if (!leafName.equals(jarLeafName)) {
throw new IllegalArgumentException("Can only " + (whitelist ? "whitelist" : "blacklist")
+ " jars by leafname: " + jarLeafName);
}
if (jarLeafName.contains("*")) {
// Compare wildcarded pattern against all jars in lib and ext dirs
final Pattern pattern = WhiteBlackList.globToPattern(jarLeafName);
boolean found = false;
for (final String libOrExtJarPath : SystemJarFinder.getJreLibOrExtJars()) {
final String libOrExtJarLeafName = JarUtils.leafName(libOrExtJarPath);
if (pattern.matcher(libOrExtJarLeafName).matches()) {
// Check for "*" in filename to prevent infinite recursion (shouldn't happen)
if (!libOrExtJarLeafName.contains("*")) {
whitelistOrBlacklistLibOrExtJars(whitelist, libOrExtJarLeafName); // depends on control dependency: [if], data = [none]
}
found = true; // depends on control dependency: [if], data = [none]
}
}
if (!found && topLevelLog != null) {
topLevelLog.log("Could not find lib or ext jar matching wildcard: " + jarLeafName); // depends on control dependency: [if], data = [none]
}
} else {
// No wildcards, just whitelist or blacklist the named jar, if present
boolean found = false;
for (final String libOrExtJarPath : SystemJarFinder.getJreLibOrExtJars()) {
final String libOrExtJarLeafName = JarUtils.leafName(libOrExtJarPath);
if (jarLeafName.equals(libOrExtJarLeafName)) {
if (whitelist) {
scanSpec.libOrExtJarWhiteBlackList.addToWhitelist(jarLeafName); // depends on control dependency: [if], data = [none]
} else {
scanSpec.libOrExtJarWhiteBlackList.addToBlacklist(jarLeafName); // depends on control dependency: [if], data = [none]
}
if (topLevelLog != null) {
topLevelLog.log((whitelist ? "Whitelisting" : "Blacklisting") + " lib or ext jar: "
+ libOrExtJarPath); // depends on control dependency: [if], data = [none]
}
found = true; // depends on control dependency: [if], data = [none]
break;
}
}
if (!found && topLevelLog != null) {
topLevelLog.log("Could not find lib or ext jar: " + jarLeafName); // depends on control dependency: [if], data = [none]
}
}
}
}
} } |
public class class_name {
public String translateGroup(String name) {
if (m_importGroupTranslations == null) {
return name;
}
String match = m_importGroupTranslations.get(name);
if (match != null) {
return match;
} else {
return name;
}
} } | public class class_name {
public String translateGroup(String name) {
if (m_importGroupTranslations == null) {
return name; // depends on control dependency: [if], data = [none]
}
String match = m_importGroupTranslations.get(name);
if (match != null) {
return match; // depends on control dependency: [if], data = [none]
} else {
return name; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void executeRevocationStatusChecks(
List<SFPair<Certificate, Certificate>> pairIssuerSubjectList, String peerHost)
throws CertificateException
{
long currentTimeSecond = new Date().getTime() / 1000L;
try
{
for (SFPair<Certificate, Certificate> pairIssuerSubject : pairIssuerSubjectList)
{
executeOneRevoctionStatusCheck(pairIssuerSubject, currentTimeSecond, peerHost);
}
}
catch (IOException ex)
{
LOGGER.debug("Failed to decode CertID. Ignored.");
}
} } | public class class_name {
private void executeRevocationStatusChecks(
List<SFPair<Certificate, Certificate>> pairIssuerSubjectList, String peerHost)
throws CertificateException
{
long currentTimeSecond = new Date().getTime() / 1000L;
try
{
for (SFPair<Certificate, Certificate> pairIssuerSubject : pairIssuerSubjectList)
{
executeOneRevoctionStatusCheck(pairIssuerSubject, currentTimeSecond, peerHost); // depends on control dependency: [for], data = [pairIssuerSubject]
}
}
catch (IOException ex)
{
LOGGER.debug("Failed to decode CertID. Ignored.");
}
} } |
public class class_name {
@Override
public ValidationReport validateBond(IBond subject) {
ValidationReport report = new ValidationReport();
// only consider two atom bonds
if (subject.getAtomCount() == 2) {
double distance = subject.getBegin().getPoint3d().distance(subject.getEnd().getPoint3d());
if (distance > 3.0) { // should really depend on the elements
ValidationTest badBondLengthError = new ValidationTest(subject,
"Bond length cannot exceed 3 Angstroms.",
"A bond length typically is between 0.5 and 3.0 Angstroms.");
report.addError(badBondLengthError);
}
}
return report;
} } | public class class_name {
@Override
public ValidationReport validateBond(IBond subject) {
ValidationReport report = new ValidationReport();
// only consider two atom bonds
if (subject.getAtomCount() == 2) {
double distance = subject.getBegin().getPoint3d().distance(subject.getEnd().getPoint3d());
if (distance > 3.0) { // should really depend on the elements
ValidationTest badBondLengthError = new ValidationTest(subject,
"Bond length cannot exceed 3 Angstroms.",
"A bond length typically is between 0.5 and 3.0 Angstroms.");
report.addError(badBondLengthError); // depends on control dependency: [if], data = [none]
}
}
return report;
} } |
public class class_name {
@NonNegative
public int frequency(@NonNull E e) {
if (isNotInitialized()) {
return 0;
}
int hash = spread(e.hashCode());
int start = (hash & 3) << 2;
int frequency = Integer.MAX_VALUE;
for (int i = 0; i < 4; i++) {
int index = indexOf(hash, i);
int count = (int) ((table[index] >>> ((start + i) << 2)) & 0xfL);
frequency = Math.min(frequency, count);
}
return frequency;
} } | public class class_name {
@NonNegative
public int frequency(@NonNull E e) {
if (isNotInitialized()) {
return 0; // depends on control dependency: [if], data = [none]
}
int hash = spread(e.hashCode());
int start = (hash & 3) << 2;
int frequency = Integer.MAX_VALUE;
for (int i = 0; i < 4; i++) {
int index = indexOf(hash, i);
int count = (int) ((table[index] >>> ((start + i) << 2)) & 0xfL);
frequency = Math.min(frequency, count); // depends on control dependency: [for], data = [none]
}
return frequency;
} } |
public class class_name {
public static boolean or(IComplexNDArray n, Condition cond) {
boolean ret = false;
IComplexNDArray linear = n.linearView();
for (int i = 0; i < linear.length(); i++) {
ret = ret || cond.apply(linear.getComplex(i));
}
return ret;
} } | public class class_name {
public static boolean or(IComplexNDArray n, Condition cond) {
boolean ret = false;
IComplexNDArray linear = n.linearView();
for (int i = 0; i < linear.length(); i++) {
ret = ret || cond.apply(linear.getComplex(i)); // depends on control dependency: [for], data = [i]
}
return ret;
} } |
public class class_name {
public final void ruleXPrimaryExpression() throws RecognitionException {
int stackSize = keepStackSize();
try {
// InternalXbase.g:671:2: ( ( ( rule__XPrimaryExpression__Alternatives ) ) )
// InternalXbase.g:672:2: ( ( rule__XPrimaryExpression__Alternatives ) )
{
// InternalXbase.g:672:2: ( ( rule__XPrimaryExpression__Alternatives ) )
// InternalXbase.g:673:3: ( rule__XPrimaryExpression__Alternatives )
{
if ( state.backtracking==0 ) {
before(grammarAccess.getXPrimaryExpressionAccess().getAlternatives());
}
// InternalXbase.g:674:3: ( rule__XPrimaryExpression__Alternatives )
// InternalXbase.g:674:4: rule__XPrimaryExpression__Alternatives
{
pushFollow(FOLLOW_2);
rule__XPrimaryExpression__Alternatives();
state._fsp--;
if (state.failed) return ;
}
if ( state.backtracking==0 ) {
after(grammarAccess.getXPrimaryExpressionAccess().getAlternatives());
}
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
restoreStackSize(stackSize);
}
return ;
} } | public class class_name {
public final void ruleXPrimaryExpression() throws RecognitionException {
int stackSize = keepStackSize();
try {
// InternalXbase.g:671:2: ( ( ( rule__XPrimaryExpression__Alternatives ) ) )
// InternalXbase.g:672:2: ( ( rule__XPrimaryExpression__Alternatives ) )
{
// InternalXbase.g:672:2: ( ( rule__XPrimaryExpression__Alternatives ) )
// InternalXbase.g:673:3: ( rule__XPrimaryExpression__Alternatives )
{
if ( state.backtracking==0 ) {
before(grammarAccess.getXPrimaryExpressionAccess().getAlternatives()); // depends on control dependency: [if], data = [none]
}
// InternalXbase.g:674:3: ( rule__XPrimaryExpression__Alternatives )
// InternalXbase.g:674:4: rule__XPrimaryExpression__Alternatives
{
pushFollow(FOLLOW_2);
rule__XPrimaryExpression__Alternatives();
state._fsp--;
if (state.failed) return ;
}
if ( state.backtracking==0 ) {
after(grammarAccess.getXPrimaryExpressionAccess().getAlternatives()); // depends on control dependency: [if], data = [none]
}
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
restoreStackSize(stackSize);
}
return ;
} } |
public class class_name {
public byte[] getFileBytes(String filePath)
throws IOException {
byte[] out = null;
String path = (String) Utils.intern(filePath);
RandomAccessFile raf = null;
synchronized (path) {
File file = null;
try {
file = new File(path);
boolean exists = file.exists();
if (exists) {
byte[] fileBytes = new byte[(int)file.length()];
raf = new RandomAccessFile(file,"r");
raf.readFully(fileBytes);
out = fileBytes;
} else {
String msg =
"File does not exist. (" + filePath + ")";
mLog.error(msg);
throw new IOException(msg);
}
} catch (IOException e) {
mLog.error("Error writing: " + filePath);
mLog.error(e);
throw e;
} finally {
if (raf != null) {
raf.close();
}
}
}
return out;
} } | public class class_name {
public byte[] getFileBytes(String filePath)
throws IOException {
byte[] out = null;
String path = (String) Utils.intern(filePath);
RandomAccessFile raf = null;
synchronized (path) {
File file = null;
try {
file = new File(path); // depends on control dependency: [try], data = [none]
boolean exists = file.exists();
if (exists) {
byte[] fileBytes = new byte[(int)file.length()];
raf = new RandomAccessFile(file,"r"); // depends on control dependency: [if], data = [none]
raf.readFully(fileBytes); // depends on control dependency: [if], data = [none]
out = fileBytes; // depends on control dependency: [if], data = [none]
} else {
String msg =
"File does not exist. (" + filePath + ")";
mLog.error(msg); // depends on control dependency: [if], data = [none]
throw new IOException(msg);
}
} catch (IOException e) {
mLog.error("Error writing: " + filePath);
mLog.error(e);
throw e;
} finally { // depends on control dependency: [catch], data = [none]
if (raf != null) {
raf.close(); // depends on control dependency: [if], data = [none]
}
}
}
return out;
} } |
public class class_name {
private void clearPipelineError() {
File dir = new File(buildDirectory, "pipeline");
if (dir.isDirectory()) {
try {
FileUtils.cleanDirectory(dir);
} catch (IOException e) {
getLog().warn("Cannot clean the pipeline directory", e);
}
}
} } | public class class_name {
private void clearPipelineError() {
File dir = new File(buildDirectory, "pipeline");
if (dir.isDirectory()) {
try {
FileUtils.cleanDirectory(dir); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
getLog().warn("Cannot clean the pipeline directory", e);
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public static long getRandomLong()
{
Random random = getRandom();
long value = random.nextLong();
if (! _isTest) {
_freeRandomList.free(random);
}
return value;
} } | public class class_name {
public static long getRandomLong()
{
Random random = getRandom();
long value = random.nextLong();
if (! _isTest) {
_freeRandomList.free(random); // depends on control dependency: [if], data = [none]
}
return value;
} } |
public class class_name {
public void marshall(AssociateMemberAccountRequest associateMemberAccountRequest, ProtocolMarshaller protocolMarshaller) {
if (associateMemberAccountRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(associateMemberAccountRequest.getMemberAccountId(), MEMBERACCOUNTID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(AssociateMemberAccountRequest associateMemberAccountRequest, ProtocolMarshaller protocolMarshaller) {
if (associateMemberAccountRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(associateMemberAccountRequest.getMemberAccountId(), MEMBERACCOUNTID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private static final ParameterizedType parameterizeWithOwner(final Type owner, final Class<?> raw,
final Type... typeArguments) {
Assert.requireNonNull(raw, "raw");
final Type useOwner;
if (raw.getEnclosingClass() == null) {
if (owner == null) {
throw new IllegalArgumentException("no owner allowed for top-level " + raw);
}
useOwner = null;
} else if (owner == null) {
useOwner = raw.getEnclosingClass();
} else {
if (TypeUtils.isAssignable(owner, raw.getEnclosingClass())) {
throw new IllegalArgumentException(owner + " is invalid owner type for parameterized " + raw);
}
useOwner = owner;
}
Assert.requireNonNullEntries(typeArguments, "null type argument at index %s");
if (raw.getTypeParameters().length == typeArguments.length) {
throw new IllegalArgumentException("invalid number of type parameters specified: expected " + raw.getTypeParameters().length + ", got " + typeArguments.length);
}
if (raw.getTypeParameters().length == typeArguments.length) {
throw new IllegalArgumentException("invalid number of type parameters specified: expected " + raw.getTypeParameters().length + ", got " + typeArguments.length);
}
return new ParameterizedTypeImpl(raw, useOwner, typeArguments);
} } | public class class_name {
private static final ParameterizedType parameterizeWithOwner(final Type owner, final Class<?> raw,
final Type... typeArguments) {
Assert.requireNonNull(raw, "raw");
final Type useOwner;
if (raw.getEnclosingClass() == null) {
if (owner == null) {
throw new IllegalArgumentException("no owner allowed for top-level " + raw);
}
useOwner = null; // depends on control dependency: [if], data = [none]
} else if (owner == null) {
useOwner = raw.getEnclosingClass(); // depends on control dependency: [if], data = [none]
} else {
if (TypeUtils.isAssignable(owner, raw.getEnclosingClass())) {
throw new IllegalArgumentException(owner + " is invalid owner type for parameterized " + raw);
}
useOwner = owner; // depends on control dependency: [if], data = [none]
}
Assert.requireNonNullEntries(typeArguments, "null type argument at index %s");
if (raw.getTypeParameters().length == typeArguments.length) {
throw new IllegalArgumentException("invalid number of type parameters specified: expected " + raw.getTypeParameters().length + ", got " + typeArguments.length);
}
if (raw.getTypeParameters().length == typeArguments.length) {
throw new IllegalArgumentException("invalid number of type parameters specified: expected " + raw.getTypeParameters().length + ", got " + typeArguments.length);
}
return new ParameterizedTypeImpl(raw, useOwner, typeArguments);
} } |
public class class_name {
protected void createAllRuntimes() {
List<RuntimeInfo> runtimes = listRuntimes(true);
runtimesList.clear();
for (RuntimeInfo info : runtimes) {
runtimesList.put(info.getRuntimeID(), info);
}
} } | public class class_name {
protected void createAllRuntimes() {
List<RuntimeInfo> runtimes = listRuntimes(true);
runtimesList.clear();
for (RuntimeInfo info : runtimes) {
runtimesList.put(info.getRuntimeID(), info); // depends on control dependency: [for], data = [info]
}
} } |
public class class_name {
public static Element getElement(Element element, String tagName) {
final NodeList nodeList = element.getElementsByTagName(tagName);
if (nodeList == null || nodeList.getLength() < 1) {
return null;
}
int length = nodeList.getLength();
for (int i = 0; i < length; i++) {
Element childEle = (Element) nodeList.item(i);
if (childEle == null || childEle.getParentNode() == element) {
return childEle;
}
}
return null;
} } | public class class_name {
public static Element getElement(Element element, String tagName) {
final NodeList nodeList = element.getElementsByTagName(tagName);
if (nodeList == null || nodeList.getLength() < 1) {
return null;
// depends on control dependency: [if], data = [none]
}
int length = nodeList.getLength();
for (int i = 0; i < length; i++) {
Element childEle = (Element) nodeList.item(i);
if (childEle == null || childEle.getParentNode() == element) {
return childEle;
// depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
@Override
public EClass getIfcIntegerCountRateMeasure() {
if (ifcIntegerCountRateMeasureEClass == null) {
ifcIntegerCountRateMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(818);
}
return ifcIntegerCountRateMeasureEClass;
} } | public class class_name {
@Override
public EClass getIfcIntegerCountRateMeasure() {
if (ifcIntegerCountRateMeasureEClass == null) {
ifcIntegerCountRateMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(818);
// depends on control dependency: [if], data = [none]
}
return ifcIntegerCountRateMeasureEClass;
} } |
public class class_name {
public Producer<Void> getDecodedImagePrefetchProducerSequence(
ImageRequest imageRequest) {
Producer<CloseableReference<CloseableImage>> inputProducer =
getBasicDecodedImageSequence(imageRequest);
if (mUseBitmapPrepareToDraw) {
inputProducer = getBitmapPrepareSequence(inputProducer);
}
return getDecodedImagePrefetchSequence(inputProducer);
} } | public class class_name {
public Producer<Void> getDecodedImagePrefetchProducerSequence(
ImageRequest imageRequest) {
Producer<CloseableReference<CloseableImage>> inputProducer =
getBasicDecodedImageSequence(imageRequest);
if (mUseBitmapPrepareToDraw) {
inputProducer = getBitmapPrepareSequence(inputProducer); // depends on control dependency: [if], data = [none]
}
return getDecodedImagePrefetchSequence(inputProducer);
} } |
public class class_name {
public static CcgUnaryRule parseFrom(String line) {
String[] chunks = new CsvParser(CsvParser.DEFAULT_SEPARATOR,
CsvParser.DEFAULT_QUOTE, CsvParser.NULL_ESCAPE).parseLine(line.trim());
Preconditions.checkArgument(chunks.length >= 1, "Illegal unary rule string: %s", line);
String[] syntacticParts = chunks[0].split(" ");
Preconditions.checkArgument(syntacticParts.length == 2, "Illegal unary rule string: %s", line);
HeadedSyntacticCategory inputSyntax = HeadedSyntacticCategory.parseFrom(syntacticParts[0]);
HeadedSyntacticCategory returnSyntax = HeadedSyntacticCategory.parseFrom(syntacticParts[1]);
// Ensure that the return syntactic type is in canonical form.
HeadedSyntacticCategory returnCanonical = returnSyntax.getCanonicalForm();
int[] originalToCanonical = returnSyntax.unifyVariables(returnSyntax.getUniqueVariables(), returnCanonical, new int[0]);
int[] inputVars = inputSyntax.getUniqueVariables();
int[] inputRelabeling = new int[inputVars.length];
int[] returnOriginalVars = returnSyntax.getUniqueVariables();
int nextUnassignedVar = Ints.max(returnCanonical.getUniqueVariables()) + 1;
for (int i = 0; i < inputVars.length; i++) {
int index = Ints.indexOf(returnOriginalVars, inputVars[i]);
if (index != -1) {
inputRelabeling[i] = originalToCanonical[index];
} else {
inputRelabeling[i] = nextUnassignedVar;
nextUnassignedVar++;
}
}
HeadedSyntacticCategory relabeledInput = inputSyntax.relabelVariables(inputVars, inputRelabeling);
Expression2 logicalForm = null;
if (chunks.length >= 2 && chunks[1].trim().length() > 0) {
logicalForm = ExpressionParser.expression2().parse(chunks[1]);
}
if (chunks.length >= 3) {
throw new UnsupportedOperationException(
"Using unfilled dependencies with unary CCG rules is not yet implemented");
/*
* String[] newDeps = chunks[4].split(" ");
* Preconditions.checkArgument(newDeps.length == 3); long
* subjectNum = Long.parseLong(newDeps[0].substring(1)); long
* argNum = Long.parseLong(newDeps[1]); long objectNum =
* Long.parseLong(newDeps[2].substring(1)); unfilledDeps = new
* long[1];
*
* unfilledDeps[0] =
* CcgParser.marshalUnfilledDependency(objectNum, argNum,
* subjectNum, 0, 0);
*/
}
return new CcgUnaryRule(relabeledInput, returnCanonical, logicalForm);
} } | public class class_name {
public static CcgUnaryRule parseFrom(String line) {
String[] chunks = new CsvParser(CsvParser.DEFAULT_SEPARATOR,
CsvParser.DEFAULT_QUOTE, CsvParser.NULL_ESCAPE).parseLine(line.trim());
Preconditions.checkArgument(chunks.length >= 1, "Illegal unary rule string: %s", line);
String[] syntacticParts = chunks[0].split(" ");
Preconditions.checkArgument(syntacticParts.length == 2, "Illegal unary rule string: %s", line);
HeadedSyntacticCategory inputSyntax = HeadedSyntacticCategory.parseFrom(syntacticParts[0]);
HeadedSyntacticCategory returnSyntax = HeadedSyntacticCategory.parseFrom(syntacticParts[1]);
// Ensure that the return syntactic type is in canonical form.
HeadedSyntacticCategory returnCanonical = returnSyntax.getCanonicalForm();
int[] originalToCanonical = returnSyntax.unifyVariables(returnSyntax.getUniqueVariables(), returnCanonical, new int[0]);
int[] inputVars = inputSyntax.getUniqueVariables();
int[] inputRelabeling = new int[inputVars.length];
int[] returnOriginalVars = returnSyntax.getUniqueVariables();
int nextUnassignedVar = Ints.max(returnCanonical.getUniqueVariables()) + 1;
for (int i = 0; i < inputVars.length; i++) {
int index = Ints.indexOf(returnOriginalVars, inputVars[i]);
if (index != -1) {
inputRelabeling[i] = originalToCanonical[index]; // depends on control dependency: [if], data = [none]
} else {
inputRelabeling[i] = nextUnassignedVar; // depends on control dependency: [if], data = [none]
nextUnassignedVar++; // depends on control dependency: [if], data = [none]
}
}
HeadedSyntacticCategory relabeledInput = inputSyntax.relabelVariables(inputVars, inputRelabeling);
Expression2 logicalForm = null;
if (chunks.length >= 2 && chunks[1].trim().length() > 0) {
logicalForm = ExpressionParser.expression2().parse(chunks[1]); // depends on control dependency: [if], data = [none]
}
if (chunks.length >= 3) {
throw new UnsupportedOperationException(
"Using unfilled dependencies with unary CCG rules is not yet implemented");
/*
* String[] newDeps = chunks[4].split(" ");
* Preconditions.checkArgument(newDeps.length == 3); long
* subjectNum = Long.parseLong(newDeps[0].substring(1)); long
* argNum = Long.parseLong(newDeps[1]); long objectNum =
* Long.parseLong(newDeps[2].substring(1)); unfilledDeps = new
* long[1];
*
* unfilledDeps[0] =
* CcgParser.marshalUnfilledDependency(objectNum, argNum,
* subjectNum, 0, 0);
*/
}
return new CcgUnaryRule(relabeledInput, returnCanonical, logicalForm);
} } |
public class class_name {
private String processIgnoreStatements(String control, String result) {
if (control.equals(Citrus.IGNORE_PLACEHOLDER)) {
return control;
}
Pattern whitespacePattern = Pattern.compile("[\\W]");
Pattern ignorePattern = Pattern.compile("@ignore\\(?(\\d*)\\)?@");
Matcher ignoreMatcher = ignorePattern.matcher(control);
while (ignoreMatcher.find()) {
String actualValue;
if (ignoreMatcher.groupCount() > 0 && StringUtils.hasText(ignoreMatcher.group(1))) {
int end = ignoreMatcher.start() + Integer.valueOf(ignoreMatcher.group(1));
if (end > result.length()) {
end = result.length();
}
if (ignoreMatcher.start() > result.length()) {
actualValue = "";
} else {
actualValue = result.substring(ignoreMatcher.start(), end);
}
} else {
actualValue = result.substring(ignoreMatcher.start());
Matcher whitespaceMatcher = whitespacePattern.matcher(actualValue);
if (whitespaceMatcher.find()) {
actualValue = actualValue.substring(0, whitespaceMatcher.start());
}
}
control = ignoreMatcher.replaceFirst(actualValue);
ignoreMatcher = ignorePattern.matcher(control);
}
return control;
} } | public class class_name {
private String processIgnoreStatements(String control, String result) {
if (control.equals(Citrus.IGNORE_PLACEHOLDER)) {
return control; // depends on control dependency: [if], data = [none]
}
Pattern whitespacePattern = Pattern.compile("[\\W]");
Pattern ignorePattern = Pattern.compile("@ignore\\(?(\\d*)\\)?@");
Matcher ignoreMatcher = ignorePattern.matcher(control);
while (ignoreMatcher.find()) {
String actualValue;
if (ignoreMatcher.groupCount() > 0 && StringUtils.hasText(ignoreMatcher.group(1))) {
int end = ignoreMatcher.start() + Integer.valueOf(ignoreMatcher.group(1));
if (end > result.length()) {
end = result.length(); // depends on control dependency: [if], data = [none]
}
if (ignoreMatcher.start() > result.length()) {
actualValue = ""; // depends on control dependency: [if], data = [none]
} else {
actualValue = result.substring(ignoreMatcher.start(), end); // depends on control dependency: [if], data = [(ignoreMatcher.start()]
}
} else {
actualValue = result.substring(ignoreMatcher.start()); // depends on control dependency: [if], data = [none]
Matcher whitespaceMatcher = whitespacePattern.matcher(actualValue);
if (whitespaceMatcher.find()) {
actualValue = actualValue.substring(0, whitespaceMatcher.start()); // depends on control dependency: [if], data = [none]
}
}
control = ignoreMatcher.replaceFirst(actualValue); // depends on control dependency: [while], data = [none]
ignoreMatcher = ignorePattern.matcher(control); // depends on control dependency: [while], data = [none]
}
return control;
} } |
public class class_name {
@Override
public int compareTo(DestinationPatternsMessageCondition other, Message<?> message) {
WampMessage wampMessage = (WampMessage) message;
String destination = wampMessage.getDestination();
Comparator<String> patternComparator = this.pathMatcher
.getPatternComparator(destination);
Iterator<String> iterator = this.patterns.iterator();
Iterator<String> iteratorOther = other.patterns.iterator();
while (iterator.hasNext() && iteratorOther.hasNext()) {
int result = patternComparator.compare(iterator.next(), iteratorOther.next());
if (result != 0) {
return result;
}
}
if (iterator.hasNext()) {
return -1;
}
else if (iteratorOther.hasNext()) {
return 1;
}
else {
return 0;
}
} } | public class class_name {
@Override
public int compareTo(DestinationPatternsMessageCondition other, Message<?> message) {
WampMessage wampMessage = (WampMessage) message;
String destination = wampMessage.getDestination();
Comparator<String> patternComparator = this.pathMatcher
.getPatternComparator(destination);
Iterator<String> iterator = this.patterns.iterator();
Iterator<String> iteratorOther = other.patterns.iterator();
while (iterator.hasNext() && iteratorOther.hasNext()) {
int result = patternComparator.compare(iterator.next(), iteratorOther.next());
if (result != 0) {
return result; // depends on control dependency: [if], data = [none]
}
}
if (iterator.hasNext()) {
return -1; // depends on control dependency: [if], data = [none]
}
else if (iteratorOther.hasNext()) {
return 1; // depends on control dependency: [if], data = [none]
}
else {
return 0; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static void run(Process process) {
try {
LOG.info("Starting {}.", process);
process.start();
LOG.info("Stopping {}.", process);
System.exit(0);
} catch (Throwable t) {
LOG.error("Uncaught exception while running {}, stopping it and exiting.", process, t);
try {
process.stop();
} catch (Throwable t2) {
// continue to exit
LOG.error("Uncaught exception while stopping {}, simply exiting.", process, t2);
}
System.exit(-1);
}
} } | public class class_name {
public static void run(Process process) {
try {
LOG.info("Starting {}.", process); // depends on control dependency: [try], data = [none]
process.start(); // depends on control dependency: [try], data = [none]
LOG.info("Stopping {}.", process); // depends on control dependency: [try], data = [none]
System.exit(0); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
LOG.error("Uncaught exception while running {}, stopping it and exiting.", process, t);
try {
process.stop(); // depends on control dependency: [try], data = [none]
} catch (Throwable t2) {
// continue to exit
LOG.error("Uncaught exception while stopping {}, simply exiting.", process, t2);
} // depends on control dependency: [catch], data = [none]
System.exit(-1);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
NuProcess start(List<String> commands, String[] environment, Path cwd)
{
callPreStart();
try {
createPipes();
char[] block = getEnvironment(environment);
Memory env = new Memory(block.length * 3);
env.write(0, block, 0, block.length);
STARTUPINFO startupInfo = new STARTUPINFO();
startupInfo.clear();
startupInfo.cb = new DWORD(startupInfo.size());
startupInfo.hStdInput = hStdinWidow;
startupInfo.hStdError = hStderrWidow;
startupInfo.hStdOutput = hStdoutWidow;
startupInfo.dwFlags = NuWinNT.STARTF_USESTDHANDLES;
processInfo = new PROCESS_INFORMATION();
DWORD dwCreationFlags = new DWORD(NuWinNT.CREATE_NO_WINDOW | NuWinNT.CREATE_UNICODE_ENVIRONMENT | NuWinNT.CREATE_SUSPENDED);
char[] cwdChars = (cwd != null) ? Native.toCharArray(cwd.toAbsolutePath().toString()) : null;
if (!NuKernel32.CreateProcessW(null, getCommandLine(commands), null /*lpProcessAttributes*/, null /*lpThreadAttributes*/, true /*bInheritHandles*/,
dwCreationFlags, env, cwdChars, startupInfo, processInfo)) {
int lastError = Native.getLastError();
throw new RuntimeException("CreateProcessW() failed, error: " + lastError);
}
afterStart();
registerProcess();
callStart();
NuKernel32.ResumeThread(processInfo.hThread);
}
catch (Throwable e) {
e.printStackTrace();
onExit(Integer.MIN_VALUE);
}
finally {
NuKernel32.CloseHandle(hStdinWidow);
NuKernel32.CloseHandle(hStdoutWidow);
NuKernel32.CloseHandle(hStderrWidow);
}
return this;
} } | public class class_name {
NuProcess start(List<String> commands, String[] environment, Path cwd)
{
callPreStart();
try {
createPipes(); // depends on control dependency: [try], data = [none]
char[] block = getEnvironment(environment);
Memory env = new Memory(block.length * 3);
env.write(0, block, 0, block.length); // depends on control dependency: [try], data = [none]
STARTUPINFO startupInfo = new STARTUPINFO();
startupInfo.clear(); // depends on control dependency: [try], data = [none]
startupInfo.cb = new DWORD(startupInfo.size()); // depends on control dependency: [try], data = [none]
startupInfo.hStdInput = hStdinWidow; // depends on control dependency: [try], data = [none]
startupInfo.hStdError = hStderrWidow; // depends on control dependency: [try], data = [none]
startupInfo.hStdOutput = hStdoutWidow; // depends on control dependency: [try], data = [none]
startupInfo.dwFlags = NuWinNT.STARTF_USESTDHANDLES; // depends on control dependency: [try], data = [none]
processInfo = new PROCESS_INFORMATION(); // depends on control dependency: [try], data = [none]
DWORD dwCreationFlags = new DWORD(NuWinNT.CREATE_NO_WINDOW | NuWinNT.CREATE_UNICODE_ENVIRONMENT | NuWinNT.CREATE_SUSPENDED);
char[] cwdChars = (cwd != null) ? Native.toCharArray(cwd.toAbsolutePath().toString()) : null;
if (!NuKernel32.CreateProcessW(null, getCommandLine(commands), null /*lpProcessAttributes*/, null /*lpThreadAttributes*/, true /*bInheritHandles*/,
dwCreationFlags, env, cwdChars, startupInfo, processInfo)) {
int lastError = Native.getLastError();
throw new RuntimeException("CreateProcessW() failed, error: " + lastError);
}
afterStart(); // depends on control dependency: [try], data = [none]
registerProcess(); // depends on control dependency: [try], data = [none]
callStart(); // depends on control dependency: [try], data = [none]
NuKernel32.ResumeThread(processInfo.hThread); // depends on control dependency: [try], data = [none]
}
catch (Throwable e) {
e.printStackTrace();
onExit(Integer.MIN_VALUE);
} // depends on control dependency: [catch], data = [none]
finally {
NuKernel32.CloseHandle(hStdinWidow);
NuKernel32.CloseHandle(hStdoutWidow);
NuKernel32.CloseHandle(hStderrWidow);
}
return this;
} } |
public class class_name {
private Snapshot createSnapshot(SnapshotDescriptor descriptor) {
if (storage.level() == StorageLevel.MEMORY) {
return createMemorySnapshot(descriptor);
} else {
return createDiskSnapshot(descriptor);
}
} } | public class class_name {
private Snapshot createSnapshot(SnapshotDescriptor descriptor) {
if (storage.level() == StorageLevel.MEMORY) {
return createMemorySnapshot(descriptor); // depends on control dependency: [if], data = [none]
} else {
return createDiskSnapshot(descriptor); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void transformDownSamplerToRollupQuery(final Aggregator group_by,
final String str_interval) {
if (downsampler != null && downsampler.getInterval() > 0) {
if (tsdb.getRollupConfig() != null) {
try {
best_match_rollups = tsdb.getRollupConfig().
getRollupInterval(downsampler.getInterval() / 1000, str_interval);
//It is thread safe as each thread will be working on unique
// TsdbQuery object
//RollupConfig.getRollupInterval guarantees that,
// it always return a non-empty list
// TODO
rollup_query = new RollupQuery(best_match_rollups.remove(0),
downsampler.getFunction(), downsampler.getInterval(),
group_by);
if (group_by == Aggregators.COUNT) {
aggregator = Aggregators.SUM;
}
}
catch (NoSuchRollupForIntervalException nre) {
LOG.error("There is no such rollup for the downsample interval "
+ str_interval + ". So fall back to the default tsdb down"
+ " sampling approach and it requires raw data scan." );
//nullify the rollup_query if this api is called explicitly
rollup_query = null;
return;
}
if (rollup_query.getRollupInterval().isDefaultInterval()) {
//Anyways it is a scan on raw data
rollup_query = null;
}
}
}
} } | public class class_name {
public void transformDownSamplerToRollupQuery(final Aggregator group_by,
final String str_interval) {
if (downsampler != null && downsampler.getInterval() > 0) {
if (tsdb.getRollupConfig() != null) {
try {
best_match_rollups = tsdb.getRollupConfig().
getRollupInterval(downsampler.getInterval() / 1000, str_interval); // depends on control dependency: [try], data = [none]
//It is thread safe as each thread will be working on unique
// TsdbQuery object
//RollupConfig.getRollupInterval guarantees that,
// it always return a non-empty list
// TODO
rollup_query = new RollupQuery(best_match_rollups.remove(0),
downsampler.getFunction(), downsampler.getInterval(),
group_by); // depends on control dependency: [try], data = [none]
if (group_by == Aggregators.COUNT) {
aggregator = Aggregators.SUM; // depends on control dependency: [if], data = [none]
}
}
catch (NoSuchRollupForIntervalException nre) {
LOG.error("There is no such rollup for the downsample interval "
+ str_interval + ". So fall back to the default tsdb down"
+ " sampling approach and it requires raw data scan." );
//nullify the rollup_query if this api is called explicitly
rollup_query = null;
return;
} // depends on control dependency: [catch], data = [none]
if (rollup_query.getRollupInterval().isDefaultInterval()) {
//Anyways it is a scan on raw data
rollup_query = null; // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
public Object get(Object key) {
Object value = null;
try {
value = ReflectionValueExtractor.evaluate(String.valueOf(key), project);
} catch (Exception e) {
logger.debug("could not evaluate " + key, e);
}
return value;
} } | public class class_name {
public Object get(Object key) {
Object value = null;
try {
value = ReflectionValueExtractor.evaluate(String.valueOf(key), project); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
logger.debug("could not evaluate " + key, e);
} // depends on control dependency: [catch], data = [none]
return value;
} } |
public class class_name {
public void close() {
lock.lock();
try {
if (writeTarget == null) {
closePending = true;
return;
}
} finally {
lock.unlock();
}
writeTarget.closeConnection();
} } | public class class_name {
public void close() {
lock.lock();
try {
if (writeTarget == null) {
closePending = true; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
} finally {
lock.unlock();
}
writeTarget.closeConnection();
} } |
public class class_name {
public void format(StringBuffer fmt, Set done, Set todo, int indent) {
formatName(fmt, indent);
fmt.append("{\n");
for (int i = 0; i < cases.length; i++) {
if (i > 0)
fmt.append(" |\n");
cases[i].format(fmt, done, todo, indent + 2);
}
fmt.append("\n");
indent(fmt, indent);
fmt.append("}");
} } | public class class_name {
public void format(StringBuffer fmt, Set done, Set todo, int indent) {
formatName(fmt, indent);
fmt.append("{\n");
for (int i = 0; i < cases.length; i++) {
if (i > 0)
fmt.append(" |\n");
cases[i].format(fmt, done, todo, indent + 2); // depends on control dependency: [for], data = [i]
}
fmt.append("\n");
indent(fmt, indent);
fmt.append("}");
} } |
public class class_name {
public static String urlEncode(String part) {
try {
return URLEncoder.encode(part, Charsets.UTF_8_NAME);
} catch (UnsupportedEncodingException ignored) { // NOSONAR
// this exception is only for detecting and handling invalid inputs
return null;
}
} } | public class class_name {
public static String urlEncode(String part) {
try {
return URLEncoder.encode(part, Charsets.UTF_8_NAME); // depends on control dependency: [try], data = [none]
} catch (UnsupportedEncodingException ignored) { // NOSONAR
// this exception is only for detecting and handling invalid inputs
return null;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected boolean matchMimeType(DataUrl dataUrl, MimeType mimeType) {
MimeType dataUrlMimeType;
try {
dataUrlMimeType = MimeType.valueOfMimeType(dataUrl.getMimeType());
} catch (IllegalArgumentException e) {
// The MIME-Type is not supported
return false;
}
return mimeType.equals(dataUrlMimeType);
} } | public class class_name {
protected boolean matchMimeType(DataUrl dataUrl, MimeType mimeType) {
MimeType dataUrlMimeType;
try {
dataUrlMimeType = MimeType.valueOfMimeType(dataUrl.getMimeType()); // depends on control dependency: [try], data = [none]
} catch (IllegalArgumentException e) {
// The MIME-Type is not supported
return false;
} // depends on control dependency: [catch], data = [none]
return mimeType.equals(dataUrlMimeType);
} } |
public class class_name {
@Nullable
public MentionSpan getMentionSpanEndingAt(int index) {
MentionSpan[] spans = getSpans(0, length(), MentionSpan.class);
if (spans != null) {
for (MentionSpan span : spans) {
if (getSpanEnd(span) == index) {
return span;
}
}
}
return null;
} } | public class class_name {
@Nullable
public MentionSpan getMentionSpanEndingAt(int index) {
MentionSpan[] spans = getSpans(0, length(), MentionSpan.class);
if (spans != null) {
for (MentionSpan span : spans) {
if (getSpanEnd(span) == index) {
return span; // depends on control dependency: [if], data = [none]
}
}
}
return null;
} } |
public class class_name {
private static KeyStore getKeyInfo(String pfxkeyfile, String keypwd,
String type) throws IOException {
LogUtil.writeLog("加载签名证书==>" + pfxkeyfile);
FileInputStream fis = null;
try {
KeyStore ks = KeyStore.getInstance(type, "BC");
LogUtil.writeLog("Load RSA CertPath=[" + pfxkeyfile + "],Pwd=["+ keypwd + "],type=["+type+"]");
fis = new FileInputStream(pfxkeyfile);
char[] nPassword = null;
nPassword = null == keypwd || "".equals(keypwd.trim()) ? null: keypwd.toCharArray();
if (null != ks) {
ks.load(fis, nPassword);
}
return ks;
} catch (Exception e) {
LogUtil.writeErrorLog("getKeyInfo Error", e);
return null;
} finally {
if(null!=fis)
fis.close();
}
} } | public class class_name {
private static KeyStore getKeyInfo(String pfxkeyfile, String keypwd,
String type) throws IOException {
LogUtil.writeLog("加载签名证书==>" + pfxkeyfile);
FileInputStream fis = null;
try {
KeyStore ks = KeyStore.getInstance(type, "BC");
LogUtil.writeLog("Load RSA CertPath=[" + pfxkeyfile + "],Pwd=["+ keypwd + "],type=["+type+"]");
fis = new FileInputStream(pfxkeyfile);
char[] nPassword = null;
nPassword = null == keypwd || "".equals(keypwd.trim()) ? null: keypwd.toCharArray();
if (null != ks) {
ks.load(fis, nPassword); // depends on control dependency: [if], data = [none]
}
return ks;
} catch (Exception e) {
LogUtil.writeErrorLog("getKeyInfo Error", e);
return null;
} finally {
if(null!=fis)
fis.close();
}
} } |
public class class_name {
public void open(boolean readonly) {
fileFreePosition = 0;
try {
dataFile = ScaledRAFile.newScaledRAFile(database, fileName,
readonly, ScaledRAFile.DATA_FILE_RAF, null, null);
fileFreePosition = dataFile.length();
if (fileFreePosition > Integer.MAX_VALUE) {
throw new HsqlException("", "", 0);
}
initBuffers();
} catch (Exception e) {
throw Error.error(ErrorCode.FILE_IO_ERROR,
ErrorCode.M_TextCache_openning_file_error,
new Object[] {
fileName, e
});
}
cacheReadonly = readonly;
} } | public class class_name {
public void open(boolean readonly) {
fileFreePosition = 0;
try {
dataFile = ScaledRAFile.newScaledRAFile(database, fileName,
readonly, ScaledRAFile.DATA_FILE_RAF, null, null); // depends on control dependency: [try], data = [none]
fileFreePosition = dataFile.length(); // depends on control dependency: [try], data = [none]
if (fileFreePosition > Integer.MAX_VALUE) {
throw new HsqlException("", "", 0);
}
initBuffers(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw Error.error(ErrorCode.FILE_IO_ERROR,
ErrorCode.M_TextCache_openning_file_error,
new Object[] {
fileName, e
});
} // depends on control dependency: [catch], data = [none]
cacheReadonly = readonly;
} } |
public class class_name {
public EClass getIfcShellBasedSurfaceModel() {
if (ifcShellBasedSurfaceModelEClass == null) {
ifcShellBasedSurfaceModelEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(513);
}
return ifcShellBasedSurfaceModelEClass;
} } | public class class_name {
public EClass getIfcShellBasedSurfaceModel() {
if (ifcShellBasedSurfaceModelEClass == null) {
ifcShellBasedSurfaceModelEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(513);
// depends on control dependency: [if], data = [none]
}
return ifcShellBasedSurfaceModelEClass;
} } |
public class class_name {
public static <K, V> ListMultimap<K, V> filterKeys(
ListMultimap<K, V> unfiltered, final Predicate<? super K> keyPredicate) {
if (unfiltered instanceof FilteredKeyListMultimap) {
FilteredKeyListMultimap<K, V> prev = (FilteredKeyListMultimap<K, V>) unfiltered;
return new FilteredKeyListMultimap<>(
prev.unfiltered(), Predicates.<K>and(prev.keyPredicate, keyPredicate));
} else {
return new FilteredKeyListMultimap<>(unfiltered, keyPredicate);
}
} } | public class class_name {
public static <K, V> ListMultimap<K, V> filterKeys(
ListMultimap<K, V> unfiltered, final Predicate<? super K> keyPredicate) {
if (unfiltered instanceof FilteredKeyListMultimap) {
FilteredKeyListMultimap<K, V> prev = (FilteredKeyListMultimap<K, V>) unfiltered;
return new FilteredKeyListMultimap<>(
prev.unfiltered(), Predicates.<K>and(prev.keyPredicate, keyPredicate)); // depends on control dependency: [if], data = [none]
} else {
return new FilteredKeyListMultimap<>(unfiltered, keyPredicate); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public java.util.List<HostedZone> getHostedZones() {
if (hostedZones == null) {
hostedZones = new com.amazonaws.internal.SdkInternalList<HostedZone>();
}
return hostedZones;
} } | public class class_name {
public java.util.List<HostedZone> getHostedZones() {
if (hostedZones == null) {
hostedZones = new com.amazonaws.internal.SdkInternalList<HostedZone>(); // depends on control dependency: [if], data = [none]
}
return hostedZones;
} } |
public class class_name {
private void guaranteeRegistered(TrafficSource trafficSource,
TargetGroup targetGroup,
Collection<TargetDescription> targets,
Collection<BaragonAgentMetadata> baragonAgents,
Collection<LoadBalancer> loadBalancers) {
/*
- Check that load balancers, baragon agents, target groups are on same VPC
- Check that load balancers, targets are on same subnet (== AZ)
- Check that all baragon agents are associated with a target on target group
- Check that load balancers has listeners, rules to make talk to target group
*/
if (configuration.isPresent() && configuration.get().isCheckForCorrectVpc()) {
guaranteeSameVPC(targetGroup, baragonAgents, loadBalancers);
}
guaranteeAzEnabled(baragonAgents, loadBalancers);
guaranteeHasAllTargets(trafficSource, targetGroup, targets, baragonAgents);
//guaranteeListenersPresent(targetGroup, loadBalancers);
} } | public class class_name {
private void guaranteeRegistered(TrafficSource trafficSource,
TargetGroup targetGroup,
Collection<TargetDescription> targets,
Collection<BaragonAgentMetadata> baragonAgents,
Collection<LoadBalancer> loadBalancers) {
/*
- Check that load balancers, baragon agents, target groups are on same VPC
- Check that load balancers, targets are on same subnet (== AZ)
- Check that all baragon agents are associated with a target on target group
- Check that load balancers has listeners, rules to make talk to target group
*/
if (configuration.isPresent() && configuration.get().isCheckForCorrectVpc()) {
guaranteeSameVPC(targetGroup, baragonAgents, loadBalancers); // depends on control dependency: [if], data = [none]
}
guaranteeAzEnabled(baragonAgents, loadBalancers);
guaranteeHasAllTargets(trafficSource, targetGroup, targets, baragonAgents);
//guaranteeListenersPresent(targetGroup, loadBalancers);
} } |
public class class_name {
private Event getNextEvent(Iterator<File> files) {
while (files.hasNext()) {
File file = files.next();
// only consider files that end with FILE_SUFFIX
if (!file.getAbsolutePath().endsWith(FILE_SUFFIX)) {
continue;
}
Event event = fileToEvent(file);
if (event != null) {
return event;
}
}
return null;
} } | public class class_name {
private Event getNextEvent(Iterator<File> files) {
while (files.hasNext()) {
File file = files.next();
// only consider files that end with FILE_SUFFIX
if (!file.getAbsolutePath().endsWith(FILE_SUFFIX)) {
continue;
}
Event event = fileToEvent(file);
if (event != null) {
return event; // depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
static boolean hasEnvironmentVariable(String variable) {
final String value = System.getenv(variable);
if (value != null) {
return true;
}
return false;
} } | public class class_name {
static boolean hasEnvironmentVariable(String variable) {
final String value = System.getenv(variable);
if (value != null) {
return true; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
@Override
public void gemm(char Order, char TransA, char TransB, IComplexNumber alpha, IComplexNDArray A, IComplexNDArray B,
IComplexNumber beta, IComplexNDArray C) {
if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL)
OpProfiler.getInstance().processBlasCall(true, A, B, C);
GemmParams params = new GemmParams(A, B, C);
if (A.data().dataType() == DataBuffer.Type.DOUBLE) {
zgemm(Order, TransA, TransB, params.getM(), params.getN(), params.getK(), alpha.asDouble(),
A.ordering() == NDArrayFactory.C ? B : A, params.getLda(),
B.ordering() == NDArrayFactory.C ? A : B, params.getLdb(), beta.asDouble(), C,
params.getLdc());
} else
cgemm(Order, TransA, TransB, params.getM(), params.getN(), params.getK(), alpha.asFloat(),
A.ordering() == NDArrayFactory.C ? B : A, params.getLda(),
B.ordering() == NDArrayFactory.C ? A : B, params.getLdb(), beta.asFloat(), C,
params.getLdc());
} } | public class class_name {
@Override
public void gemm(char Order, char TransA, char TransB, IComplexNumber alpha, IComplexNDArray A, IComplexNDArray B,
IComplexNumber beta, IComplexNDArray C) {
if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL)
OpProfiler.getInstance().processBlasCall(true, A, B, C);
GemmParams params = new GemmParams(A, B, C);
if (A.data().dataType() == DataBuffer.Type.DOUBLE) {
zgemm(Order, TransA, TransB, params.getM(), params.getN(), params.getK(), alpha.asDouble(),
A.ordering() == NDArrayFactory.C ? B : A, params.getLda(),
B.ordering() == NDArrayFactory.C ? A : B, params.getLdb(), beta.asDouble(), C,
params.getLdc()); // depends on control dependency: [if], data = [none]
} else
cgemm(Order, TransA, TransB, params.getM(), params.getN(), params.getK(), alpha.asFloat(),
A.ordering() == NDArrayFactory.C ? B : A, params.getLda(),
B.ordering() == NDArrayFactory.C ? A : B, params.getLdb(), beta.asFloat(), C,
params.getLdc());
} } |
public class class_name {
private NavigableSet<RenditionMetadata> rotateSourceRenditions(Set<RenditionMetadata> candidates) {
if (rotation == null) {
return new TreeSet<>(candidates);
}
return candidates.stream()
.map(rendition -> new VirtualTransformedRenditionMetadata(rendition.getRendition(),
rotateMapWidth(rendition.getWidth(), rendition.getHeight()),
rotateMapHeight(rendition.getWidth(), rendition.getHeight()),
null, rotation))
.collect(Collectors.toCollection(TreeSet::new));
} } | public class class_name {
private NavigableSet<RenditionMetadata> rotateSourceRenditions(Set<RenditionMetadata> candidates) {
if (rotation == null) {
return new TreeSet<>(candidates); // depends on control dependency: [if], data = [none]
}
return candidates.stream()
.map(rendition -> new VirtualTransformedRenditionMetadata(rendition.getRendition(),
rotateMapWidth(rendition.getWidth(), rendition.getHeight()),
rotateMapHeight(rendition.getWidth(), rendition.getHeight()),
null, rotation))
.collect(Collectors.toCollection(TreeSet::new));
} } |
public class class_name {
@Override
public final void messageReceived(NextFilter nextFilter, IoSession session,
Object message) {
if (eventTypes.contains(IoEventType.MESSAGE_RECEIVED)) {
IoFilterEvent event = new IoFilterEvent(nextFilter,
IoEventType.MESSAGE_RECEIVED, session, message);
fireEvent(event);
} else {
nextFilter.messageReceived(session, message);
}
} } | public class class_name {
@Override
public final void messageReceived(NextFilter nextFilter, IoSession session,
Object message) {
if (eventTypes.contains(IoEventType.MESSAGE_RECEIVED)) {
IoFilterEvent event = new IoFilterEvent(nextFilter,
IoEventType.MESSAGE_RECEIVED, session, message);
fireEvent(event); // depends on control dependency: [if], data = [none]
} else {
nextFilter.messageReceived(session, message); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
int readVarint32() {
byte b; // negative number implies MSB set
if ((b = readByte()) >= 0) {
return b;
}
int result = b & 0x7f;
if ((b = readByte()) >= 0) {
return result | b << 7;
}
result |= (b & 0x7f) << 7;
if ((b = readByte()) >= 0) {
return result | b << 14;
}
result |= (b & 0x7f) << 14;
if ((b = readByte()) >= 0) {
return result | b << 21;
}
result |= (b & 0x7f) << 21;
b = readByte();
if ((b & 0xf0) != 0) {
throw new IllegalArgumentException("Greater than 32-bit varint at position " + (pos() - 1));
}
return result | b << 28;
} } | public class class_name {
int readVarint32() {
byte b; // negative number implies MSB set
if ((b = readByte()) >= 0) {
return b; // depends on control dependency: [if], data = [none]
}
int result = b & 0x7f;
if ((b = readByte()) >= 0) {
return result | b << 7; // depends on control dependency: [if], data = [none]
}
result |= (b & 0x7f) << 7;
if ((b = readByte()) >= 0) {
return result | b << 14; // depends on control dependency: [if], data = [none]
}
result |= (b & 0x7f) << 14;
if ((b = readByte()) >= 0) {
return result | b << 21; // depends on control dependency: [if], data = [none]
}
result |= (b & 0x7f) << 21;
b = readByte();
if ((b & 0xf0) != 0) {
throw new IllegalArgumentException("Greater than 32-bit varint at position " + (pos() - 1));
}
return result | b << 28;
} } |
public class class_name {
public void addMilestone(@NonNull Milestone milestone) {
boolean milestoneAdded = milestones.add(milestone);
if (!milestoneAdded) {
Timber.w("Milestone has already been added to the stack.");
}
} } | public class class_name {
public void addMilestone(@NonNull Milestone milestone) {
boolean milestoneAdded = milestones.add(milestone);
if (!milestoneAdded) {
Timber.w("Milestone has already been added to the stack."); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static byte[] bestAvailableMac() {
// Find the best MAC address available.
byte[] bestMacAddr = EMPTY_BYTES;
InetAddress bestInetAddr = NetUtil.LOCALHOST4;
// Retrieve the list of available network interfaces.
Map<NetworkInterface, InetAddress> ifaces = new LinkedHashMap<NetworkInterface, InetAddress>();
try {
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
if (interfaces != null) {
while (interfaces.hasMoreElements()) {
NetworkInterface iface = interfaces.nextElement();
// Use the interface with proper INET addresses only.
Enumeration<InetAddress> addrs = SocketUtils.addressesFromNetworkInterface(iface);
if (addrs.hasMoreElements()) {
InetAddress a = addrs.nextElement();
if (!a.isLoopbackAddress()) {
ifaces.put(iface, a);
}
}
}
}
} catch (SocketException e) {
logger.warn("Failed to retrieve the list of available network interfaces", e);
}
for (Entry<NetworkInterface, InetAddress> entry: ifaces.entrySet()) {
NetworkInterface iface = entry.getKey();
InetAddress inetAddr = entry.getValue();
if (iface.isVirtual()) {
continue;
}
byte[] macAddr;
try {
macAddr = SocketUtils.hardwareAddressFromNetworkInterface(iface);
} catch (SocketException e) {
logger.debug("Failed to get the hardware address of a network interface: {}", iface, e);
continue;
}
boolean replace = false;
int res = compareAddresses(bestMacAddr, macAddr);
if (res < 0) {
// Found a better MAC address.
replace = true;
} else if (res == 0) {
// Two MAC addresses are of pretty much same quality.
res = compareAddresses(bestInetAddr, inetAddr);
if (res < 0) {
// Found a MAC address with better INET address.
replace = true;
} else if (res == 0) {
// Cannot tell the difference. Choose the longer one.
if (bestMacAddr.length < macAddr.length) {
replace = true;
}
}
}
if (replace) {
bestMacAddr = macAddr;
bestInetAddr = inetAddr;
}
}
if (bestMacAddr == EMPTY_BYTES) {
return null;
}
switch (bestMacAddr.length) {
case EUI48_MAC_ADDRESS_LENGTH: // EUI-48 - convert to EUI-64
byte[] newAddr = new byte[EUI64_MAC_ADDRESS_LENGTH];
System.arraycopy(bestMacAddr, 0, newAddr, 0, 3);
newAddr[3] = (byte) 0xFF;
newAddr[4] = (byte) 0xFE;
System.arraycopy(bestMacAddr, 3, newAddr, 5, 3);
bestMacAddr = newAddr;
break;
default: // Unknown
bestMacAddr = Arrays.copyOf(bestMacAddr, EUI64_MAC_ADDRESS_LENGTH);
}
return bestMacAddr;
} } | public class class_name {
public static byte[] bestAvailableMac() {
// Find the best MAC address available.
byte[] bestMacAddr = EMPTY_BYTES;
InetAddress bestInetAddr = NetUtil.LOCALHOST4;
// Retrieve the list of available network interfaces.
Map<NetworkInterface, InetAddress> ifaces = new LinkedHashMap<NetworkInterface, InetAddress>();
try {
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
if (interfaces != null) {
while (interfaces.hasMoreElements()) {
NetworkInterface iface = interfaces.nextElement();
// Use the interface with proper INET addresses only.
Enumeration<InetAddress> addrs = SocketUtils.addressesFromNetworkInterface(iface);
if (addrs.hasMoreElements()) {
InetAddress a = addrs.nextElement();
if (!a.isLoopbackAddress()) {
ifaces.put(iface, a); // depends on control dependency: [if], data = [none]
}
}
}
}
} catch (SocketException e) {
logger.warn("Failed to retrieve the list of available network interfaces", e);
} // depends on control dependency: [catch], data = [none]
for (Entry<NetworkInterface, InetAddress> entry: ifaces.entrySet()) {
NetworkInterface iface = entry.getKey();
InetAddress inetAddr = entry.getValue();
if (iface.isVirtual()) {
continue;
}
byte[] macAddr;
try {
macAddr = SocketUtils.hardwareAddressFromNetworkInterface(iface); // depends on control dependency: [try], data = [none]
} catch (SocketException e) {
logger.debug("Failed to get the hardware address of a network interface: {}", iface, e);
continue;
} // depends on control dependency: [catch], data = [none]
boolean replace = false;
int res = compareAddresses(bestMacAddr, macAddr);
if (res < 0) {
// Found a better MAC address.
replace = true; // depends on control dependency: [if], data = [none]
} else if (res == 0) {
// Two MAC addresses are of pretty much same quality.
res = compareAddresses(bestInetAddr, inetAddr); // depends on control dependency: [if], data = [none]
if (res < 0) {
// Found a MAC address with better INET address.
replace = true; // depends on control dependency: [if], data = [none]
} else if (res == 0) {
// Cannot tell the difference. Choose the longer one.
if (bestMacAddr.length < macAddr.length) {
replace = true; // depends on control dependency: [if], data = [none]
}
}
}
if (replace) {
bestMacAddr = macAddr; // depends on control dependency: [if], data = [none]
bestInetAddr = inetAddr; // depends on control dependency: [if], data = [none]
}
}
if (bestMacAddr == EMPTY_BYTES) {
return null; // depends on control dependency: [if], data = [none]
}
switch (bestMacAddr.length) {
case EUI48_MAC_ADDRESS_LENGTH: // EUI-48 - convert to EUI-64
byte[] newAddr = new byte[EUI64_MAC_ADDRESS_LENGTH];
System.arraycopy(bestMacAddr, 0, newAddr, 0, 3);
newAddr[3] = (byte) 0xFF;
newAddr[4] = (byte) 0xFE;
System.arraycopy(bestMacAddr, 3, newAddr, 5, 3);
bestMacAddr = newAddr;
break;
default: // Unknown
bestMacAddr = Arrays.copyOf(bestMacAddr, EUI64_MAC_ADDRESS_LENGTH);
}
return bestMacAddr;
} } |
public class class_name {
public String asHeader() {
StringBuilder result = new StringBuilder();
for (Map.Entry<String, List<String>> linkParamEntry : linkParams.entrySet()) {
if (result.length() != 0) {
result.append("; ");
}
String linkParamEntryKey = linkParamEntry.getKey();
if (REL.paramName.equals(linkParamEntryKey) || REV.paramName.equals(linkParamEntryKey)) {
result.append(linkParamEntryKey)
.append("=");
result.append("\"")
.append(StringUtils.collectionToDelimitedString(linkParamEntry.getValue(), " "))
.append("\"");
} else {
StringBuilder linkParams = new StringBuilder();
for (String value : linkParamEntry.getValue()) {
if (linkParams.length() != 0) {
linkParams.append("; ");
}
linkParams.append(linkParamEntryKey)
.append("=");
linkParams.append("\"")
.append(value)
.append("\"");
}
result.append(linkParams);
}
}
String linkHeader = "<" + partialUriTemplate.asComponents()
.toString() + ">; ";
return result.insert(0, linkHeader)
.toString();
} } | public class class_name {
public String asHeader() {
StringBuilder result = new StringBuilder();
for (Map.Entry<String, List<String>> linkParamEntry : linkParams.entrySet()) {
if (result.length() != 0) {
result.append("; "); // depends on control dependency: [if], data = [none]
}
String linkParamEntryKey = linkParamEntry.getKey();
if (REL.paramName.equals(linkParamEntryKey) || REV.paramName.equals(linkParamEntryKey)) {
result.append(linkParamEntryKey)
.append("="); // depends on control dependency: [if], data = [none]
result.append("\"")
.append(StringUtils.collectionToDelimitedString(linkParamEntry.getValue(), " "))
.append("\""); // depends on control dependency: [if], data = [none]
} else {
StringBuilder linkParams = new StringBuilder();
for (String value : linkParamEntry.getValue()) {
if (linkParams.length() != 0) {
linkParams.append("; "); // depends on control dependency: [if], data = [none]
}
linkParams.append(linkParamEntryKey)
.append("="); // depends on control dependency: [for], data = [none]
linkParams.append("\"")
.append(value)
.append("\""); // depends on control dependency: [for], data = [none]
}
result.append(linkParams); // depends on control dependency: [if], data = [none]
}
}
String linkHeader = "<" + partialUriTemplate.asComponents()
.toString() + ">; ";
return result.insert(0, linkHeader)
.toString();
} } |
public class class_name {
protected final double[] selectNextSeed( List<double[]> points , double target ) {
// this won't select previously selected points because the distance will be zero
// If the distance is zero it will simply skip over it
double sum = 0;
for (int i = 0; i < distance.size(); i++) {
sum += distance.get(i);
double fraction = sum/totalDistance;
if( fraction >= target )
return points.get(i);
}
throw new RuntimeException("This shouldn't happen");
} } | public class class_name {
protected final double[] selectNextSeed( List<double[]> points , double target ) {
// this won't select previously selected points because the distance will be zero
// If the distance is zero it will simply skip over it
double sum = 0;
for (int i = 0; i < distance.size(); i++) {
sum += distance.get(i); // depends on control dependency: [for], data = [i]
double fraction = sum/totalDistance;
if( fraction >= target )
return points.get(i);
}
throw new RuntimeException("This shouldn't happen");
} } |
public class class_name {
@Nullable
protected static SOAPFaultException createSfe(@NotNull SoapFaultCode soapFaultCode) {
try {
QName qname = new QName("http://schemas.xmlsoap.org/soap/envelope/", soapFaultCode.name());
SOAPFactory sf = SOAPFactory.newInstance();
SOAPFault fault = sf.createFault("", qname);
return new SOAPFaultException(fault);
} catch (SOAPException e) {
//e.printStackTrace();
return null;
}
} } | public class class_name {
@Nullable
protected static SOAPFaultException createSfe(@NotNull SoapFaultCode soapFaultCode) {
try {
QName qname = new QName("http://schemas.xmlsoap.org/soap/envelope/", soapFaultCode.name());
SOAPFactory sf = SOAPFactory.newInstance();
SOAPFault fault = sf.createFault("", qname);
return new SOAPFaultException(fault); // depends on control dependency: [try], data = [none]
} catch (SOAPException e) {
//e.printStackTrace();
return null;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void generateQvarConstraints() {
final StringBuilder qvarConstrBuilder = new StringBuilder();
final StringBuilder qvarMapStrBuilder = new StringBuilder();
final Iterator<Map.Entry<String, ArrayList<String>>> entryIterator = qvar.entrySet().iterator();
if (entryIterator.hasNext()) {
qvarMapStrBuilder.append("declare function local:qvarMap($x) {\n map {");
while (entryIterator.hasNext()) {
final Map.Entry<String, ArrayList<String>> currentEntry = entryIterator.next();
final Iterator<String> valueIterator = currentEntry.getValue().iterator();
final String firstValue = valueIterator.next();
qvarMapStrBuilder.append('"').append(currentEntry.getKey()).append('"')
.append(" : (data($x").append(firstValue).append("/@xml:id)");
//check if there are additional values that we need to constrain
if (valueIterator.hasNext()) {
if (qvarConstrBuilder.length() > 0) {
//only add beginning and if it's an additional constraint in the aggregate qvar string
qvarConstrBuilder.append("\n and ");
}
while (valueIterator.hasNext()) {
//process second value onwards
final String currentValue = valueIterator.next();
qvarMapStrBuilder.append(",data($x").append(currentValue).append("/@xml-id)");
//These constraints specify that the same qvars must refer to the same nodes,
//using the XQuery "=" equality
//This is equality based on: same text, same node names, and same children nodes
qvarConstrBuilder.append("$x").append(firstValue).append(" = $x").append(currentValue);
if (valueIterator.hasNext()) {
qvarConstrBuilder.append(" and ");
}
}
}
qvarMapStrBuilder.append(')');
if (entryIterator.hasNext()) {
qvarMapStrBuilder.append(',');
}
}
qvarMapStrBuilder.append("}\n};");
}
qvarMapVariable = qvarMapStrBuilder.toString();
qvarConstraint = qvarConstrBuilder.toString();
} } | public class class_name {
private void generateQvarConstraints() {
final StringBuilder qvarConstrBuilder = new StringBuilder();
final StringBuilder qvarMapStrBuilder = new StringBuilder();
final Iterator<Map.Entry<String, ArrayList<String>>> entryIterator = qvar.entrySet().iterator();
if (entryIterator.hasNext()) {
qvarMapStrBuilder.append("declare function local:qvarMap($x) {\n map {"); // depends on control dependency: [if], data = [none]
while (entryIterator.hasNext()) {
final Map.Entry<String, ArrayList<String>> currentEntry = entryIterator.next();
final Iterator<String> valueIterator = currentEntry.getValue().iterator();
final String firstValue = valueIterator.next();
qvarMapStrBuilder.append('"').append(currentEntry.getKey()).append('"')
.append(" : (data($x").append(firstValue).append("/@xml:id)"); // depends on control dependency: [while], data = [none]
//check if there are additional values that we need to constrain
if (valueIterator.hasNext()) {
if (qvarConstrBuilder.length() > 0) {
//only add beginning and if it's an additional constraint in the aggregate qvar string
qvarConstrBuilder.append("\n and "); // depends on control dependency: [if], data = [none]
}
while (valueIterator.hasNext()) {
//process second value onwards
final String currentValue = valueIterator.next();
qvarMapStrBuilder.append(",data($x").append(currentValue).append("/@xml-id)"); // depends on control dependency: [while], data = [none]
//These constraints specify that the same qvars must refer to the same nodes,
//using the XQuery "=" equality
//This is equality based on: same text, same node names, and same children nodes
qvarConstrBuilder.append("$x").append(firstValue).append(" = $x").append(currentValue); // depends on control dependency: [while], data = [none]
if (valueIterator.hasNext()) {
qvarConstrBuilder.append(" and "); // depends on control dependency: [if], data = [none]
}
}
}
qvarMapStrBuilder.append(')'); // depends on control dependency: [while], data = [none]
if (entryIterator.hasNext()) {
qvarMapStrBuilder.append(','); // depends on control dependency: [if], data = [none]
}
}
qvarMapStrBuilder.append("}\n};"); // depends on control dependency: [if], data = [none]
}
qvarMapVariable = qvarMapStrBuilder.toString();
qvarConstraint = qvarConstrBuilder.toString();
} } |
public class class_name {
public Rational add(Rational value)
{
if (denominator == value.getDenominator())
{
return new Rational(numerator + value.getNumerator(), denominator);
}
else
{
return new Rational(numerator * value.getDenominator() + value.getNumerator() * denominator,
denominator * value.getDenominator());
}
} } | public class class_name {
public Rational add(Rational value)
{
if (denominator == value.getDenominator())
{
return new Rational(numerator + value.getNumerator(), denominator); // depends on control dependency: [if], data = [none]
}
else
{
return new Rational(numerator * value.getDenominator() + value.getNumerator() * denominator,
denominator * value.getDenominator()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void addPatternFromReader(Reader r) throws IOException {
BufferedReader br = new BufferedReader(r);
String line;
// We dont want \n and commented line
Pattern MY_PATTERN = Pattern.compile("^([A-z0-9_]+)([~]?)\\s+(.*)$");
while ((line = br.readLine()) != null) {
Matcher m = MY_PATTERN.matcher(line);
if (m.matches()) {
if (m.group(2).length() > 0) {
// process line as simple template
this.addPattern(m.group(1), simpleTemplateToRegEx(m.group(3)));
}
else {
this.addPattern(m.group(1), m.group(3));
}
}
}
br.close();
} } | public class class_name {
public void addPatternFromReader(Reader r) throws IOException {
BufferedReader br = new BufferedReader(r);
String line;
// We dont want \n and commented line
Pattern MY_PATTERN = Pattern.compile("^([A-z0-9_]+)([~]?)\\s+(.*)$");
while ((line = br.readLine()) != null) {
Matcher m = MY_PATTERN.matcher(line);
if (m.matches()) {
if (m.group(2).length() > 0) {
// process line as simple template
this.addPattern(m.group(1), simpleTemplateToRegEx(m.group(3))); // depends on control dependency: [if], data = [none]
}
else {
this.addPattern(m.group(1), m.group(3)); // depends on control dependency: [if], data = [none]
}
}
}
br.close();
} } |
public class class_name {
void syncLevelWithLog4j() {
// get the level from log4j, only the root one uses effective level
Level log4jLevel = parent == null ? logger.getEffectiveLevel() : logger.getLevel();
if (level == null) {
// set the level
assignLog4JLevel(log4jLevel);
}
else {
// set the level only if differs, otherwise we may loose levels not present in log4j
if (tracerToLog4JLevel(level) != log4jLevel) {
assignLog4JLevel(log4jLevel);
}
}
// the root must always have a level
if (parent == null && level == null) {
// defaults to INFO
logger.setLevel(Level.INFO);
level = TraceLevel.INFO;
}
// reset the flags
resetCacheFlags(false);
} } | public class class_name {
void syncLevelWithLog4j() {
// get the level from log4j, only the root one uses effective level
Level log4jLevel = parent == null ? logger.getEffectiveLevel() : logger.getLevel();
if (level == null) {
// set the level
assignLog4JLevel(log4jLevel);
// depends on control dependency: [if], data = [none]
}
else {
// set the level only if differs, otherwise we may loose levels not present in log4j
if (tracerToLog4JLevel(level) != log4jLevel) {
assignLog4JLevel(log4jLevel);
// depends on control dependency: [if], data = [log4jLevel)]
}
}
// the root must always have a level
if (parent == null && level == null) {
// defaults to INFO
logger.setLevel(Level.INFO);
// depends on control dependency: [if], data = [none]
level = TraceLevel.INFO;
// depends on control dependency: [if], data = [none]
}
// reset the flags
resetCacheFlags(false);
} } |
public class class_name {
IoSession take(HttpResourceAddress serverAddress) {
IoSession transportSession = removeThreadAligned(serverAddress);
if (transportSession != null) {
// Got a cached persistent connection
// Remove session idle tracking for this session
transportSession.getConfig().setBothIdleTime(0);
IoFilterChain filterChain = transportSession.getFilterChain();
if (filterChain.contains(IDLE_FILTER)) {
filterChain.remove(IDLE_FILTER);
}
// Remove our CloseFuture listener as it is out of pool
CloseFuture closeFuture = transportSession.getCloseFuture();
closeFuture.removeListener(closeListener);
SERVER_ADDRESS.remove(transportSession);
}
return transportSession;
} } | public class class_name {
IoSession take(HttpResourceAddress serverAddress) {
IoSession transportSession = removeThreadAligned(serverAddress);
if (transportSession != null) {
// Got a cached persistent connection
// Remove session idle tracking for this session
transportSession.getConfig().setBothIdleTime(0); // depends on control dependency: [if], data = [none]
IoFilterChain filterChain = transportSession.getFilterChain();
if (filterChain.contains(IDLE_FILTER)) {
filterChain.remove(IDLE_FILTER); // depends on control dependency: [if], data = [none]
}
// Remove our CloseFuture listener as it is out of pool
CloseFuture closeFuture = transportSession.getCloseFuture();
closeFuture.removeListener(closeListener); // depends on control dependency: [if], data = [none]
SERVER_ADDRESS.remove(transportSession); // depends on control dependency: [if], data = [(transportSession]
}
return transportSession;
} } |
public class class_name {
public void inherit(DocFinder.Input input, DocFinder.Output output) {
List<? extends DocTree> tags = input.utils.getBlockTags(input.element, DocTree.Kind.RETURN);
CommentHelper ch = input.utils.getCommentHelper(input.element);
if (!tags.isEmpty()) {
output.holder = input.element;
output.holderTag = tags.get(0);
output.inlineTags = input.isFirstSentence
? ch.getFirstSentenceTrees(input.utils.configuration, output.holderTag)
: ch.getDescription(input.utils.configuration, output.holderTag);
}
} } | public class class_name {
public void inherit(DocFinder.Input input, DocFinder.Output output) {
List<? extends DocTree> tags = input.utils.getBlockTags(input.element, DocTree.Kind.RETURN);
CommentHelper ch = input.utils.getCommentHelper(input.element);
if (!tags.isEmpty()) {
output.holder = input.element; // depends on control dependency: [if], data = [none]
output.holderTag = tags.get(0); // depends on control dependency: [if], data = [none]
output.inlineTags = input.isFirstSentence
? ch.getFirstSentenceTrees(input.utils.configuration, output.holderTag)
: ch.getDescription(input.utils.configuration, output.holderTag); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public TCert getNextTCert(List<String> attrs) {
if (!isEnrolled()) {
throw new RuntimeException(String.format("user '%s' is not enrolled", this.getName()));
}
String key = getAttrsKey(attrs);
logger.debug(String.format("Member.getNextTCert: key=%s", key));
TCertGetter tcertGetter = this.tcertGetterMap.get(key);
if (tcertGetter == null) {
logger.debug(String.format("Member.getNextTCert: key=%s, creating new getter", key));
tcertGetter = new TCertGetter(this, attrs, key);
this.tcertGetterMap.put(key, tcertGetter);
}
return tcertGetter.getNextTCert();
} } | public class class_name {
public TCert getNextTCert(List<String> attrs) {
if (!isEnrolled()) {
throw new RuntimeException(String.format("user '%s' is not enrolled", this.getName()));
}
String key = getAttrsKey(attrs);
logger.debug(String.format("Member.getNextTCert: key=%s", key));
TCertGetter tcertGetter = this.tcertGetterMap.get(key);
if (tcertGetter == null) {
logger.debug(String.format("Member.getNextTCert: key=%s, creating new getter", key)); // depends on control dependency: [if], data = [none]
tcertGetter = new TCertGetter(this, attrs, key); // depends on control dependency: [if], data = [none]
this.tcertGetterMap.put(key, tcertGetter); // depends on control dependency: [if], data = [none]
}
return tcertGetter.getNextTCert();
} } |
public class class_name {
static double calculateNewMeanNonFinite(double previousMean, double value) {
/*
* Desired behaviour is to match the results of applying the naive mean formula. In particular,
* the update formula can subtract infinities in cases where the naive formula would add them.
*
* Consequently:
* 1. If the previous mean is finite and the new value is non-finite then the new mean is that
* value (whether it is NaN or infinity).
* 2. If the new value is finite and the previous mean is non-finite then the mean is unchanged
* (whether it is NaN or infinity).
* 3. If both the previous mean and the new value are non-finite and...
* 3a. ...either or both is NaN (so mean != value) then the new mean is NaN.
* 3b. ...they are both the same infinities (so mean == value) then the mean is unchanged.
* 3c. ...they are different infinities (so mean != value) then the new mean is NaN.
*/
if (isFinite(previousMean)) {
// This is case 1.
return value;
} else if (isFinite(value) || previousMean == value) {
// This is case 2. or 3b.
return previousMean;
} else {
// This is case 3a. or 3c.
return NaN;
}
} } | public class class_name {
static double calculateNewMeanNonFinite(double previousMean, double value) {
/*
* Desired behaviour is to match the results of applying the naive mean formula. In particular,
* the update formula can subtract infinities in cases where the naive formula would add them.
*
* Consequently:
* 1. If the previous mean is finite and the new value is non-finite then the new mean is that
* value (whether it is NaN or infinity).
* 2. If the new value is finite and the previous mean is non-finite then the mean is unchanged
* (whether it is NaN or infinity).
* 3. If both the previous mean and the new value are non-finite and...
* 3a. ...either or both is NaN (so mean != value) then the new mean is NaN.
* 3b. ...they are both the same infinities (so mean == value) then the mean is unchanged.
* 3c. ...they are different infinities (so mean != value) then the new mean is NaN.
*/
if (isFinite(previousMean)) {
// This is case 1.
return value; // depends on control dependency: [if], data = [none]
} else if (isFinite(value) || previousMean == value) {
// This is case 2. or 3b.
return previousMean; // depends on control dependency: [if], data = [none]
} else {
// This is case 3a. or 3c.
return NaN; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public final void evaluate() {
final LemmatizerEvaluator evaluator = new LemmatizerEvaluator(this.lemmatizer);
try {
evaluator.evaluate(this.testSamples);
} catch (IOException e) {
e.printStackTrace();
}
System.out.println(evaluator.getWordAccuracy());
} } | public class class_name {
public final void evaluate() {
final LemmatizerEvaluator evaluator = new LemmatizerEvaluator(this.lemmatizer);
try {
evaluator.evaluate(this.testSamples); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
System.out.println(evaluator.getWordAccuracy());
} } |
public class class_name {
public Schema analyzer(String name, Analyzer analyzer) {
if (analyzers == null) {
analyzers = new LinkedHashMap<>();
}
analyzers.put(name, analyzer);
return this;
} } | public class class_name {
public Schema analyzer(String name, Analyzer analyzer) {
if (analyzers == null) {
analyzers = new LinkedHashMap<>(); // depends on control dependency: [if], data = [none]
}
analyzers.put(name, analyzer);
return this;
} } |
public class class_name {
public boolean isServerInRecoveryMode() {
String thisMethodName = CLASS_NAME + ".isServerInRecoveryMode()";
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
SibTr.entry(tc, thisMethodName, this);
}
boolean ret = false;// (_serverMode == Server.RECOVERY_MODE); TBD
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
SibTr.exit(tc, thisMethodName, new Boolean(ret));
}
return ret;
} } | public class class_name {
public boolean isServerInRecoveryMode() {
String thisMethodName = CLASS_NAME + ".isServerInRecoveryMode()";
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
SibTr.entry(tc, thisMethodName, this); // depends on control dependency: [if], data = [none]
}
boolean ret = false;// (_serverMode == Server.RECOVERY_MODE); TBD
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
SibTr.exit(tc, thisMethodName, new Boolean(ret)); // depends on control dependency: [if], data = [none]
}
return ret;
} } |
public class class_name {
public double[] getDoublesByName(String Name) {
DbaseData d;
if ((d = getField(Name)) == null) return null;
if (d.getType() == DbaseData.TYPE_CHAR) {
String[] s = d.getStrings();
double[] dd = new double[s.length];
for (int i = 0; i < s.length; i++) {
dd[i] = Double.valueOf(s[i]);
}
return dd;
}
if (d.getType() == DbaseData.TYPE_BOOLEAN) {
boolean[] b = d.getBooleans();
double[] dd = new double[b.length];
for (int i = 0; i < b.length; i++) {
if (b[i]) {
dd[i] = 1;
} else {
dd[i] = 0;
}
}
return dd;
}
return d.getDoubles();
} } | public class class_name {
public double[] getDoublesByName(String Name) {
DbaseData d;
if ((d = getField(Name)) == null) return null;
if (d.getType() == DbaseData.TYPE_CHAR) {
String[] s = d.getStrings();
double[] dd = new double[s.length];
for (int i = 0; i < s.length; i++) {
dd[i] = Double.valueOf(s[i]);
// depends on control dependency: [for], data = [i]
}
return dd;
// depends on control dependency: [if], data = [none]
}
if (d.getType() == DbaseData.TYPE_BOOLEAN) {
boolean[] b = d.getBooleans();
double[] dd = new double[b.length];
for (int i = 0; i < b.length; i++) {
if (b[i]) {
dd[i] = 1;
// depends on control dependency: [if], data = [none]
} else {
dd[i] = 0;
// depends on control dependency: [if], data = [none]
}
}
return dd;
// depends on control dependency: [if], data = [none]
}
return d.getDoubles();
} } |
public class class_name {
private void assignNestedValuesPrivate(XAttributable element,
List<String> keys, Type value) {
if (keys.isEmpty()) {
/*
* Key list is empty. Assign amount here if attribute. Else skip.
*/
if (element instanceof XAttribute) {
assignValue((XAttribute) element, value);
}
} else {
/*
* Key list not empty yet. Step down to the next attribute.
*/
String key = keys.get(0);
List<String> keysTail = keys.subList(1, keys.size());
XAttribute attr;
if (element.getAttributes().containsKey(key)) {
/*
* Attribute with given key already exists. Use it.
*/
attr = element.getAttributes().get(key);
} else {
/*
* Attribute with given key does not exist yet.
*/
attr = XFactoryRegistry.instance().currentDefault()
.createAttributeLiteral(key, "", null);
element.getAttributes().put(key, attr);
/*
* Now it does.
*/
}
/*
* Step down.
*/
assignNestedValuesPrivate(attr, keysTail, value);
}
} } | public class class_name {
private void assignNestedValuesPrivate(XAttributable element,
List<String> keys, Type value) {
if (keys.isEmpty()) {
/*
* Key list is empty. Assign amount here if attribute. Else skip.
*/
if (element instanceof XAttribute) {
assignValue((XAttribute) element, value);
// depends on control dependency: [if], data = [none]
}
} else {
/*
* Key list not empty yet. Step down to the next attribute.
*/
String key = keys.get(0);
List<String> keysTail = keys.subList(1, keys.size());
XAttribute attr;
if (element.getAttributes().containsKey(key)) {
/*
* Attribute with given key already exists. Use it.
*/
attr = element.getAttributes().get(key);
// depends on control dependency: [if], data = [none]
} else {
/*
* Attribute with given key does not exist yet.
*/
attr = XFactoryRegistry.instance().currentDefault()
.createAttributeLiteral(key, "", null);
// depends on control dependency: [if], data = [none]
element.getAttributes().put(key, attr);
// depends on control dependency: [if], data = [none]
/*
* Now it does.
*/
}
/*
* Step down.
*/
assignNestedValuesPrivate(attr, keysTail, value);
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private static StatusRuntimeException toStatusRuntimeException(Throwable t) {
Throwable cause = checkNotNull(t, "t");
while (cause != null) {
// If we have an embedded status, use it and replace the cause
if (cause instanceof StatusException) {
StatusException se = (StatusException) cause;
return new StatusRuntimeException(se.getStatus(), se.getTrailers());
} else if (cause instanceof StatusRuntimeException) {
StatusRuntimeException se = (StatusRuntimeException) cause;
return new StatusRuntimeException(se.getStatus(), se.getTrailers());
}
cause = cause.getCause();
}
return Status.UNKNOWN.withDescription("unexpected exception").withCause(t)
.asRuntimeException();
} } | public class class_name {
private static StatusRuntimeException toStatusRuntimeException(Throwable t) {
Throwable cause = checkNotNull(t, "t");
while (cause != null) {
// If we have an embedded status, use it and replace the cause
if (cause instanceof StatusException) {
StatusException se = (StatusException) cause;
return new StatusRuntimeException(se.getStatus(), se.getTrailers()); // depends on control dependency: [if], data = [none]
} else if (cause instanceof StatusRuntimeException) {
StatusRuntimeException se = (StatusRuntimeException) cause;
return new StatusRuntimeException(se.getStatus(), se.getTrailers()); // depends on control dependency: [if], data = [none]
}
cause = cause.getCause(); // depends on control dependency: [while], data = [none]
}
return Status.UNKNOWN.withDescription("unexpected exception").withCause(t)
.asRuntimeException();
} } |
public class class_name {
public XAResourceWrapper createConnectableXAResourceWrapper(XAResource xares,
boolean pad, Boolean override,
String productName, String productVersion,
String jndiName,
ManagedConnection mc,
XAResourceStatistics xastat)
{
if (mc instanceof org.ironjacamar.core.spi.transaction.FirstResource ||
mc instanceof org.jboss.tm.FirstResource)
{
if (xastat != null && xastat.isEnabled())
{
return new FirstResourceConnectableXAResourceWrapperStatImpl(xares, pad, override,
productName, productVersion, jndiName,
(org.jboss.tm.ConnectableResource)mc,
xastat);
}
else
{
return new FirstResourceConnectableXAResourceWrapperImpl(xares, pad, override,
productName, productVersion, jndiName,
(org.jboss.tm.ConnectableResource)mc);
}
}
else
{
if (xastat != null && xastat.isEnabled())
{
return new ConnectableXAResourceWrapperStatImpl(xares, pad, override,
productName, productVersion, jndiName,
(org.jboss.tm.ConnectableResource)mc, xastat);
}
else
{
return new ConnectableXAResourceWrapperImpl(xares, pad, override,
productName, productVersion, jndiName,
(org.jboss.tm.ConnectableResource)mc);
}
}
} } | public class class_name {
public XAResourceWrapper createConnectableXAResourceWrapper(XAResource xares,
boolean pad, Boolean override,
String productName, String productVersion,
String jndiName,
ManagedConnection mc,
XAResourceStatistics xastat)
{
if (mc instanceof org.ironjacamar.core.spi.transaction.FirstResource ||
mc instanceof org.jboss.tm.FirstResource)
{
if (xastat != null && xastat.isEnabled())
{
return new FirstResourceConnectableXAResourceWrapperStatImpl(xares, pad, override,
productName, productVersion, jndiName,
(org.jboss.tm.ConnectableResource)mc,
xastat); // depends on control dependency: [if], data = [none]
}
else
{
return new FirstResourceConnectableXAResourceWrapperImpl(xares, pad, override,
productName, productVersion, jndiName,
(org.jboss.tm.ConnectableResource)mc); // depends on control dependency: [if], data = [none]
}
}
else
{
if (xastat != null && xastat.isEnabled())
{
return new ConnectableXAResourceWrapperStatImpl(xares, pad, override,
productName, productVersion, jndiName,
(org.jboss.tm.ConnectableResource)mc, xastat); // depends on control dependency: [if], data = [none]
}
else
{
return new ConnectableXAResourceWrapperImpl(xares, pad, override,
productName, productVersion, jndiName,
(org.jboss.tm.ConnectableResource)mc); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void setValue(XExpression newValue)
{
if (newValue != value)
{
NotificationChain msgs = null;
if (value != null)
msgs = ((InternalEObject)value).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - XbasePackage.XASSIGNMENT__VALUE, null, msgs);
if (newValue != null)
msgs = ((InternalEObject)newValue).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - XbasePackage.XASSIGNMENT__VALUE, null, msgs);
msgs = basicSetValue(newValue, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, XbasePackage.XASSIGNMENT__VALUE, newValue, newValue));
} } | public class class_name {
public void setValue(XExpression newValue)
{
if (newValue != value)
{
NotificationChain msgs = null;
if (value != null)
msgs = ((InternalEObject)value).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - XbasePackage.XASSIGNMENT__VALUE, null, msgs);
if (newValue != null)
msgs = ((InternalEObject)newValue).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - XbasePackage.XASSIGNMENT__VALUE, null, msgs);
msgs = basicSetValue(newValue, msgs); // depends on control dependency: [if], data = [(newValue]
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, XbasePackage.XASSIGNMENT__VALUE, newValue, newValue));
} } |
public class class_name {
@Override
public WebTarget target(UriBuilder builder) {
checkClosed();
WebTargetImpl wt = (WebTargetImpl) super.target(builder);
//construct our own webclient
JAXRSClientFactoryBean bean = new JAXRSClientFactoryBean();
URI uri = builder.build();
bean.setAddress(uri.toString());
WebClient targetClient = bean.createWebClient();
//get ClientCongfiguration
ClientConfiguration ccfg = WebClient.getConfig(targetClient);
//add Liberty Jax-RS Client Config Interceptor to configure things like KeepAlive, timeouts, etc.
ccfg.getOutInterceptors().add(new LibertyJaxRsClientConfigInterceptor(Phase.PRE_LOGICAL));
//add Liberty Jax-RS Client Proxy Interceptor to configure the proxy
ccfg.getOutInterceptors().add(new LibertyJaxRsClientProxyInterceptor(Phase.PRE_LOGICAL));
//add Liberty Ltpa handler Interceptor to check if is using ltpa token for sso
ccfg.getOutInterceptors().add(new LibertyJaxRsClientLtpaInterceptor());
//add Liberty Jax-RS OAuth Interceptor to check whether it has to propagate OAuth/access token
ccfg.getOutInterceptors().add(new LibertyJaxRsClientOAuthInterceptor());
//add Interceptor to check whether it has to propagate SAML token for sso
ccfg.getOutInterceptors().add(new PropagationHandler());
/**
* if no any user programmed SSL context info
* put the LibertyJaxRsClientSSLOutInterceptor into client OUT interceptor chain
* see if Liberty SSL can help
*/
if (hasSSLConfigInfo == false) {
LibertyJaxRsClientSSLOutInterceptor sslOutInterceptor = new LibertyJaxRsClientSSLOutInterceptor(Phase.PRE_LOGICAL);
sslOutInterceptor.setTLSConfiguration(secConfig);
ccfg.getOutInterceptors().add(sslOutInterceptor);
}
//set bus
LibertyApplicationBus bus;
//202957 same url use same bus, add a lock to busCache to ensure only one bus will be created in concurrent mode.
//ConcurrentHashMap can't ensure that.
String moduleName = getModuleName();
String id = moduleName + uri.getHost() + "-" + uri.getPort();
synchronized (busCache) {
bus = busCache.get(id);
if (bus == null) {
bus = LibertyJAXRSClientBusFactory.getInstance().getClientScopeBus(id);
busCache.put(id, bus);
}
}
ccfg.setBus(bus);
//add the root WebTarget to managed set so we can close it's associated WebClient
synchronized (baseClients) {
baseClients.add(targetClient);
}
return new WebTargetImpl(wt.getUriBuilder(), wt.getConfiguration(), targetClient);
} } | public class class_name {
@Override
public WebTarget target(UriBuilder builder) {
checkClosed();
WebTargetImpl wt = (WebTargetImpl) super.target(builder);
//construct our own webclient
JAXRSClientFactoryBean bean = new JAXRSClientFactoryBean();
URI uri = builder.build();
bean.setAddress(uri.toString());
WebClient targetClient = bean.createWebClient();
//get ClientCongfiguration
ClientConfiguration ccfg = WebClient.getConfig(targetClient);
//add Liberty Jax-RS Client Config Interceptor to configure things like KeepAlive, timeouts, etc.
ccfg.getOutInterceptors().add(new LibertyJaxRsClientConfigInterceptor(Phase.PRE_LOGICAL));
//add Liberty Jax-RS Client Proxy Interceptor to configure the proxy
ccfg.getOutInterceptors().add(new LibertyJaxRsClientProxyInterceptor(Phase.PRE_LOGICAL));
//add Liberty Ltpa handler Interceptor to check if is using ltpa token for sso
ccfg.getOutInterceptors().add(new LibertyJaxRsClientLtpaInterceptor());
//add Liberty Jax-RS OAuth Interceptor to check whether it has to propagate OAuth/access token
ccfg.getOutInterceptors().add(new LibertyJaxRsClientOAuthInterceptor());
//add Interceptor to check whether it has to propagate SAML token for sso
ccfg.getOutInterceptors().add(new PropagationHandler());
/**
* if no any user programmed SSL context info
* put the LibertyJaxRsClientSSLOutInterceptor into client OUT interceptor chain
* see if Liberty SSL can help
*/
if (hasSSLConfigInfo == false) {
LibertyJaxRsClientSSLOutInterceptor sslOutInterceptor = new LibertyJaxRsClientSSLOutInterceptor(Phase.PRE_LOGICAL);
sslOutInterceptor.setTLSConfiguration(secConfig); // depends on control dependency: [if], data = [none]
ccfg.getOutInterceptors().add(sslOutInterceptor); // depends on control dependency: [if], data = [none]
}
//set bus
LibertyApplicationBus bus;
//202957 same url use same bus, add a lock to busCache to ensure only one bus will be created in concurrent mode.
//ConcurrentHashMap can't ensure that.
String moduleName = getModuleName();
String id = moduleName + uri.getHost() + "-" + uri.getPort();
synchronized (busCache) {
bus = busCache.get(id);
if (bus == null) {
bus = LibertyJAXRSClientBusFactory.getInstance().getClientScopeBus(id); // depends on control dependency: [if], data = [none]
busCache.put(id, bus); // depends on control dependency: [if], data = [none]
}
}
ccfg.setBus(bus);
//add the root WebTarget to managed set so we can close it's associated WebClient
synchronized (baseClients) {
baseClients.add(targetClient);
}
return new WebTargetImpl(wt.getUriBuilder(), wt.getConfiguration(), targetClient);
} } |
public class class_name {
public static boolean between(float value, float start, float end) {
if (start > end) {
float tmp = start;
start = end;
end = tmp;
}
return value >= start && value <= end;
} } | public class class_name {
public static boolean between(float value, float start, float end) {
if (start > end) {
float tmp = start;
start = end; // depends on control dependency: [if], data = [none]
end = tmp; // depends on control dependency: [if], data = [none]
}
return value >= start && value <= end;
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.