code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
public R finish() throws X, DbxException {
assertOpenAndUnfinished();
HttpRequestor.Response response = null;
try {
response = httpUploader.finish();
try {
if (response.getStatusCode() == 200) {
return responseSerializer.deserialize(response.getBody());
}
else if (response.getStatusCode() == 409) {
DbxWrappedException wrapped = DbxWrappedException.fromResponse(errorSerializer, response, this.userId);
throw newException(wrapped);
}
else {
throw DbxRequestUtil.unexpectedStatus(response);
}
} catch (JsonProcessingException ex) {
String requestId = DbxRequestUtil.getRequestId(response);
throw new BadResponseException(requestId, "Bad JSON in response: " + ex, ex);
}
} catch (IOException ex) {
throw new NetworkIOException(ex);
} finally {
// Make sure input stream is closed
if (response != null) {
IOUtil.closeQuietly(response.getBody());
}
finished = true;
}
} } | public class class_name {
public R finish() throws X, DbxException {
assertOpenAndUnfinished();
HttpRequestor.Response response = null;
try {
response = httpUploader.finish();
try {
if (response.getStatusCode() == 200) {
return responseSerializer.deserialize(response.getBody()); // depends on control dependency: [if], data = [none]
}
else if (response.getStatusCode() == 409) {
DbxWrappedException wrapped = DbxWrappedException.fromResponse(errorSerializer, response, this.userId);
throw newException(wrapped);
}
else {
throw DbxRequestUtil.unexpectedStatus(response);
}
} catch (JsonProcessingException ex) {
String requestId = DbxRequestUtil.getRequestId(response);
throw new BadResponseException(requestId, "Bad JSON in response: " + ex, ex);
} // depends on control dependency: [catch], data = [none]
} catch (IOException ex) {
throw new NetworkIOException(ex);
} finally {
// Make sure input stream is closed
if (response != null) {
IOUtil.closeQuietly(response.getBody()); // depends on control dependency: [if], data = [(response]
}
finished = true;
}
} } |
public class class_name {
private int getNumberOfSelectedAlerts() {
JTree treeAlert = this.extAlert.getAlertPanel().getTreeAlert();
int count = treeAlert.getSelectionCount();
if (count == 0) {
return 0;
}
if (count == 1) {
DefaultMutableTreeNode alertNode = (DefaultMutableTreeNode) treeAlert.getSelectionPath().getLastPathComponent();
if (alertNode.getChildCount() == 0 || !isMultiSelect()) {
return 1;
}
return alertNode.getChildCount();
}
count = 0;
TreePath[] paths = treeAlert.getSelectionPaths();
for (int i = 0; i < paths.length; i++) {
TreePath nodePath = paths[i];
int childCount = ((DefaultMutableTreeNode) nodePath.getLastPathComponent()).getChildCount();
count += childCount != 0 ? childCount : (treeAlert.isPathSelected(nodePath.getParentPath()) ? 0 : 1);
}
return count;
} } | public class class_name {
private int getNumberOfSelectedAlerts() {
JTree treeAlert = this.extAlert.getAlertPanel().getTreeAlert();
int count = treeAlert.getSelectionCount();
if (count == 0) {
return 0; // depends on control dependency: [if], data = [none]
}
if (count == 1) {
DefaultMutableTreeNode alertNode = (DefaultMutableTreeNode) treeAlert.getSelectionPath().getLastPathComponent();
if (alertNode.getChildCount() == 0 || !isMultiSelect()) {
return 1; // depends on control dependency: [if], data = [none]
}
return alertNode.getChildCount(); // depends on control dependency: [if], data = [none]
}
count = 0;
TreePath[] paths = treeAlert.getSelectionPaths();
for (int i = 0; i < paths.length; i++) {
TreePath nodePath = paths[i];
int childCount = ((DefaultMutableTreeNode) nodePath.getLastPathComponent()).getChildCount();
count += childCount != 0 ? childCount : (treeAlert.isPathSelected(nodePath.getParentPath()) ? 0 : 1); // depends on control dependency: [for], data = [none]
}
return count;
} } |
public class class_name {
public static URI buildUri(String httpOrHttps, String host, String path,
Map<String, String> paramMap) {
try {
return new URIBuilder().setScheme(httpOrHttps).setHost(host)
.setPath(path)
.setParameters(buildNameValuePareList(paramMap)).build();
} catch (URISyntaxException e) {
throw JMExceptionManager.handleExceptionAndReturnRuntimeEx(log, e,
"getResponseAsString", httpOrHttps, host, path, paramMap);
}
} } | public class class_name {
public static URI buildUri(String httpOrHttps, String host, String path,
Map<String, String> paramMap) {
try {
return new URIBuilder().setScheme(httpOrHttps).setHost(host)
.setPath(path)
.setParameters(buildNameValuePareList(paramMap)).build(); // depends on control dependency: [try], data = [none]
} catch (URISyntaxException e) {
throw JMExceptionManager.handleExceptionAndReturnRuntimeEx(log, e,
"getResponseAsString", httpOrHttps, host, path, paramMap);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public void processEvent(ComponentSystemEvent event) throws AbortProcessingException {
if (event instanceof PostAddToViewEvent) {
if ("non-sticky".equals(getFixed()) || ("bottom".equals(getPosition()) && (!isSticky()))) {
AddResourcesListener.addExtCSSResource("sticky-footer-navbar.css");
/*UIOutput resource = new UIOutput();
resource.getAttributes().put("name", "css/sticky-footer-navbar.css");
resource.getAttributes().put("library", C.BSF_LIBRARY);
resource.getAttributes().put("target", "head");
resource.setRendererType("javax.faces.resource.Stylesheet");
FacesContext.getCurrentInstance().getViewRoot().addComponentResource(FacesContext.getCurrentInstance(),
resource);*/
}
}
super.processEvent(event);
} } | public class class_name {
@Override
public void processEvent(ComponentSystemEvent event) throws AbortProcessingException {
if (event instanceof PostAddToViewEvent) {
if ("non-sticky".equals(getFixed()) || ("bottom".equals(getPosition()) && (!isSticky()))) {
AddResourcesListener.addExtCSSResource("sticky-footer-navbar.css"); // depends on control dependency: [if], data = [none]
/*UIOutput resource = new UIOutput();
resource.getAttributes().put("name", "css/sticky-footer-navbar.css");
resource.getAttributes().put("library", C.BSF_LIBRARY);
resource.getAttributes().put("target", "head");
resource.setRendererType("javax.faces.resource.Stylesheet");
FacesContext.getCurrentInstance().getViewRoot().addComponentResource(FacesContext.getCurrentInstance(),
resource);*/
}
}
super.processEvent(event);
} } |
public class class_name {
public boolean send()
{
assert sentRequests == null;
sentRequests = new ArrayList<RequestCallInfo>();
// add prepended requests,
addPrependedRequests( sentRequests );
// add requests to send
while( !requestsToSend.isEmpty() )
sentRequests.add( requestsToSend.remove( 0 ) );
// add appended requests
addAppendedRequests( sentRequests );
// prepare payload
JSONArray payload = createPayload();
RequestBuilder builderPost = buildMultipart( "payload", payload.toString() );
nbSentBytes += builderPost.getRequestData().length();
try
{
sentRequest = builderPost.send();
}
catch( RequestException e )
{
callback.error( RPCErrorCodes.ERROR_REQUEST_SEND, e, this );
return false;
}
callback.sent( this );
return true;
} } | public class class_name {
public boolean send()
{
assert sentRequests == null;
sentRequests = new ArrayList<RequestCallInfo>();
// add prepended requests,
addPrependedRequests( sentRequests );
// add requests to send
while( !requestsToSend.isEmpty() )
sentRequests.add( requestsToSend.remove( 0 ) );
// add appended requests
addAppendedRequests( sentRequests );
// prepare payload
JSONArray payload = createPayload();
RequestBuilder builderPost = buildMultipart( "payload", payload.toString() );
nbSentBytes += builderPost.getRequestData().length();
try
{
sentRequest = builderPost.send(); // depends on control dependency: [try], data = [none]
}
catch( RequestException e )
{
callback.error( RPCErrorCodes.ERROR_REQUEST_SEND, e, this );
return false;
} // depends on control dependency: [catch], data = [none]
callback.sent( this );
return true;
} } |
public class class_name {
private Map<String, Object> jsonRepresentation() {
final Map<String, Object> json = new HashMap<>(getProperties());
json.put(META_PROP_TYPE, TYPE_BLOB);
if (blobDigest != null) {
json.put(PROP_DIGEST, blobDigest);
}
else {
json.put(META_PROP_DATA, getContent());
}
return json;
} } | public class class_name {
private Map<String, Object> jsonRepresentation() {
final Map<String, Object> json = new HashMap<>(getProperties());
json.put(META_PROP_TYPE, TYPE_BLOB);
if (blobDigest != null) {
json.put(PROP_DIGEST, blobDigest); // depends on control dependency: [if], data = [none]
}
else {
json.put(META_PROP_DATA, getContent()); // depends on control dependency: [if], data = [none]
}
return json;
} } |
public class class_name {
public Bundle saveInstanceState(@Nullable Bundle savedInstanceState, String prefix) {
// handle our extensions
for (IAdapterExtension<Item> ext : mExtensions.values()) {
ext.saveInstanceState(savedInstanceState, prefix);
}
return savedInstanceState;
} } | public class class_name {
public Bundle saveInstanceState(@Nullable Bundle savedInstanceState, String prefix) {
// handle our extensions
for (IAdapterExtension<Item> ext : mExtensions.values()) {
ext.saveInstanceState(savedInstanceState, prefix); // depends on control dependency: [for], data = [ext]
}
return savedInstanceState;
} } |
public class class_name {
@Override
public EClass getIfcGeometricCurveSet() {
if (ifcGeometricCurveSetEClass == null) {
ifcGeometricCurveSetEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(302);
}
return ifcGeometricCurveSetEClass;
} } | public class class_name {
@Override
public EClass getIfcGeometricCurveSet() {
if (ifcGeometricCurveSetEClass == null) {
ifcGeometricCurveSetEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(302);
// depends on control dependency: [if], data = [none]
}
return ifcGeometricCurveSetEClass;
} } |
public class class_name {
@Override
@FFDCIgnore({ IOException.class })
public void writeFile(FileChannel fc) throws IOException {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "writeFile: " + fc);
}
if (cannotWriteFC()) {
// not file channel capable
convertFile(fc);
return;
}
// make sure the headers are written separately from the file buffer
flushHeaders();
WsByteBuffer fb = HttpDispatcher.getBufferManager().allocateFileChannelBuffer(fc);
try {
// TODO should adjust write timeout based on file size. Large files
// can only be written so fast so a 1Gb file should have larger
// timeout than a 100K file
this.isc.sendResponseBody(new WsByteBuffer[] { fb });
this.bytesWritten += fc.size();
} catch (MessageSentException mse) {
FFDCFilter.processException(mse, getClass().getName(),
"writeFile", new Object[] { this, this.isc });
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) {
Tr.event(tc, "Invalid state, message-sent-exception received; " + this.isc);
}
this.error = new IOException("Invalid state");
throw this.error;
} catch (IOException ioe) {
// no FFDC required
this.error = ioe;
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Received exception during write: " + ioe);
}
throw ioe;
} finally {
fb.release();
}
} } | public class class_name {
@Override
@FFDCIgnore({ IOException.class })
public void writeFile(FileChannel fc) throws IOException {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "writeFile: " + fc);
}
if (cannotWriteFC()) {
// not file channel capable
convertFile(fc);
return;
}
// make sure the headers are written separately from the file buffer
flushHeaders();
WsByteBuffer fb = HttpDispatcher.getBufferManager().allocateFileChannelBuffer(fc);
try {
// TODO should adjust write timeout based on file size. Large files
// can only be written so fast so a 1Gb file should have larger
// timeout than a 100K file
this.isc.sendResponseBody(new WsByteBuffer[] { fb });
this.bytesWritten += fc.size();
} catch (MessageSentException mse) {
FFDCFilter.processException(mse, getClass().getName(),
"writeFile", new Object[] { this, this.isc });
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) {
Tr.event(tc, "Invalid state, message-sent-exception received; " + this.isc); // depends on control dependency: [if], data = [none]
}
this.error = new IOException("Invalid state");
throw this.error;
} catch (IOException ioe) {
// no FFDC required
this.error = ioe;
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Received exception during write: " + ioe); // depends on control dependency: [if], data = [none]
}
throw ioe;
} finally {
fb.release();
}
} } |
public class class_name {
public static synchronized byte[] addressHash(byte[] pubkeyBytes) {
try {
byte[] sha256 = MessageDigest.getInstance(SHA256).digest(pubkeyBytes);
byte[] out = new byte[20];
ripeMD160.update(sha256, 0, sha256.length);
ripeMD160.doFinal(out, 0); // This also resets the hash function for
// next use
return out;
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e); // Cannot happen.
}
} } | public class class_name {
public static synchronized byte[] addressHash(byte[] pubkeyBytes) {
try {
byte[] sha256 = MessageDigest.getInstance(SHA256).digest(pubkeyBytes);
byte[] out = new byte[20];
ripeMD160.update(sha256, 0, sha256.length); // depends on control dependency: [try], data = [none]
ripeMD160.doFinal(out, 0); // This also resets the hash function for // depends on control dependency: [try], data = [none]
// next use
return out; // depends on control dependency: [try], data = [none]
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e); // Cannot happen.
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
synchronized int removeOldConnections() {
long now = System.currentTimeMillis();
long expirationTime = now - idleTransportTimeout.millis();
List<PooledTransport> removeFromIdle = new ArrayList<PooledTransport>();
for (Map.Entry<PooledTransport, Long> idleEntry : idle.entrySet()) {
long lastUsed = idleEntry.getValue();
if (lastUsed < expirationTime) {
PooledTransport removed = idleEntry.getKey();
if (log.isTraceEnabled()) {
log.trace("Expiring idle transport for job [" + jobPoolingKey + "], transport: ["
+ removed.toString() + "]. Last used [" + new TimeValue(now-lastUsed) + "] ago. Expired ["
+ idleTransportTimeout + "] ago.");
}
release(removed);
removeFromIdle.add(removed);
}
}
for (PooledTransport toRemove : removeFromIdle) {
idle.remove(toRemove);
}
return idle.size() + leased.size();
} } | public class class_name {
synchronized int removeOldConnections() {
long now = System.currentTimeMillis();
long expirationTime = now - idleTransportTimeout.millis();
List<PooledTransport> removeFromIdle = new ArrayList<PooledTransport>();
for (Map.Entry<PooledTransport, Long> idleEntry : idle.entrySet()) {
long lastUsed = idleEntry.getValue();
if (lastUsed < expirationTime) {
PooledTransport removed = idleEntry.getKey();
if (log.isTraceEnabled()) {
log.trace("Expiring idle transport for job [" + jobPoolingKey + "], transport: ["
+ removed.toString() + "]. Last used [" + new TimeValue(now-lastUsed) + "] ago. Expired ["
+ idleTransportTimeout + "] ago."); // depends on control dependency: [if], data = [none]
}
release(removed); // depends on control dependency: [if], data = [none]
removeFromIdle.add(removed); // depends on control dependency: [if], data = [none]
}
}
for (PooledTransport toRemove : removeFromIdle) {
idle.remove(toRemove); // depends on control dependency: [for], data = [toRemove]
}
return idle.size() + leased.size();
} } |
public class class_name {
protected void sessionCreated(HttpSessionEvent event) {
HttpServletRequest request = OpenCmsServlet.currentRequest.get();
String tid = "[" + Thread.currentThread().getId() + "] ";
synchronized (m_lockSessionCount) {
m_sessionCountCurrent = (m_sessionCountCurrent <= 0) ? 1 : (m_sessionCountCurrent + 1);
m_sessionCountTotal++;
if (LOG.isInfoEnabled()) {
LOG.info(
tid
+ Messages.get().getBundle().key(
Messages.LOG_SESSION_CREATED_2,
new Integer(m_sessionCountTotal),
new Integer(m_sessionCountCurrent)));
}
}
if (LOG.isDebugEnabled()) {
LOG.debug(tid + Messages.get().getBundle().key(Messages.LOG_SESSION_CREATED_1, event.getSession().getId()));
if (request != null) {
LOG.debug(tid + "Session created in request: " + request.getRequestURL());
}
StringWriter sw = new StringWriter();
new Throwable("").printStackTrace(new PrintWriter(sw));
String stackTrace = sw.toString();
LOG.debug(tid + "Stack = \n" + stackTrace);
}
} } | public class class_name {
protected void sessionCreated(HttpSessionEvent event) {
HttpServletRequest request = OpenCmsServlet.currentRequest.get();
String tid = "[" + Thread.currentThread().getId() + "] ";
synchronized (m_lockSessionCount) {
m_sessionCountCurrent = (m_sessionCountCurrent <= 0) ? 1 : (m_sessionCountCurrent + 1);
m_sessionCountTotal++;
if (LOG.isInfoEnabled()) {
LOG.info(
tid
+ Messages.get().getBundle().key(
Messages.LOG_SESSION_CREATED_2,
new Integer(m_sessionCountTotal),
new Integer(m_sessionCountCurrent))); // depends on control dependency: [if], data = [none]
}
}
if (LOG.isDebugEnabled()) {
LOG.debug(tid + Messages.get().getBundle().key(Messages.LOG_SESSION_CREATED_1, event.getSession().getId())); // depends on control dependency: [if], data = [none]
if (request != null) {
LOG.debug(tid + "Session created in request: " + request.getRequestURL()); // depends on control dependency: [if], data = [none]
}
StringWriter sw = new StringWriter();
new Throwable("").printStackTrace(new PrintWriter(sw)); // depends on control dependency: [if], data = [none]
String stackTrace = sw.toString();
LOG.debug(tid + "Stack = \n" + stackTrace); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@CliCommand(value = "send drools start", help = "start send to drools action")
public String sendToDroolsStart(
@CliOption(key = {"stream"}, help = "The stream name", mandatory = true, optionContext = "stream") final
String streamName,
@CliOption(key = {"group"}, help = "Drools Group Configuration", mandatory = true, optionContext = "group")
final String groupName,
@CliOption(key = {"output_stream"}, help = "CEP Stream to insert the results", mandatory = false) final
String outputStream,
@CliOption(key = {"kafkaTopic"}, help = "Name of a Kafka Topic to insert the results", mandatory = false)
final
String kafkaTopic ) {
try {
ssaw.api().startSendToDrools(streamName, groupName, outputStream, kafkaTopic);
return "Stream ".concat(streamName).concat(" attached to drools correctly");
} catch (StratioStreamingException e) {
throw new StreamingShellException(e);
}
} } | public class class_name {
@CliCommand(value = "send drools start", help = "start send to drools action")
public String sendToDroolsStart(
@CliOption(key = {"stream"}, help = "The stream name", mandatory = true, optionContext = "stream") final
String streamName,
@CliOption(key = {"group"}, help = "Drools Group Configuration", mandatory = true, optionContext = "group")
final String groupName,
@CliOption(key = {"output_stream"}, help = "CEP Stream to insert the results", mandatory = false) final
String outputStream,
@CliOption(key = {"kafkaTopic"}, help = "Name of a Kafka Topic to insert the results", mandatory = false)
final
String kafkaTopic ) {
try {
ssaw.api().startSendToDrools(streamName, groupName, outputStream, kafkaTopic); // depends on control dependency: [try], data = [none]
return "Stream ".concat(streamName).concat(" attached to drools correctly"); // depends on control dependency: [try], data = [none]
} catch (StratioStreamingException e) {
throw new StreamingShellException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@SuppressWarnings("checkstyle:npathcomplexity")
protected AbstractMarkerLanguageParser createLanguageParser(File inputFile) throws MojoExecutionException, IOException {
final AbstractMarkerLanguageParser parser;
if (isFileExtension(inputFile, MarkdownParser.MARKDOWN_FILE_EXTENSIONS)) {
parser = this.injector.getInstance(MarkdownParser.class);
} else {
throw new MojoExecutionException(MessageFormat.format(Messages.AbstractDocumentationMojo_3, inputFile));
}
parser.setGithubExtensionEnable(this.githubExtension);
final SarlDocumentationParser internalParser = parser.getDocumentParser();
if (this.isLineContinuationEnable) {
internalParser.setLineContinuation(SarlDocumentationParser.DEFAULT_LINE_CONTINUATION);
} else {
internalParser.addLowPropertyProvider(createProjectProperties());
}
final ScriptExecutor scriptExecutor = internalParser.getScriptExecutor();
final StringBuilder cp = new StringBuilder();
for (final File cpElement : getClassPath()) {
if (cp.length() > 0) {
cp.append(":"); //$NON-NLS-1$
}
cp.append(cpElement.getAbsolutePath());
}
scriptExecutor.setClassPath(cp.toString());
final String bootPath = getBootClassPath();
if (!Strings.isEmpty(bootPath)) {
scriptExecutor.setBootClassPath(bootPath);
}
JavaVersion version = null;
if (!Strings.isEmpty(this.source)) {
version = JavaVersion.fromQualifier(this.source);
}
if (version == null) {
version = JavaVersion.JAVA8;
}
scriptExecutor.setJavaSourceVersion(version.getQualifier());
scriptExecutor.setTempFolder(this.tempDirectory.getAbsoluteFile());
internalParser.addLowPropertyProvider(createProjectProperties());
internalParser.addLowPropertyProvider(this.session.getCurrentProject().getProperties());
internalParser.addLowPropertyProvider(this.session.getUserProperties());
internalParser.addLowPropertyProvider(this.session.getSystemProperties());
internalParser.addLowPropertyProvider(createGeneratorProperties());
final Properties defaultValues = createDefaultValueProperties();
if (defaultValues != null) {
internalParser.addLowPropertyProvider(defaultValues);
}
return parser;
} } | public class class_name {
@SuppressWarnings("checkstyle:npathcomplexity")
protected AbstractMarkerLanguageParser createLanguageParser(File inputFile) throws MojoExecutionException, IOException {
final AbstractMarkerLanguageParser parser;
if (isFileExtension(inputFile, MarkdownParser.MARKDOWN_FILE_EXTENSIONS)) {
parser = this.injector.getInstance(MarkdownParser.class);
} else {
throw new MojoExecutionException(MessageFormat.format(Messages.AbstractDocumentationMojo_3, inputFile));
}
parser.setGithubExtensionEnable(this.githubExtension);
final SarlDocumentationParser internalParser = parser.getDocumentParser();
if (this.isLineContinuationEnable) {
internalParser.setLineContinuation(SarlDocumentationParser.DEFAULT_LINE_CONTINUATION);
} else {
internalParser.addLowPropertyProvider(createProjectProperties());
}
final ScriptExecutor scriptExecutor = internalParser.getScriptExecutor();
final StringBuilder cp = new StringBuilder();
for (final File cpElement : getClassPath()) {
if (cp.length() > 0) {
cp.append(":"); //$NON-NLS-1$ // depends on control dependency: [if], data = [none]
}
cp.append(cpElement.getAbsolutePath());
}
scriptExecutor.setClassPath(cp.toString());
final String bootPath = getBootClassPath();
if (!Strings.isEmpty(bootPath)) {
scriptExecutor.setBootClassPath(bootPath);
}
JavaVersion version = null;
if (!Strings.isEmpty(this.source)) {
version = JavaVersion.fromQualifier(this.source);
}
if (version == null) {
version = JavaVersion.JAVA8;
}
scriptExecutor.setJavaSourceVersion(version.getQualifier());
scriptExecutor.setTempFolder(this.tempDirectory.getAbsoluteFile());
internalParser.addLowPropertyProvider(createProjectProperties());
internalParser.addLowPropertyProvider(this.session.getCurrentProject().getProperties());
internalParser.addLowPropertyProvider(this.session.getUserProperties());
internalParser.addLowPropertyProvider(this.session.getSystemProperties());
internalParser.addLowPropertyProvider(createGeneratorProperties());
final Properties defaultValues = createDefaultValueProperties();
if (defaultValues != null) {
internalParser.addLowPropertyProvider(defaultValues);
}
return parser;
} } |
public class class_name {
protected List<String> collectMethodNamesForService(final ServiceDescriptor serviceDescriptor) {
final List<String> methods = new ArrayList<>();
for (final MethodDescriptor<?, ?> grpcMethod : serviceDescriptor.getMethods()) {
methods.add(extractMethodName(grpcMethod));
}
methods.sort(String.CASE_INSENSITIVE_ORDER);
return methods;
} } | public class class_name {
protected List<String> collectMethodNamesForService(final ServiceDescriptor serviceDescriptor) {
final List<String> methods = new ArrayList<>();
for (final MethodDescriptor<?, ?> grpcMethod : serviceDescriptor.getMethods()) {
methods.add(extractMethodName(grpcMethod)); // depends on control dependency: [for], data = [grpcMethod]
}
methods.sort(String.CASE_INSENSITIVE_ORDER);
return methods;
} } |
public class class_name {
public static boolean hasMoreCodePointsThan(char source[], int start, int limit, int number) {
int length = limit - start;
if (length < 0 || start < 0 || limit < 0) {
throw new IndexOutOfBoundsException(
"Start and limit indexes should be non-negative and start <= limit");
}
if (number < 0) {
return true;
}
if (source == null) {
return false;
}
// length >= 0 known
// source contains at least (length + 1) / 2 code points: <= 2
// chars per cp
if (((length + 1) >> 1) > number) {
return true;
}
// check if source does not even contain enough chars
int maxsupplementary = length - number;
if (maxsupplementary <= 0) {
return false;
}
// there are maxsupplementary = length - number more chars than
// asked-for code points
// count code points until they exceed and also check that there are
// no more than maxsupplementary supplementary code points (char pairs)
while (true) {
if (length == 0) {
return false;
}
if (number == 0) {
return true;
}
if (isLeadSurrogate(source[start++]) && start != limit
&& isTrailSurrogate(source[start])) {
start++;
if (--maxsupplementary <= 0) {
// too many pairs - too few code points
return false;
}
}
--number;
}
} } | public class class_name {
public static boolean hasMoreCodePointsThan(char source[], int start, int limit, int number) {
int length = limit - start;
if (length < 0 || start < 0 || limit < 0) {
throw new IndexOutOfBoundsException(
"Start and limit indexes should be non-negative and start <= limit");
}
if (number < 0) {
return true; // depends on control dependency: [if], data = [none]
}
if (source == null) {
return false; // depends on control dependency: [if], data = [none]
}
// length >= 0 known
// source contains at least (length + 1) / 2 code points: <= 2
// chars per cp
if (((length + 1) >> 1) > number) {
return true; // depends on control dependency: [if], data = [none]
}
// check if source does not even contain enough chars
int maxsupplementary = length - number;
if (maxsupplementary <= 0) {
return false; // depends on control dependency: [if], data = [none]
}
// there are maxsupplementary = length - number more chars than
// asked-for code points
// count code points until they exceed and also check that there are
// no more than maxsupplementary supplementary code points (char pairs)
while (true) {
if (length == 0) {
return false; // depends on control dependency: [if], data = [none]
}
if (number == 0) {
return true; // depends on control dependency: [if], data = [none]
}
if (isLeadSurrogate(source[start++]) && start != limit
&& isTrailSurrogate(source[start])) {
start++; // depends on control dependency: [if], data = [sta]
if (--maxsupplementary <= 0) {
// too many pairs - too few code points
return false; // depends on control dependency: [if], data = [none]
}
}
--number; // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
public boolean txnCommitOffer(long itemId, Data data, boolean backup) {
QueueItem item = txMap.remove(itemId);
if (item == null && !backup) {
throw new TransactionException("No reserve: " + itemId);
} else if (item == null) {
item = new QueueItem(this, itemId, data);
}
item.setData(data);
if (!backup) {
getItemQueue().offer(item);
cancelEvictionIfExists();
} else {
getBackupMap().put(itemId, item);
}
if (store.isEnabled() && !backup) {
try {
store.store(item.getItemId(), data);
} catch (Exception e) {
logger.warning("Exception during store", e);
}
}
return true;
} } | public class class_name {
public boolean txnCommitOffer(long itemId, Data data, boolean backup) {
QueueItem item = txMap.remove(itemId);
if (item == null && !backup) {
throw new TransactionException("No reserve: " + itemId);
} else if (item == null) {
item = new QueueItem(this, itemId, data); // depends on control dependency: [if], data = [none]
}
item.setData(data);
if (!backup) {
getItemQueue().offer(item); // depends on control dependency: [if], data = [none]
cancelEvictionIfExists(); // depends on control dependency: [if], data = [none]
} else {
getBackupMap().put(itemId, item); // depends on control dependency: [if], data = [none]
}
if (store.isEnabled() && !backup) {
try {
store.store(item.getItemId(), data); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
logger.warning("Exception during store", e);
} // depends on control dependency: [catch], data = [none]
}
return true;
} } |
public class class_name {
@Override
public void setBackReference( String referenceName, Object reference, T value, JsonDeserializationContext ctx ) {
if ( null == value ) {
return;
}
JsonDeserializer<T> deserializer = getDeserializer( null, ctx, value.getClass() ).getDeserializer();
if ( deserializer.getClass() != getClass() ) {
// we test if it's not this deserializer to avoid an infinite loop
deserializer.setBackReference( referenceName, reference, value, ctx );
return;
}
BackReferenceProperty backReferenceProperty = backReferenceDeserializers.get( referenceName );
if ( null == backReferenceProperty ) {
throw ctx.traceError( "The back reference '" + referenceName + "' does not exist" );
}
backReferenceProperty.setBackReference( value, reference, ctx );
} } | public class class_name {
@Override
public void setBackReference( String referenceName, Object reference, T value, JsonDeserializationContext ctx ) {
if ( null == value ) {
return; // depends on control dependency: [if], data = [none]
}
JsonDeserializer<T> deserializer = getDeserializer( null, ctx, value.getClass() ).getDeserializer();
if ( deserializer.getClass() != getClass() ) {
// we test if it's not this deserializer to avoid an infinite loop
deserializer.setBackReference( referenceName, reference, value, ctx ); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
BackReferenceProperty backReferenceProperty = backReferenceDeserializers.get( referenceName );
if ( null == backReferenceProperty ) {
throw ctx.traceError( "The back reference '" + referenceName + "' does not exist" );
}
backReferenceProperty.setBackReference( value, reference, ctx );
} } |
public class class_name {
public final void setUnitOfMeasure(final UnitOfMeasure pUnitOfMeasure) {
this.unitOfMeasure = pUnitOfMeasure;
if (this.itsId == null) {
this.itsId = new WarehouseRestId();
}
this.itsId.setUnitOfMeasure(this.unitOfMeasure);
} } | public class class_name {
public final void setUnitOfMeasure(final UnitOfMeasure pUnitOfMeasure) {
this.unitOfMeasure = pUnitOfMeasure;
if (this.itsId == null) {
this.itsId = new WarehouseRestId(); // depends on control dependency: [if], data = [none]
}
this.itsId.setUnitOfMeasure(this.unitOfMeasure);
} } |
public class class_name {
public static <T> int firstIndexOf(T[] items, T item) {
for (int i = 0; i != items.length; ++i) {
T ith = items[i];
if (ith == item || (ith != null && ith.equals(item))) {
return i;
}
}
return -1;
} } | public class class_name {
public static <T> int firstIndexOf(T[] items, T item) {
for (int i = 0; i != items.length; ++i) {
T ith = items[i];
if (ith == item || (ith != null && ith.equals(item))) {
return i; // depends on control dependency: [if], data = [none]
}
}
return -1;
} } |
public class class_name {
@Override
public void triggered(Calendar calendar) {
timesTriggered++;
previousFireTime = nextFireTime;
nextFireTime = getFireTimeAfter(nextFireTime);
while (nextFireTime != null
&& calendar != null
&& !calendar.isTimeIncluded(nextFireTime.getTime())) {
nextFireTime = getFireTimeAfter(nextFireTime);
if (nextFireTime == null) {
break;
}
// avoid infinite loop
java.util.Calendar c = java.util.Calendar.getInstance();
c.setTime(nextFireTime);
if (c.get(java.util.Calendar.YEAR) > YEAR_TO_GIVEUP_SCHEDULING_AT) {
nextFireTime = null;
}
}
} } | public class class_name {
@Override
public void triggered(Calendar calendar) {
timesTriggered++;
previousFireTime = nextFireTime;
nextFireTime = getFireTimeAfter(nextFireTime);
while (nextFireTime != null
&& calendar != null
&& !calendar.isTimeIncluded(nextFireTime.getTime())) {
nextFireTime = getFireTimeAfter(nextFireTime); // depends on control dependency: [while], data = [(nextFireTime]
if (nextFireTime == null) {
break;
}
// avoid infinite loop
java.util.Calendar c = java.util.Calendar.getInstance();
c.setTime(nextFireTime); // depends on control dependency: [while], data = [(nextFireTime]
if (c.get(java.util.Calendar.YEAR) > YEAR_TO_GIVEUP_SCHEDULING_AT) {
nextFireTime = null; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
@Nullable
public Date timestamp() {
// It's unclear if this will ever be null. So we're being safe.
String timestamp = getString(TIMESTAMP_KEY);
if (isNullOrEmpty(timestamp)) {
return null;
}
return parseISO8601Date(timestamp);
} } | public class class_name {
@Nullable
public Date timestamp() {
// It's unclear if this will ever be null. So we're being safe.
String timestamp = getString(TIMESTAMP_KEY);
if (isNullOrEmpty(timestamp)) {
return null; // depends on control dependency: [if], data = [none]
}
return parseISO8601Date(timestamp);
} } |
public class class_name {
public void marshall(DeleteMetricFilterRequest deleteMetricFilterRequest, ProtocolMarshaller protocolMarshaller) {
if (deleteMetricFilterRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deleteMetricFilterRequest.getLogGroupName(), LOGGROUPNAME_BINDING);
protocolMarshaller.marshall(deleteMetricFilterRequest.getFilterName(), FILTERNAME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(DeleteMetricFilterRequest deleteMetricFilterRequest, ProtocolMarshaller protocolMarshaller) {
if (deleteMetricFilterRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deleteMetricFilterRequest.getLogGroupName(), LOGGROUPNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(deleteMetricFilterRequest.getFilterName(), FILTERNAME_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public InputPreProcessor getInputPreprocessor(InputType... inputType) throws
InvalidKerasConfigurationException {
if (inputType.length > 1)
throw new InvalidKerasConfigurationException(
"Keras Permute layer accepts only one input (received " + inputType.length + ")");
InputPreProcessor preprocessor = null;
if (inputType[0] instanceof InputType.InputTypeConvolutional) {
switch (this.getDimOrder()) {
case THEANO:
preprocessor = new PermutePreprocessor(permutationIndices);
break;
case NONE: // TF by default
case TENSORFLOW:
// account for channels last
permutationIndices = new int[] {permutationIndices[2], permutationIndices[0], permutationIndices[1]};
preprocessor = new PermutePreprocessor(new int[]{1, 3, 2});
}
} else if (inputType[0] instanceof InputType.InputTypeRecurrent) {
if (Arrays.equals(permutationIndices, new int[] {2, 1}))
preprocessor = new PermutePreprocessor(permutationIndices);
else
throw new InvalidKerasConfigurationException("For RNN type input data, permutation dims have to be" +
"(2, 1) in Permute layer, got " + Arrays.toString(permutationIndices));
} else if (inputType[0] instanceof InputType.InputTypeFeedForward) {
preprocessor = null;
} else {
throw new InvalidKerasConfigurationException("Input type not supported: " + inputType[0]);
}
return preprocessor;
} } | public class class_name {
@Override
public InputPreProcessor getInputPreprocessor(InputType... inputType) throws
InvalidKerasConfigurationException {
if (inputType.length > 1)
throw new InvalidKerasConfigurationException(
"Keras Permute layer accepts only one input (received " + inputType.length + ")");
InputPreProcessor preprocessor = null;
if (inputType[0] instanceof InputType.InputTypeConvolutional) {
switch (this.getDimOrder()) {
case THEANO:
preprocessor = new PermutePreprocessor(permutationIndices);
break;
case NONE: // TF by default
case TENSORFLOW:
// account for channels last
permutationIndices = new int[] {permutationIndices[2], permutationIndices[0], permutationIndices[1]};
preprocessor = new PermutePreprocessor(new int[]{1, 3, 2});
}
} else if (inputType[0] instanceof InputType.InputTypeRecurrent) {
if (Arrays.equals(permutationIndices, new int[] {2, 1}))
preprocessor = new PermutePreprocessor(permutationIndices);
else
throw new InvalidKerasConfigurationException("For RNN type input data, permutation dims have to be" +
"(2, 1) in Permute layer, got " + Arrays.toString(permutationIndices));
} else if (inputType[0] instanceof InputType.InputTypeFeedForward) {
preprocessor = null; // depends on control dependency: [if], data = [none]
} else {
throw new InvalidKerasConfigurationException("Input type not supported: " + inputType[0]);
}
return preprocessor;
} } |
public class class_name {
public static boolean is_yayadi(String str)
{
// System.out.print("Entered is_yayadi, returning: ");
String s1 = VarnaUtil.getAdiVarna(str);
if (is_yay(s1))
{
// Log.logInfo("true");
return true;
}
// Log.logInfo("false");
return false;
} } | public class class_name {
public static boolean is_yayadi(String str)
{
// System.out.print("Entered is_yayadi, returning: ");
String s1 = VarnaUtil.getAdiVarna(str);
if (is_yay(s1))
{
// Log.logInfo("true");
return true; // depends on control dependency: [if], data = [none]
}
// Log.logInfo("false");
return false;
} } |
public class class_name {
protected boolean setupWorkingDirectory() {
if (!FileUtils.isDirectoryExists(schedulerWorkingDirectory)) {
FileUtils.createDirectory(schedulerWorkingDirectory);
}
String topologyPackageURL = String.format("file://%s", Context.topologyPackageFile(config));
String topologyPackageDestination = Paths.get(
schedulerWorkingDirectory, "topology.tar.gz").toString();
return SchedulerUtils.curlAndExtractPackage(
schedulerWorkingDirectory, topologyPackageURL,
topologyPackageDestination, true, Context.verbose(config));
} } | public class class_name {
protected boolean setupWorkingDirectory() {
if (!FileUtils.isDirectoryExists(schedulerWorkingDirectory)) {
FileUtils.createDirectory(schedulerWorkingDirectory); // depends on control dependency: [if], data = [none]
}
String topologyPackageURL = String.format("file://%s", Context.topologyPackageFile(config));
String topologyPackageDestination = Paths.get(
schedulerWorkingDirectory, "topology.tar.gz").toString();
return SchedulerUtils.curlAndExtractPackage(
schedulerWorkingDirectory, topologyPackageURL,
topologyPackageDestination, true, Context.verbose(config));
} } |
public class class_name {
public static String longToHexBytes(final long v) {
final long mask = 0XFFL;
final StringBuilder sb = new StringBuilder();
for (int i = 8; i-- > 0; ) {
final String s = Long.toHexString((v >>> (i * 8)) & mask);
sb.append(zeroPad(s, 2)).append(" ");
}
return sb.toString();
} } | public class class_name {
public static String longToHexBytes(final long v) {
final long mask = 0XFFL;
final StringBuilder sb = new StringBuilder();
for (int i = 8; i-- > 0; ) {
final String s = Long.toHexString((v >>> (i * 8)) & mask);
sb.append(zeroPad(s, 2)).append(" "); // depends on control dependency: [for], data = [none]
}
return sb.toString();
} } |
public class class_name {
private Object createAndActivate(final Object key) throws ContainerException {
Object instance = null;
try {
instance = prototype.newInstance();
} catch(final DempsyException e) {
if(e.userCaused()) {
LOGGER.warn("The message processor prototype " + SafeString.valueOf(prototype)
+ " threw an exception when trying to create a new message processor for they key " + SafeString.objectDescription(key));
statCollector.messageFailed(true);
instance = null;
} else
throw new ContainerException("the container for " + clusterId + " failed to create a new instance of " +
SafeString.valueOf(prototype) + " for the key " + SafeString.objectDescription(key) +
" because the clone method threw an exception.", e);
} catch(final RuntimeException e) {
throw new ContainerException("the container for " + clusterId + " failed to create a new instance of " +
SafeString.valueOf(prototype) + " for the key " + SafeString.objectDescription(key) +
" because the clone invocation resulted in an unknown exception.", e);
}
// activate
boolean activateSuccessful = false;
try {
if(instance != null) {
if(LOGGER.isTraceEnabled())
LOGGER.trace("the container for " + clusterId + " is activating instance " + String.valueOf(instance)
+ " via " + SafeString.valueOf(prototype) + " for " + SafeString.valueOf(key));
prototype.activate(instance, key);
activateSuccessful = true;
}
} catch(final DempsyException e) {
if(e.userCaused()) {
LOGGER.warn("The message processor " + SafeString.objectDescription(instance) + " activate call threw an exception.");
statCollector.messageFailed(true);
instance = null;
} else
throw new ContainerException(
"the container for " + clusterId + " failed to invoke the activate method of " + SafeString.valueOf(prototype)
+ ". Is the active method accessible - the class is public and the method is public?",
e);
} catch(final RuntimeException e) {
throw new ContainerException(
"the container for " + clusterId + " failed to invoke the activate method of " + SafeString.valueOf(prototype) +
" because of an unknown exception.",
e);
}
if(activateSuccessful) {
// we only want to create a wrapper and place the instance into the container
// if the instance activated correctly. If we got here then the above try block
// must have been successful.
if(instances.putIfAbsent(key, instance) != null) // once it goes into the map, we can remove it from the 'being worked' set
throw new IllegalStateException("WTF?");
// the newly added one.
statCollector.messageProcessorCreated(key);
}
return instance;
} } | public class class_name {
private Object createAndActivate(final Object key) throws ContainerException {
Object instance = null;
try {
instance = prototype.newInstance();
} catch(final DempsyException e) {
if(e.userCaused()) {
LOGGER.warn("The message processor prototype " + SafeString.valueOf(prototype)
+ " threw an exception when trying to create a new message processor for they key " + SafeString.objectDescription(key)); // depends on control dependency: [if], data = [none]
statCollector.messageFailed(true); // depends on control dependency: [if], data = [none]
instance = null; // depends on control dependency: [if], data = [none]
} else
throw new ContainerException("the container for " + clusterId + " failed to create a new instance of " +
SafeString.valueOf(prototype) + " for the key " + SafeString.objectDescription(key) +
" because the clone method threw an exception.", e);
} catch(final RuntimeException e) {
throw new ContainerException("the container for " + clusterId + " failed to create a new instance of " +
SafeString.valueOf(prototype) + " for the key " + SafeString.objectDescription(key) +
" because the clone invocation resulted in an unknown exception.", e);
}
// activate
boolean activateSuccessful = false;
try {
if(instance != null) {
if(LOGGER.isTraceEnabled())
LOGGER.trace("the container for " + clusterId + " is activating instance " + String.valueOf(instance)
+ " via " + SafeString.valueOf(prototype) + " for " + SafeString.valueOf(key));
prototype.activate(instance, key); // depends on control dependency: [if], data = [(instance]
activateSuccessful = true; // depends on control dependency: [if], data = [none]
}
} catch(final DempsyException e) {
if(e.userCaused()) {
LOGGER.warn("The message processor " + SafeString.objectDescription(instance) + " activate call threw an exception."); // depends on control dependency: [if], data = [none]
statCollector.messageFailed(true); // depends on control dependency: [if], data = [none]
instance = null; // depends on control dependency: [if], data = [none]
} else
throw new ContainerException(
"the container for " + clusterId + " failed to invoke the activate method of " + SafeString.valueOf(prototype)
+ ". Is the active method accessible - the class is public and the method is public?",
e);
} catch(final RuntimeException e) {
throw new ContainerException(
"the container for " + clusterId + " failed to invoke the activate method of " + SafeString.valueOf(prototype) +
" because of an unknown exception.",
e);
}
if(activateSuccessful) {
// we only want to create a wrapper and place the instance into the container
// if the instance activated correctly. If we got here then the above try block
// must have been successful.
if(instances.putIfAbsent(key, instance) != null) // once it goes into the map, we can remove it from the 'being worked' set
throw new IllegalStateException("WTF?");
// the newly added one.
statCollector.messageProcessorCreated(key);
}
return instance;
} } |
public class class_name {
@Transactional
public List<Role> authenticate(DataBinder parameters) throws AuthorizationException {
try {
long now = System.currentTimeMillis();
Credential credential = collectCredential(parameters);
if (credential != null) {
_logger.log(Level.FINE, "credential: {0}", org.xillium.base.beans.Beans.toString(credential));
List<Role> roles = _persistence.getResults(RetrieveRolesByCredential, credential);
_logger.log(Level.FINE, "# of roles under this credential: {0}", roles.size());
if (roles.size() > 0) {
_logger.log(Level.FINE, "session timeout: {0}", _timeout);
if (_timeout > 0) { // is session authentication enabled?
Session session = new Session(credential.id, createSecureToken(), now);
_logger.log(Level.FINE, "updating session {0}", org.xillium.base.beans.Beans.toString(session));
_persistence.executeUpdate(UpdateSecureSession, session);
String ticket = URLEncoder.encode(session.id + Session.AT + session.token, "UTF-8");
parameters.put(AUTHCODE, ticket);
// place the new authcode in a session cookie for the client
_logger.log(Level.FINE, "Sending ticket in cookie: {0}", ticket);
Multimap<String, String> headers = parameters.mul(Service.SERVICE_HTTP_HEADER, String.class, String.class);
if (parameters.get(Service.REQUEST_HTTP_SECURE) != null) {
headers.add("Set-Cookie", AUTHCODE + "=" + ticket + ";path=/;secure");
} else {
headers.add("Set-Cookie", AUTHCODE + "=" + ticket + ";path=/");
}
}
return roles;
//authorize(deployment, roles);
} else {
throw new AuthorizationException("InvalidCredential");
}
} else if (_timeout > 0) {
Session session = collectSession(parameters);
if (session != null) {
session.clock = now;
session.maxAge = _timeout;
_logger.log(Level.FINE, "Attempt to authorized with session {0}", org.xillium.base.beans.Beans.toString(session));
List<Role> roles = _persistence.getResults(RetrieveRolesBySession, session);
if (roles.size() > 0) {
_persistence.executeUpdate(UpdateSecureSession, session);
return roles;
//authorize(deployment, roles);
} else {
_logger.log(Level.WARNING, "merchant:"+session.id+",token:"+session.token);
parameters.remove(AUTHCODE);
throw new AuthorizationException("InvalidSession");
}
} else {
throw new AuthenticationRequiredException("AuthenticationRequired");
}
} else {
throw new AuthenticationRequiredException("AuthenticationRequired");
}
} catch (AuthorizationException x) {
redirectToAuthenticationPage(parameters);
throw x;
} catch (Exception x) {
redirectToAuthenticationPage(parameters);
throw new AuthorizationException(x.getMessage(), x);
}
} } | public class class_name {
@Transactional
public List<Role> authenticate(DataBinder parameters) throws AuthorizationException {
try {
long now = System.currentTimeMillis();
Credential credential = collectCredential(parameters);
if (credential != null) {
_logger.log(Level.FINE, "credential: {0}", org.xillium.base.beans.Beans.toString(credential));
List<Role> roles = _persistence.getResults(RetrieveRolesByCredential, credential);
_logger.log(Level.FINE, "# of roles under this credential: {0}", roles.size());
if (roles.size() > 0) {
_logger.log(Level.FINE, "session timeout: {0}", _timeout);
if (_timeout > 0) { // is session authentication enabled?
Session session = new Session(credential.id, createSecureToken(), now);
_logger.log(Level.FINE, "updating session {0}", org.xillium.base.beans.Beans.toString(session)); // depends on control dependency: [if], data = [none]
_persistence.executeUpdate(UpdateSecureSession, session); // depends on control dependency: [if], data = [none]
String ticket = URLEncoder.encode(session.id + Session.AT + session.token, "UTF-8");
parameters.put(AUTHCODE, ticket); // depends on control dependency: [if], data = [none]
// place the new authcode in a session cookie for the client
_logger.log(Level.FINE, "Sending ticket in cookie: {0}", ticket); // depends on control dependency: [if], data = [none]
Multimap<String, String> headers = parameters.mul(Service.SERVICE_HTTP_HEADER, String.class, String.class);
if (parameters.get(Service.REQUEST_HTTP_SECURE) != null) {
headers.add("Set-Cookie", AUTHCODE + "=" + ticket + ";path=/;secure"); // depends on control dependency: [if], data = [none]
} else {
headers.add("Set-Cookie", AUTHCODE + "=" + ticket + ";path=/"); // depends on control dependency: [if], data = [none]
}
}
return roles;
//authorize(deployment, roles);
} else {
throw new AuthorizationException("InvalidCredential");
}
} else if (_timeout > 0) {
Session session = collectSession(parameters);
if (session != null) {
session.clock = now;
session.maxAge = _timeout;
_logger.log(Level.FINE, "Attempt to authorized with session {0}", org.xillium.base.beans.Beans.toString(session));
List<Role> roles = _persistence.getResults(RetrieveRolesBySession, session);
if (roles.size() > 0) {
_persistence.executeUpdate(UpdateSecureSession, session);
return roles;
//authorize(deployment, roles);
} else {
_logger.log(Level.WARNING, "merchant:"+session.id+",token:"+session.token);
parameters.remove(AUTHCODE);
throw new AuthorizationException("InvalidSession");
}
} else {
throw new AuthenticationRequiredException("AuthenticationRequired");
}
} else {
throw new AuthenticationRequiredException("AuthenticationRequired");
}
} catch (AuthorizationException x) {
redirectToAuthenticationPage(parameters);
throw x;
} catch (Exception x) {
redirectToAuthenticationPage(parameters);
throw new AuthorizationException(x.getMessage(), x);
}
} } |
public class class_name {
public void resume(String queueName) {
try {
getQueueControl(queueName).resume();
} catch (Exception e) {
throw new AsyncException(e);
}
} } | public class class_name {
public void resume(String queueName) {
try {
getQueueControl(queueName).resume(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new AsyncException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
HandshakeMessage getKickstartMessage() throws SSLException {
// session ID of the ClientHello message
SessionId sessionId = SSLSessionImpl.nullSession.getSessionId();
// a list of cipher suites sent by the client
CipherSuiteList cipherSuites = getActiveCipherSuites();
// set the max protocol version this client is supporting.
maxProtocolVersion = protocolVersion;
//
// Try to resume an existing session. This might be mandatory,
// given certain API options.
//
session = ((SSLSessionContextImpl)sslContext
.engineGetClientSessionContext())
.get(getHostSE(), getPortSE());
if (debug != null && Debug.isOn("session")) {
if (session != null) {
System.out.println("%% Client cached "
+ session
+ (session.isRejoinable() ? "" : " (not rejoinable)"));
} else {
System.out.println("%% No cached client session");
}
}
if (session != null) {
// If unsafe server certificate change is not allowed, reserve
// current server certificates if the preious handshake is a
// session-resumption abbreviated initial handshake.
if (!allowUnsafeServerCertChange && session.isSessionResumption()) {
try {
// If existing, peer certificate chain cannot be null.
reservedServerCerts =
(X509Certificate[])session.getPeerCertificates();
} catch (SSLPeerUnverifiedException puve) {
// Maybe not certificate-based, ignore the exception.
}
}
if (!session.isRejoinable()) {
session = null;
}
}
if (session != null) {
CipherSuite sessionSuite = session.getSuite();
ProtocolVersion sessionVersion = session.getProtocolVersion();
if (isNegotiable(sessionSuite) == false) {
if (debug != null && Debug.isOn("session")) {
System.out.println("%% can't resume, unavailable cipher");
}
session = null;
}
if ((session != null) && !isNegotiable(sessionVersion)) {
if (debug != null && Debug.isOn("session")) {
System.out.println("%% can't resume, protocol disabled");
}
session = null;
}
if (session != null) {
if (debug != null) {
if (Debug.isOn("handshake") || Debug.isOn("session")) {
System.out.println("%% Try resuming " + session
+ " from port " + getLocalPortSE());
}
}
sessionId = session.getSessionId();
maxProtocolVersion = sessionVersion;
// Update SSL version number in underlying SSL socket and
// handshake output stream, so that the output records (at the
// record layer) have the correct version
setVersion(sessionVersion);
}
/*
* Force use of the previous session ciphersuite, and
* add the SCSV if enabled.
*/
if (!enableNewSession) {
if (session == null) {
throw new SSLHandshakeException(
"Can't reuse existing SSL client session");
}
Collection<CipherSuite> cipherList = new ArrayList<>(2);
cipherList.add(sessionSuite);
if (!secureRenegotiation &&
cipherSuites.contains(CipherSuite.C_SCSV)) {
cipherList.add(CipherSuite.C_SCSV);
} // otherwise, renegotiation_info extension will be used
cipherSuites = new CipherSuiteList(cipherList);
}
}
if (session == null && !enableNewSession) {
throw new SSLHandshakeException("No existing session to resume");
}
// exclude SCSV for secure renegotiation
if (secureRenegotiation && cipherSuites.contains(CipherSuite.C_SCSV)) {
Collection<CipherSuite> cipherList =
new ArrayList<>(cipherSuites.size() - 1);
for (CipherSuite suite : cipherSuites.collection()) {
if (suite != CipherSuite.C_SCSV) {
cipherList.add(suite);
}
}
cipherSuites = new CipherSuiteList(cipherList);
}
// make sure there is a negotiable cipher suite.
boolean negotiable = false;
for (CipherSuite suite : cipherSuites.collection()) {
if (isNegotiable(suite)) {
negotiable = true;
break;
}
}
if (!negotiable) {
throw new SSLHandshakeException("No negotiable cipher suite");
}
// Not a TLS1.2+ handshake
// For SSLv2Hello, HandshakeHash.reset() will be called, so we
// cannot call HandshakeHash.protocolDetermined() here. As it does
// not follow the spec that HandshakeHash.reset() can be only be
// called before protocolDetermined.
// if (maxProtocolVersion.v < ProtocolVersion.TLS12.v) {
// handshakeHash.protocolDetermined(maxProtocolVersion);
// }
// create the ClientHello message
ClientHello clientHelloMessage = new ClientHello(
sslContext.getSecureRandom(), maxProtocolVersion,
sessionId, cipherSuites);
// add signature_algorithm extension
if (maxProtocolVersion.v >= ProtocolVersion.TLS12.v) {
// we will always send the signature_algorithm extension
Collection<SignatureAndHashAlgorithm> localSignAlgs =
getLocalSupportedSignAlgs();
if (localSignAlgs.isEmpty()) {
throw new SSLHandshakeException(
"No supported signature algorithm");
}
clientHelloMessage.addSignatureAlgorithmsExtension(localSignAlgs);
}
// add server_name extension
if (enableSNIExtension) {
// We cannot use the hostname resolved from name services. For
// virtual hosting, multiple hostnames may be bound to the same IP
// address, so the hostname resolved from name services is not
// reliable.
String hostname = getRawHostnameSE();
// we only allow FQDN
if (hostname != null && hostname.indexOf('.') > 0 &&
!IPAddressUtil.isIPv4LiteralAddress(hostname) &&
!IPAddressUtil.isIPv6LiteralAddress(hostname)) {
clientHelloMessage.addServerNameIndicationExtension(hostname);
}
}
// reset the client random cookie
clnt_random = clientHelloMessage.clnt_random;
/*
* need to set the renegotiation_info extension for:
* 1: secure renegotiation
* 2: initial handshake and no SCSV in the ClientHello
* 3: insecure renegotiation and no SCSV in the ClientHello
*/
if (secureRenegotiation ||
!cipherSuites.contains(CipherSuite.C_SCSV)) {
clientHelloMessage.addRenegotiationInfoExtension(clientVerifyData);
}
// NPN_CHANGES_BEGIN
if (isInitialHandshake)
{
NextProtoNego.ClientProvider provider = conn != null ?
(NextProtoNego.ClientProvider)NextProtoNego.get(conn) :
(NextProtoNego.ClientProvider)NextProtoNego.get(engine);
if (provider != null)
{
if (provider.supports())
{
if (NextProtoNego.debug)
System.err.println(new StringBuilder("[C] NPN supported for ").append(conn != null ? conn : engine));
clientHelloMessage.extensions.add(new NextProtoNegoExtension());
}
else
{
if (NextProtoNego.debug)
System.err.println(new StringBuilder("[C] NPN not supported for ").append(conn != null ? conn : engine));
}
}
else
{
if (NextProtoNego.debug)
System.err.println(new StringBuilder("[C] NPN client provider not present for ").append(conn != null ? conn : engine));
}
}
// NPN_CHANGES_END
return clientHelloMessage;
} } | public class class_name {
HandshakeMessage getKickstartMessage() throws SSLException {
// session ID of the ClientHello message
SessionId sessionId = SSLSessionImpl.nullSession.getSessionId();
// a list of cipher suites sent by the client
CipherSuiteList cipherSuites = getActiveCipherSuites();
// set the max protocol version this client is supporting.
maxProtocolVersion = protocolVersion;
//
// Try to resume an existing session. This might be mandatory,
// given certain API options.
//
session = ((SSLSessionContextImpl)sslContext
.engineGetClientSessionContext())
.get(getHostSE(), getPortSE());
if (debug != null && Debug.isOn("session")) {
if (session != null) {
System.out.println("%% Client cached "
+ session
+ (session.isRejoinable() ? "" : " (not rejoinable)")); // depends on control dependency: [if], data = [none]
} else {
System.out.println("%% No cached client session"); // depends on control dependency: [if], data = [none]
}
}
if (session != null) {
// If unsafe server certificate change is not allowed, reserve
// current server certificates if the preious handshake is a
// session-resumption abbreviated initial handshake.
if (!allowUnsafeServerCertChange && session.isSessionResumption()) {
try {
// If existing, peer certificate chain cannot be null.
reservedServerCerts =
(X509Certificate[])session.getPeerCertificates(); // depends on control dependency: [try], data = [none]
} catch (SSLPeerUnverifiedException puve) {
// Maybe not certificate-based, ignore the exception.
} // depends on control dependency: [catch], data = [none]
}
if (!session.isRejoinable()) {
session = null;
}
}
if (session != null) {
CipherSuite sessionSuite = session.getSuite();
ProtocolVersion sessionVersion = session.getProtocolVersion();
if (isNegotiable(sessionSuite) == false) {
if (debug != null && Debug.isOn("session")) {
System.out.println("%% can't resume, unavailable cipher");
}
session = null;
}
if ((session != null) && !isNegotiable(sessionVersion)) {
if (debug != null && Debug.isOn("session")) {
System.out.println("%% can't resume, protocol disabled"); // depends on control dependency: [if], data = [none]
}
session = null; // depends on control dependency: [if], data = [none]
}
if (session != null) {
if (debug != null) {
if (Debug.isOn("handshake") || Debug.isOn("session")) {
System.out.println("%% Try resuming " + session
+ " from port " + getLocalPortSE()); // depends on control dependency: [if], data = [none]
}
}
sessionId = session.getSessionId(); // depends on control dependency: [if], data = [none]
maxProtocolVersion = sessionVersion; // depends on control dependency: [if], data = [none]
// Update SSL version number in underlying SSL socket and
// handshake output stream, so that the output records (at the
// record layer) have the correct version
setVersion(sessionVersion); // depends on control dependency: [if], data = [(session]
}
/*
* Force use of the previous session ciphersuite, and
* add the SCSV if enabled.
*/
if (!enableNewSession) {
if (session == null) {
throw new SSLHandshakeException(
"Can't reuse existing SSL client session");
}
Collection<CipherSuite> cipherList = new ArrayList<>(2);
cipherList.add(sessionSuite);
if (!secureRenegotiation &&
cipherSuites.contains(CipherSuite.C_SCSV)) {
cipherList.add(CipherSuite.C_SCSV);
} // otherwise, renegotiation_info extension will be used
cipherSuites = new CipherSuiteList(cipherList);
}
}
if (session == null && !enableNewSession) {
throw new SSLHandshakeException("No existing session to resume");
}
// exclude SCSV for secure renegotiation
if (secureRenegotiation && cipherSuites.contains(CipherSuite.C_SCSV)) {
Collection<CipherSuite> cipherList =
new ArrayList<>(cipherSuites.size() - 1);
for (CipherSuite suite : cipherSuites.collection()) {
if (suite != CipherSuite.C_SCSV) {
cipherList.add(suite);
}
}
cipherSuites = new CipherSuiteList(cipherList);
}
// make sure there is a negotiable cipher suite.
boolean negotiable = false;
for (CipherSuite suite : cipherSuites.collection()) {
if (isNegotiable(suite)) {
negotiable = true;
break;
}
}
if (!negotiable) {
throw new SSLHandshakeException("No negotiable cipher suite");
}
// Not a TLS1.2+ handshake
// For SSLv2Hello, HandshakeHash.reset() will be called, so we
// cannot call HandshakeHash.protocolDetermined() here. As it does
// not follow the spec that HandshakeHash.reset() can be only be
// called before protocolDetermined.
// if (maxProtocolVersion.v < ProtocolVersion.TLS12.v) {
// handshakeHash.protocolDetermined(maxProtocolVersion);
// }
// create the ClientHello message
ClientHello clientHelloMessage = new ClientHello(
sslContext.getSecureRandom(), maxProtocolVersion,
sessionId, cipherSuites);
// add signature_algorithm extension
if (maxProtocolVersion.v >= ProtocolVersion.TLS12.v) {
// we will always send the signature_algorithm extension
Collection<SignatureAndHashAlgorithm> localSignAlgs =
getLocalSupportedSignAlgs();
if (localSignAlgs.isEmpty()) {
throw new SSLHandshakeException(
"No supported signature algorithm");
}
clientHelloMessage.addSignatureAlgorithmsExtension(localSignAlgs);
}
// add server_name extension
if (enableSNIExtension) {
// We cannot use the hostname resolved from name services. For
// virtual hosting, multiple hostnames may be bound to the same IP
// address, so the hostname resolved from name services is not
// reliable.
String hostname = getRawHostnameSE();
// we only allow FQDN
if (hostname != null && hostname.indexOf('.') > 0 &&
!IPAddressUtil.isIPv4LiteralAddress(hostname) &&
!IPAddressUtil.isIPv6LiteralAddress(hostname)) {
clientHelloMessage.addServerNameIndicationExtension(hostname);
}
}
// reset the client random cookie
clnt_random = clientHelloMessage.clnt_random;
/*
* need to set the renegotiation_info extension for:
* 1: secure renegotiation
* 2: initial handshake and no SCSV in the ClientHello
* 3: insecure renegotiation and no SCSV in the ClientHello
*/
if (secureRenegotiation ||
!cipherSuites.contains(CipherSuite.C_SCSV)) {
clientHelloMessage.addRenegotiationInfoExtension(clientVerifyData); // depends on control dependency: [if], data = [none]
}
// NPN_CHANGES_BEGIN
if (isInitialHandshake)
{
NextProtoNego.ClientProvider provider = conn != null ?
(NextProtoNego.ClientProvider)NextProtoNego.get(conn) :
(NextProtoNego.ClientProvider)NextProtoNego.get(engine);
if (provider != null)
{
if (provider.supports())
{
if (NextProtoNego.debug)
System.err.println(new StringBuilder("[C] NPN supported for ").append(conn != null ? conn : engine));
clientHelloMessage.extensions.add(new NextProtoNegoExtension()); // depends on control dependency: [if], data = [none]
}
else
{
if (NextProtoNego.debug)
System.err.println(new StringBuilder("[C] NPN not supported for ").append(conn != null ? conn : engine));
}
}
else
{
if (NextProtoNego.debug)
System.err.println(new StringBuilder("[C] NPN client provider not present for ").append(conn != null ? conn : engine));
}
}
// NPN_CHANGES_END
return clientHelloMessage;
} } |
public class class_name {
public static JSONArray fromObject( Object object, JsonConfig jsonConfig ) {
if( object instanceof JSONString ){
return _fromJSONString( (JSONString) object, jsonConfig );
}else if( object instanceof JSONArray ){
return _fromJSONArray( (JSONArray) object, jsonConfig );
}else if( object instanceof Collection ){
return _fromCollection( (Collection) object, jsonConfig );
}else if( object instanceof JSONTokener ){
return _fromJSONTokener( (JSONTokener) object, jsonConfig );
}else if( object instanceof String ){
return _fromString( (String) object, jsonConfig );
}else if( object != null && object.getClass()
.isArray() ){
Class type = object.getClass()
.getComponentType();
if( !type.isPrimitive() ){
return _fromArray( (Object[]) object, jsonConfig );
}else{
if( type == Boolean.TYPE ){
return _fromArray( (boolean[]) object, jsonConfig );
}else if( type == Byte.TYPE ){
return _fromArray( (byte[]) object, jsonConfig );
}else if( type == Short.TYPE ){
return _fromArray( (short[]) object, jsonConfig );
}else if( type == Integer.TYPE ){
return _fromArray( (int[]) object, jsonConfig );
}else if( type == Long.TYPE ){
return _fromArray( (long[]) object, jsonConfig );
}else if( type == Float.TYPE ){
return _fromArray( (float[]) object, jsonConfig );
}else if( type == Double.TYPE ){
return _fromArray( (double[]) object, jsonConfig );
}else if( type == Character.TYPE ){
return _fromArray( (char[]) object, jsonConfig );
}else{
throw new JSONException( "Unsupported type" );
}
}
}else if( JSONUtils.isBoolean( object ) || JSONUtils.isFunction( object )
|| JSONUtils.isNumber( object ) || JSONUtils.isNull( object )
|| JSONUtils.isString( object ) || object instanceof JSON ){
fireArrayStartEvent( jsonConfig );
JSONArray jsonArray = new JSONArray().element( object, jsonConfig );
fireElementAddedEvent( 0, jsonArray.get( 0 ), jsonConfig );
fireArrayStartEvent( jsonConfig );
return jsonArray;
}else if( JSONUtils.isObject( object ) ){
fireArrayStartEvent( jsonConfig );
JSONArray jsonArray = new JSONArray().element( JSONObject.fromObject( object, jsonConfig ) );
fireElementAddedEvent( 0, jsonArray.get( 0 ), jsonConfig );
fireArrayStartEvent( jsonConfig );
return jsonArray;
}else{
throw new JSONException( "Unsupported type" );
}
} } | public class class_name {
public static JSONArray fromObject( Object object, JsonConfig jsonConfig ) {
if( object instanceof JSONString ){
return _fromJSONString( (JSONString) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( object instanceof JSONArray ){
return _fromJSONArray( (JSONArray) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( object instanceof Collection ){
return _fromCollection( (Collection) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( object instanceof JSONTokener ){
return _fromJSONTokener( (JSONTokener) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( object instanceof String ){
return _fromString( (String) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( object != null && object.getClass()
.isArray() ){
Class type = object.getClass()
.getComponentType();
if( !type.isPrimitive() ){
return _fromArray( (Object[]) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else{
if( type == Boolean.TYPE ){
return _fromArray( (boolean[]) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( type == Byte.TYPE ){
return _fromArray( (byte[]) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( type == Short.TYPE ){
return _fromArray( (short[]) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( type == Integer.TYPE ){
return _fromArray( (int[]) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( type == Long.TYPE ){
return _fromArray( (long[]) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( type == Float.TYPE ){
return _fromArray( (float[]) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( type == Double.TYPE ){
return _fromArray( (double[]) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else if( type == Character.TYPE ){
return _fromArray( (char[]) object, jsonConfig ); // depends on control dependency: [if], data = [none]
}else{
throw new JSONException( "Unsupported type" );
}
}
}else if( JSONUtils.isBoolean( object ) || JSONUtils.isFunction( object )
|| JSONUtils.isNumber( object ) || JSONUtils.isNull( object )
|| JSONUtils.isString( object ) || object instanceof JSON ){
fireArrayStartEvent( jsonConfig ); // depends on control dependency: [if], data = [none]
JSONArray jsonArray = new JSONArray().element( object, jsonConfig );
fireElementAddedEvent( 0, jsonArray.get( 0 ), jsonConfig ); // depends on control dependency: [if], data = [none]
fireArrayStartEvent( jsonConfig ); // depends on control dependency: [if], data = [none]
return jsonArray; // depends on control dependency: [if], data = [none]
}else if( JSONUtils.isObject( object ) ){
fireArrayStartEvent( jsonConfig ); // depends on control dependency: [if], data = [none]
JSONArray jsonArray = new JSONArray().element( JSONObject.fromObject( object, jsonConfig ) );
fireElementAddedEvent( 0, jsonArray.get( 0 ), jsonConfig ); // depends on control dependency: [if], data = [none]
fireArrayStartEvent( jsonConfig ); // depends on control dependency: [if], data = [none]
return jsonArray; // depends on control dependency: [if], data = [none]
}else{
throw new JSONException( "Unsupported type" );
}
} } |
public class class_name {
public void setListeners(java.util.Collection<Listener> listeners) {
if (listeners == null) {
this.listeners = null;
return;
}
this.listeners = new java.util.ArrayList<Listener>(listeners);
} } | public class class_name {
public void setListeners(java.util.Collection<Listener> listeners) {
if (listeners == null) {
this.listeners = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.listeners = new java.util.ArrayList<Listener>(listeners);
} } |
public class class_name {
private int[] getHashes(int length) {
int i = length - MIN_PATTERN_LENGTH;
int[] subHashes = this.hashes[i];
if (subHashes == null) {
subHashes = this.hasher.getHashCodes(this.normalizedWord, length);
this.hashes[i] = subHashes;
}
return subHashes;
} } | public class class_name {
private int[] getHashes(int length) {
int i = length - MIN_PATTERN_LENGTH;
int[] subHashes = this.hashes[i];
if (subHashes == null) {
subHashes = this.hasher.getHashCodes(this.normalizedWord, length);
// depends on control dependency: [if], data = [none]
this.hashes[i] = subHashes;
// depends on control dependency: [if], data = [none]
}
return subHashes;
} } |
public class class_name {
TreeNode freeze(int index) {
TreeNode node = this;
node.setEndIndex(index);
while (node.parent != null) {
node.parent.latestChild = node;
node = node.parent;
node.setEndIndex(index);
}
return node;
} } | public class class_name {
TreeNode freeze(int index) {
TreeNode node = this;
node.setEndIndex(index);
while (node.parent != null) {
node.parent.latestChild = node; // depends on control dependency: [while], data = [none]
node = node.parent; // depends on control dependency: [while], data = [none]
node.setEndIndex(index); // depends on control dependency: [while], data = [none]
}
return node;
} } |
public class class_name {
public void serializeParameter(String name, Map< ? , ? > map, boolean type, ContentHandler contentHandler)
{
Attributes attributes;
if (type) {
AttributesImpl attributesImpl = new AttributesImpl();
attributesImpl.addAttribute(null, null, "type", null, "stringmap");
attributes = attributesImpl;
} else {
attributes = EMPTY_ATTRIBUTES;
}
startElement(name, attributes, contentHandler);
for (Map.Entry< ? , ? > entry : map.entrySet()) {
if (entry.getValue() != null && entry.getKey() != null) {
serializeParameter(entry.getKey().toString(), entry.getValue().toString(), null, contentHandler);
}
}
endElement(name, contentHandler);
} } | public class class_name {
public void serializeParameter(String name, Map< ? , ? > map, boolean type, ContentHandler contentHandler)
{
Attributes attributes;
if (type) {
AttributesImpl attributesImpl = new AttributesImpl();
attributesImpl.addAttribute(null, null, "type", null, "stringmap"); // depends on control dependency: [if], data = [none]
attributes = attributesImpl; // depends on control dependency: [if], data = [none]
} else {
attributes = EMPTY_ATTRIBUTES; // depends on control dependency: [if], data = [none]
}
startElement(name, attributes, contentHandler);
for (Map.Entry< ? , ? > entry : map.entrySet()) {
if (entry.getValue() != null && entry.getKey() != null) {
serializeParameter(entry.getKey().toString(), entry.getValue().toString(), null, contentHandler); // depends on control dependency: [if], data = [none]
}
}
endElement(name, contentHandler);
} } |
public class class_name {
public static String getRedirectDestination(String pageText) {
String redirectString = null;
try {
String regex = "\\[\\[\\s*(.+?)\\s*]]";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(pageText);
// group 0 is the whole match
if (matcher.find()) {
redirectString = matcher.group(1);
}
if (redirectString == null) {
return null;
}
// remove anchor (case: "#Redirect [[Article #Anchor]])
String[] anchorSplitValues = redirectString.split("#");
redirectString = anchorSplitValues[0];
// remove whitespace (case: "Article " - when splitting the example
// above)
redirectString = redirectString.trim();
// remove direct|redirect alternatives
String[] directSplitValues = redirectString.split("\\|");
redirectString = directSplitValues[0];
// remove whitespace (case: "Article " - when splitting the example
// above)
redirectString = redirectString.trim();
// remove whitespace (case: "Article " - when splitting the example
// above)
redirectString = redirectString.trim();
// remove namespace string (case:
// "#REDIRECT [[Portal:Recht/Liste der Rechtsthemen]]")
// but there are names with colons in it => consider only cases
// where
// there are no spaces around the colon
String regexNamespace = ":([^\\s].+)";
Pattern patternNamespace = Pattern.compile(regexNamespace);
Matcher matcherNamespace = patternNamespace.matcher(redirectString);
// group 0 is the whole match
if (matcherNamespace.find()) {
redirectString = matcherNamespace.group(1);
}
// replace spaces with underscores (spaces are represented as
// underscores in page titles)
// e.g. "Englische Grammatik"
redirectString = redirectString.replace(" ", "_");
// page titles always start with a capital letter
if (redirectString.length() > 0) {
redirectString = redirectString.substring(0, 1).toUpperCase()
+ redirectString.substring(1, redirectString.length());
}
} catch (Exception e) {
redirectString = null;
logger.debug("Error in Redirects ignored");
}
return redirectString;
} } | public class class_name {
public static String getRedirectDestination(String pageText) {
String redirectString = null;
try {
String regex = "\\[\\[\\s*(.+?)\\s*]]";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(pageText);
// group 0 is the whole match
if (matcher.find()) {
redirectString = matcher.group(1); // depends on control dependency: [if], data = [none]
}
if (redirectString == null) {
return null; // depends on control dependency: [if], data = [none]
}
// remove anchor (case: "#Redirect [[Article #Anchor]])
String[] anchorSplitValues = redirectString.split("#");
redirectString = anchorSplitValues[0]; // depends on control dependency: [try], data = [none]
// remove whitespace (case: "Article " - when splitting the example
// above)
redirectString = redirectString.trim(); // depends on control dependency: [try], data = [none]
// remove direct|redirect alternatives
String[] directSplitValues = redirectString.split("\\|");
redirectString = directSplitValues[0]; // depends on control dependency: [try], data = [none]
// remove whitespace (case: "Article " - when splitting the example
// above)
redirectString = redirectString.trim(); // depends on control dependency: [try], data = [none]
// remove whitespace (case: "Article " - when splitting the example
// above)
redirectString = redirectString.trim(); // depends on control dependency: [try], data = [none]
// remove namespace string (case:
// "#REDIRECT [[Portal:Recht/Liste der Rechtsthemen]]")
// but there are names with colons in it => consider only cases
// where
// there are no spaces around the colon
String regexNamespace = ":([^\\s].+)";
Pattern patternNamespace = Pattern.compile(regexNamespace);
Matcher matcherNamespace = patternNamespace.matcher(redirectString);
// group 0 is the whole match
if (matcherNamespace.find()) {
redirectString = matcherNamespace.group(1); // depends on control dependency: [if], data = [none]
}
// replace spaces with underscores (spaces are represented as
// underscores in page titles)
// e.g. "Englische Grammatik"
redirectString = redirectString.replace(" ", "_"); // depends on control dependency: [try], data = [none]
// page titles always start with a capital letter
if (redirectString.length() > 0) {
redirectString = redirectString.substring(0, 1).toUpperCase()
+ redirectString.substring(1, redirectString.length()); // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
redirectString = null;
logger.debug("Error in Redirects ignored");
} // depends on control dependency: [catch], data = [none]
return redirectString;
} } |
public class class_name {
public static void pauseTimer(final String type) {
TransactionLogger instance = getInstance();
if (instance == null) {
return;
}
instance.components.get(type).pauseTimer();
} } | public class class_name {
public static void pauseTimer(final String type) {
TransactionLogger instance = getInstance();
if (instance == null) {
return; // depends on control dependency: [if], data = [none]
}
instance.components.get(type).pauseTimer();
} } |
public class class_name {
public String getValue() {
if (paramValue == null) {
if (paramNameEnd == paramEnd) {
return null;
}
try {
paramValue = URLDecoder.decode(queryString.substring(paramNameEnd + 1, paramEnd), "UTF-8");
} catch (UnsupportedEncodingException ex) {
throw new Error(ex);
}
}
return paramValue;
} } | public class class_name {
public String getValue() {
if (paramValue == null) {
if (paramNameEnd == paramEnd) {
return null; // depends on control dependency: [if], data = [none]
}
try {
paramValue = URLDecoder.decode(queryString.substring(paramNameEnd + 1, paramEnd), "UTF-8"); // depends on control dependency: [try], data = [none]
} catch (UnsupportedEncodingException ex) {
throw new Error(ex);
} // depends on control dependency: [catch], data = [none]
}
return paramValue;
} } |
public class class_name {
public Map<String, PropertyWrapper> extractProperties(Class<?> clazz) {
Map<String, PropertyInfo> map = new LinkedHashMap<>();
Method[] methods = clazz.getMethods();
for(Method method: methods){
// ignore java.lang.Object methods
if(method.getDeclaringClass() == Object.class){
continue;
}
String methodName = method.getName();
if((methodName.startsWith("get") || methodName.startsWith("is")) && method.getParameterTypes().length == 0){
String propertyName = getPropertyName(methodName);
PropertyInfo info = map.get(propertyName);
if(info == null){
info = new PropertyInfo();
info.name = propertyName;
info.getterMethod = method;
info.type = method.getReturnType();
map.put(propertyName, info);
} else if(info.type == method.getReturnType()){
info.getterMethod = method;
}
}
if(methodName.startsWith("set") && method.getParameterTypes().length == 1){
String propertyName = getPropertyName(methodName);
PropertyInfo info = map.get(propertyName);
if(info == null){
info = new PropertyInfo();
info.name = propertyName;
info.setterMethod = method;
info.type = method.getParameterTypes()[0];
map.put(propertyName, info);
} else if(info.type == method.getParameterTypes()[0]){
info.setterMethod = method;
}
}
}
Field[] fields = clazz.getFields();
for(Field field: fields){
int modifiers = field.getModifiers();
if (map.containsKey(field.getName()) == false
&& Modifier.isStatic(modifiers) == false
&& Modifier.isFinal(modifiers) == false) {
PropertyInfo info = new PropertyInfo();
info.name = field.getName();
info.field = field;
info.type = field.getType();
map.put(field.getName(), info);
}
}
Map<String, PropertyWrapper> result = new LinkedHashMap<>();
for(Map.Entry<String, PropertyInfo> e: map.entrySet()){
PropertyInfo info = e.getValue();
if(info.field != null){
result.put(e.getKey(), new PropertyWrapperImpl(info.name, null, null, info.field));
} else {
Field field = null;
try {
field = clazz.getDeclaredField(info.name);
} catch(Exception ex){
// ignore
}
result.put(e.getKey(), new PropertyWrapperImpl(info.name, info.getterMethod, info.setterMethod, field));
}
}
return result;
} } | public class class_name {
public Map<String, PropertyWrapper> extractProperties(Class<?> clazz) {
Map<String, PropertyInfo> map = new LinkedHashMap<>();
Method[] methods = clazz.getMethods();
for(Method method: methods){
// ignore java.lang.Object methods
if(method.getDeclaringClass() == Object.class){
continue;
}
String methodName = method.getName();
if((methodName.startsWith("get") || methodName.startsWith("is")) && method.getParameterTypes().length == 0){
String propertyName = getPropertyName(methodName);
PropertyInfo info = map.get(propertyName);
if(info == null){
info = new PropertyInfo();
// depends on control dependency: [if], data = [none]
info.name = propertyName;
// depends on control dependency: [if], data = [none]
info.getterMethod = method;
// depends on control dependency: [if], data = [none]
info.type = method.getReturnType();
// depends on control dependency: [if], data = [none]
map.put(propertyName, info);
// depends on control dependency: [if], data = [none]
} else if(info.type == method.getReturnType()){
info.getterMethod = method;
// depends on control dependency: [if], data = [none]
}
}
if(methodName.startsWith("set") && method.getParameterTypes().length == 1){
String propertyName = getPropertyName(methodName);
PropertyInfo info = map.get(propertyName);
if(info == null){
info = new PropertyInfo();
// depends on control dependency: [if], data = [none]
info.name = propertyName;
// depends on control dependency: [if], data = [none]
info.setterMethod = method;
// depends on control dependency: [if], data = [none]
info.type = method.getParameterTypes()[0];
// depends on control dependency: [if], data = [none]
map.put(propertyName, info);
// depends on control dependency: [if], data = [none]
} else if(info.type == method.getParameterTypes()[0]){
info.setterMethod = method;
// depends on control dependency: [if], data = [none]
}
}
}
Field[] fields = clazz.getFields();
for(Field field: fields){
int modifiers = field.getModifiers();
if (map.containsKey(field.getName()) == false
&& Modifier.isStatic(modifiers) == false
&& Modifier.isFinal(modifiers) == false) {
PropertyInfo info = new PropertyInfo();
info.name = field.getName();
// depends on control dependency: [if], data = [none]
info.field = field;
// depends on control dependency: [if], data = [none]
info.type = field.getType();
// depends on control dependency: [if], data = [none]
map.put(field.getName(), info);
// depends on control dependency: [if], data = [none]
}
}
Map<String, PropertyWrapper> result = new LinkedHashMap<>();
for(Map.Entry<String, PropertyInfo> e: map.entrySet()){
PropertyInfo info = e.getValue();
if(info.field != null){
result.put(e.getKey(), new PropertyWrapperImpl(info.name, null, null, info.field));
// depends on control dependency: [if], data = [none]
} else {
Field field = null;
try {
field = clazz.getDeclaredField(info.name);
// depends on control dependency: [try], data = [none]
} catch(Exception ex){
// ignore
}
// depends on control dependency: [catch], data = [none]
result.put(e.getKey(), new PropertyWrapperImpl(info.name, info.getterMethod, info.setterMethod, field));
// depends on control dependency: [if], data = [none]
}
}
return result;
} } |
public class class_name {
@Override
public final void set(String providerName, ThreadContext context) {
int index = providerNames.indexOf(providerName);
if (index >= 0)
threadContext.set(index, context);
else {
providerNames.add(providerName);
threadContext.add(context);
}
} } | public class class_name {
@Override
public final void set(String providerName, ThreadContext context) {
int index = providerNames.indexOf(providerName);
if (index >= 0)
threadContext.set(index, context);
else {
providerNames.add(providerName); // depends on control dependency: [if], data = [none]
threadContext.add(context); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void marshall(UpdateConfigurationRequest updateConfigurationRequest, ProtocolMarshaller protocolMarshaller) {
if (updateConfigurationRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateConfigurationRequest.getConfigurationId(), CONFIGURATIONID_BINDING);
protocolMarshaller.marshall(updateConfigurationRequest.getData(), DATA_BINDING);
protocolMarshaller.marshall(updateConfigurationRequest.getDescription(), DESCRIPTION_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(UpdateConfigurationRequest updateConfigurationRequest, ProtocolMarshaller protocolMarshaller) {
if (updateConfigurationRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateConfigurationRequest.getConfigurationId(), CONFIGURATIONID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateConfigurationRequest.getData(), DATA_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateConfigurationRequest.getDescription(), DESCRIPTION_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void filter(CaptureSearchResults results) {
Iterator<CaptureSearchResult> itr = results.iterator();
while(itr.hasNext()) {
CaptureSearchResult result = itr.next();
String captureDate = result.getCaptureTimestamp();
if((captureDate.compareTo(startDateStr) >= 0)
&& (captureDate.compareTo(endDateStr) < 0)) {
matches.add(result);
}
}
} } | public class class_name {
public void filter(CaptureSearchResults results) {
Iterator<CaptureSearchResult> itr = results.iterator();
while(itr.hasNext()) {
CaptureSearchResult result = itr.next();
String captureDate = result.getCaptureTimestamp();
if((captureDate.compareTo(startDateStr) >= 0)
&& (captureDate.compareTo(endDateStr) < 0)) {
matches.add(result); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
private Transliterator getTransliterator(int source) {
if (source == targetScript || source == UScript.INVALID_CODE) {
if (isWide(targetScript)) {
return null;
} else {
return widthFix;
}
}
Integer key = Integer.valueOf(source);
Transliterator t = cache.get(key);
if (t == null) {
String sourceName = UScript.getName(source);
String id = sourceName + TARGET_SEP + target;
try {
t = Transliterator.getInstance(id, FORWARD);
} catch (RuntimeException e) { }
if (t == null) {
// Try to pivot around Latin, our most common script
id = sourceName + LATIN_PIVOT + target;
try {
t = Transliterator.getInstance(id, FORWARD);
} catch (RuntimeException e) { }
}
if (t != null) {
if (!isWide(targetScript)) {
List<Transliterator> v = new ArrayList<Transliterator>();
v.add(widthFix);
v.add(t);
t = new CompoundTransliterator(v);
}
Transliterator prevCachedT = cache.putIfAbsent(key, t);
if (prevCachedT != null) {
t = prevCachedT;
}
} else if (!isWide(targetScript)) {
return widthFix;
}
}
return t;
} } | public class class_name {
private Transliterator getTransliterator(int source) {
if (source == targetScript || source == UScript.INVALID_CODE) {
if (isWide(targetScript)) {
return null; // depends on control dependency: [if], data = [none]
} else {
return widthFix; // depends on control dependency: [if], data = [none]
}
}
Integer key = Integer.valueOf(source);
Transliterator t = cache.get(key);
if (t == null) {
String sourceName = UScript.getName(source);
String id = sourceName + TARGET_SEP + target;
try {
t = Transliterator.getInstance(id, FORWARD); // depends on control dependency: [try], data = [none]
} catch (RuntimeException e) { } // depends on control dependency: [catch], data = [none]
if (t == null) {
// Try to pivot around Latin, our most common script
id = sourceName + LATIN_PIVOT + target; // depends on control dependency: [if], data = [none]
try {
t = Transliterator.getInstance(id, FORWARD); // depends on control dependency: [try], data = [none]
} catch (RuntimeException e) { } // depends on control dependency: [catch], data = [none]
}
if (t != null) {
if (!isWide(targetScript)) {
List<Transliterator> v = new ArrayList<Transliterator>();
v.add(widthFix); // depends on control dependency: [if], data = [none]
v.add(t); // depends on control dependency: [if], data = [none]
t = new CompoundTransliterator(v); // depends on control dependency: [if], data = [none]
}
Transliterator prevCachedT = cache.putIfAbsent(key, t);
if (prevCachedT != null) {
t = prevCachedT; // depends on control dependency: [if], data = [none]
}
} else if (!isWide(targetScript)) {
return widthFix; // depends on control dependency: [if], data = [none]
}
}
return t;
} } |
public class class_name {
public void addQuartzContext(JobExecutionContext jobExecutionContext) {
for (Object mapKey : jobExecutionContext.getMergedJobDataMap().keySet()) {
// logger.debug("added key: " + (String) mapKey);
// logger.debug("added value: " + (String)
// jobExecutionContext.getMergedJobDataMap().get(mapKey));
map.put((String) mapKey, jobExecutionContext.getMergedJobDataMap().get(mapKey));
}
map.put(KEY_JOB_NAME, jobExecutionContext.getJobDetail().getName());
map.put(KEY_TRIGGER_NAME, (jobExecutionContext.getTrigger().getName()));
if (jobExecutionContext.getTrigger() instanceof CronTrigger) {
map.put(
KEY_TRIGGER_CRON_EXPRESSION,
((CronTrigger) jobExecutionContext.getTrigger()).getCronExpression());
}
} } | public class class_name {
public void addQuartzContext(JobExecutionContext jobExecutionContext) {
for (Object mapKey : jobExecutionContext.getMergedJobDataMap().keySet()) {
// logger.debug("added key: " + (String) mapKey);
// logger.debug("added value: " + (String)
// jobExecutionContext.getMergedJobDataMap().get(mapKey));
map.put((String) mapKey, jobExecutionContext.getMergedJobDataMap().get(mapKey));
// depends on control dependency: [for], data = [mapKey]
}
map.put(KEY_JOB_NAME, jobExecutionContext.getJobDetail().getName());
map.put(KEY_TRIGGER_NAME, (jobExecutionContext.getTrigger().getName()));
if (jobExecutionContext.getTrigger() instanceof CronTrigger) {
map.put(
KEY_TRIGGER_CRON_EXPRESSION,
((CronTrigger) jobExecutionContext.getTrigger()).getCronExpression());
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@XmlTransient
public Type getTypeAsEnum()
{
if(this.getFieldType() == null || this.getFieldType().trim().isEmpty())
{
return null;
}
return Type.valueOf(this.getFieldType());
} } | public class class_name {
@XmlTransient
public Type getTypeAsEnum()
{
if(this.getFieldType() == null || this.getFieldType().trim().isEmpty())
{
return null; // depends on control dependency: [if], data = [none]
}
return Type.valueOf(this.getFieldType());
} } |
public class class_name {
private static long getBlockSize(LocatedBlocks lbs) throws IOException {
List<LocatedBlock> locatedBlocks = lbs.getLocatedBlocks();
long bs = -1;
for (LocatedBlock lb: locatedBlocks) {
if (lb.getBlockSize() > bs) {
bs = lb.getBlockSize();
}
}
return bs;
} } | public class class_name {
private static long getBlockSize(LocatedBlocks lbs) throws IOException {
List<LocatedBlock> locatedBlocks = lbs.getLocatedBlocks();
long bs = -1;
for (LocatedBlock lb: locatedBlocks) {
if (lb.getBlockSize() > bs) {
bs = lb.getBlockSize(); // depends on control dependency: [if], data = [none]
}
}
return bs;
} } |
public class class_name {
public void startServer() {
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
synchronized (HTTP_SERVER_START_STOP_LOCK) {
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.childHandler(new DefaultInitializer())
.option(ChannelOption.SO_BACKLOG, Config.getBacklog())
.option(ChannelOption.SO_REUSEADDR, true);
LOG.info("[Netty httpServer] About to bind and start to accept incoming connections on port " + Config.getPort());
parentChannel = b.bind(Config.getPort()).sync().channel();
LOG.info("[Netty httpServer] Started on port " + Config.getPort());
}
parentChannel.closeFuture().addListener(future -> {
LOG.debug("until the server socket is closed.");
LOG.info("[Netty httpServer] 准备shutdown netty server");
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
LOG.info("[Netty httpServer] netty server shutdown完毕!");
});
} catch (Throwable e) {
LOG.error("http netty server 启动失败!", e);
}
} } | public class class_name {
public void startServer() {
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
synchronized (HTTP_SERVER_START_STOP_LOCK) { // depends on control dependency: [try], data = [none]
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.childHandler(new DefaultInitializer())
.option(ChannelOption.SO_BACKLOG, Config.getBacklog())
.option(ChannelOption.SO_REUSEADDR, true);
LOG.info("[Netty httpServer] About to bind and start to accept incoming connections on port " + Config.getPort());
parentChannel = b.bind(Config.getPort()).sync().channel();
LOG.info("[Netty httpServer] Started on port " + Config.getPort());
}
parentChannel.closeFuture().addListener(future -> {
LOG.debug("until the server socket is closed.");
LOG.info("[Netty httpServer] 准备shutdown netty server"); // depends on control dependency: [try], data = [none]
bossGroup.shutdownGracefully(); // depends on control dependency: [try], data = [none]
workerGroup.shutdownGracefully(); // depends on control dependency: [try], data = [none]
LOG.info("[Netty httpServer] netty server shutdown完毕!"); // depends on control dependency: [try], data = [none]
});
} catch (Throwable e) {
LOG.error("http netty server 启动失败!", e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public synchronized ControlPoint getControlPoint(final String deploymentName, final String entryPointName) {
ControlPointIdentifier id = new ControlPointIdentifier(deploymentName, entryPointName);
ControlPoint ep = entryPoints.get(id);
if (ep == null) {
ep = new ControlPoint(this, deploymentName, entryPointName, trackIndividualControlPoints);
entryPoints.put(id, ep);
}
ep.increaseReferenceCount();
return ep;
} } | public class class_name {
public synchronized ControlPoint getControlPoint(final String deploymentName, final String entryPointName) {
ControlPointIdentifier id = new ControlPointIdentifier(deploymentName, entryPointName);
ControlPoint ep = entryPoints.get(id);
if (ep == null) {
ep = new ControlPoint(this, deploymentName, entryPointName, trackIndividualControlPoints); // depends on control dependency: [if], data = [none]
entryPoints.put(id, ep); // depends on control dependency: [if], data = [none]
}
ep.increaseReferenceCount();
return ep;
} } |
public class class_name {
public void addJarFile(String jarFile)
{
if (jarFiles == null)
{
jarFiles = new HashSet<String>();
}
this.jarFiles.add(jarFile);
addJarFileUrl(jarFile);
} } | public class class_name {
public void addJarFile(String jarFile)
{
if (jarFiles == null)
{
jarFiles = new HashSet<String>();
// depends on control dependency: [if], data = [none]
}
this.jarFiles.add(jarFile);
addJarFileUrl(jarFile);
} } |
public class class_name {
public JDBCAppendTableSinkBuilder setParameterTypes(TypeInformation<?>... types) {
int[] ty = new int[types.length];
for (int i = 0; i < types.length; ++i) {
ty[i] = JDBCTypeUtil.typeInformationToSqlType(types[i]);
}
this.parameterTypes = ty;
return this;
} } | public class class_name {
public JDBCAppendTableSinkBuilder setParameterTypes(TypeInformation<?>... types) {
int[] ty = new int[types.length];
for (int i = 0; i < types.length; ++i) {
ty[i] = JDBCTypeUtil.typeInformationToSqlType(types[i]); // depends on control dependency: [for], data = [i]
}
this.parameterTypes = ty;
return this;
} } |
public class class_name {
public ActivityImpl parseBusinessRuleTask(Element businessRuleTaskElement, ScopeImpl scope) {
String decisionRef = businessRuleTaskElement.attributeNS(CAMUNDA_BPMN_EXTENSIONS_NS, "decisionRef");
if (decisionRef != null) {
return parseDmnBusinessRuleTask(businessRuleTaskElement, scope);
}
else {
return parseServiceTaskLike("businessRuleTask", businessRuleTaskElement, scope);
}
} } | public class class_name {
public ActivityImpl parseBusinessRuleTask(Element businessRuleTaskElement, ScopeImpl scope) {
String decisionRef = businessRuleTaskElement.attributeNS(CAMUNDA_BPMN_EXTENSIONS_NS, "decisionRef");
if (decisionRef != null) {
return parseDmnBusinessRuleTask(businessRuleTaskElement, scope); // depends on control dependency: [if], data = [none]
}
else {
return parseServiceTaskLike("businessRuleTask", businessRuleTaskElement, scope); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public final void setItem(final SeGoods pSeGoods) {
this.item = pSeGoods;
if (this.itsId == null) {
this.itsId = new SeGoodsPriceId();
}
this.itsId.setItem(this.item);
} } | public class class_name {
@Override
public final void setItem(final SeGoods pSeGoods) {
this.item = pSeGoods;
if (this.itsId == null) {
this.itsId = new SeGoodsPriceId(); // depends on control dependency: [if], data = [none]
}
this.itsId.setItem(this.item);
} } |
public class class_name {
public Observable<ServiceResponse<Page<AppServicePlanInner>>> listAppServicePlansNextWithServiceResponseAsync(final String nextPageLink) {
return listAppServicePlansNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<AppServicePlanInner>>, Observable<ServiceResponse<Page<AppServicePlanInner>>>>() {
@Override
public Observable<ServiceResponse<Page<AppServicePlanInner>>> call(ServiceResponse<Page<AppServicePlanInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listAppServicePlansNextWithServiceResponseAsync(nextPageLink));
}
});
} } | public class class_name {
public Observable<ServiceResponse<Page<AppServicePlanInner>>> listAppServicePlansNextWithServiceResponseAsync(final String nextPageLink) {
return listAppServicePlansNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<AppServicePlanInner>>, Observable<ServiceResponse<Page<AppServicePlanInner>>>>() {
@Override
public Observable<ServiceResponse<Page<AppServicePlanInner>>> call(ServiceResponse<Page<AppServicePlanInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page); // depends on control dependency: [if], data = [none]
}
return Observable.just(page).concatWith(listAppServicePlansNextWithServiceResponseAsync(nextPageLink));
}
});
} } |
public class class_name {
@Override
public String getServerName() {
try {
collaborator.preInvoke(componentMetaData);
return request.getServerName();
} finally {
collaborator.postInvoke();
}
} } | public class class_name {
@Override
public String getServerName() {
try {
collaborator.preInvoke(componentMetaData); // depends on control dependency: [try], data = [none]
return request.getServerName(); // depends on control dependency: [try], data = [none]
} finally {
collaborator.postInvoke();
}
} } |
public class class_name {
private Future triggerLoading() {
if (keyLoadFinished.isDone()) {
keyLoadFinished = new LoadFinishedFuture();
// side effect -> just trigger load on SENDER_BACKUP ID SENDER died
execService.execute(MAP_LOAD_ALL_KEYS_EXECUTOR, () -> {
// checks if loading has finished and triggers loading in case SENDER died and SENDER_BACKUP took over.
Operation op = new TriggerLoadIfNeededOperation(mapName);
opService.<Boolean>invokeOnPartition(SERVICE_NAME, op, mapNamePartition)
// required since loading may be triggered after migration
// and in this case the callback is the only way to get to know if the key load finished or not.
.andThen(loadingFinishedCallback());
});
}
return keyLoadFinished;
} } | public class class_name {
private Future triggerLoading() {
if (keyLoadFinished.isDone()) {
keyLoadFinished = new LoadFinishedFuture(); // depends on control dependency: [if], data = [none]
// side effect -> just trigger load on SENDER_BACKUP ID SENDER died
execService.execute(MAP_LOAD_ALL_KEYS_EXECUTOR, () -> {
// checks if loading has finished and triggers loading in case SENDER died and SENDER_BACKUP took over.
Operation op = new TriggerLoadIfNeededOperation(mapName);
opService.<Boolean>invokeOnPartition(SERVICE_NAME, op, mapNamePartition)
// required since loading may be triggered after migration
// and in this case the callback is the only way to get to know if the key load finished or not.
.andThen(loadingFinishedCallback());
}); // depends on control dependency: [if], data = [none]
}
return keyLoadFinished;
} } |
public class class_name {
@RequiresApi(Build.VERSION_CODES.KITKAT)
public void removePauseListener(@NonNull IconicsAnimationPauseListener listener) {
if (mPauseListeners == null) {
return;
}
mPauseListeners.remove(listener);
if (mPauseListeners.size() == 0) {
mPauseListeners = null;
mAnimator.removePauseListener((Animator.AnimatorPauseListener) mProxyPauseListener);
}
} } | public class class_name {
@RequiresApi(Build.VERSION_CODES.KITKAT)
public void removePauseListener(@NonNull IconicsAnimationPauseListener listener) {
if (mPauseListeners == null) {
return; // depends on control dependency: [if], data = [none]
}
mPauseListeners.remove(listener);
if (mPauseListeners.size() == 0) {
mPauseListeners = null; // depends on control dependency: [if], data = [none]
mAnimator.removePauseListener((Animator.AnimatorPauseListener) mProxyPauseListener); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public Observable<ServiceResponse<StorageBundle>> getStorageAccountWithServiceResponseAsync(String vaultBaseUrl, String storageAccountName) {
if (vaultBaseUrl == null) {
throw new IllegalArgumentException("Parameter vaultBaseUrl is required and cannot be null.");
}
if (storageAccountName == null) {
throw new IllegalArgumentException("Parameter storageAccountName is required and cannot be null.");
}
if (this.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.apiVersion() is required and cannot be null.");
}
String parameterizedHost = Joiner.on(", ").join("{vaultBaseUrl}", vaultBaseUrl);
return service.getStorageAccount(storageAccountName, this.apiVersion(), this.acceptLanguage(), parameterizedHost, this.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<StorageBundle>>>() {
@Override
public Observable<ServiceResponse<StorageBundle>> call(Response<ResponseBody> response) {
try {
ServiceResponse<StorageBundle> clientResponse = getStorageAccountDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
} } | public class class_name {
public Observable<ServiceResponse<StorageBundle>> getStorageAccountWithServiceResponseAsync(String vaultBaseUrl, String storageAccountName) {
if (vaultBaseUrl == null) {
throw new IllegalArgumentException("Parameter vaultBaseUrl is required and cannot be null.");
}
if (storageAccountName == null) {
throw new IllegalArgumentException("Parameter storageAccountName is required and cannot be null.");
}
if (this.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.apiVersion() is required and cannot be null.");
}
String parameterizedHost = Joiner.on(", ").join("{vaultBaseUrl}", vaultBaseUrl);
return service.getStorageAccount(storageAccountName, this.apiVersion(), this.acceptLanguage(), parameterizedHost, this.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<StorageBundle>>>() {
@Override
public Observable<ServiceResponse<StorageBundle>> call(Response<ResponseBody> response) {
try {
ServiceResponse<StorageBundle> clientResponse = getStorageAccountDelegate(response);
return Observable.just(clientResponse); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
return Observable.error(t);
} // depends on control dependency: [catch], data = [none]
}
});
} } |
public class class_name {
public static long getSize(byte[] byteSize) {
if (byteSize.length!=4) {
return 0;
}
ByteBuffer converterBuffer = ByteBuffer.wrap(byteSize);
converterBuffer.order(ByteOrder.LITTLE_ENDIAN);
return convertSignedIntToUnsigned(converterBuffer.getInt());
} } | public class class_name {
public static long getSize(byte[] byteSize) {
if (byteSize.length!=4) {
return 0; // depends on control dependency: [if], data = [none]
}
ByteBuffer converterBuffer = ByteBuffer.wrap(byteSize);
converterBuffer.order(ByteOrder.LITTLE_ENDIAN);
return convertSignedIntToUnsigned(converterBuffer.getInt());
} } |
public class class_name {
public synchronized void generatePluginConfig(String root, String serverName,boolean utilityRequest, File writeDirectory) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.entry(tc, "generatePluginConfig", "server is stopping = " + serverIsStopping);
}
try {
// Method is synchronized so only one generate can be in progress at a time.
generateInProgress = true;
if (!serverIsStopping) {
PluginGenerator generator = pluginGenerator;
if ( generator == null ) {
// Process the updated configuration
generator = pluginGenerator = new PluginGenerator(this.config, locMgr, bundleContext);
}
generator.generateXML(root, serverName, (WebContainer) webContainer, smgr, dynVhostMgr, locMgr,utilityRequest, writeDirectory);
}
} catch (Throwable t) {
FFDCFilter.processException(t, getClass().getName(), "generatePluginConfig");
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) {
Tr.event(tc, "Error generate plugin xml: " + t.getMessage());
}
} finally {
generateInProgress = false;
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.exit(tc, "generatePluginConfig", "server is stopping = " + serverIsStopping);
}
} } | public class class_name {
public synchronized void generatePluginConfig(String root, String serverName,boolean utilityRequest, File writeDirectory) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.entry(tc, "generatePluginConfig", "server is stopping = " + serverIsStopping); // depends on control dependency: [if], data = [none]
}
try {
// Method is synchronized so only one generate can be in progress at a time.
generateInProgress = true; // depends on control dependency: [try], data = [none]
if (!serverIsStopping) {
PluginGenerator generator = pluginGenerator;
if ( generator == null ) {
// Process the updated configuration
generator = pluginGenerator = new PluginGenerator(this.config, locMgr, bundleContext); // depends on control dependency: [if], data = [none]
}
generator.generateXML(root, serverName, (WebContainer) webContainer, smgr, dynVhostMgr, locMgr,utilityRequest, writeDirectory); // depends on control dependency: [if], data = [none]
}
} catch (Throwable t) {
FFDCFilter.processException(t, getClass().getName(), "generatePluginConfig");
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) {
Tr.event(tc, "Error generate plugin xml: " + t.getMessage()); // depends on control dependency: [if], data = [none]
}
} finally { // depends on control dependency: [catch], data = [none]
generateInProgress = false;
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.exit(tc, "generatePluginConfig", "server is stopping = " + serverIsStopping); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static EJBEndpoint getWebserviceMetadataEJBEndpoint(final JAXWSDeployment jaxwsDeployment,
final String className) {
java.util.List<EJBEndpoint> ejbEndpointList = jaxwsDeployment.getEjbEndpoints();
for (EJBEndpoint ejbEndpoint : ejbEndpointList) {
if (className.equals(ejbEndpoint.getClassName())) {
return ejbEndpoint;
}
}
return null;
} } | public class class_name {
public static EJBEndpoint getWebserviceMetadataEJBEndpoint(final JAXWSDeployment jaxwsDeployment,
final String className) {
java.util.List<EJBEndpoint> ejbEndpointList = jaxwsDeployment.getEjbEndpoints();
for (EJBEndpoint ejbEndpoint : ejbEndpointList) {
if (className.equals(ejbEndpoint.getClassName())) {
return ejbEndpoint; // depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
public void marshall(BatchDetachTypedLinkResponse batchDetachTypedLinkResponse, ProtocolMarshaller protocolMarshaller) {
if (batchDetachTypedLinkResponse == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(BatchDetachTypedLinkResponse batchDetachTypedLinkResponse, ProtocolMarshaller protocolMarshaller) {
if (batchDetachTypedLinkResponse == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static AtomContactSet getAtomsInContact(Chain chain1, Chain chain2,
String[] atomNames, double cutoff, boolean hetAtoms) {
Grid grid = new Grid(cutoff);
Atom[] atoms1 = null;
Atom[] atoms2 = null;
if (atomNames == null) {
atoms1 = getAllNonHAtomArray(chain1, hetAtoms);
atoms2 = getAllNonHAtomArray(chain2, hetAtoms);
} else {
atoms1 = getAtomArray(chain1, atomNames);
atoms2 = getAtomArray(chain2, atomNames);
}
grid.addAtoms(atoms1, atoms2);
return grid.getAtomContacts();
} } | public class class_name {
public static AtomContactSet getAtomsInContact(Chain chain1, Chain chain2,
String[] atomNames, double cutoff, boolean hetAtoms) {
Grid grid = new Grid(cutoff);
Atom[] atoms1 = null;
Atom[] atoms2 = null;
if (atomNames == null) {
atoms1 = getAllNonHAtomArray(chain1, hetAtoms); // depends on control dependency: [if], data = [none]
atoms2 = getAllNonHAtomArray(chain2, hetAtoms); // depends on control dependency: [if], data = [none]
} else {
atoms1 = getAtomArray(chain1, atomNames); // depends on control dependency: [if], data = [none]
atoms2 = getAtomArray(chain2, atomNames); // depends on control dependency: [if], data = [none]
}
grid.addAtoms(atoms1, atoms2);
return grid.getAtomContacts();
} } |
public class class_name {
public String instanceName(TypeMirror tm) {
if (tm == null) {
return "null";
}
String result = valueToName.get(tm);
if (result != null) {
return result;
}
String className = unqualifiedClassName(tm).toLowerCase();
Integer size = nameToCount.get(className);
int instanceNumber = (size == null) ? 0 : (size).intValue() + 1;
nameToCount.put(className, new Integer(instanceNumber));
result = className + instanceNumber;
valueToName.put(tm, result);
return result;
} } | public class class_name {
public String instanceName(TypeMirror tm) {
if (tm == null) {
return "null"; // depends on control dependency: [if], data = [none]
}
String result = valueToName.get(tm);
if (result != null) {
return result; // depends on control dependency: [if], data = [none]
}
String className = unqualifiedClassName(tm).toLowerCase();
Integer size = nameToCount.get(className);
int instanceNumber = (size == null) ? 0 : (size).intValue() + 1;
nameToCount.put(className, new Integer(instanceNumber));
result = className + instanceNumber;
valueToName.put(tm, result);
return result;
} } |
public class class_name {
public IntersectionType intersectionType (Box box) {
// exit quickly in cases where the bounding boxes don't overlap (equivalent to a separating
// axis test using the axes of the box)
if (!_bounds.intersects(box)) {
return IntersectionType.NONE;
}
// consider each side of the frustum as a potential separating axis
int ccount = 0;
for (int ii = 0; ii < 6; ii++) {
// determine how many vertices fall inside/outside the plane
int inside = 0;
Plane plane = _planes[ii];
for (int jj = 0; jj < 8; jj++) {
if (plane.distance(box.vertex(jj, _vertex)) <= 0f) {
inside++;
}
}
if (inside == 0) {
return IntersectionType.NONE;
} else if (inside == 8) {
ccount++;
}
}
return (ccount == 6) ? IntersectionType.CONTAINS : IntersectionType.INTERSECTS;
} } | public class class_name {
public IntersectionType intersectionType (Box box) {
// exit quickly in cases where the bounding boxes don't overlap (equivalent to a separating
// axis test using the axes of the box)
if (!_bounds.intersects(box)) {
return IntersectionType.NONE; // depends on control dependency: [if], data = [none]
}
// consider each side of the frustum as a potential separating axis
int ccount = 0;
for (int ii = 0; ii < 6; ii++) {
// determine how many vertices fall inside/outside the plane
int inside = 0;
Plane plane = _planes[ii];
for (int jj = 0; jj < 8; jj++) {
if (plane.distance(box.vertex(jj, _vertex)) <= 0f) {
inside++; // depends on control dependency: [if], data = [none]
}
}
if (inside == 0) {
return IntersectionType.NONE; // depends on control dependency: [if], data = [none]
} else if (inside == 8) {
ccount++; // depends on control dependency: [if], data = [none]
}
}
return (ccount == 6) ? IntersectionType.CONTAINS : IntersectionType.INTERSECTS;
} } |
public class class_name {
public synchronized void setEndpoint(URI endpoint) {
checkUpdatable();
int port = endpoint.getPort();
if (port==80||port==443) {
if (MwsUtl.usesStandardPort(endpoint)) {
try {
//some versions of apache http client cause signature errors when
//standard port is explicitly used, so remove that case.
endpoint = new URI(endpoint.getScheme(), endpoint.getHost(),
endpoint.getPath(), endpoint.getFragment());
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
}
this.endpoint = endpoint;
} } | public class class_name {
public synchronized void setEndpoint(URI endpoint) {
checkUpdatable();
int port = endpoint.getPort();
if (port==80||port==443) {
if (MwsUtl.usesStandardPort(endpoint)) {
try {
//some versions of apache http client cause signature errors when
//standard port is explicitly used, so remove that case.
endpoint = new URI(endpoint.getScheme(), endpoint.getHost(),
endpoint.getPath(), endpoint.getFragment()); // depends on control dependency: [try], data = [none]
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
} // depends on control dependency: [catch], data = [none]
}
}
this.endpoint = endpoint;
} } |
public class class_name {
public long countOf(DomainObjectMatch<?> match) {
long ret;
Object so = InternalAccess.getQueryExecutor(this.domainQuery).getMappingInfo()
.getInternalDomainAccess().getSyncObject();
if (so != null) {
synchronized (so) {
ret = intCountOf(match);
}
} else
ret = intCountOf(match);
return ret;
} } | public class class_name {
public long countOf(DomainObjectMatch<?> match) {
long ret;
Object so = InternalAccess.getQueryExecutor(this.domainQuery).getMappingInfo()
.getInternalDomainAccess().getSyncObject();
if (so != null) {
synchronized (so) { // depends on control dependency: [if], data = [(so]
ret = intCountOf(match);
}
} else
ret = intCountOf(match);
return ret;
} } |
public class class_name {
protected PollItem filter(final Object socketOrChannel, int events)
{
if (socketOrChannel == null) {
return null;
}
CompositePollItem item = items.get(socketOrChannel);
if (item == null) {
return null;
}
PollItem pollItem = item.item();
if (pollItem == null) {
return null;
}
if (pollItem.hasEvent(events)) {
return pollItem;
}
return null;
} } | public class class_name {
protected PollItem filter(final Object socketOrChannel, int events)
{
if (socketOrChannel == null) {
return null; // depends on control dependency: [if], data = [none]
}
CompositePollItem item = items.get(socketOrChannel);
if (item == null) {
return null; // depends on control dependency: [if], data = [none]
}
PollItem pollItem = item.item();
if (pollItem == null) {
return null; // depends on control dependency: [if], data = [none]
}
if (pollItem.hasEvent(events)) {
return pollItem; // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
private void block(ClassInference inference, ClassConclusion conclusion) {
List<ClassInference> blockedForConclusion = blocked_.get(conclusion);
if (blockedForConclusion == null) {
blockedForConclusion = new ArrayList<ClassInference>();
blocked_.put(conclusion, blockedForConclusion);
}
blockedForConclusion.add(inference);
} } | public class class_name {
private void block(ClassInference inference, ClassConclusion conclusion) {
List<ClassInference> blockedForConclusion = blocked_.get(conclusion);
if (blockedForConclusion == null) {
blockedForConclusion = new ArrayList<ClassInference>(); // depends on control dependency: [if], data = [none]
blocked_.put(conclusion, blockedForConclusion); // depends on control dependency: [if], data = [none]
}
blockedForConclusion.add(inference);
} } |
public class class_name {
public AsyncBulkheadState createAsyncBulkheadState(ScheduledExecutorService executorService, BulkheadPolicy policy, MetricRecorder metricRecorder) {
if (policy == null) {
return new AsyncBulkheadStateNullImpl(executorService);
} else {
return new AsyncBulkheadStateImpl(executorService, policy, metricRecorder);
}
} } | public class class_name {
public AsyncBulkheadState createAsyncBulkheadState(ScheduledExecutorService executorService, BulkheadPolicy policy, MetricRecorder metricRecorder) {
if (policy == null) {
return new AsyncBulkheadStateNullImpl(executorService); // depends on control dependency: [if], data = [none]
} else {
return new AsyncBulkheadStateImpl(executorService, policy, metricRecorder); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void computeDandH( DenseMatrix64F param , DenseMatrix64F x , DenseMatrix64F y )
{
func.compute(param,x, tempDH);
subtractEquals(tempDH, y);
if (jacobianFactory != null)
{
jacobianFactory.computeJacobian(param, x, jacobian);
}
else
{
computeNumericalJacobian(param,x,jacobian);
}
int numParam = param.getNumElements();
int length = y.getNumElements();
// d = average{ (f(x_i;p) - y_i) * jacobian(:,i) }
for( int i = 0; i < numParam; i++ ) {
double total = 0;
for( int j = 0; j < length; j++ ) {
total += tempDH.get(j,0)*jacobian.get(i,j);
}
d.set(i,0,total/length);
}
// compute the approximation of the hessian
multTransB(jacobian,jacobian,H);
scale(1.0/length,H);
} } | public class class_name {
private void computeDandH( DenseMatrix64F param , DenseMatrix64F x , DenseMatrix64F y )
{
func.compute(param,x, tempDH);
subtractEquals(tempDH, y);
if (jacobianFactory != null)
{
jacobianFactory.computeJacobian(param, x, jacobian);
// depends on control dependency: [if], data = [none]
}
else
{
computeNumericalJacobian(param,x,jacobian);
// depends on control dependency: [if], data = [none]
}
int numParam = param.getNumElements();
int length = y.getNumElements();
// d = average{ (f(x_i;p) - y_i) * jacobian(:,i) }
for( int i = 0; i < numParam; i++ ) {
double total = 0;
for( int j = 0; j < length; j++ ) {
total += tempDH.get(j,0)*jacobian.get(i,j);
// depends on control dependency: [for], data = [j]
}
d.set(i,0,total/length);
// depends on control dependency: [for], data = [i]
}
// compute the approximation of the hessian
multTransB(jacobian,jacobian,H);
scale(1.0/length,H);
} } |
public class class_name {
public PropertyDescriptor[] getPropertyDescriptors() {
try {
return new PropertyDescriptor[] {
new PropertyDescriptor("name", JMSReceiver.class),
new PropertyDescriptor("topicFactoryName", JMSReceiver.class),
new PropertyDescriptor("topicName", JMSReceiver.class),
new PropertyDescriptor("threshold", JMSReceiver.class),
new PropertyDescriptor("jndiPath", JMSReceiver.class),
new PropertyDescriptor("userId",
JMSReceiver.class),
};
} catch (Exception e) {
}
return null;
} } | public class class_name {
public PropertyDescriptor[] getPropertyDescriptors() {
try {
return new PropertyDescriptor[] {
new PropertyDescriptor("name", JMSReceiver.class),
new PropertyDescriptor("topicFactoryName", JMSReceiver.class),
new PropertyDescriptor("topicName", JMSReceiver.class),
new PropertyDescriptor("threshold", JMSReceiver.class),
new PropertyDescriptor("jndiPath", JMSReceiver.class),
new PropertyDescriptor("userId",
JMSReceiver.class),
}; // depends on control dependency: [try], data = [none]
} catch (Exception e) {
} // depends on control dependency: [catch], data = [none]
return null;
} } |
public class class_name {
public static synchronized String rot13( String input ) {
StringBuffer output = new StringBuffer();
if ( input != null ) {
for ( int i = 0; i < input.length(); i++ ) {
char inChar = input.charAt( i );
if ( ( inChar >= 'A' ) & ( inChar <= 'Z' ) ) {
inChar += 13;
if ( inChar > 'Z' ) {
inChar -= 26;
}
}
if ( ( inChar >= 'a' ) & ( inChar <= 'z' ) ) {
inChar += 13;
if ( inChar > 'z' ) {
inChar -= 26;
}
}
output.append( inChar );
}
}
return output.toString();
} } | public class class_name {
public static synchronized String rot13( String input ) {
StringBuffer output = new StringBuffer();
if ( input != null ) {
for ( int i = 0; i < input.length(); i++ ) {
char inChar = input.charAt( i );
if ( ( inChar >= 'A' ) & ( inChar <= 'Z' ) ) {
inChar += 13; // depends on control dependency: [if], data = [none]
if ( inChar > 'Z' ) {
inChar -= 26; // depends on control dependency: [if], data = [none]
}
}
if ( ( inChar >= 'a' ) & ( inChar <= 'z' ) ) {
inChar += 13; // depends on control dependency: [if], data = [none]
if ( inChar > 'z' ) {
inChar -= 26; // depends on control dependency: [if], data = [none]
}
}
output.append( inChar ); // depends on control dependency: [for], data = [none]
}
}
return output.toString();
} } |
public class class_name {
public void addExtraCapabilities(DesiredCapabilities extraCapabilities) {
if (extraCapabilities != null && browser.getName() != BrowserName.NONE) {
desiredCapabilities = desiredCapabilities.merge(extraCapabilities);
}
} } | public class class_name {
public void addExtraCapabilities(DesiredCapabilities extraCapabilities) {
if (extraCapabilities != null && browser.getName() != BrowserName.NONE) {
desiredCapabilities = desiredCapabilities.merge(extraCapabilities); // depends on control dependency: [if], data = [(extraCapabilities]
}
} } |
public class class_name {
public EClass getIfcStackTerminalType() {
if (ifcStackTerminalTypeEClass == null) {
ifcStackTerminalTypeEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(530);
}
return ifcStackTerminalTypeEClass;
} } | public class class_name {
public EClass getIfcStackTerminalType() {
if (ifcStackTerminalTypeEClass == null) {
ifcStackTerminalTypeEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(530);
// depends on control dependency: [if], data = [none]
}
return ifcStackTerminalTypeEClass;
} } |
public class class_name {
public DocumentFragment rtree(XPathContext support)
{
DocumentFragment docFrag = null;
int result = rtf();
if (DTM.NULL == result)
{
DTM frag = support.createDocumentFragment();
// %OPT%
frag.appendTextChild(str());
docFrag = (DocumentFragment)frag.getNode(frag.getDocument());
}
else
{
DTM frag = support.getDTM(result);
docFrag = (DocumentFragment)frag.getNode(frag.getDocument());
}
return docFrag;
} } | public class class_name {
public DocumentFragment rtree(XPathContext support)
{
DocumentFragment docFrag = null;
int result = rtf();
if (DTM.NULL == result)
{
DTM frag = support.createDocumentFragment();
// %OPT%
frag.appendTextChild(str()); // depends on control dependency: [if], data = [none]
docFrag = (DocumentFragment)frag.getNode(frag.getDocument()); // depends on control dependency: [if], data = [none]
}
else
{
DTM frag = support.getDTM(result);
docFrag = (DocumentFragment)frag.getNode(frag.getDocument()); // depends on control dependency: [if], data = [none]
}
return docFrag;
} } |
public class class_name {
protected void activate(ComponentContext cc) {
_runtimeMetaDataProvider = new LibertyRuntimeMetaDataProvider(this);
activateHasBeenCalled = true;
_transactionSettingsProviders.activate(cc);
_cc = cc;
// Irrespective of the logtype we need to get the properties
_props = _cc.getProperties();
if (tc.isDebugEnabled())
Tr.debug(tc, "activate properties set to " + _props);
// There is additional work to do if we are storing transaction log in an RDBMS. The key
// determinant that we are using an RDBMS is the specification of the dataSourceRef
// attribute of the transaction stanza in the server.xml. So start by checking this
// attribute. If it is present, set the _isSQLRecoveryLog flag and set the logDir
// to "custom" <- this will allow compatibility with tWAS code.
//
// Drive the getTransactionLogDirectory() method if we're working against the filesys.
checkDataSourceRef();
if (_isSQLRecoveryLog) {
if (tc.isDebugEnabled())
Tr.debug(tc, "activate working with Tran Log in an RDBMS");
ServiceReference<ResourceFactory> serviceRef = dataSourceFactoryRef.getReference();
if (tc.isDebugEnabled())
Tr.debug(tc, "pre-activate datasourceFactory ref " + dataSourceFactoryRef +
", underlying reference: " + serviceRef);
dataSourceFactoryRef.activate(_cc);
if (tc.isDebugEnabled())
Tr.debug(tc, "post-activate datasourceFactory ref " + dataSourceFactoryRef);
// If we already have a dataSourceFactory then we can startup (and drive recovery) now.
if (tc.isDebugEnabled())
Tr.debug(tc, "retrieved datasourceFactory service ref " + serviceRef);
if (serviceRef != null) {
// The DataSource is available, which means that we are able to drive recovery
// processing. This is driven through the reference to the TransactionManagerService,
// assuming that it is available
if (tmsRef != null)
tmsRef.doStartup(this, _isSQLRecoveryLog);
}
} else {
getTransactionLogDirectory();
if (tmsRef != null)
tmsRef.doStartup(this, _isSQLRecoveryLog);
}
if (tc.isDebugEnabled())
Tr.debug(tc, "activate retrieved datasourceFactory is " + _theDataSourceFactory);
} } | public class class_name {
protected void activate(ComponentContext cc) {
_runtimeMetaDataProvider = new LibertyRuntimeMetaDataProvider(this);
activateHasBeenCalled = true;
_transactionSettingsProviders.activate(cc);
_cc = cc;
// Irrespective of the logtype we need to get the properties
_props = _cc.getProperties();
if (tc.isDebugEnabled())
Tr.debug(tc, "activate properties set to " + _props);
// There is additional work to do if we are storing transaction log in an RDBMS. The key
// determinant that we are using an RDBMS is the specification of the dataSourceRef
// attribute of the transaction stanza in the server.xml. So start by checking this
// attribute. If it is present, set the _isSQLRecoveryLog flag and set the logDir
// to "custom" <- this will allow compatibility with tWAS code.
//
// Drive the getTransactionLogDirectory() method if we're working against the filesys.
checkDataSourceRef();
if (_isSQLRecoveryLog) {
if (tc.isDebugEnabled())
Tr.debug(tc, "activate working with Tran Log in an RDBMS");
ServiceReference<ResourceFactory> serviceRef = dataSourceFactoryRef.getReference();
if (tc.isDebugEnabled())
Tr.debug(tc, "pre-activate datasourceFactory ref " + dataSourceFactoryRef +
", underlying reference: " + serviceRef);
dataSourceFactoryRef.activate(_cc); // depends on control dependency: [if], data = [none]
if (tc.isDebugEnabled())
Tr.debug(tc, "post-activate datasourceFactory ref " + dataSourceFactoryRef);
// If we already have a dataSourceFactory then we can startup (and drive recovery) now.
if (tc.isDebugEnabled())
Tr.debug(tc, "retrieved datasourceFactory service ref " + serviceRef);
if (serviceRef != null) {
// The DataSource is available, which means that we are able to drive recovery
// processing. This is driven through the reference to the TransactionManagerService,
// assuming that it is available
if (tmsRef != null)
tmsRef.doStartup(this, _isSQLRecoveryLog);
}
} else {
getTransactionLogDirectory(); // depends on control dependency: [if], data = [none]
if (tmsRef != null)
tmsRef.doStartup(this, _isSQLRecoveryLog);
}
if (tc.isDebugEnabled())
Tr.debug(tc, "activate retrieved datasourceFactory is " + _theDataSourceFactory);
} } |
public class class_name {
protected void printMacroValue(String macro) throws IOException {
if (macro.startsWith(KEY_CMS)) {
Object result = getMacroBeanValue(m_contextBean, macro.substring(KEY_CMS.length()));
if (result != null) {
m_context.getOut().print(result);
}
} else if (macro.startsWith(KEY_ELEMENT)) {
Object result = getMacroBeanValue(m_contextBean.getElement(), macro.substring(KEY_ELEMENT.length()));
if (result != null) {
m_context.getOut().print(result);
}
} else if (macro.startsWith(KEY_PARENT)) {
Object result = getMacroBeanValue(
m_contextBean.getParentElement(m_element),
macro.substring(KEY_PARENT.length()));
if (result != null) {
m_context.getOut().print(result);
}
} else if (macro.startsWith(KEY_SETTINGS)) {
String settingValue = m_element.getSettings().get(macro.substring(KEY_SETTINGS.length()));
if (settingValue != null) {
m_context.getOut().print(settingValue);
}
} else {
I_CmsFormatterBean formatter = getFormatterForMacro(macro);
if (formatter != null) {
try {
CmsJspTagDisplay.displayAction(
CmsContainerElementBean.cloneWithFormatter(m_element, formatter.getJspStructureId()),
formatter,
m_context,
m_request,
m_response);
} catch (Exception e) {
LOG.error("Failed to display formatted content.", e);
}
}
}
} } | public class class_name {
protected void printMacroValue(String macro) throws IOException {
if (macro.startsWith(KEY_CMS)) {
Object result = getMacroBeanValue(m_contextBean, macro.substring(KEY_CMS.length()));
if (result != null) {
m_context.getOut().print(result); // depends on control dependency: [if], data = [(result]
}
} else if (macro.startsWith(KEY_ELEMENT)) {
Object result = getMacroBeanValue(m_contextBean.getElement(), macro.substring(KEY_ELEMENT.length()));
if (result != null) {
m_context.getOut().print(result); // depends on control dependency: [if], data = [(result]
}
} else if (macro.startsWith(KEY_PARENT)) {
Object result = getMacroBeanValue(
m_contextBean.getParentElement(m_element),
macro.substring(KEY_PARENT.length()));
if (result != null) {
m_context.getOut().print(result); // depends on control dependency: [if], data = [(result]
}
} else if (macro.startsWith(KEY_SETTINGS)) {
String settingValue = m_element.getSettings().get(macro.substring(KEY_SETTINGS.length()));
if (settingValue != null) {
m_context.getOut().print(settingValue); // depends on control dependency: [if], data = [(settingValue]
}
} else {
I_CmsFormatterBean formatter = getFormatterForMacro(macro);
if (formatter != null) {
try {
CmsJspTagDisplay.displayAction(
CmsContainerElementBean.cloneWithFormatter(m_element, formatter.getJspStructureId()),
formatter,
m_context,
m_request,
m_response); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
LOG.error("Failed to display formatted content.", e);
} // depends on control dependency: [catch], data = [none]
}
}
} } |
public class class_name {
public String getFullName(TypeElement element) {
element = typeUtil.getObjcClass(element);
String fullName = fullNameCache.get(element);
if (fullName == null) {
fullName = getFullNameImpl(element);
fullNameCache.put(element, fullName);
}
return fullName;
} } | public class class_name {
public String getFullName(TypeElement element) {
element = typeUtil.getObjcClass(element);
String fullName = fullNameCache.get(element);
if (fullName == null) {
fullName = getFullNameImpl(element); // depends on control dependency: [if], data = [none]
fullNameCache.put(element, fullName); // depends on control dependency: [if], data = [none]
}
return fullName;
} } |
public class class_name {
Coordinate getSnappingPoint(Coordinate original, double threshold) {
minimumDistance = Double.MAX_VALUE;
Coordinate snappingPoint = null;
double currThreshold = threshold;
List<Coordinate> coordinates = getPossibleCoordinates(original);
for (Coordinate coordinate : coordinates) {
double distance = Mathlib.distance(original, coordinate);
if (distance < currThreshold && distance < ruleDistance) {
currThreshold = distance;
minimumDistance = distance;
snappingPoint = coordinate;
}
}
return snappingPoint;
} } | public class class_name {
Coordinate getSnappingPoint(Coordinate original, double threshold) {
minimumDistance = Double.MAX_VALUE;
Coordinate snappingPoint = null;
double currThreshold = threshold;
List<Coordinate> coordinates = getPossibleCoordinates(original);
for (Coordinate coordinate : coordinates) {
double distance = Mathlib.distance(original, coordinate);
if (distance < currThreshold && distance < ruleDistance) {
currThreshold = distance; // depends on control dependency: [if], data = [none]
minimumDistance = distance; // depends on control dependency: [if], data = [none]
snappingPoint = coordinate; // depends on control dependency: [if], data = [none]
}
}
return snappingPoint;
} } |
public class class_name {
public CmsContainerPageBean getPage(Object page, Object locale) {
CmsResource pageResource = null;
CmsContainerPageBean result = null;
if (m_cms != null) {
try {
pageResource = CmsJspElFunctions.convertRawResource(m_cms, page);
Locale l = CmsJspElFunctions.convertLocale(locale);
result = getPage(pageResource);
if (result != null) {
CmsADEConfigData adeConfig = OpenCms.getADEManager().lookupConfiguration(
m_cms,
pageResource.getRootPath());
for (CmsContainerBean container : result.getContainers().values()) {
for (CmsContainerElementBean element : container.getElements()) {
boolean isGroupContainer = element.isGroupContainer(m_cms);
boolean isInheritedContainer = element.isInheritedContainer(m_cms);
I_CmsFormatterBean formatterConfig = null;
if (!isGroupContainer && !isInheritedContainer) {
element.initResource(m_cms);
// ensure that the formatter configuration id is added to the element settings, so it will be persisted on save
formatterConfig = CmsJspTagContainer.getFormatterConfigurationForElement(
m_cms,
element,
adeConfig,
container.getName(),
"",
0);
if (formatterConfig != null) {
element.initSettings(m_cms, formatterConfig, l, m_request, null);
}
}
}
}
}
} catch (Exception e) {
LOG.warn(e.getLocalizedMessage(), e);
}
}
return result;
} } | public class class_name {
public CmsContainerPageBean getPage(Object page, Object locale) {
CmsResource pageResource = null;
CmsContainerPageBean result = null;
if (m_cms != null) {
try {
pageResource = CmsJspElFunctions.convertRawResource(m_cms, page); // depends on control dependency: [try], data = [none]
Locale l = CmsJspElFunctions.convertLocale(locale);
result = getPage(pageResource); // depends on control dependency: [try], data = [none]
if (result != null) {
CmsADEConfigData adeConfig = OpenCms.getADEManager().lookupConfiguration(
m_cms,
pageResource.getRootPath());
for (CmsContainerBean container : result.getContainers().values()) {
for (CmsContainerElementBean element : container.getElements()) {
boolean isGroupContainer = element.isGroupContainer(m_cms);
boolean isInheritedContainer = element.isInheritedContainer(m_cms);
I_CmsFormatterBean formatterConfig = null;
if (!isGroupContainer && !isInheritedContainer) {
element.initResource(m_cms); // depends on control dependency: [if], data = [none]
// ensure that the formatter configuration id is added to the element settings, so it will be persisted on save
formatterConfig = CmsJspTagContainer.getFormatterConfigurationForElement(
m_cms,
element,
adeConfig,
container.getName(),
"",
0); // depends on control dependency: [if], data = [none]
if (formatterConfig != null) {
element.initSettings(m_cms, formatterConfig, l, m_request, null); // depends on control dependency: [if], data = [null)]
}
}
}
}
}
} catch (Exception e) {
LOG.warn(e.getLocalizedMessage(), e);
} // depends on control dependency: [catch], data = [none]
}
return result;
} } |
public class class_name {
private void dispatchGrantedResource(
Map<String, List<ResourceGrant>> sessionIdToGranted) {
for (Map.Entry<String, List<ResourceGrant>> entry :
sessionIdToGranted.entrySet()) {
LOG.info("Assigning " + entry.getValue().size() + " " +
type + " requests to Session: " + entry.getKey());
try {
Session session = sessionManager.getSession(entry.getKey());
session.setResourceGrant(entry.getValue());
} catch (InvalidSessionHandle e) {
LOG.warn("Trying to add call for invalid session: " + entry.getKey());
}
if (LOG.isDebugEnabled()) {
LOG.debug(Arrays.toString(entry.getValue().toArray()));
}
sessionNotifier.notifyGrantResource(entry.getKey(), entry.getValue());
}
} } | public class class_name {
private void dispatchGrantedResource(
Map<String, List<ResourceGrant>> sessionIdToGranted) {
for (Map.Entry<String, List<ResourceGrant>> entry :
sessionIdToGranted.entrySet()) {
LOG.info("Assigning " + entry.getValue().size() + " " +
type + " requests to Session: " + entry.getKey()); // depends on control dependency: [for], data = [entry]
try {
Session session = sessionManager.getSession(entry.getKey());
session.setResourceGrant(entry.getValue()); // depends on control dependency: [try], data = [none]
} catch (InvalidSessionHandle e) {
LOG.warn("Trying to add call for invalid session: " + entry.getKey());
} // depends on control dependency: [catch], data = [none]
if (LOG.isDebugEnabled()) {
LOG.debug(Arrays.toString(entry.getValue().toArray())); // depends on control dependency: [if], data = [none]
}
sessionNotifier.notifyGrantResource(entry.getKey(), entry.getValue()); // depends on control dependency: [for], data = [entry]
}
} } |
public class class_name {
public static ServerSocket createSocketFromPorts(Iterator<Integer> portsIterator, SocketFactory factory) {
while (portsIterator.hasNext()) {
int port = portsIterator.next();
LOG.debug("Trying to open socket on port {}", port);
try {
return factory.createSocket(port);
} catch (IOException | IllegalArgumentException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Unable to allocate socket on port", e);
} else {
LOG.info("Unable to allocate on port {}, due to error: {}", port, e.getMessage());
}
}
}
return null;
} } | public class class_name {
public static ServerSocket createSocketFromPorts(Iterator<Integer> portsIterator, SocketFactory factory) {
while (portsIterator.hasNext()) {
int port = portsIterator.next();
LOG.debug("Trying to open socket on port {}", port); // depends on control dependency: [while], data = [none]
try {
return factory.createSocket(port); // depends on control dependency: [try], data = [none]
} catch (IOException | IllegalArgumentException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Unable to allocate socket on port", e); // depends on control dependency: [if], data = [none]
} else {
LOG.info("Unable to allocate on port {}, due to error: {}", port, e.getMessage()); // depends on control dependency: [if], data = [none]
}
} // depends on control dependency: [catch], data = [none]
}
return null;
} } |
public class class_name {
static boolean equalsImpl(Set<?> s, @Nullable Object object) {
if (s == object) {
return true;
}
if (object instanceof Set) {
Set<?> o = (Set<?>) object;
try {
return s.size() == o.size() && s.containsAll(o);
} catch (NullPointerException ignored) {
return false;
} catch (ClassCastException ignored) {
return false;
}
}
return false;
} } | public class class_name {
static boolean equalsImpl(Set<?> s, @Nullable Object object) {
if (s == object) {
return true; // depends on control dependency: [if], data = [none]
}
if (object instanceof Set) {
Set<?> o = (Set<?>) object;
try {
return s.size() == o.size() && s.containsAll(o); // depends on control dependency: [try], data = [none]
} catch (NullPointerException ignored) {
return false;
} catch (ClassCastException ignored) { // depends on control dependency: [catch], data = [none]
return false;
} // depends on control dependency: [catch], data = [none]
}
return false;
} } |
public class class_name {
public boolean hasPossibleStaticMethod(String name, Expression arguments) {
int count = 0;
if (arguments instanceof TupleExpression) {
TupleExpression tuple = (TupleExpression) arguments;
// TODO this won't strictly be true when using list expansion in argument calls
count = tuple.getExpressions().size();
} else if (arguments instanceof MapExpression) {
count = 1;
}
for (MethodNode method : getMethods(name)) {
if(method.isStatic()) {
Parameter[] parameters = method.getParameters();
if (parameters.length == count) return true;
// handle varargs case
if (parameters.length > 0 && parameters[parameters.length - 1].getType().isArray()) {
if (count >= parameters.length - 1) return true;
}
// handle parameters with default values
int nonDefaultParameters = 0;
for (Parameter parameter : parameters) {
if (!parameter.hasInitialExpression()) {
nonDefaultParameters++;
}
}
if (count < parameters.length && nonDefaultParameters <= count) {
return true;
}
}
}
return false;
} } | public class class_name {
public boolean hasPossibleStaticMethod(String name, Expression arguments) {
int count = 0;
if (arguments instanceof TupleExpression) {
TupleExpression tuple = (TupleExpression) arguments;
// TODO this won't strictly be true when using list expansion in argument calls
count = tuple.getExpressions().size(); // depends on control dependency: [if], data = [none]
} else if (arguments instanceof MapExpression) {
count = 1; // depends on control dependency: [if], data = [none]
}
for (MethodNode method : getMethods(name)) {
if(method.isStatic()) {
Parameter[] parameters = method.getParameters();
if (parameters.length == count) return true;
// handle varargs case
if (parameters.length > 0 && parameters[parameters.length - 1].getType().isArray()) {
if (count >= parameters.length - 1) return true;
}
// handle parameters with default values
int nonDefaultParameters = 0;
for (Parameter parameter : parameters) {
if (!parameter.hasInitialExpression()) {
nonDefaultParameters++; // depends on control dependency: [if], data = [none]
}
}
if (count < parameters.length && nonDefaultParameters <= count) {
return true; // depends on control dependency: [if], data = [none]
}
}
}
return false;
} } |
public class class_name {
public DescribeGameSessionQueuesResult withGameSessionQueues(GameSessionQueue... gameSessionQueues) {
if (this.gameSessionQueues == null) {
setGameSessionQueues(new java.util.ArrayList<GameSessionQueue>(gameSessionQueues.length));
}
for (GameSessionQueue ele : gameSessionQueues) {
this.gameSessionQueues.add(ele);
}
return this;
} } | public class class_name {
public DescribeGameSessionQueuesResult withGameSessionQueues(GameSessionQueue... gameSessionQueues) {
if (this.gameSessionQueues == null) {
setGameSessionQueues(new java.util.ArrayList<GameSessionQueue>(gameSessionQueues.length)); // depends on control dependency: [if], data = [none]
}
for (GameSessionQueue ele : gameSessionQueues) {
this.gameSessionQueues.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public EEnum getIfcElectricDistributionPointFunctionEnum() {
if (ifcElectricDistributionPointFunctionEnumEEnum == null) {
ifcElectricDistributionPointFunctionEnumEEnum = (EEnum) EPackage.Registry.INSTANCE
.getEPackage(Ifc2x3tc1Package.eNS_URI).getEClassifiers().get(824);
}
return ifcElectricDistributionPointFunctionEnumEEnum;
} } | public class class_name {
public EEnum getIfcElectricDistributionPointFunctionEnum() {
if (ifcElectricDistributionPointFunctionEnumEEnum == null) {
ifcElectricDistributionPointFunctionEnumEEnum = (EEnum) EPackage.Registry.INSTANCE
.getEPackage(Ifc2x3tc1Package.eNS_URI).getEClassifiers().get(824);
// depends on control dependency: [if], data = [none]
}
return ifcElectricDistributionPointFunctionEnumEEnum;
} } |
public class class_name {
public CMASpaceMembership update(CMASpaceMembership membership) {
assertNotNull(membership, "membership");
final String id = getResourceIdOrThrow(membership, "membership");
final String spaceId = getSpaceIdOrThrow(membership, "membership");
final Integer version = getVersionOrThrow(membership, "update");
final CMASystem sys = membership.getSystem();
membership.setSystem(null);
try {
return service.update(spaceId, id, membership, version).blockingFirst();
} finally {
membership.setSystem(sys);
}
} } | public class class_name {
public CMASpaceMembership update(CMASpaceMembership membership) {
assertNotNull(membership, "membership");
final String id = getResourceIdOrThrow(membership, "membership");
final String spaceId = getSpaceIdOrThrow(membership, "membership");
final Integer version = getVersionOrThrow(membership, "update");
final CMASystem sys = membership.getSystem();
membership.setSystem(null);
try {
return service.update(spaceId, id, membership, version).blockingFirst(); // depends on control dependency: [try], data = [none]
} finally {
membership.setSystem(sys);
}
} } |
public class class_name {
private void convertSaltProject(SaltProject p, List<String> annoKeys, Map<String, String> args, boolean alignmc, int offset,
Map<String, CorpusConfig> corpusConfigs, Writer out, Integer nodeCount) throws IOException, IllegalArgumentException
{
int recordNumber = offset;
if(p != null && p.getCorpusGraphs() != null)
{
Map<String, String> spanAnno2order = null;
boolean virtualTokenizationFromNamespace = false;
Set<String> corpusNames = CommonHelper.getToplevelCorpusNames(p);
if(!corpusNames.isEmpty())
{
CorpusConfig config = corpusConfigs.get(corpusNames.iterator().next());
if(config != null)
{
if("true".equalsIgnoreCase(config.getConfig("virtual_tokenization_from_namespace")))
{
virtualTokenizationFromNamespace = true;
}
else
{
String mappingRaw = config.getConfig("virtual_tokenization_mapping");
if(mappingRaw != null)
{
spanAnno2order = new HashMap<>();
for(String singleMapping : Splitter.on(',').split(mappingRaw))
{
List<String> mappingParts = Splitter.on('=').splitToList(singleMapping);
if(mappingParts.size() >= 2)
{
spanAnno2order.put(mappingParts.get(0), mappingParts.get(1));
}
}
}
}
}
}
for(SCorpusGraph corpusGraph : p.getCorpusGraphs())
{
if(corpusGraph.getDocuments() != null)
{
for(SDocument doc : corpusGraph.getDocuments())
{
if(virtualTokenizationFromNamespace)
{
TimelineReconstructor.removeVirtualTokenizationUsingNamespace(doc.getDocumentGraph());
}
else if(spanAnno2order != null)
{
// there is a definition how to map the virtual tokenization to a real one
TimelineReconstructor.removeVirtualTokenization(doc.getDocumentGraph(), spanAnno2order);
}
if (nodeCount != null){
createAdjacencyMatrix(doc.getDocumentGraph(), args, recordNumber++, nodeCount);
}
else{
outputText(doc.getDocumentGraph(), alignmc, recordNumber++, out);
}
}
}
}
}
} } | public class class_name {
private void convertSaltProject(SaltProject p, List<String> annoKeys, Map<String, String> args, boolean alignmc, int offset,
Map<String, CorpusConfig> corpusConfigs, Writer out, Integer nodeCount) throws IOException, IllegalArgumentException
{
int recordNumber = offset;
if(p != null && p.getCorpusGraphs() != null)
{
Map<String, String> spanAnno2order = null;
boolean virtualTokenizationFromNamespace = false;
Set<String> corpusNames = CommonHelper.getToplevelCorpusNames(p);
if(!corpusNames.isEmpty())
{
CorpusConfig config = corpusConfigs.get(corpusNames.iterator().next());
if(config != null)
{
if("true".equalsIgnoreCase(config.getConfig("virtual_tokenization_from_namespace")))
{
virtualTokenizationFromNamespace = true; // depends on control dependency: [if], data = [none]
}
else
{
String mappingRaw = config.getConfig("virtual_tokenization_mapping");
if(mappingRaw != null)
{
spanAnno2order = new HashMap<>(); // depends on control dependency: [if], data = [none]
for(String singleMapping : Splitter.on(',').split(mappingRaw))
{
List<String> mappingParts = Splitter.on('=').splitToList(singleMapping);
if(mappingParts.size() >= 2)
{
spanAnno2order.put(mappingParts.get(0), mappingParts.get(1)); // depends on control dependency: [if], data = [none]
}
}
}
}
}
}
for(SCorpusGraph corpusGraph : p.getCorpusGraphs())
{
if(corpusGraph.getDocuments() != null)
{
for(SDocument doc : corpusGraph.getDocuments())
{
if(virtualTokenizationFromNamespace)
{
TimelineReconstructor.removeVirtualTokenizationUsingNamespace(doc.getDocumentGraph()); // depends on control dependency: [if], data = [none]
}
else if(spanAnno2order != null)
{
// there is a definition how to map the virtual tokenization to a real one
TimelineReconstructor.removeVirtualTokenization(doc.getDocumentGraph(), spanAnno2order); // depends on control dependency: [if], data = [none]
}
if (nodeCount != null){
createAdjacencyMatrix(doc.getDocumentGraph(), args, recordNumber++, nodeCount); // depends on control dependency: [if], data = [none]
}
else{
outputText(doc.getDocumentGraph(), alignmc, recordNumber++, out); // depends on control dependency: [if], data = [none]
}
}
}
}
}
} } |
public class class_name {
static public byte[] expandByteArray(byte[] src, byte[] dst, int srcPos, int srcLength, int dstPos, int dstLength) {
byte[] rc = null;
int totalLen = 0;
if (null != src) {
totalLen += srcLength;
}
if (null != dst) {
totalLen += dstLength;
}
if (0 < totalLen) {
rc = new byte[totalLen];
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Created byte[] of size " + totalLen);
}
try {
if (null != src) {
System.arraycopy(src, srcPos, rc, 0, srcLength);
}
if (null != dst) {
System.arraycopy(dst, dstPos, rc, srcLength, dstLength);
}
} catch (Exception e) {
// no FFDC required
// any error from arraycopy, we'll just return null
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Exception " + e + " while copying.");
}
rc = null;
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "expandByteArray returning: [" + getEnglishString(rc) + "]");
}
return rc;
} } | public class class_name {
static public byte[] expandByteArray(byte[] src, byte[] dst, int srcPos, int srcLength, int dstPos, int dstLength) {
byte[] rc = null;
int totalLen = 0;
if (null != src) {
totalLen += srcLength; // depends on control dependency: [if], data = [none]
}
if (null != dst) {
totalLen += dstLength; // depends on control dependency: [if], data = [none]
}
if (0 < totalLen) {
rc = new byte[totalLen]; // depends on control dependency: [if], data = [none]
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Created byte[] of size " + totalLen); // depends on control dependency: [if], data = [none]
}
try {
if (null != src) {
System.arraycopy(src, srcPos, rc, 0, srcLength); // depends on control dependency: [if], data = [none]
}
if (null != dst) {
System.arraycopy(dst, dstPos, rc, srcLength, dstLength); // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
// no FFDC required
// any error from arraycopy, we'll just return null
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Exception " + e + " while copying."); // depends on control dependency: [if], data = [none]
}
rc = null;
} // depends on control dependency: [catch], data = [none]
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "expandByteArray returning: [" + getEnglishString(rc) + "]"); // depends on control dependency: [if], data = [none]
}
return rc;
} } |
public class class_name {
public String[] toStringArray() {
String key;
String attrs[] = new String[hashAttributes.size() * 2];
String[] keys = hashAttributes.keySet().toArray(new String[0]);
for (int x = 0; x < keys.length; x++) {
attrs[(x * 2)] = keys[x];
attrs[(x * 2) + 1] = "" + hashAttributes.get(keys[x]);
}
return attrs;
} } | public class class_name {
public String[] toStringArray() {
String key;
String attrs[] = new String[hashAttributes.size() * 2];
String[] keys = hashAttributes.keySet().toArray(new String[0]);
for (int x = 0; x < keys.length; x++) {
attrs[(x * 2)] = keys[x]; // depends on control dependency: [for], data = [x]
attrs[(x * 2) + 1] = "" + hashAttributes.get(keys[x]); // depends on control dependency: [for], data = [x]
}
return attrs;
} } |
public class class_name {
private void setChildDrawableAtIndex(int index, @Nullable Drawable drawable) {
if (drawable == null) {
mFadeDrawable.setDrawable(index, null);
return;
}
drawable = WrappingUtils.maybeApplyLeafRounding(drawable, mRoundingParams, mResources);
getParentDrawableAtIndex(index).setDrawable(drawable);
} } | public class class_name {
private void setChildDrawableAtIndex(int index, @Nullable Drawable drawable) {
if (drawable == null) {
mFadeDrawable.setDrawable(index, null); // depends on control dependency: [if], data = [null)]
return; // depends on control dependency: [if], data = [none]
}
drawable = WrappingUtils.maybeApplyLeafRounding(drawable, mRoundingParams, mResources);
getParentDrawableAtIndex(index).setDrawable(drawable);
} } |
public class class_name {
@Override
public int countByCPI_CPIU(long CProductId, String CPInstanceUuid) {
FinderPath finderPath = FINDER_PATH_COUNT_BY_CPI_CPIU;
Object[] finderArgs = new Object[] { CProductId, CPInstanceUuid };
Long count = (Long)finderCache.getResult(finderPath, finderArgs, this);
if (count == null) {
StringBundler query = new StringBundler(3);
query.append(_SQL_COUNT_COMMERCEWAREHOUSEITEM_WHERE);
query.append(_FINDER_COLUMN_CPI_CPIU_CPRODUCTID_2);
boolean bindCPInstanceUuid = false;
if (CPInstanceUuid == null) {
query.append(_FINDER_COLUMN_CPI_CPIU_CPINSTANCEUUID_1);
}
else if (CPInstanceUuid.equals("")) {
query.append(_FINDER_COLUMN_CPI_CPIU_CPINSTANCEUUID_3);
}
else {
bindCPInstanceUuid = true;
query.append(_FINDER_COLUMN_CPI_CPIU_CPINSTANCEUUID_2);
}
String sql = query.toString();
Session session = null;
try {
session = openSession();
Query q = session.createQuery(sql);
QueryPos qPos = QueryPos.getInstance(q);
qPos.add(CProductId);
if (bindCPInstanceUuid) {
qPos.add(CPInstanceUuid);
}
count = (Long)q.uniqueResult();
finderCache.putResult(finderPath, finderArgs, count);
}
catch (Exception e) {
finderCache.removeResult(finderPath, finderArgs);
throw processException(e);
}
finally {
closeSession(session);
}
}
return count.intValue();
} } | public class class_name {
@Override
public int countByCPI_CPIU(long CProductId, String CPInstanceUuid) {
FinderPath finderPath = FINDER_PATH_COUNT_BY_CPI_CPIU;
Object[] finderArgs = new Object[] { CProductId, CPInstanceUuid };
Long count = (Long)finderCache.getResult(finderPath, finderArgs, this);
if (count == null) {
StringBundler query = new StringBundler(3);
query.append(_SQL_COUNT_COMMERCEWAREHOUSEITEM_WHERE); // depends on control dependency: [if], data = [none]
query.append(_FINDER_COLUMN_CPI_CPIU_CPRODUCTID_2); // depends on control dependency: [if], data = [none]
boolean bindCPInstanceUuid = false;
if (CPInstanceUuid == null) {
query.append(_FINDER_COLUMN_CPI_CPIU_CPINSTANCEUUID_1); // depends on control dependency: [if], data = [none]
}
else if (CPInstanceUuid.equals("")) {
query.append(_FINDER_COLUMN_CPI_CPIU_CPINSTANCEUUID_3); // depends on control dependency: [if], data = [none]
}
else {
bindCPInstanceUuid = true; // depends on control dependency: [if], data = [none]
query.append(_FINDER_COLUMN_CPI_CPIU_CPINSTANCEUUID_2); // depends on control dependency: [if], data = [none]
}
String sql = query.toString();
Session session = null;
try {
session = openSession(); // depends on control dependency: [try], data = [none]
Query q = session.createQuery(sql);
QueryPos qPos = QueryPos.getInstance(q);
qPos.add(CProductId); // depends on control dependency: [try], data = [none]
if (bindCPInstanceUuid) {
qPos.add(CPInstanceUuid); // depends on control dependency: [if], data = [none]
}
count = (Long)q.uniqueResult(); // depends on control dependency: [try], data = [none]
finderCache.putResult(finderPath, finderArgs, count); // depends on control dependency: [try], data = [none]
}
catch (Exception e) {
finderCache.removeResult(finderPath, finderArgs);
throw processException(e);
} // depends on control dependency: [catch], data = [none]
finally {
closeSession(session);
}
}
return count.intValue();
} } |
public class class_name {
@Beta
@Nullable
@CheckForNull
public static Long tryParse(String string, int radix) {
if (checkNotNull(string).isEmpty()) {
return null;
}
if (radix < Character.MIN_RADIX || radix > Character.MAX_RADIX) {
throw new IllegalArgumentException(
"radix must be between MIN_RADIX and MAX_RADIX but was " + radix);
}
boolean negative = string.charAt(0) == '-';
int index = negative ? 1 : 0;
if (index == string.length()) {
return null;
}
int digit = digit(string.charAt(index++));
if (digit < 0 || digit >= radix) {
return null;
}
long accum = -digit;
long cap = Long.MIN_VALUE / radix;
while (index < string.length()) {
digit = digit(string.charAt(index++));
if (digit < 0 || digit >= radix || accum < cap) {
return null;
}
accum *= radix;
if (accum < Long.MIN_VALUE + digit) {
return null;
}
accum -= digit;
}
if (negative) {
return accum;
} else if (accum == Long.MIN_VALUE) {
return null;
} else {
return -accum;
}
} } | public class class_name {
@Beta
@Nullable
@CheckForNull
public static Long tryParse(String string, int radix) {
if (checkNotNull(string).isEmpty()) {
return null; // depends on control dependency: [if], data = [none]
}
if (radix < Character.MIN_RADIX || radix > Character.MAX_RADIX) {
throw new IllegalArgumentException(
"radix must be between MIN_RADIX and MAX_RADIX but was " + radix);
}
boolean negative = string.charAt(0) == '-';
int index = negative ? 1 : 0;
if (index == string.length()) {
return null; // depends on control dependency: [if], data = [none]
}
int digit = digit(string.charAt(index++));
if (digit < 0 || digit >= radix) {
return null; // depends on control dependency: [if], data = [none]
}
long accum = -digit;
long cap = Long.MIN_VALUE / radix;
while (index < string.length()) {
digit = digit(string.charAt(index++)); // depends on control dependency: [while], data = [(index]
if (digit < 0 || digit >= radix || accum < cap) {
return null; // depends on control dependency: [if], data = [none]
}
accum *= radix; // depends on control dependency: [while], data = [none]
if (accum < Long.MIN_VALUE + digit) {
return null; // depends on control dependency: [if], data = [none]
}
accum -= digit; // depends on control dependency: [while], data = [none]
}
if (negative) {
return accum; // depends on control dependency: [if], data = [none]
} else if (accum == Long.MIN_VALUE) {
return null; // depends on control dependency: [if], data = [none]
} else {
return -accum; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public double[][][] getIntegratedLIBORCovariance() {
synchronized (integratedLIBORCovarianceLazyInitLock) {
if(integratedLIBORCovariance == null) {
TimeDiscretizationInterface liborPeriodDiscretization = getLiborPeriodDiscretization();
TimeDiscretizationInterface simulationTimeDiscretization = getCovarianceModel().getTimeDiscretization();
integratedLIBORCovariance = new double[simulationTimeDiscretization.getNumberOfTimeSteps()][liborPeriodDiscretization.getNumberOfTimeSteps()][liborPeriodDiscretization.getNumberOfTimeSteps()];
for(int timeIndex = 0; timeIndex < simulationTimeDiscretization.getNumberOfTimeSteps(); timeIndex++) {
double dt = simulationTimeDiscretization.getTime(timeIndex+1) - simulationTimeDiscretization.getTime(timeIndex);
RandomVariableInterface[][] factorLoadings = new RandomVariableInterface[liborPeriodDiscretization.getNumberOfTimeSteps()][];
// Prefetch factor loadings
for(int componentIndex = 0; componentIndex < liborPeriodDiscretization.getNumberOfTimeSteps(); componentIndex++) {
factorLoadings[componentIndex] = getCovarianceModel().getFactorLoading(timeIndex, componentIndex, null);
}
for(int componentIndex1 = 0; componentIndex1 < liborPeriodDiscretization.getNumberOfTimeSteps(); componentIndex1++) {
RandomVariableInterface[] factorLoadingOfComponent1 = factorLoadings[componentIndex1];
// Sum the libor cross terms (use symmetry)
for(int componentIndex2 = componentIndex1; componentIndex2 < liborPeriodDiscretization.getNumberOfTimeSteps(); componentIndex2++) {
double integratedLIBORCovarianceValue = 0.0;
if(getLiborPeriod(componentIndex1) > getTime(timeIndex)) {
RandomVariableInterface[] factorLoadingOfComponent2 = factorLoadings[componentIndex2];
for(int factorIndex = 0; factorIndex < getNumberOfFactors(); factorIndex++) {
integratedLIBORCovarianceValue += factorLoadingOfComponent1[factorIndex].get(0) * factorLoadingOfComponent2[factorIndex].get(0) * dt;
}
}
integratedLIBORCovariance[timeIndex][componentIndex1][componentIndex2] = integratedLIBORCovarianceValue;
}
}
}
// Integrate over time (i.e. sum up).
for(int timeIndex = 1; timeIndex < simulationTimeDiscretization.getNumberOfTimeSteps(); timeIndex++) {
double[][] prevIntegratedLIBORCovariance = integratedLIBORCovariance[timeIndex-1];
double[][] thisIntegratedLIBORCovariance = integratedLIBORCovariance[timeIndex];
for(int componentIndex1 = 0; componentIndex1 < liborPeriodDiscretization.getNumberOfTimeSteps(); componentIndex1++) {
for(int componentIndex2 = componentIndex1; componentIndex2 < liborPeriodDiscretization.getNumberOfTimeSteps(); componentIndex2++) {
thisIntegratedLIBORCovariance[componentIndex1][componentIndex2] = prevIntegratedLIBORCovariance[componentIndex1][componentIndex2] + thisIntegratedLIBORCovariance[componentIndex1][componentIndex2];
thisIntegratedLIBORCovariance[componentIndex2][componentIndex1] = thisIntegratedLIBORCovariance[componentIndex1][componentIndex2];
}
}
}
}
}
return integratedLIBORCovariance;
} } | public class class_name {
@Override
public double[][][] getIntegratedLIBORCovariance() {
synchronized (integratedLIBORCovarianceLazyInitLock) {
if(integratedLIBORCovariance == null) {
TimeDiscretizationInterface liborPeriodDiscretization = getLiborPeriodDiscretization();
TimeDiscretizationInterface simulationTimeDiscretization = getCovarianceModel().getTimeDiscretization();
integratedLIBORCovariance = new double[simulationTimeDiscretization.getNumberOfTimeSteps()][liborPeriodDiscretization.getNumberOfTimeSteps()][liborPeriodDiscretization.getNumberOfTimeSteps()]; // depends on control dependency: [if], data = [none]
for(int timeIndex = 0; timeIndex < simulationTimeDiscretization.getNumberOfTimeSteps(); timeIndex++) {
double dt = simulationTimeDiscretization.getTime(timeIndex+1) - simulationTimeDiscretization.getTime(timeIndex);
RandomVariableInterface[][] factorLoadings = new RandomVariableInterface[liborPeriodDiscretization.getNumberOfTimeSteps()][];
// Prefetch factor loadings
for(int componentIndex = 0; componentIndex < liborPeriodDiscretization.getNumberOfTimeSteps(); componentIndex++) {
factorLoadings[componentIndex] = getCovarianceModel().getFactorLoading(timeIndex, componentIndex, null); // depends on control dependency: [for], data = [componentIndex]
}
for(int componentIndex1 = 0; componentIndex1 < liborPeriodDiscretization.getNumberOfTimeSteps(); componentIndex1++) {
RandomVariableInterface[] factorLoadingOfComponent1 = factorLoadings[componentIndex1];
// Sum the libor cross terms (use symmetry)
for(int componentIndex2 = componentIndex1; componentIndex2 < liborPeriodDiscretization.getNumberOfTimeSteps(); componentIndex2++) {
double integratedLIBORCovarianceValue = 0.0;
if(getLiborPeriod(componentIndex1) > getTime(timeIndex)) {
RandomVariableInterface[] factorLoadingOfComponent2 = factorLoadings[componentIndex2];
for(int factorIndex = 0; factorIndex < getNumberOfFactors(); factorIndex++) {
integratedLIBORCovarianceValue += factorLoadingOfComponent1[factorIndex].get(0) * factorLoadingOfComponent2[factorIndex].get(0) * dt; // depends on control dependency: [for], data = [factorIndex]
}
}
integratedLIBORCovariance[timeIndex][componentIndex1][componentIndex2] = integratedLIBORCovarianceValue; // depends on control dependency: [for], data = [componentIndex2]
}
}
}
// Integrate over time (i.e. sum up).
for(int timeIndex = 1; timeIndex < simulationTimeDiscretization.getNumberOfTimeSteps(); timeIndex++) {
double[][] prevIntegratedLIBORCovariance = integratedLIBORCovariance[timeIndex-1];
double[][] thisIntegratedLIBORCovariance = integratedLIBORCovariance[timeIndex];
for(int componentIndex1 = 0; componentIndex1 < liborPeriodDiscretization.getNumberOfTimeSteps(); componentIndex1++) {
for(int componentIndex2 = componentIndex1; componentIndex2 < liborPeriodDiscretization.getNumberOfTimeSteps(); componentIndex2++) {
thisIntegratedLIBORCovariance[componentIndex1][componentIndex2] = prevIntegratedLIBORCovariance[componentIndex1][componentIndex2] + thisIntegratedLIBORCovariance[componentIndex1][componentIndex2]; // depends on control dependency: [for], data = [componentIndex2]
thisIntegratedLIBORCovariance[componentIndex2][componentIndex1] = thisIntegratedLIBORCovariance[componentIndex1][componentIndex2]; // depends on control dependency: [for], data = [componentIndex2]
}
}
}
}
}
return integratedLIBORCovariance;
} } |
public class class_name {
public boolean invokeProxyScript(ScriptWrapper script, HttpMessage msg, boolean request) {
validateScriptType(script, TYPE_PROXY);
Writer writer = getWriters(script);
try {
// Dont need to check if enabled as it can only be invoked manually
ProxyScript s = this.getInterface(script, ProxyScript.class);
if (s != null) {
if (request) {
return s.proxyRequest(msg);
} else {
return s.proxyResponse(msg);
}
} else {
handleUnspecifiedScriptError(script, writer, Constant.messages.getString("script.interface.proxy.error"));
}
} catch (Exception e) {
handleScriptException(script, writer, e);
}
// Return true so that the request is submitted - if we returned false all proxying would fail on script errors
return true;
} } | public class class_name {
public boolean invokeProxyScript(ScriptWrapper script, HttpMessage msg, boolean request) {
validateScriptType(script, TYPE_PROXY);
Writer writer = getWriters(script);
try {
// Dont need to check if enabled as it can only be invoked manually
ProxyScript s = this.getInterface(script, ProxyScript.class);
if (s != null) {
if (request) {
return s.proxyRequest(msg);
// depends on control dependency: [if], data = [none]
} else {
return s.proxyResponse(msg);
// depends on control dependency: [if], data = [none]
}
} else {
handleUnspecifiedScriptError(script, writer, Constant.messages.getString("script.interface.proxy.error"));
}
} catch (Exception e) {
handleScriptException(script, writer, e);
}
// Return true so that the request is submitted - if we returned false all proxying would fail on script errors
return true;
} } |
public class class_name {
public <S> T manyToOneWithoutControl(final S source) {
try{ return this.<T,S>getJMapper(relationalManyToOneMapper,source).getDestinationWithoutControl(source); }
catch (Exception e) { return logAndReturnNull(e); }
} } | public class class_name {
public <S> T manyToOneWithoutControl(final S source) {
try{ return this.<T,S>getJMapper(relationalManyToOneMapper,source).getDestinationWithoutControl(source); }
// depends on control dependency: [try], data = [none]
catch (Exception e) { return logAndReturnNull(e); }
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void marshall(StopTrainingDocumentClassifierRequest stopTrainingDocumentClassifierRequest, ProtocolMarshaller protocolMarshaller) {
if (stopTrainingDocumentClassifierRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(stopTrainingDocumentClassifierRequest.getDocumentClassifierArn(), DOCUMENTCLASSIFIERARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(StopTrainingDocumentClassifierRequest stopTrainingDocumentClassifierRequest, ProtocolMarshaller protocolMarshaller) {
if (stopTrainingDocumentClassifierRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(stopTrainingDocumentClassifierRequest.getDocumentClassifierArn(), DOCUMENTCLASSIFIERARN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public RawFrameGrabber getRawFrameGrabber(int w, int h, int input, int std,
ImageFormat format) throws V4L4JException{
if(format==null)
throw new ImageFormatException("The image format can not be null");
synchronized(this){
if(fg==null) {
state.get();
fg = new RawFrameGrabber(deviceInfo, v4l4jObject, w, h, input,
std, findTuner(input), format, threadFactory);
try {
fg.init();
} catch (V4L4JException ve){
fg = null;
state.put();
throw ve;
} catch (StateException se){
fg = null;
state.put();
throw se;
} catch (Throwable t){
fg = null;
state.put();
throw new V4L4JException("Error", t);
}
return (RawFrameGrabber) fg;
} else {
if(fg.getClass().isInstance(RawFrameGrabber.class))
return (RawFrameGrabber) fg;
else {
state.put();
throw new StateException("Another FrameGrabber object already "
+"exists");
}
}
}
} } | public class class_name {
public RawFrameGrabber getRawFrameGrabber(int w, int h, int input, int std,
ImageFormat format) throws V4L4JException{
if(format==null)
throw new ImageFormatException("The image format can not be null");
synchronized(this){
if(fg==null) {
state.get(); // depends on control dependency: [if], data = [none]
fg = new RawFrameGrabber(deviceInfo, v4l4jObject, w, h, input,
std, findTuner(input), format, threadFactory); // depends on control dependency: [if], data = [none]
try {
fg.init(); // depends on control dependency: [try], data = [none]
} catch (V4L4JException ve){
fg = null;
state.put();
throw ve;
} catch (StateException se){ // depends on control dependency: [catch], data = [none]
fg = null;
state.put();
throw se;
} catch (Throwable t){ // depends on control dependency: [catch], data = [none]
fg = null;
state.put();
throw new V4L4JException("Error", t);
} // depends on control dependency: [catch], data = [none]
return (RawFrameGrabber) fg; // depends on control dependency: [if], data = [none]
} else {
if(fg.getClass().isInstance(RawFrameGrabber.class))
return (RawFrameGrabber) fg;
else {
state.put(); // depends on control dependency: [if], data = [none]
throw new StateException("Another FrameGrabber object already "
+"exists");
}
}
}
} } |
public class class_name {
public DiscreteFactor outerProduct(Factor other) {
Preconditions.checkArgument(getVars().intersection(other.getVars()).size() == 0);
DiscreteFactor otherAsDiscrete = other.coerceToDiscrete();
// See if the fast, tensor outer product implementation is usable.
int[] myDims = getVars().getVariableNumsArray();
int[] otherDims = other.getVars().getVariableNumsArray();
if (myDims.length == 0 || otherDims.length == 0 || myDims[myDims.length - 1] < otherDims[0]) {
return new TableFactor(getVars().union(other.getVars()),
getWeights().outerProduct(otherAsDiscrete.getWeights()));
}
// This implementation is slow, but Tensors currently don't support all
// outer products.
TableFactorBuilder builder = new TableFactorBuilder(getVars().union(other.getVars()),
SparseTensorBuilder.getFactory());
Iterator<Outcome> myIter = outcomeIterator();
while (myIter.hasNext()) {
Outcome myOutcome = myIter.next();
Iterator<Outcome> otherIter = otherAsDiscrete.outcomeIterator();
while (otherIter.hasNext()) {
Outcome otherOutcome = otherIter.next();
builder.setWeight(myOutcome.getAssignment().union(otherOutcome.getAssignment()),
myOutcome.getProbability() * otherOutcome.getProbability());
}
}
return builder.build();
} } | public class class_name {
public DiscreteFactor outerProduct(Factor other) {
Preconditions.checkArgument(getVars().intersection(other.getVars()).size() == 0);
DiscreteFactor otherAsDiscrete = other.coerceToDiscrete();
// See if the fast, tensor outer product implementation is usable.
int[] myDims = getVars().getVariableNumsArray();
int[] otherDims = other.getVars().getVariableNumsArray();
if (myDims.length == 0 || otherDims.length == 0 || myDims[myDims.length - 1] < otherDims[0]) {
return new TableFactor(getVars().union(other.getVars()),
getWeights().outerProduct(otherAsDiscrete.getWeights())); // depends on control dependency: [if], data = [none]
}
// This implementation is slow, but Tensors currently don't support all
// outer products.
TableFactorBuilder builder = new TableFactorBuilder(getVars().union(other.getVars()),
SparseTensorBuilder.getFactory());
Iterator<Outcome> myIter = outcomeIterator();
while (myIter.hasNext()) {
Outcome myOutcome = myIter.next();
Iterator<Outcome> otherIter = otherAsDiscrete.outcomeIterator();
while (otherIter.hasNext()) {
Outcome otherOutcome = otherIter.next();
builder.setWeight(myOutcome.getAssignment().union(otherOutcome.getAssignment()),
myOutcome.getProbability() * otherOutcome.getProbability()); // depends on control dependency: [while], data = [none]
}
}
return builder.build();
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.