id
stringlengths 17
20
| content
stringlengths 45
12.2k
| max_stars_repo_path
stringlengths 55
167
|
|---|---|---|
logbench-o_data_1
|
protected void setUp() throws Exception {
super.setUp();
Destination replyDestination = null;
if (topic) {
replyDestination = receiveSession.createTopic("REPLY." + getSubject());
} else {
replyDestination = receiveSession.createQueue("REPLY." + getSubject());
}
replyProducer = receiveSession.createProducer(replyDestination);
LOG.info("Created replyProducer: " + replyProducer);
}
|
LogBench/LogBench-O_prefix_1point/activemq_PublishOnTopicConsumedMessageTest_setUp.java
|
logbench-o_data_2
|
@Override
public void closeImpl() throws SerialPortException {
try {
this.socket.close();
} catch (IOException e) {
LOG.error(e.getMessage(), e);
throw new SerialPortException(e.getMessage());
}
}
|
LogBench/LogBench-O_prefix_1point/ma-core-public_SerialSocketBridge_closeImpl.java
|
logbench-o_data_3
|
@Override
public boolean canSubscribe(Node node, JID owner, JID subscriber) {
// Let node owners and sysadmins always subscribe to the node
if (node.isAdmin(owner)) {
return true;
}
for (JID nodeOwner : node.getOwners()) {
if (nodeOwner.equals(owner)) {
return true;
}
}
// Check that the subscriber is a local user
XMPPServer server = XMPPServer.getInstance();
if (server.isLocal(owner)) {
GroupManager gMgr = GroupManager.getInstance();
Collection<String> nodeGroups = node.getRosterGroupsAllowed();
for (String groupName : nodeGroups) {
try {
Group group = gMgr.getGroup(groupName);
// access allowed if the node group is visible to the subscriber
if (server.getRosterManager().isGroupVisible(group, owner)) {
return true;
}
} catch (GroupNotFoundException gnfe) {
// ignore
}
}
} else {
// Subscriber is a remote user. This should never happen.
Log.warn("Node with access model Roster has a remote user as subscriber: {}", node.getUniqueIdentifier());
}
return false;
}
|
LogBench/LogBench-O_prefix_1point/openfire_RosterAccess_canSubscribe.java
|
logbench-o_data_4
|
/**
* Method executed after test run is finished.
*
* It runs receiving result parameter, which is the result from finished
* test run. If the string "Caused by:" is found inside result it means
* that some exception were triggered in some test execution, and
* {@link #testRunFinished(Result)} throws an {@link IllegalStateException}
* to make explicit that some exceptions occured during the tests.
*
* Sometimes {@link #testRunFinished(Result)} throws the
* {@link IllegalStateException} but the test results show no failures or
* exceptions stracktraces. That means that some exceptions was swallowed
* or treated in tests execution but its log have been added to result
* anyways. See {@link https://github.com/zerocracy/farm/issues/1227
* #1227} for more information about this.
*
* @param result Test run results
* @throws Exception When threads are still alive or when are exceptions
* in the test log
*/
@Override
public void testRunFinished(final Result result) throws Exception {
super.testRunFinished(result);
final Collection<Thread> alive = new Filtered<>(thread -> thread.getName().startsWith("Terminator-") || thread.getName().startsWith("AsyncFlush-") || thread.getName().startsWith("RvFarm-"), Thread.getAllStackTraces().keySet());
if (!alive.isEmpty()) {
for (final Thread thread : alive) {
Logger.warn(this, "Thread is still alive: %d/%s (%b)", thread.getId(), thread.getName(), thread.isDaemon());
}
throw new IllegalStateException(String.format("%d threads are still alive, it's a bug, see above", alive.size()));
}
final String stdout = new String(this.log.toByteArray(), StandardCharsets.UTF_8);
if (stdout.contains("Caused by: ")) {
throw new IllegalStateException("There were some exceptions in the log above");
}
}
|
LogBench/LogBench-O_prefix_1point/farm_TestListener_testRunFinished.java
|
logbench-o_data_5
|
@Test(timeout = 60000)
public void testCRLF11() throws Exception {
for (TransportConnector connector : brokerService.getTransportConnectors()) {
LOG.info("try: " + connector.getConnectUri());
int port = connector.getConnectUri().getPort();
StompConnection stompConnection = new StompConnection();
stompConnection.open(createSocket(port));
String frame = "CONNECT\r\naccept-version:1.1\r\n\r\n" + Stomp.NULL;
stompConnection.sendFrame(frame);
frame = stompConnection.receiveFrame();
LOG.info("response from: " + connector.getConnectUri() + ", " + frame);
assertTrue(frame.startsWith("CONNECTED"));
stompConnection.close();
}
}
|
LogBench/LogBench-O_prefix_1point/activemq_StompTelnetTest_testCRLF11.java
|
logbench-o_data_6
|
@Override
public boolean isActive(FeatureState featureState, FeatureUser user) {
String lang = featureState.getParameter(PARAM_LANG);
String script = featureState.getParameter(PARAM_SCRIPT);
ScriptEngine engine = engineManager.getEngineByName(lang);
if (engine == null) {
log.error("Could not find script engine for: " + lang);
return false;
}
Bindings bindings = engine.getBindings(ScriptContext.ENGINE_SCOPE);
bindings.put("polyglot.js.allowHostAccess", true);
bindings.put("polyglot.js.allowHostClassLookup", (Predicate<String>) s -> true);
bindings.put("polyglot.js.nashorn-compat", true);
engine.put("user", user);
engine.put("date", new Date());
try {
Object result = engine.eval(script);
if (result instanceof Boolean) {
return (Boolean) result;
}
} catch (ScriptException e) {
log.error("Could not evaluate script for feature " + featureState.getFeature().name() + ": " + e.getMessage());
}
return false;
}
|
LogBench/LogBench-O_prefix_1point/togglz_ScriptEngineActivationStrategy_isActive.java
|
logbench-o_data_7
|
@Override
protected int writeDataWireFormat(byte[] dst, int dstIndex) {
if ((dst.length - dstIndex) < this.pipeDataLen) {
log.debug("TransCallNamedPipe data too long for buffer");
return 0;
}
System.arraycopy(this.pipeData, this.pipeDataOff, dst, dstIndex, this.pipeDataLen);
return this.pipeDataLen;
}
|
LogBench/LogBench-O_prefix_1point/jcifs-ng_TransCallNamedPipe_writeDataWireFormat.java
|
logbench-o_data_8
|
private byte[] toJsonAsBytes(Notification notification) {
try {
return mapper.writeValueAsBytes(notification);
} catch (JsonProcessingException e) {
log.error(e.toString(), e);
return null;
}
}
|
LogBench/LogBench-O_prefix_1point/togglz_Notifier_toJsonAsBytes.java
|
logbench-o_data_9
|
public void doStop() throws Exception {
log.debug("Stopped StaticMember gbean.");
}
|
LogBench/LogBench-O_prefix_1point/geronimo_StaticMemberGBean_doStop.java
|
logbench-o_data_10
|
@FXML
public void proceed() {
LOG.trace("proceed()");
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle(resourceBundle.getString("unlock.chooseMasterkey.filePickerTitle"));
fileChooser.getExtensionFilters().add(new FileChooser.ExtensionFilter(resourceBundle.getString("unlock.chooseMasterkey.filePickerMimeDesc"), CRYPTOMATOR_FILENAME_GLOB));
File masterkeyFile = fileChooser.showOpenDialog(window);
if (masterkeyFile != null) {
LOG.debug("Chose masterkey file: {}", masterkeyFile);
result.complete(masterkeyFile.toPath());
}
}
|
LogBench/LogBench-O_prefix_1point/cryptomator_ChooseMasterkeyFileController_proceed.java
|
logbench-o_data_11
|
@Override
public List<JATETerm> execute(Collection<String> candidates) throws JATEException {
AbstractFeature feature = features.get(FrequencyTermBased.class.getName() + SUFFIX_WORD);
validateFeature(feature, FrequencyTermBased.class);
FrequencyTermBased fFeatureWords = (FrequencyTermBased) feature;
AbstractFeature feature2 = features.get(FrequencyTermBased.class.getName() + SUFFIX_TERM);
validateFeature(feature2, FrequencyTermBased.class);
FrequencyTermBased fFeatureTerms = (FrequencyTermBased) feature2;
AbstractFeature tciFeature = features.get(TermComponentIndex.class.getName());
validateFeature(tciFeature, TermComponentIndex.class);
TermComponentIndex fFeatureTermCompIndex = (TermComponentIndex) tciFeature;
int cores = Runtime.getRuntime().availableProcessors();
int maxPerWorker = candidates.size() / cores;
if (maxPerWorker == 0)
maxPerWorker = 50;
StringBuilder msg = new StringBuilder("Beginning computing RAKE values, cores=");
msg.append(cores).append(" total terms=" + candidates.size()).append(",").append(" max terms per worker thread=").append(maxPerWorker);
LOG.info(msg.toString());
ForkJoinPool forkJoinPool = new ForkJoinPool(cores);
RAKEWorker worker = new RAKEWorker(new ArrayList<>(candidates), Integer.MAX_VALUE, fFeatureWords, fFeatureTerms, fFeatureTermCompIndex);
List<JATETerm> result = forkJoinPool.invoke(worker);
Collections.sort(result);
LOG.info("Complete");
return result;
}
|
LogBench/LogBench-O_prefix_1point/jate_RAKE_execute.java
|
logbench-o_data_12
|
public void testPerformance() throws Exception {
ConnectionFactory factory = createConnectionFactory();
List<Connection> list = new ArrayList<Connection>();
for (int i = 0; i < CONNECTION_COUNT; i++) {
Connection connection = factory.createConnection();
connection.start();
list.add(connection);
LOG.info("Created " + i);
if (i % 100 == 0) {
closeConnections(list);
}
}
closeConnections(list);
}
|
LogBench/LogBench-O_prefix_1point/activemq_ConnectionChurnTest_testPerformance.java
|
logbench-o_data_13
|
@Test
void testEdgeMac() {
String libName = "libc++.dylib";
String driverVersion = "87.0.664.75";
WebDriverManager wdm = WebDriverManager.edgedriver().driverVersion(driverVersion).mac();
wdm.setup();
String downloadedDriverPath = wdm.getDownloadedDriverPath();
log.debug("The downloaded driver path is: {}", downloadedDriverPath);
File driver = new File(downloadedDriverPath);
assertThat(driver).exists();
File lib = new File(driver.getParent(), libName);
assertThat(lib).exists();
}
|
LogBench/LogBench-O_prefix_1point/webdrivermanager_EdgeMacTest_testEdgeMac.java
|
logbench-o_data_14
|
protected CommandChannel createCommandChannel() throws IOException {
socket = new MulticastSocket(mcastPort);
socket.setLoopbackMode(loopBackMode);
socket.setTimeToLive(timeToLive);
LOG.debug("Joining multicast address: " + getMulticastAddress());
socket.joinGroup(getMulticastAddress());
socket.setSoTimeout((int) keepAliveInterval);
return new CommandDatagramSocket(this, getWireFormat(), getDatagramSize(), getTargetAddress(), createDatagramHeaderMarshaller(), getSocket());
}
|
LogBench/LogBench-O_prefix_1point/activemq_MulticastTransport_createCommandChannel.java
|
logbench-o_data_15
|
@Test
void testCachePathContainsTilde() {
String customPath = "C:\\user\\abcdef~1\\path";
wdm.config().setCachePath(customPath);
String cachePath = wdm.config().getCachePath();
log.info("Using {} got {}", customPath, cachePath);
assertThat(cachePath).startsWith(customPath);
}
|
LogBench/LogBench-O_prefix_1point/webdrivermanager_CustomCacheTest_testCachePathContainsTilde.java
|
logbench-o_data_16
|
@Test(groups = { "wso2.dss" }, description = "insert timestamp in America/New_York timezone and UTC timezone, retrieve all and compare whether they are different", alwaysRun = true)
public void insertAndTestTimeStampValuesInDbTest() throws Exception {
OMElement payload = fac.createOMElement("getTimeStamps", omNs);
OMElement result = new AxisServiceClient().sendReceive(payload, backendUrl + serviceName, "getTimeStamps");
Iterator iterator = result.getChildrenWithLocalName("timeStamp");
String timeStampString = null;
while (iterator.hasNext()) {
OMElement timeStamp = (OMElement) iterator.next();
if (timeStampString == null) {
timeStampString = timeStamp.getChildrenWithLocalName("testTimeStamp").next().toString();
log.info("TimeStamp Recv:" + timeStampString);
Assert.assertTrue(timeStampString.contains("1970-01-02T05:00:00.000+00:00"));
} else {
String tempTimeStamp = timeStamp.getChildrenWithLocalName("testTimeStamp").next().toString();
log.info("Timestamp Comapre:" + timeStampString + "|" + tempTimeStamp);
Assert.assertFalse(timeStampString.equals(tempTimeStamp));
}
}
Assert.assertNotNull(result, "Response message null ");
log.debug(result);
log.info("data service insert different timestamp to the database when the server is in different timezones");
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_DS1189LeagyTimeStampModeTestCase_insertAndTestTimeStampValuesInDbTest.java
|
logbench-o_data_17
|
public void onStatus(IConnection conn, ObjectMap<String, Object> status) {
log.debug("onStatus: {}", status);
String code = status.get("code").toString();
if ("NetStream.Play.Stop".equals(code)) {
log.debug("Playback stopped");
conn.close();
}
}
|
LogBench/LogBench-O_prefix_1point/red5-server_ClientMethodHander_onStatus.java
|
logbench-o_data_18
|
/**
* Create the Print Writer output
*/
protected void createOut() {
try {
out = new PrintWriter(new FileWriter(file, true));
} catch (IOException e) {
out = new PrintWriter(new NullWriter());
LOG.error("Error while creating process log", e);
}
}
|
LogBench/LogBench-O_prefix_1point/ma-core-public_BaseIOLog_createOut.java
|
logbench-o_data_19
|
@Override
protected void tearDown() throws Exception {
connection.close();
super.tearDown();
LOG.info("### closed down the test case: " + getName());
}
|
LogBench/LogBench-O_prefix_1point/activemq_MultipleTestsWithEmbeddedBrokerTest_tearDown.java
|
logbench-o_data_20
|
@Override
public void onExecutionStart() throws AutomationFrameworkException {
if (!JMSBrokerController.isBrokerStarted()) {
log.info("String JMS Broker...");
activeMqBroker.start();
}
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_ActiveMQServerExtension_onExecutionStart.java
|
logbench-o_data_21
|
public void testWaitForTransportInterruptionProcessingHang() throws Exception {
final ConnectionFactory fac = new ActiveMQConnectionFactory("failover:(" + this.bindAddress + ")");
final Connection connection = fac.createConnection();
try {
final Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
final Queue queue = session.createQueue(QUEUE_NAME);
final MessageProducer producer = session.createProducer(queue);
producer.setDeliveryMode(DeliveryMode.PERSISTENT);
connection.start();
producer.send(session.createTextMessage("test"));
final CountDownLatch afterRestart = new CountDownLatch(1);
final CountDownLatch twoNewMessages = new CountDownLatch(1);
final CountDownLatch thirdMessageReceived = new CountDownLatch(1);
final MessageConsumer consumer = session.createConsumer(session.createQueue(QUEUE_NAME));
consumer.setMessageListener(new MessageListener() {
public void onMessage(Message message) {
try {
afterRestart.await();
final TextMessage txtMsg = (TextMessage) message;
if (txtMsg.getText().equals("test")) {
producer.send(session.createTextMessage("test 1"));
TimeUnit.SECONDS.sleep(5);
// THIS SECOND send() WILL CAUSE CONSUMER DEADLOCK
producer.send(session.createTextMessage("test 2"));
LOG.info("Two new messages produced.");
twoNewMessages.countDown();
} else if (txtMsg.getText().equals("test 3")) {
thirdMessageReceived.countDown();
}
} catch (Exception e) {
LOG.error(e.toString());
throw new RuntimeException(e);
}
}
});
LOG.info("Stopping broker....");
broker.stop();
LOG.info("Creating new broker...");
broker = createBroker();
startBroker();
broker.waitUntilStarted();
afterRestart.countDown();
assertTrue("Consumer is deadlocked!", twoNewMessages.await(60, TimeUnit.SECONDS));
producer.send(session.createTextMessage("test 3"));
assertTrue("Consumer got third message after block", thirdMessageReceived.await(60, TimeUnit.SECONDS));
} finally {
broker.stop();
}
}
|
LogBench/LogBench-O_prefix_1point/activemq_AMQ2645Test_testWaitForTransportInterruptionProcessingHang.java
|
logbench-o_data_22
|
@Test(groups = { "wso2.dss" }, invocationCount = 5, enabled = false)
public void selectOperation() throws AxisFault, XPathExpressionException {
OMFactory fac = OMAbstractFactory.getOMFactory();
OMNamespace omNs = fac.createOMNamespace("http://ws.wso2.org/dataservice/samples/gspread_sample_service", "ns1");
OMElement payload = fac.createOMElement("getCustomers", omNs);
OMElement result = new AxisServiceClient().sendReceive(payload, getServiceUrlHttp(serviceName), "getcustomers");
log.info("Response : " + result);
Assert.assertTrue((result.toString().indexOf("Customers") == 1), "Expected Result not found on response message");
Assert.assertTrue(result.toString().contains("<customerNumber>"), "Expected Result not found on response message");
Assert.assertTrue(result.toString().contains("</Customer>"), "Expected Result not found on response message");
log.info("Service Invocation success");
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_AGSpreadDataServiceTestCase_selectOperation.java
|
logbench-o_data_23
|
/**
* @param args the queue used by the example
*/
public static void main(String[] args) {
String destinationName = null;
Context jndiContext = null;
ConnectionFactory connectionFactory = null;
Connection connection = null;
Session session = null;
Destination destination = null;
MessageConsumer consumer = null;
/*
* Read destination name from command line and display it.
*/
if (args.length != 1) {
LOG.info("Usage: java SimpleConsumer <destination-name>");
System.exit(1);
}
destinationName = args[0];
LOG.info("Destination name is " + destinationName);
/*
* Create a JNDI API InitialContext object
*/
try {
jndiContext = new InitialContext();
} catch (NamingException e) {
LOG.info("Could not create JNDI API " + "context: " + e.toString());
System.exit(1);
}
/*
* Look up connection factory and destination.
*/
try {
connectionFactory = (ConnectionFactory) jndiContext.lookup("ConnectionFactory");
destination = (Destination) jndiContext.lookup(destinationName);
} catch (NamingException e) {
LOG.info("JNDI API lookup failed: " + e.toString());
System.exit(1);
}
/*
* Create connection. Create session from connection; false means
* session is not transacted. Create receiver, then start message
* delivery. Receive all text messages from destination until a non-text
* message is received indicating end of message stream. Close
* connection.
*/
try {
connection = connectionFactory.createConnection();
session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
consumer = session.createConsumer(destination);
connection.start();
while (true) {
Message m = consumer.receive(1);
if (m != null) {
if (m instanceof TextMessage) {
TextMessage message = (TextMessage) m;
LOG.info("Reading message: " + message.getText());
} else {
break;
}
}
}
} catch (JMSException e) {
LOG.info("Exception occurred: " + e);
} finally {
if (connection != null) {
try {
connection.close();
} catch (JMSException e) {
}
}
}
}
|
LogBench/LogBench-O_prefix_1point/activemq_SimpleConsumer_main.java
|
logbench-o_data_24
|
private void doParse(BufferedReader reader, String baseUri, RDFNotation notation, boolean keepBNode, RDFCallback callback) throws ParserException {
try {
String line;
long numLine = 1;
TripleString triple = new TripleString();
while ((line = reader.readLine()) != null) {
// trim, find start
int start = 0;
while (start < line.length()) {
char c = line.charAt(start);
if (c != ' ' && c != '\t') {
break;
}
start++;
}
// trim, find end
int end = line.length() - 1;
while (end >= 0) {
char c = line.charAt(end);
if (c != ' ' && c != '\t') {
break;
}
end--;
}
// check that we have at least one element and this line isn't a comment
if (start + 1 < end && line.charAt(start) != '#') {
triple.read(line, start, end);
if (!triple.hasEmpty()) {
// System.out.println(triple);
callback.processTriple(triple, 0);
} else {
System.err.println("Warning: Could not parse triple at line " + numLine + ", ignored and not processed.\n" + line);
}
}
numLine++;
}
reader.close();
} catch (Exception e) {
log.error("Unexpected exception.", e);
throw new ParserException(e);
}
}
|
LogBench/LogBench-O_prefix_1point/hdt-java_RDFParserSimple_doParse.java
|
logbench-o_data_25
|
@Override
public void onAccept(Transport localTransport) {
ProxyConnection connection = null;
try {
Transport remoteTransport = createRemoteTransport(localTransport);
connection = new ProxyConnection(localTransport, remoteTransport);
connection.start();
connections.add(connection);
} catch (Exception e) {
onAcceptError(e);
try {
if (connection != null) {
connection.stop();
}
} catch (Exception eoc) {
LOG.error("Could not close broken connection: ", eoc);
}
}
}
|
LogBench/LogBench-O_prefix_1point/activemq_ProxyConnector_onAccept.java
|
logbench-o_data_26
|
public String getClaimsGatheringScriptName(CustomScriptConfiguration script, UmaAuthorizationContext context) {
try {
log.debug("Executing python 'getClaimsGatheringScriptName' method, script: " + script.getName());
String result = policyScript(script).getClaimsGatheringScriptName(context);
log.debug("python 'getClaimsGatheringScriptName' result: " + result);
return result;
} catch (Exception ex) {
log.error("Failed to execute python 'getClaimsGatheringScriptName' method, script: " + script.getName() + ", message: " + ex.getMessage(), ex);
saveScriptError(script.getCustomScript(), ex);
return "";
}
}
|
LogBench/LogBench-O_prefix_1point/oxauth_ExternalUmaRptPolicyService_getClaimsGatheringScriptName.java
|
logbench-o_data_27
|
private void createBucket(BucketProperties bucket) {
storage.create(BucketInfo.newBuilder(bucket.name).build());
log.info("bucket {} created", bucket.name);
}
|
LogBench/LogBench-O_prefix_1point/testcontainers-spring-boot_StorageResourcesGenerator_createBucket.java
|
logbench-o_data_28
|
protected BrokerService createBroker(String uri) throws Exception {
LOG.info("Loading broker configuration from the classpath with URI: " + uri);
return BrokerFactory.createBroker(new URI("xbean:" + uri));
}
|
LogBench/LogBench-O_prefix_1point/activemq_SimpleAuthenticationPluginTest_createBroker.java
|
logbench-o_data_29
|
public void unloading(Artifact configurationId) {
log.debug("unloading module: {}", configurationId);
}
|
LogBench/LogBench-O_prefix_1point/geronimo_DebugLoggingLifecycleMonitor_unloading.java
|
logbench-o_data_30
|
public static void main(String[] args) throws Exception {
if (args.length == 2 || args.length == 4) {
String zkQuorum = args[0];
String zkPath = args[1];
LOG.info("Reading ZK[{}], path[{}]" + zkQuorum, zkPath);
MapMetastore meta = newZookeeperMapsMeta(zkQuorum, 1000, zkPath);
LOG.info("{}", meta.read());
if (args.length == 4) {
String tileTable = args[2];
String pointTable = args[3];
LOG.info("Update tileTable[{}], pointTable[{}]", tileTable, pointTable);
meta.update(new MapTables(tileTable, pointTable));
LOG.info("Done.");
}
} else {
LOG.error("Usage (supply table names to issue update): Metastores zkQuorum path [pointTable tileTable]");
}
}
|
LogBench/LogBench-O_prefix_1point/maps_Metastores_main.java
|
logbench-o_data_31
|
@Override
public void startup() {
log.warn(buildTab() + "startup");
delegate.startup();
}
|
LogBench/LogBench-O_prefix_1point/pronghorn_StreamingReadVisitorDebugDelegate_startup.java
|
logbench-o_data_32
|
@AfterThrowing(value = "this(io.splitet.core.api.CommandHandler) && @annotation(command)", throwing = "exception")
public void afterThrowing(Command command, Exception exception) {
try {
log.info("afterThrowing Command: " + exception);
kafkaOperationRepository.failOperation(operationContext.getCommandContext(), event -> event.setEventState(EventState.TXN_FAILED));
} finally {
operationContext.clearCommandContext();
}
}
|
LogBench/LogBench-O_prefix_1point/splitet_CommandExecutionInterceptor_afterThrowing.java
|
logbench-o_data_33
|
public void run() {
while (!publishing) {
try {
Thread.sleep(100L);
} catch (InterruptedException e) {
}
}
do {
try {
RTMPMessage message = que.poll();
if (message != null && client != null) {
client.publishStreamData(streamId, message);
} else {
Thread.sleep(3L);
}
} catch (Exception e1) {
log.warn("streaming error {}", e1);
}
} while (!que.isEmpty());
client.unpublish(streamId);
}
|
LogBench/LogBench-O_prefix_1point/red5-server_FBLiveConnectTest_run.java
|
logbench-o_data_34
|
/**
* Sets up a test where the producer and consumer have their own connection.
*
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
File journalFile = new File(JOURNAL_ROOT);
recursiveDelete(journalFile);
// Create broker from resource
LOG.info("Creating broker... ");
broker = createBroker("org/apache/activemq/usecases/activemq.xml");
LOG.info("Success");
super.setUp();
}
|
LogBench/LogBench-O_prefix_1point/activemq_PublishOnTopicConsumerMessageUsingActivemqXMLTest_setUp.java
|
logbench-o_data_35
|
public DeviceRegistrationResult finishRegistration(RegisterRequestMessage requestMessage, RegisterResponse response, String userInum, Set<String> facets) throws BadInputException {
RegisterRequest request = requestMessage.getRegisterRequest();
String appId = request.getAppId();
ClientData clientData = response.getClientData();
clientDataValidationService.checkContent(clientData, RawRegistrationService.SUPPORTED_REGISTER_TYPES, request.getChallenge(), facets);
RawRegisterResponse rawRegisterResponse = rawRegistrationService.parseRawRegisterResponse(response.getRegistrationData());
rawRegistrationService.checkSignature(appId, clientData, rawRegisterResponse);
Date now = new GregorianCalendar(TimeZone.getTimeZone("UTC")).getTime();
DeviceRegistration deviceRegistration = rawRegistrationService.createDevice(userInum, rawRegisterResponse);
deviceRegistration.setStatus(DeviceRegistrationStatus.ACTIVE);
deviceRegistration.setApplication(appId);
deviceRegistration.setCreationDate(now);
int keyHandleHashCode = deviceRegistrationService.getKeyHandleHashCode(rawRegisterResponse.getKeyHandle());
deviceRegistration.setKeyHandleHashCode(keyHandleHashCode);
final String deviceRegistrationId = String.valueOf(System.currentTimeMillis());
deviceRegistration.setId(deviceRegistrationId);
String responseDeviceData = response.getDeviceData();
if (StringHelper.isNotEmpty(responseDeviceData)) {
try {
String responseDeviceDataDecoded = new String(Base64Util.base64urldecode(responseDeviceData));
DeviceData deviceData = ServerUtil.jsonMapperWithWrapRoot().readValue(responseDeviceDataDecoded, DeviceData.class);
deviceRegistration.setDeviceData(deviceData);
} catch (Exception ex) {
throw new BadInputException(String.format("Device data is invalid: %s", responseDeviceData), ex);
}
}
boolean approved = StringHelper.equals(RawRegistrationService.REGISTER_FINISH_TYPE, response.getClientData().getTyp());
if (!approved) {
log.debug("Registratio request with keyHandle '{}' was canceled", rawRegisterResponse.getKeyHandle());
return new DeviceRegistrationResult(deviceRegistration, DeviceRegistrationResult.Status.CANCELED);
}
boolean twoStep = StringHelper.isNotEmpty(userInum);
if (twoStep) {
deviceRegistration.setDn(deviceRegistrationService.getDnForU2fDevice(userInum, deviceRegistrationId));
// Check if there is device registration with keyHandle in LDAP already
List<DeviceRegistration> foundDeviceRegistrations = deviceRegistrationService.findDeviceRegistrationsByKeyHandle(appId, deviceRegistration.getKeyHandle(), "oxId");
if (foundDeviceRegistrations.size() != 0) {
throw new BadInputException(String.format("KeyHandle %s was compromised", deviceRegistration.getKeyHandle()));
}
deviceRegistrationService.addUserDeviceRegistration(userInum, deviceRegistration);
} else {
deviceRegistration.setDn(deviceRegistrationService.getDnForOneStepU2fDevice(deviceRegistrationId));
deviceRegistrationService.addOneStepDeviceRegistration(deviceRegistration);
}
return new DeviceRegistrationResult(deviceRegistration, DeviceRegistrationResult.Status.APPROVED);
}
|
LogBench/LogBench-O_prefix_1point/oxauth_RegistrationService_finishRegistration.java
|
logbench-o_data_36
|
@Override
protected void afterExecute(Runnable runnable, Throwable throwable) {
ScheduledFuture future = (ScheduledFuture) runnable;
if (future.isDone()) {
try {
future.get();
log.info("Task is completed");
} catch (CancellationException ce) {
log.error("Task is cancelled!");
} catch (ExecutionException e) {
log.error("Task is completed with exception!");
Throwable t = e.getCause();
SchedulerParams schedulerParams = runnables.remove(runnable);
if (t != null && schedulerParams != null) {
boolean resubmit = handler.exceptionOccurred(t);
if (resubmit) {
log.info("Resubmitting the runnable task");
scheduleAtFixedRate(schedulerParams.runnable, schedulerParams.period, schedulerParams.period, schedulerParams.unit);
}
}
} catch (InterruptedException e) {
log.error("Scheduler thread is interrupted!");
Thread.currentThread().interrupt();
}
}
}
|
LogBench/LogBench-O_prefix_1point/sherlock_RecoverableThreadScheduler_afterExecute.java
|
logbench-o_data_37
|
// doc comment inherited from LogWriter
public void logReceivedException(Logger log, IOException error) {
log.debug("$$ RECEIVED_EXCEPTION: " + error, error);
}
|
LogBench/LogBench-O_prefix_1point/activemq_CustomLogWriter_logReceivedException.java
|
logbench-o_data_38
|
private Message collectMessagesFromDurableSubscriptionForOneMinute() throws Exception {
ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://" + brokerName);
TopicConnection connection = connectionFactory.createTopicConnection();
connection.setClientID(clientID);
TopicSession topicSession = connection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE);
Topic topic = topicSession.createTopic(topicName);
connection.start();
TopicSubscriber subscriber = topicSession.createDurableSubscriber(topic, durableSubName);
LOG.info("About to receive messages");
Message message = subscriber.receive(120000);
subscriber.close();
connection.close();
LOG.info("collectMessagesFromDurableSubscriptionForOneMinute done");
return message;
}
|
LogBench/LogBench-O_prefix_1point/activemq_DurableSubscriptionHangTestCase_collectMessagesFromDurableSubscriptionForOneMinute.java
|
logbench-o_data_39
|
public void reportDurationFromMRSubmittedToMRStarted(int maxToReport) {
LOG.info("Time from MR submitted to MR started running By Topic:");
Map<String, Long> durations = getDurations(this.mrSubmitTimeByTopic, this.mrStartRunningTimeByTopic);
reportDurations(durations, maxToReport);
}
|
LogBench/LogBench-O_prefix_1point/camus_CamusSweeperMetrics_reportDurationFromMRSubmittedToMRStarted.java
|
logbench-o_data_40
|
@Override
public boolean canSubscribe(Node node, JID owner, JID subscriber) {
// Let node owners and sysadmins always subcribe to the node
if (node.isAdmin(owner)) {
return true;
}
XMPPServer server = XMPPServer.getInstance();
for (JID nodeOwner : node.getOwners()) {
// Give access to the owner of the roster :)
if (nodeOwner.equals(owner)) {
return true;
}
// Check that the node owner is a local user
if (server.isLocal(nodeOwner)) {
try {
Roster roster = server.getRosterManager().getRoster(nodeOwner.getNode());
RosterItem item = roster.getRosterItem(owner);
// Check that the subscriber is subscribe to the node owner's presence
return item != null && (RosterItem.SUB_BOTH == item.getSubStatus() || RosterItem.SUB_FROM == item.getSubStatus());
} catch (UserNotFoundException e) {
// Do nothing
}
} else {
// Owner of the node is a remote user. This should never happen.
Log.warn("Node with access model Presence has a remote user as owner: {}", node.getUniqueIdentifier());
}
}
return false;
}
|
LogBench/LogBench-O_prefix_1point/openfire_PresenceAccess_canSubscribe.java
|
logbench-o_data_41
|
private String getHttpResponse(String endpoint, String requestMethod, String payload) {
StringBuilder jsonString = new StringBuilder();
BufferedReader br = null;
HttpURLConnection connection = null;
try {
String line;
URL url = new URL(endpoint);
connection = (HttpURLConnection) url.openConnection();
connection.setDoInput(true);
connection.setDoOutput(true);
connection.setRequestProperty("charset", "UTF-8");
connection.setReadTimeout(10000);
connection.setRequestMethod(requestMethod);
connection.setRequestProperty("Accept", "application/json");
if (null != payload) {
connection.setRequestProperty("Content-Type", "application/json");
connection.setRequestProperty("Content-Length", String.valueOf(payload.length()));
OutputStreamWriter writer = new OutputStreamWriter(connection.getOutputStream(), "UTF-8");
writer.write(payload);
writer.close();
}
br = new BufferedReader(new InputStreamReader(connection.getInputStream()));
while (null != (line = br.readLine())) {
jsonString.append(line);
}
} catch (IOException e) {
log.error("IO exception occurred, " + e.getMessage(), e);
} finally {
try {
if (null != br) {
br.close();
connection.disconnect();
}
} catch (IOException e) {
log.error("IO exception occurred while closing the reader, " + e.getMessage(), e);
}
}
return jsonString.toString();
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_RequestBoxJsonTestCase_getHttpResponse.java
|
logbench-o_data_42
|
/**
* Decide if the given Sensor should be executed.
*/
public boolean shouldExecute(DefaultSensorDescriptor descriptor) {
if (!fsCondition(descriptor)) {
LOG.debug("'{}' skipped because there is no related files in the current project", descriptor.name());
return false;
}
if (!activeRulesCondition(descriptor)) {
LOG.debug("'{}' skipped because there is no related rules activated", descriptor.name());
return false;
}
if (!settingsCondition(descriptor)) {
LOG.debug("'{}' skipped because one of the required properties is missing", descriptor.name());
return false;
}
return true;
}
|
LogBench/LogBench-O_prefix_1point/sonarlint-core_SensorOptimizer_shouldExecute.java
|
logbench-o_data_43
|
@Override
public void write(int i) throws IOException {
StringBuilder sb = local.get();
if (sb == null) {
sb = new StringBuilder();
}
// look for LF
if (i == 10) {
log.info("Derby log: {}", sb.toString());
sb.delete(0, sb.length() - 1);
} else {
log.trace("Derby log: {}", i);
sb.append(new String(intToDWord(i)));
}
local.set(sb);
}
|
LogBench/LogBench-O_prefix_1point/red5-server_DerbyLogInterceptor_write.java
|
logbench-o_data_44
|
protected void spawnConsumer() {
Thread thread = new Thread(new Runnable() {
public void run() {
try {
Connection consumerConnection = createConsumerConnection();
MessageConsumer consumer = createConsumer(consumerConnection);
// consume some messages
for (int i = 0; i < firstBatch; i++) {
Message msg = consumer.receive(RECEIVE_TIMEOUT);
if (msg != null) {
// log.info("GOT: " + msg);
messagesReceived.incrementAndGet();
}
}
synchronized (closeBroker) {
closeBroker.set(true);
closeBroker.notify();
}
Thread.sleep(2000);
for (int i = firstBatch; i < MESSAGE_COUNT; i++) {
Message msg = consumer.receive(RECEIVE_TIMEOUT);
// log.info("GOT: " + msg);
if (msg != null) {
messagesReceived.incrementAndGet();
}
}
consumerConnection.close();
synchronized (messagesReceived) {
messagesReceived.notify();
}
} catch (Throwable e) {
e.printStackTrace();
}
}
});
thread.start();
}
|
LogBench/LogBench-O_prefix_1point/activemq_ReliableReconnectTest_spawnConsumer.java
|
logbench-o_data_45
|
@Override
public void stop() throws Exception {
LOG.info("Stopping proxy.");
super.stop();
ProxyConnection dummy = new ProxyConnection(local, this);
LOG.debug("Removing proxyConnection {}", dummy.toString());
connections.remove(dummy);
}
|
LogBench/LogBench-O_prefix_1point/activemq_ProxyConnector_stop.java
|
logbench-o_data_46
|
@Override
public List<String> groupsFromRoles(List<String> roles) {
if (template == null || roles == null) {
return roles;
} else {
List<String> renamedRoles = new ArrayList<>();
for (String role : roles) {
String renamedRole = template.replace(ROLE_MAPPING_PLACEHOLDER, role);
renamedRoles.add(renamedRole);
LOG.debug("Mapped role {} to {}", role, renamedRole);
}
return renamedRoles;
}
}
|
LogBench/LogBench-O_prefix_1point/cxf-fediz_DefaultRoleToGroupMapper_groupsFromRoles.java
|
logbench-o_data_47
|
public void testRebalance() throws Exception {
createBroker(new ClassPathResource("org/apache/activemq/usecases/rebalance-broker1.xml"));
createBroker(new ClassPathResource("org/apache/activemq/usecases/rebalance-broker2.xml"));
startAllBrokers();
brokers.get("b1").broker.waitUntilStarted();
LOG.info("Starting connection");
ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory("failover:(tcp://localhost:61616,tcp://localhost:61617)?randomize=false");
Connection conn = factory.createConnection();
conn.start();
Session session = conn.createSession(false, Session.AUTO_ACKNOWLEDGE);
Queue theQueue = session.createQueue(QUEUE_NAME);
MessageProducer producer = session.createProducer(theQueue);
MessageConsumer consumer = session.createConsumer(theQueue);
Message message = session.createTextMessage("Test message");
producer.send(message);
Message msg = consumer.receive(2000);
assertNotNull(msg);
// introduce third broker
createBroker(new ClassPathResource("org/apache/activemq/usecases/rebalance-broker3.xml"));
brokers.get("b3").broker.waitUntilStarted();
Thread.sleep(3000);
LOG.info("Stopping broker 1");
brokers.get("b1").broker.stop();
brokers.get("b1").broker.waitUntilStopped();
Thread.sleep(3000);
// should reconnect to some of the remaining brokers
producer.send(message);
msg = consumer.receive(2000);
assertNotNull(msg);
LOG.info("Stopping broker 2");
brokers.get("b2").broker.stop();
brokers.get("b2").broker.waitUntilStopped();
// should reconnect to broker3
producer.send(message);
msg = consumer.receive(2000);
assertNotNull(msg);
}
|
LogBench/LogBench-O_prefix_1point/activemq_ClientRebalanceTest_testRebalance.java
|
logbench-o_data_48
|
public void startServersWithDepSync(boolean mountRegistry, TestServerManager... serverManagers) throws AutomationFrameworkException {
int noOfServers = serverManagers.length;
for (int index = 0; index < noOfServers; ++index) {
log.info("============================== Configuring server " + (servers.size() + 1) + " ==============================");
TestServerManager testServerManager = serverManagers[index];
try {
String carbonHome;
if (deploymentDirectory == null) {
carbonHome = testServerManager.startServer();
deploymentDirectory = String.join(File.separator, carbonHome, "repository", "deployment");
if (mountRegistry) {
registryDirectory = String.join(File.separator, carbonHome, "registry");
}
} else {
carbonHome = testServerManager.startServer(deploymentDirectory, registryDirectory);
}
servers.put(carbonHome, testServerManager);
} catch (Exception ex) {
throw new AutomationFrameworkException(ex);
}
}
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_MultipleServersManager_startServersWithDepSync.java
|
logbench-o_data_49
|
public void info(String s, Throwable t) {
LOG.info(filterPasswords(s), t);
if (infoString != null) {
infoString += s;
infoString += ", " + t;
infoString += ";";
}
}
|
LogBench/LogBench-O_prefix_1point/activemq_AbstractRuntimeConfigurationBroker_info.java
|
logbench-o_data_50
|
public Future<ValidationResult> validateMessages(HttpServerRequest request, List<KafkaProducerRecord<String, String>> kafkaProducerRecords) {
if (kafkaProducerRecords.isEmpty()) {
return Future.succeededFuture(new ValidationResult(ValidationStatus.VALIDATED_POSITIV));
}
Map<String, String> entry = ValidationUtil.matchingValidationResourceEntry(validationResourceManager.getValidationResource(), request, log);
if (entry == null) {
return Future.succeededFuture(new ValidationResult(ValidationStatus.VALIDATED_POSITIV));
}
Optional<SchemaLocation> optionalSchemaLocation = ValidationUtil.matchingSchemaLocation(validationResourceManager.getValidationResource(), request, log);
if (optionalSchemaLocation.isEmpty()) {
log.warn("No schema location found for {}. Could not validate kafka message", request.uri());
return Future.succeededFuture(new ValidationResult(ValidationStatus.COULD_NOT_VALIDATE));
}
SchemaLocation schemaLocation = optionalSchemaLocation.get();
// https://github.com/eclipse-vertx/vert.x/issues/2627
@SuppressWarnings("rawtypes")
List<Future> futures = kafkaProducerRecords.stream().map(message -> validator.validateWithSchemaLocation(schemaLocation, Buffer.buffer(message.value()), log)).collect(toList());
return CompositeFuture.all(futures).compose(compositeFuture -> {
for (Object o : compositeFuture.list()) {
if (((ValidationResult) o).getValidationStatus() != ValidationStatus.VALIDATED_POSITIV) {
return Future.succeededFuture((ValidationResult) o);
}
}
return Future.succeededFuture(new ValidationResult(ValidationStatus.VALIDATED_POSITIV));
});
}
|
LogBench/LogBench-O_prefix_1point/gateleen_KafkaMessageValidator_validateMessages.java
|
logbench-o_data_51
|
/**
* Run the job. The job will perform the EGADS
* anomaly detection on its data and keep a
* reference to its reports.
*/
@Override
public void run() {
List<Anomaly> anomalies;
List<AnomalyReport> reports = new ArrayList<>();
// reconstruct DetectorConfig
DetectorConfig config = DetectorConfig.fromProperties(DetectorConfig.fromFile());
config.setTsModel(this.proxyJob.getTimeseriesModel());
config.setAdModel(this.proxyJob.getAnomalyDetectionModel());
if (this.proxyJob.getTimeseriesFramework().equals(DetectorConfig.Framework.Prophet.toString())) {
config.setTsFramework(DetectorConfig.Framework.Prophet.toString());
config.setProphetGrowthModel(this.proxyJob.getProphetGrowthModel());
config.setProphetYearlySeasonality(this.proxyJob.getProphetYearlySeasonality());
config.setProphetWeeklySeasonality(this.proxyJob.getProphetWeeklySeasonality());
config.setProphetDailySeasonality(this.proxyJob.getProphetDailySeasonality());
} else {
config.setTsFramework(DetectorConfig.Framework.Egads.toString());
}
try {
proxyJob.setJobStatus(JobStatus.RUNNING.getValue());
proxyJob.setEffectiveQueryTime(effectiveQueryEndTime);
executionService.getAnomalyReportAccessor().deleteAnomalyReportsForJobAtTime(proxyJob.getJobId().toString(), proxyJob.getReportNominalTime().toString(), proxyJob.getFrequency());
Granularity granularity = Granularity.getValue(proxyJob.getGranularity());
anomalies = detectorService.runDetection(timeSeriesList, proxyJob.getSigmaThreshold(), config, proxyJob.getReportNominalTime(), proxyJob.getFrequency(), granularity, proxyJob.getGranularityRange());
reports = executionService.getReports(anomalies, proxyJob);
} catch (Exception e) {
log.info("Error in egads job!", e);
}
if (reports.isEmpty()) {
reports.add(executionService.getSingletonReport(proxyJob));
}
this.reports = reports;
}
|
LogBench/LogBench-O_prefix_1point/sherlock_DetectionTask_run.java
|
logbench-o_data_52
|
@Override
public void start() throws IllegalStateException {
// If the service isn't enabled, return.
if (!JiveGlobals.getBooleanProperty("multicastDNS.enabled", false)) {
return;
}
TimerTask startService = new TimerTask() {
@Override
public void run() {
int clientPortNum = -1;
int componentPortNum = -1;
final ConnectionManager connectionManager = XMPPServer.getInstance().getConnectionManager();
if (connectionManager != null) {
clientPortNum = connectionManager.getPort(ConnectionType.SOCKET_C2S, false);
componentPortNum = connectionManager.getPort(ConnectionType.COMPONENT, false);
}
try {
if (jmdns == null) {
jmdns = new JmDNS();
}
String serverName = XMPPServer.getInstance().getServerInfo().getXMPPDomain();
if (clientPortNum != -1) {
ServiceInfo clientService = new ServiceInfo("_xmpp-client._tcp.local.", serverName + "._xmpp-client._tcp.local.", clientPortNum, "XMPP Server");
jmdns.registerService(clientService);
}
if (componentPortNum != -1) {
ServiceInfo componentService = new ServiceInfo("_xmpp-component._tcp.local.", serverName + "._xmpp-component._tcp.local.", componentPortNum, "XMPP Component Server");
jmdns.registerService(componentService);
}
} catch (IOException ioe) {
Log.error(ioe.getMessage(), ioe);
}
}
};
// Schedule the task to run in 5 seconds, to give Openfire time to start the ports.
TaskEngine.getInstance().schedule(startService, Duration.ofSeconds(5));
}
|
LogBench/LogBench-O_prefix_1point/openfire_MulticastDNSService_start.java
|
logbench-o_data_53
|
@Override
public void onFinish(ITestContext ctx) {
System.gc();
Runtime rt = Runtime.getRuntime();
long inMb = 1024 * 1024;
log.info("Total memory : " + rt.totalMemory() / inMb);
log.info("Free memory : " + rt.freeMemory() / inMb);
log.info("Memory usage : " + (rt.totalMemory() - rt.freeMemory()) / inMb);
log.info("Process : " + ManagementFactory.getRuntimeMXBean().getName());
log.info("<<<<<<<<<<<<<<<<<<<< {} finished >>>>>>>>>>>>>>>>>>>>", ctx.getName());
}
|
LogBench/LogBench-O_prefix_1point/find-sec-bugs_VerboseTestListener_onFinish.java
|
logbench-o_data_54
|
@FXML
public void finish() {
try {
CharSequence oldPassphrase = oldPasswordField.getCharacters();
CharSequence newPassphrase = newPasswordController.passwordField.getCharacters();
Path masterkeyPath = vault.getPath().resolve(MASTERKEY_FILENAME);
byte[] oldMasterkeyBytes = Files.readAllBytes(masterkeyPath);
byte[] newMasterkeyBytes = masterkeyFileAccess.changePassphrase(oldMasterkeyBytes, oldPassphrase, newPassphrase);
Path backupKeyPath = vault.getPath().resolve(MASTERKEY_FILENAME + BackupHelper.generateFileIdSuffix(oldMasterkeyBytes) + MASTERKEY_BACKUP_SUFFIX);
Files.move(masterkeyPath, backupKeyPath, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE);
Files.write(masterkeyPath, newMasterkeyBytes, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE);
LOG.info("Successfully changed password for {}", vault.getDisplayName());
updatePasswordInSystemkeychain();
window.close();
} catch (InvalidPassphraseException e) {
Animations.createShakeWindowAnimation(window).play();
oldPasswordField.selectAll();
oldPasswordField.requestFocus();
} catch (IOException | CryptoException e) {
LOG.error("Password change failed. Unable to perform operation.", e);
appWindows.showErrorWindow(e, window, window.getScene());
}
}
|
LogBench/LogBench-O_prefix_1point/cryptomator_ChangePasswordController_finish.java
|
logbench-o_data_55
|
@Override
public AbstractFeature build() throws JATEException {
Containment feature = new Containment();
// start workers
int cores = properties.getMaxCPUCores();
cores = cores == 0 ? 1 : cores;
int maxPerThread = getMaxPerThread(cores);
StringBuilder sb = new StringBuilder("Building features using cpu cores=");
sb.append(cores).append(", total terms=").append(uniqueCandidateTerms.size()).append(", max per worker=").append(maxPerThread);
LOG.info(sb.toString());
ContainmentFBWorker worker = new ContainmentFBWorker(new ArrayList<>(uniqueCandidateTerms), maxPerThread, feature, termComponentIndex);
ForkJoinPool forkJoinPool = new ForkJoinPool(cores);
int[] total = forkJoinPool.invoke(worker);
sb = new StringBuilder("Complete building features. Total=");
sb.append(total[1]).append(" success=").append(total[0]);
LOG.info(sb.toString());
return feature;
}
|
LogBench/LogBench-O_prefix_1point/jate_ContainmentFBMaster_build.java
|
logbench-o_data_56
|
private synchronized MessageInterceptorFilter getFilter() {
if (filter == null) {
try {
MutableBrokerFilter mutableBrokerFilter = (MutableBrokerFilter) brokerService.getBroker().getAdaptor(MutableBrokerFilter.class);
Broker next = mutableBrokerFilter.getNext();
filter = new MessageInterceptorFilter(next);
mutableBrokerFilter.setNext(filter);
} catch (Exception e) {
LOG.error("Failed to create MessageInterceptorFilter", e);
}
}
return filter;
}
|
LogBench/LogBench-O_prefix_1point/activemq_MessageInterceptorRegistry_getFilter.java
|
logbench-o_data_57
|
@AfterClass(alwaysRun = true)
public void deleteService() throws Exception {
deleteService(SERVICE_NAME_1);
deleteService(SERVICE_NAME_2);
log.info(SCHEMA_NAME + " deleted");
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_MultipleServicesGeneratorTestCase_deleteService.java
|
logbench-o_data_58
|
/**
* Parses the provided router configuration resource and returns a {@link RouterConfiguration}.
*
* @param configurationResourceBuffer the resource to parse
* @return a {@link RouterConfiguration}
*/
public static Optional<RouterConfiguration> parse(Buffer configurationResourceBuffer, Map<String, Object> properties) {
String replacedConfig;
JsonObject config;
try {
replacedConfig = StringUtils.replaceWildcardConfigs(configurationResourceBuffer.toString(UTF_8), properties);
config = new JsonObject(Buffer.buffer(replacedConfig));
} catch (Exception e) {
log.warn("Could not replace wildcards with environment properties for the router configuration " + "due to following reason: {}", e.getMessage());
return Optional.empty();
}
Integer requestHopsLimit = config.getInteger(REQUEST_HOPS_LIMIT_PROPERTY);
JsonObject authConfigs = config.getJsonObject(AUTH_CONFIGS_PROPERTY);
if (authConfigs == null) {
return Optional.of(new RouterConfiguration(requestHopsLimit, Collections.emptyMap()));
}
Map<OAuthId, OAuthConfiguration> oAuthConfigurationsMap = new HashMap<>();
for (String authConfigId : authConfigs.fieldNames()) {
JsonObject authConfig = authConfigs.getJsonObject(authConfigId);
String flowTypeStr = authConfig.getString(FLOWTYPE_PROPERTY);
OAuth2FlowType flowType = flowTypeFromStr(flowTypeStr);
if (flowType == null) {
log.warn("No valid OAuth2FlowType configured for auth configuration '{}'. " + "Unable to use this configuration", authConfigId);
continue;
}
String clientId = authConfig.getString(CLIENT_ID_PROPERTY);
String clientSecret = authConfig.getString(CLIENT_SECRET_PROPERTY);
String site = authConfig.getString(SITE_PROPERTY);
String tokenPath = authConfig.getString(TOKENPATH_PROPERTY);
String authPath = authConfig.getString(AUTHPATH_PROPERTY);
JsonArray scopesArray = authConfig.getJsonArray(SCOPES_PROPERTY);
List<String> scopeList = null;
if (scopesArray != null) {
scopeList = new ArrayList<>();
for (Object scope : scopesArray) {
scopeList.add((String) scope);
}
}
JsonArray supportedGrantTypesArray = authConfig.getJsonArray(SUPPORTED_GRANTTYPES_PROPERTY);
List<String> supportedGrantTypesList = null;
if (supportedGrantTypesArray != null) {
supportedGrantTypesList = new ArrayList<>();
for (Object scope : supportedGrantTypesArray) {
supportedGrantTypesList.add((String) scope);
}
}
OAuthConfiguration oAuthConfiguration = new OAuthConfiguration(flowType, clientId, clientSecret, site, tokenPath, authPath, scopeList, supportedGrantTypesList);
oAuthConfigurationsMap.put(OAuthId.of(authConfigId), oAuthConfiguration);
}
return Optional.of(new RouterConfiguration(requestHopsLimit, oAuthConfigurationsMap));
}
|
LogBench/LogBench-O_prefix_1point/gateleen_RouterConfigurationParser_parse.java
|
logbench-o_data_59
|
protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
final HttpServletRequest httpRequest = request;
final HttpServletResponse httpResponse = response;
httpResponse.setContentType("application/json");
PrintWriter out = httpResponse.getWriter();
try {
String urlPath = httpRequest.getPathInfo();
String oxId = urlPath.substring(urlPath.lastIndexOf("/") + 1, urlPath.length());
org.oxauth.persistence.model.SectorIdentifier sectorIdentifier = sectorIdentifierService.getSectorIdentifierById(oxId);
JSONArray jsonArray = new JSONArray();
for (String redirectUri : sectorIdentifier.getRedirectUris()) {
jsonArray.put(redirectUri);
}
out.println(jsonArray.toString(4).replace("\\/", "/"));
} catch (Exception e) {
log.error(e.getMessage(), e);
} finally {
out.close();
}
}
|
LogBench/LogBench-O_prefix_1point/oxauth_SectorIdentifier_processRequest.java
|
logbench-o_data_60
|
@Override
public void process(PublishingFlow publishingFlow, int tenantId) {
Object[] metaData = new Object[2];
Object[] eventData = new Object[2];
addMetaData(metaData, tenantId);
addEventData(eventData, publishingFlow);
if (log.isDebugEnabled()) {
log.debug("Before sending to analytic server ------");
/*
Logs to print data sending to analytics server. Use log4j.properties to enable this logs
*/
for (int i = 0; i < eventData.length; i++) {
log.debug("Section-" + i + " -> " + eventData[i]);
}
}
publishToAgent(eventData, metaData);
if (log.isDebugEnabled()) {
log.debug("------ After sending to analytic server");
}
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_EIStatisticsPublisher_process.java
|
logbench-o_data_61
|
public void produceMessages() throws Exception {
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(brokerService.getTransportConnectors().get(0).getConnectUri().toString());
Connection connection = connectionFactory.createConnection();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
Destination destination = session.createQueue(QUEUE_NAME);
MessageProducer producer = session.createProducer(destination);
producer.setDeliveryMode(DeliveryMode.PERSISTENT);
long start = System.currentTimeMillis();
for (int priority : PRIORITIES) {
String name = null;
if (priority == 10) {
name = "high";
} else if (priority == 5) {
name = "mid";
} else {
name = "low";
}
for (int i = 1; i <= MESSAGE_COUNT_OF_ONE_GROUP; i++) {
TextMessage message = session.createTextMessage(name + "_" + i);
message.setIntProperty("priority", priority);
producer.send(message);
}
}
long end = System.currentTimeMillis();
log.info("sent " + (MESSAGE_COUNT_OF_ONE_GROUP * 3) + " messages in " + (end - start) + " ms");
producer.close();
session.close();
connection.close();
}
|
LogBench/LogBench-O_prefix_1point/activemq_AMQ1893Test_produceMessages.java
|
logbench-o_data_62
|
@Test
public void testFLVReaderFileGenerateMetaData() {
log.info("\n testFLVReaderFileGenerateMetaData");
String[] paths = new String[] { "target/test-classes/fixtures/stray.flv" };
try {
for (String path : paths) {
File file = Paths.get(path).toFile();
if (file.exists() && file.canRead()) {
log.info("Reading: {}", file.getName());
FLVReader reader = new FLVReader(file, true);
ITag tag = null;
while (reader.hasMoreTags()) {
tag = reader.readTag();
if (tag != null && tag.getDataType() > 9) {
log.debug("Tag: {}", tag);
}
}
reader.close();
log.info("Finished reading: {}\n", file.getName());
} else {
log.info("File couldn't be accessed or doesnt exist: {}", file.getName());
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
|
LogBench/LogBench-O_prefix_1point/red5-server_FLVReaderTest_testFLVReaderFileGenerateMetaData.java
|
logbench-o_data_63
|
private static <T extends Hook> Completable executeHooks(final String componentId, final List<T> hooks, final HookPhase phase, final ExecutionContext ctx, final ExecutionPhase executionPhase, final Throwable throwable, final ExecutionFailure executionFailure) {
return Flowable.fromIterable(hooks).flatMapCompletable(hook -> {
switch(phase) {
case PRE:
return hook.pre(componentId, ctx, executionPhase);
case POST:
return hook.post(componentId, ctx, executionPhase);
case INTERRUPT:
return hook.interrupt(componentId, ctx, executionPhase);
case INTERRUPT_WITH:
return hook.interruptWith(componentId, ctx, executionPhase, executionFailure);
case ERROR:
return hook.error(componentId, ctx, executionPhase, throwable);
default:
return Completable.error(new RuntimeException(String.format("Unknown hook phase %s while executing hook", phase)));
}
}, false, 1).doOnError(error -> log.warn("Unable to execute '{}' hook on flow '{}'", phase.name(), componentId, error)).onErrorComplete();
}
|
LogBench/LogBench-O_prefix_1point/gravitee-api-management_HookHelper_executeHooks.java
|
logbench-o_data_64
|
@Test(groups = { "wso2.esb" }, description = "Test different content types", dataProvider = "contentTypeProvider")
public void testReturnContentType(String dataProviderContentType) throws Exception {
contentType = dataProviderContentType;
HttpServer server = HttpServer.create(new InetSocketAddress(PORT), 0);
server.createContext("/gettest", new MyHandler());
// creates a default executor
server.setExecutor(null);
server.start();
DefaultHttpClient httpclient = new DefaultHttpClient();
String url = "http://localhost:8480/serviceTest/test";
HttpGet httpGet = new HttpGet(url);
HttpResponse response = null;
try {
response = httpclient.execute(httpGet);
} catch (IOException e) {
log.error("Error Occurred while sending http get request. ", e);
}
log.info(response.getEntity().getContentType());
log.info(response.getStatusLine().getStatusCode());
assertEquals(response.getFirstHeader("Content-Type").getValue(), contentType, "Expected content type doesn't match");
assertEquals(response.getStatusLine().getStatusCode(), HTTP_STATUS_OK, "response code doesn't match");
server.stop(5);
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_ContentTypeTestCase_testReturnContentType.java
|
logbench-o_data_65
|
@Override
public void onPartitionsAssigned(Map<TopicPartition, Long> assignments, ConsumerSeekAware.ConsumerSeekCallback callback) {
for (Map.Entry<TopicPartition, Long> entry : assignments.entrySet()) {
String topic = entry.getKey().topic();
topicsMap.putIfAbsent(topic, new Topic());
topicsMap.submitToKey(topic, new EndOffsetSetter(entry.getKey().partition(), entry.getValue()));
}
log.warn("onPartitionsAssigned:" + assignments.toString());
}
|
LogBench/LogBench-O_prefix_1point/splitet_EventListener_onPartitionsAssigned.java
|
logbench-o_data_66
|
public void doStart() throws Exception {
log.debug("Started channel gbean.");
}
|
LogBench/LogBench-O_prefix_1point/geronimo_ChannelGBean_doStart.java
|
logbench-o_data_67
|
@Bean(name = BEAN_NAME_EMBEDDED_CASSANDRA, destroyMethod = "stop")
public CassandraContainer cassandra(ConfigurableEnvironment environment, CassandraProperties properties, Optional<Network> network) throws Exception {
CassandraContainer cassandra = new CassandraContainer<>(ContainerUtils.getDockerImageName(properties)).withExposedPorts(properties.getPort());
network.ifPresent(cassandra::withNetwork);
cassandra = (CassandraContainer) configureCommonsAndStart(cassandra, properties, log);
initKeyspace(properties, cassandra);
Map<String, Object> cassandraEnv = registerCassandraEnvironment(environment, cassandra, properties);
log.info("Started Cassandra. Connection details: {}", cassandraEnv);
return cassandra;
}
|
LogBench/LogBench-O_prefix_1point/testcontainers-spring-boot_EmbeddedCassandraBootstrapConfiguration_cassandra.java
|
logbench-o_data_68
|
/**
* {@inheritDoc}
*/
public void stopListening() {
log.debug("stopListening, client is finished providing data");
if (writer != null) {
writer.close();
log.debug("Bytes written: {}", writer.getBytesWritten());
writer = null;
}
}
|
LogBench/LogBench-O_prefix_1point/red5-server_Writer_stopListening.java
|
logbench-o_data_69
|
@Override
public void filterWrite(NextFilter nextFilter, IoSession session, WriteRequest writeRequest) throws Exception {
if (!session.isClosing()) {
// Get number of pending requests
long pendingBytes = session.getScheduledWriteBytes();
if (pendingBytes > bytesCap) {
// Get last time we were able to send something to the connected client
long writeTime = session.getLastWriteTime();
int pendingRequests = session.getScheduledWriteMessages();
Log.debug("About to kill session with pendingBytes: " + pendingBytes + " pendingWrites: " + pendingRequests + " lastWrite: " + new Date(writeTime) + "session: " + session);
// Close the session and throw an exception
session.close(false);
throw new IOException("Closing session that seems to be stalled. Preventing OOM");
}
}
// Call next filter (everything is fine)
super.filterWrite(nextFilter, session, writeRequest);
}
|
LogBench/LogBench-O_prefix_1point/openfire_StalledSessionsFilter_filterWrite.java
|
logbench-o_data_70
|
/**
* This will clone the bootstrap realm config and creates a tenant specific realm.
* Here the user search base of the particular tenant is changes to the tenant specific user
* store.
*
* @param bootStrapConfig
* @param tenantInfo
* @param tenantId
* @return
* @throws UserStoreException
*/
public RealmConfiguration getRealmConfigForTenantToPersist(RealmConfiguration bootStrapConfig, TenantMgtConfiguration tenantMgtConfiguration, Tenant tenantInfo, int tenantId) throws UserStoreException {
RealmConfiguration ldapRealmConfig = null;
try {
ldapRealmConfig = bootStrapConfig.cloneRealmConfiguration();
// TODO: Random password generation.
ldapRealmConfig.setAdminPassword(UIDGenerator.generateUID());
ldapRealmConfig.setAdminUserName(tenantInfo.getAdminName());
ldapRealmConfig.setTenantId(tenantId);
Map<String, String> authz = ldapRealmConfig.getAuthzProperties();
authz.put(UserCoreConstants.RealmConfig.PROPERTY_ADMINROLE_AUTHORIZATION, UserCoreConstants.UI_ADMIN_PERMISSION_COLLECTION);
Map<String, String> userStoreProperties = ldapRealmConfig.getUserStoreProperties();
// If the domain is wso2.com, partition dn is composed as dc=wso2,dc=com as follows:
String partitionDN = "dc=" + tenantInfo.getDomain().split("\\.")[0] + ",dc=" + tenantInfo.getDomain().split("\\.")[1];
/*according to carbon/components/apacheds-server, users are stored under "ou=Users"
context. So that is hard coded as the default in userSearchBase.*/
String userSearchBase = "ou=Users," + partitionDN;
// replace the tenant specific user search base.
userStoreProperties.put(LDAPConstants.USER_SEARCH_BASE, userSearchBase);
return ldapRealmConfig;
} catch (Exception e) {
String errorMessage = "Tenant specific realm config could not be created.";
if (log.isDebugEnabled()) {
log.debug(errorMessage, e);
}
throw new UserStoreException(errorMessage, e);
}
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_LDAPRealmConfigBuilder_getRealmConfigForTenantToPersist.java
|
logbench-o_data_71
|
@Override
public void run() {
try {
final Connection connection = factory.createConnection();
connection.start();
final Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
for (int j = 0; j < jobs * 10; j++) {
final MessageProducer producer = session.createProducer(new ActiveMQQueue("Q." + (j % destinationCount)));
producer.send(session.createMessage());
}
connection.close();
allDone.countDown();
LOG.info("Producers done!");
} catch (Exception ignored) {
LOG.error("unexpected ", ignored);
exceptions.add(ignored);
}
}
|
LogBench/LogBench-O_prefix_1point/activemq_ConcurrentDestinationCreationTest_run.java
|
logbench-o_data_72
|
@Before
public void setUp() throws Exception {
LOG.info("========== start {} ==========", getTestName());
}
|
LogBench/LogBench-O_prefix_1point/activemq_JmsPoolTestSupport_setUp.java
|
logbench-o_data_73
|
/**
* @see org.jivesoftware.openfire.vcard.DefaultVCardProvider#updateVCard(java.lang.String, org.dom4j.Element)
*/
@Override
public Element updateVCard(String username, Element vCard) throws NotFoundException {
// make sure some properties have not been overridden
Element nickNameNode = vCard.element("NICKNAME");
Element displayNameNode = vCard.element("FN");
Element nameNode = vCard.element("N");
Element lastNameNode = nameNode.element("FAMILY");
Element firstnameNode = nameNode.element("GIVEN");
Element emailNode = vCard.element("EMAIL").element("USERID");
CrowdUserProvider userProvider = (CrowdUserProvider) UserManager.getUserProvider();
try {
User user = userProvider.getCrowdUser(username);
nickNameNode.setText(username);
displayNameNode.setText(user.displayName);
lastNameNode.setText(user.lastName);
firstnameNode.setText(user.firstName);
emailNode.setText(user.email);
} catch (UserNotFoundException unfe) {
LOG.error("Unable to find user:" + username + " for updating its vcard", unfe);
}
LOG.debug("updatevcard: {}", vCard.asXML());
return super.updateVCard(username, vCard);
}
|
LogBench/LogBench-O_prefix_1point/openfire_CrowdVCardProvider_updateVCard.java
|
logbench-o_data_74
|
public void listenQueue(final String queueName, final String selectors) {
try {
Properties props = new Properties();
props.put("java.naming.factory.initial", "org.apache.activemq.jndi.ActiveMQInitialContextFactory");
props.put("java.naming.provider.url", connectionUri);
props.put("queue.queueName", queueName);
javax.naming.Context ctx = new InitialContext(props);
QueueConnectionFactory factory = (QueueConnectionFactory) ctx.lookup("ConnectionFactory");
QueueConnection conn = factory.createQueueConnection();
final Queue queue = (Queue) ctx.lookup("queueName");
QueueSession session = conn.createQueueSession(false, Session.AUTO_ACKNOWLEDGE);
QueueReceiver receiver = session.createReceiver(queue, selectors);
System.out.println("Message Selector: " + receiver.getMessageSelector());
receiver.setMessageListener(new MessageListener() {
public void onMessage(Message message) {
try {
if (message instanceof TextMessage) {
TextMessage txtMsg = (TextMessage) message;
String msg = txtMsg.getText();
LOG.info("Queue Message Received: " + queueName + " - " + msg);
qreceived.countDown();
}
message.acknowledge();
} catch (Throwable e) {
e.printStackTrace();
}
}
});
conn.start();
} catch (Exception e) {
e.printStackTrace();
}
}
|
LogBench/LogBench-O_prefix_1point/activemq_AMQ2084Test_listenQueue.java
|
logbench-o_data_75
|
@Override
public List<String> getDefaultProviderChain() {
List<String> provList = new ArrayList<>();
String defaultChain = MonetaryConfig.getString("conversion.default-chain").orElse(null);
if (defaultChain != null) {
String[] items = defaultChain.split(",");
for (String item : items) {
if (getProviderNames().contains(item.trim())) {
provList.add(item);
} else {
LOG.warning("Ignoring non existing default provider: " + item);
}
}
} else {
Bootstrap.getServices(ExchangeRateProvider.class).forEach(p -> provList.add(p.getContext().getProviderName()));
}
return provList;
}
|
LogBench/LogBench-O_prefix_1point/jsr354-ri_DefaultMonetaryConversionsSingletonSpi_getDefaultProviderChain.java
|
logbench-o_data_76
|
/**
* Health-check implementation.
*/
@Override
public boolean invoke(MessageContext synCtx) {
org.apache.axis2.context.MessageContext axisCtx = ((Axis2MessageContext) synCtx).getAxis2MessageContext();
axisCtx.setProperty(Constants.Configuration.MESSAGE_TYPE, "application/json");
axisCtx.setProperty(Constants.Configuration.CONTENT_TYPE, "application/json");
// Sending a response body for a GET request
axisCtx.removeProperty(NO_ENTITY_BODY);
String response = "{\"server\" : \"started\"}";
int responseCode = 200;
try {
JsonUtil.getNewJsonPayload(axisCtx, response, true, true);
axisCtx.setProperty(HTTP_SC, responseCode);
} catch (AxisFault axisFault) {
log.error("Error occurred while generating server started health-check response", axisFault);
// sending 500 without a response payload
axisCtx.setProperty(NO_ENTITY_BODY, true);
axisCtx.setProperty(HTTP_SC, 500);
}
// return true always since return false = return 404
return true;
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_LivenessResource_invoke.java
|
logbench-o_data_77
|
@Override
public void close() {
if (pollGuardTimer != null) {
log.debug("Cancelling previous poll-guard timer while stopping SpringJmsConnector");
pollGuardTimer.cancel();
pollGuardTimer = null;
}
super.close();
}
|
LogBench/LogBench-O_prefix_1point/iaf_SlowListenerWithPollGuard_close.java
|
logbench-o_data_78
|
private void roundTripProperties(ActiveMQObjectMessage message) throws IOException, JMSException {
ActiveMQObjectMessage copy = new ActiveMQObjectMessage();
for (Map.Entry<String, Object> prop : message.getProperties().entrySet()) {
LOG.debug("{} -> {}", prop.getKey(), prop.getValue().getClass());
copy.setObjectProperty(prop.getKey(), prop.getValue());
}
}
|
LogBench/LogBench-O_prefix_1point/activemq_AMQ4893Test_roundTripProperties.java
|
logbench-o_data_79
|
/**
* Launches the JavaFX application, blocking the main thread until shuts down.
*
* @return Nonzero exit code in case of an error.
*/
private int runGuiApplication() {
try {
Application.launch(MainApp.class);
LOG.info("UI shut down");
return 0;
} catch (Exception e) {
LOG.error("Terminating due to error", e);
return 1;
}
}
|
LogBench/LogBench-O_prefix_1point/cryptomator_Cryptomator_runGuiApplication.java
|
logbench-o_data_80
|
Future<Void> sendMessages(KafkaProducer<String, String> kafkaProducer, List<KafkaProducerRecord<String, String>> messages) {
Promise<Void> promise = Promise.promise();
log.debug("Start processing {} messages for kafka", messages.size());
// https://github.com/eclipse-vertx/vert.x/issues/2627
@SuppressWarnings("rawtypes")
List<Future> futures = messages.stream().map(message -> KafkaMessageSender.this.sendMessage(kafkaProducer, message)).collect(toList());
CompositeFuture.all(futures).<Void>mapEmpty().onComplete(result -> {
if (result.succeeded()) {
promise.complete();
log.debug("Batch messages successfully sent to Kafka.");
} else {
promise.fail(result.cause());
}
});
return promise.future();
}
|
LogBench/LogBench-O_prefix_1point/gateleen_KafkaMessageSender_sendMessages.java
|
logbench-o_data_81
|
protected void configureServices() {
if ((msServices == null) || (msServices.length < 2)) {
LOG.error("masterSlave requires at least 2 URIs");
msServices = new String[] {};
throw new IllegalArgumentException("Expecting at least 2 arguments");
}
StringBuffer buf = new StringBuffer();
buf.append("failover:(");
for (int i = 0; i < (msServices.length - 1); i++) {
buf.append(msServices[i]);
buf.append(',');
}
buf.append(msServices[msServices.length - 1]);
buf.append(")?randomize=false&maxReconnectAttempts=0");
super.setServices(new String[] { buf.toString() });
}
|
LogBench/LogBench-O_prefix_1point/activemq_MasterSlaveDiscoveryAgent_configureServices.java
|
logbench-o_data_82
|
@Override
protected Capabilities getCapabilities() {
Capabilities options = new ChromeOptions();
try {
addDefaultArgumentsForDocker(options);
} catch (Exception e) {
log.error("Exception adding default arguments for Docker, retyring with custom class");
options = new OptionsWithArguments("chrome", "goog:chromeOptions");
try {
addDefaultArgumentsForDocker(options);
} catch (Exception e1) {
log.error("Exception getting default capabilities", e);
}
}
return options;
}
|
LogBench/LogBench-O_prefix_1point/webdrivermanager_ChromeDriverManager_getCapabilities.java
|
logbench-o_data_83
|
public List<JATETerm> extract(SolrCore core, JATEProperties properties) throws JATEException {
SolrIndexSearcher searcher = core.getSearcher().get();
try {
this.freqFeatureBuilder = new FrequencyTermBasedFBMaster(searcher, properties, 0);
this.freqFeature = (FrequencyTermBased) freqFeatureBuilder.build();
Set<String> uniqueCandidateTerms = freqFeature.getMapTerm2TTF().keySet();
TermComponentIndexFBMaster termCompIndexFeatureBuilder = new TermComponentIndexFBMaster(properties, new ArrayList<>(uniqueCandidateTerms));
TermComponentIndex termComponentIndexFeature = (TermComponentIndex) termCompIndexFeatureBuilder.build();
ContainmentFBMaster cb = new ContainmentFBMaster(searcher, properties, termComponentIndexFeature, uniqueCandidateTerms);
Containment cf = (Containment) cb.build();
Containment crf = (Containment) new ContainmentReverseBuilder(searcher, properties, cf).build();
ComboBasic cbasic = new ComboBasic();
cbasic.registerFeature(FrequencyTermBased.class.getName(), this.freqFeature);
cbasic.registerFeature(ComboBasic.CONTAINMENT_PARENT + Containment.class.getName(), cf);
cbasic.registerFeature(ComboBasic.CONTAINMENT_CHILD + Containment.class.getName(), crf);
List<String> candidates = new ArrayList<>(this.freqFeature.getMapTerm2TTF().keySet());
filterByTTF(candidates);
List<JATETerm> terms = cbasic.execute(candidates);
terms = cutoff(terms);
addAdditionalTermInfo(terms, searcher, properties.getSolrFieldNameJATENGramInfo(), properties.getSolrFieldNameID());
LOG.info("Complete ComboBasic term extraction.");
return terms;
} finally {
try {
searcher.close();
} catch (IOException e) {
LOG.error(e.toString());
}
}
}
|
LogBench/LogBench-O_prefix_1point/jate_AppComboBasic_extract.java
|
logbench-o_data_84
|
@Override
public Boolean call() {
log.info("Waiting until the proxy, " + proxyName + " is un-deployed.");
try {
return !esbUtils.isProxyServiceExist(backEndUrl, sessionCookie, proxyName);
} catch (RemoteException e) {
return false;
}
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_AvailabilityPollingUtils_call.java
|
logbench-o_data_85
|
private FirebaseCloudMessagingResponse _exec() {
try {
// Prepare request parameters
// clientRequest.setHttpMethod(getHttpMethod());
Builder clientRequest = webTarget.request();
applyCookies(clientRequest);
clientRequest.header("Content-Type", getRequest().getContentType());
clientRequest.accept(getRequest().getMediaType());
if (StringUtils.isNotBlank(getRequest().getKey())) {
clientRequest.header("Authorization", "key=" + getRequest().getKey());
}
JSONObject requestBody = getRequest().getJSONParameters();
// Call REST Service and handle response
clientResponse = clientRequest.buildPost(Entity.json(requestBody.toString(4))).invoke();
setResponse(new FirebaseCloudMessagingResponse(clientResponse));
} catch (Exception e) {
LOG.error(e.getMessage(), e);
} finally {
closeConnection();
}
return getResponse();
}
|
LogBench/LogBench-O_prefix_1point/oxauth_FirebaseCloudMessagingClient__exec.java
|
logbench-o_data_86
|
@Override
public void transportResumed() {
if (discoveryAgent instanceof Suspendable) {
try {
((Suspendable) discoveryAgent).suspend();
} catch (Exception e) {
LOG.warn("Exception suspending discoverAgent: {}", discoveryAgent);
}
}
super.transportResumed();
}
|
LogBench/LogBench-O_prefix_1point/activemq_DiscoveryTransport_transportResumed.java
|
logbench-o_data_87
|
@Override
public void removeFromClusterIdIndex(String clusterId) {
log.info("Removing cluster Id {} from clusters list index", clusterId);
RedisConnection<String> conn = connect();
Long redisResponse = conn.sync().srem(index(clusterIdName, "all"), clusterId);
log.info("Removed cluster Id {} with redis response {}", clusterId, redisResponse);
}
|
LogBench/LogBench-O_prefix_1point/sherlock_LettuceDruidClusterAccessor_removeFromClusterIdIndex.java
|
logbench-o_data_88
|
@SetEnvironment(executionEnvironments = { ExecutionEnvironment.STANDALONE })
@Test(groups = { "wso2.esb" }, description = "Send Soap Message to test client's SSL certificate")
public void testMutualSSLClientCertificate() throws Exception {
String trustStoreName = "client-truststore.jks";
String keyStoreName = "wso2carbon.jks";
String keyStorePassword = "wso2carbon";
String trustStorePassword = "wso2carbon";
String soapMessage = "<soapenv:Envelope " + "xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\" " + "xmlns:echo=\"http://echo.services.core.carbon.wso2.org\">\n" + " <soapenv:Header/>\n" + " <soapenv:Body>\n" + " <echo:echoString>\n" + " <in>WSO2 Stock</in>\n" + " </echo:echoString>\n" + " </soapenv:Body>\n" + "</soapenv:Envelope>";
// load key store file
MutualSSLClient.loadKeyStore(CERT_CONFIG_LOCATION + keyStoreName, keyStorePassword);
// load trust store file
MutualSSLClient.loadTrustStore(CERT_CONFIG_LOCATION + trustStoreName, trustStorePassword);
// create ssl socket factory instance with given key/trust stores
MutualSSLClient.initMutualSSLConnection();
Map<String, String> reqProps = new HashMap<>();
reqProps.put("Content-type", "text/xml; charset=utf-8");
reqProps.put("SOAPAction", "urn:echoString");
String response;
try {
String PROXY_SERVICE = "ClientSSLCertTestProxy";
response = MutualSSLClient.sendPostRequest(getProxyServiceURLHttps(PROXY_SERVICE), soapMessage, reqProps);
log.info("Response received : " + response);
} catch (IOException ioException) {
log.error("Error sending Post request to proxy service", ioException);
response = "";
}
Assert.assertTrue(response.contains("certs-true"), "Client SSL certificate is not found!");
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_ClientSSLCertificateTestCase_testMutualSSLClientCertificate.java
|
logbench-o_data_89
|
// visible for testing
void handleLaunchArgs(FileSystem fs, List<String> args) {
Collection<Path> pathsToOpen = args.stream().map(str -> {
try {
return fs.getPath(str);
} catch (InvalidPathException e) {
LOG.trace("Argument not a valid path: {}", str);
return null;
}
}).filter(Objects::nonNull).toList();
if (!pathsToOpen.isEmpty()) {
AppLaunchEvent launchEvent = new AppLaunchEvent(AppLaunchEvent.EventType.OPEN_FILE, pathsToOpen);
tryToEnqueueFileOpenRequest(launchEvent);
}
}
|
LogBench/LogBench-O_prefix_1point/cryptomator_FileOpenRequestHandler_handleLaunchArgs.java
|
logbench-o_data_90
|
@Test(groups = { "wso2.dss" }, invocationCount = 5, enabled = true)
public void selectOperation() throws AxisFault, XPathExpressionException {
OMFactory fac = OMAbstractFactory.getOMFactory();
OMNamespace omNs = fac.createOMNamespace("http://ws.wso2.org/dataservice/samples/gspread_sample_service", "ns1");
OMElement payload = fac.createOMElement("getCustomers", omNs);
OMElement result = new AxisServiceClient().sendReceive(payload, getServiceUrlHttp(serviceName), "getcustomers");
log.info("Response : " + result);
Assert.assertTrue((result.toString().indexOf("Customers") == 1), "Expected Result not found on response message");
Assert.assertTrue(result.toString().contains("<customerNumber>"), "Expected Result not found on response message");
Assert.assertTrue(result.toString().contains("</Customer>"), "Expected Result not found on response message");
log.info("Service Invocation success");
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_GSpreadDataServiceTestCase_selectOperation.java
|
logbench-o_data_91
|
@Override
public void onWSConnect(WebSocketConnection conn) {
log.info("Connect: {}", conn);
}
|
LogBench/LogBench-O_prefix_1point/red5-server_DefaultWebSocketDataListener_onWSConnect.java
|
logbench-o_data_92
|
private void addAllowedMethodIfValid(String methodName, HttpServletResponse response) {
HttpMethod method = HttpMethod.valueOfOrNull(methodName);
if (method != null) {
response.addHeader(ACCESS_CONTROL_ALLOW_METHODS, methodName);
} else {
log.warn("Unknown HTTP method specified in " + ACCESS_CONTROL_REQUEST_METHOD + " '" + methodName + "'. " + "It will be ignored and not attached to the " + ACCESS_CONTROL_ALLOW_METHODS + " response header");
}
}
|
LogBench/LogBench-O_prefix_1point/neo4j_CorsFilter_addAllowedMethodIfValid.java
|
logbench-o_data_93
|
@Test(timeout = 60000)
public void testStealLinkSuccess() throws Exception {
final String clientID = "ThisIsAClientId";
ActiveMQConnectionFactory factory = new ActiveMQConnectionFactory(stealableConnectionURI);
Connection connection1 = factory.createConnection();
connection1.setClientID(clientID);
connection1.start();
try {
Connection connection2 = factory.createConnection();
connection2.setClientID(clientID);
connection2.start();
} catch (InvalidClientIDException e) {
LOG.info("Should not have failed while stealing the link: {}", e.getMessage());
LOG.info("Error details: ", e);
fail("Shouldn't have failed when stealing the link");
} catch (Throwable error) {
LOG.info("Unexpected exception ", error);
fail("Unexcpected exception causes test failure");
}
// Need to wait because removeConnection might not be called yet
assertTrue(Wait.waitFor(new Condition() {
@Override
public boolean isSatisified() throws Exception {
return removeException.get() != null;
}
}, 5000, 100));
LOG.info("removeException: {}", removeException.get().getMessage());
}
|
LogBench/LogBench-O_prefix_1point/activemq_LinkStealingTest_testStealLinkSuccess.java
|
logbench-o_data_94
|
void setupTable() throws InterruptedException {
CreateTableRequest ctr = mapper.generateCreateTableRequest(User.class).withProvisionedThroughput(new ProvisionedThroughput(1L, 1L));
boolean tableWasCreatedForTest = TableUtils.createTableIfNotExists(amazonDynamoDB, ctr);
if (tableWasCreatedForTest) {
log.info("Created table {}", ctr.getTableName());
}
TableUtils.waitUntilActive(amazonDynamoDB, ctr.getTableName());
log.info("Table {} is active", ctr.getTableName());
}
|
LogBench/LogBench-O_prefix_1point/testcontainers-spring-boot_EmbeddedDynamoDBBootstrapConfigurationTest_setupTable.java
|
logbench-o_data_95
|
public void parse() {
CommandLineParser parser = new BasicParser();
CommandLine cmd = null;
try {
cmd = parser.parse(options, args);
if (cmd.hasOption(HELP))
help();
if (cmd.hasOption(KEY_STORE_FILE) && cmd.hasOption(KEY_STORE_PASSWORD) && cmd.hasOption(KEY_ALIAS) && cmd.hasOption(EXPORT_FILE)) {
String keyStore = cmd.getOptionValue(KEY_STORE_FILE);
String keyStorePasswd = cmd.getOptionValue(KEY_STORE_PASSWORD);
String keyAlias = cmd.getOptionValue(KEY_ALIAS);
String exportFile = cmd.getOptionValue(EXPORT_FILE);
try {
SecurityProviderUtility.installBCProvider(true);
OxAuthCryptoProvider cryptoProvider = new OxAuthCryptoProvider(keyStore, keyStorePasswd, "CN=oxAuth CA Certificates");
PrivateKey privateKey = cryptoProvider.getPrivateKey(keyAlias);
String base64EncodedKey = WordUtils.wrap(new String(Base64.encode(privateKey.getEncoded())), 64, "\n", true);
StringBuilder sb = new StringBuilder();
SignatureAlgorithm signatureAlgorithm = cryptoProvider.getSignatureAlgorithm(keyAlias);
if (AlgorithmFamily.RSA.equals(signatureAlgorithm.getFamily())) {
sb.append("-----BEGIN RSA PRIVATE KEY-----\n");
sb.append(base64EncodedKey);
sb.append("\n");
sb.append("-----END RSA PRIVATE KEY-----\n");
} else {
sb.append("-----BEGIN PRIVATE KEY-----\n");
sb.append(base64EncodedKey);
sb.append("\n");
sb.append("-----END PRIVATE KEY-----\n");
}
FileUtils.writeStringToFile(new File(exportFile), sb.toString());
} catch (Exception e) {
log.error("Failed to export key", e);
help();
}
} else {
help();
}
} catch (ParseException e) {
log.error("Failed to export key", e);
help();
}
}
|
LogBench/LogBench-O_prefix_1point/oxauth_KeyExporter_parse.java
|
logbench-o_data_96
|
/**
* Release event if there's no more references to it
*
* @param event
* Event
*/
protected void release(BaseEvent event) {
Info info = events.get(event);
if (info != null) {
if (info.refcount.decrementAndGet() == 0) {
events.remove(event);
}
} else {
log.warn("Release called on already released event.");
}
}
|
LogBench/LogBench-O_prefix_1point/red5-server_AllocationDebugger_release.java
|
logbench-o_data_97
|
private void assertDriver(WebDriverManager wdm) {
File driver = new File(wdm.getDownloadedDriverPath());
log.debug("Driver path {}", driver);
assertThat(driver).exists();
}
|
LogBench/LogBench-O_prefix_1point/webdrivermanager_BrowserVersionTest_assertDriver.java
|
logbench-o_data_98
|
/**
* Load the get script into redis and store the sha in the class member sha.
*
* @param redisCommand the redis command that should be executed, after the script is loaded.
* @param executionCounter a counter to control recursion depth
*/
public void loadLuaScript(final RedisCommand redisCommand, int executionCounter) {
final int executionCounterIncr = ++executionCounter;
// check first if the lua script already exists in the store
redisAPI.script(Arrays.asList("exists", this.sha), resultArray -> {
if (resultArray.failed()) {
log.error("Error checking whether lua script exists", resultArray.cause());
return;
}
Long exists = resultArray.result().get(0).toLong();
// if script already
if (Long.valueOf(1).equals(exists)) {
log.debug("RedisStorage script already exists in redis cache: {}", luaScriptType);
redisCommand.exec(executionCounterIncr);
} else {
log.info("load lua script for script type: {} logutput: {}", luaScriptType, logoutput);
redisAPI.script(Arrays.asList("load", script), stringAsyncResult -> {
String newSha = stringAsyncResult.result().toString();
log.info("got sha from redis for lua script: {}: {}", luaScriptType, newSha);
if (!newSha.equals(sha)) {
log.warn("the sha calculated by myself: {} doesn't match with the sha from redis: {}. " + "We use the sha from redis", sha, newSha);
}
sha = newSha;
log.info("execute redis command for script type: {} with new sha: {}", luaScriptType, sha);
redisCommand.exec(executionCounterIncr);
});
}
});
}
|
LogBench/LogBench-O_prefix_1point/gateleen_LuaScriptState_loadLuaScript.java
|
logbench-o_data_99
|
@Override
public void run() {
try {
Session session = pc.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(dest);
for (int i = 0; i < messageCount; i++) {
producer.send(session.createTextMessage(messageTextPrefix + i));
sentOne.countDown();
}
producer.close();
session.close();
LOG.info("Done with send of: " + messageCount);
} catch (Exception ignored) {
ignored.printStackTrace();
}
}
|
LogBench/LogBench-O_prefix_1point/activemq_FailoverReadInactivityBlockWriteTimeoutClientTest_run.java
|
logbench-o_data_100
|
public static String getHostAddress() {
if (hostAddress != null) {
return hostAddress;
}
String hostAddress = CarbonServerConfigurationService.getInstance().getFirstProperty(AnalyticsDataPublisherConstants.FLOW_STATISTIC_NODE_HOST_NAME);
if (hostAddress == null) {
hostAddress = CarbonServerConfigurationService.getInstance().getFirstProperty(HOST_NAME);
}
if (null == hostAddress) {
InetAddress localAddress = getLocalAddress();
if (localAddress != null) {
hostAddress = localAddress.getHostAddress();
} else {
// Defaults to localhost
hostAddress = "localhost";
log.warn("Unable to get the ip address, hence using hostname as localhost");
}
PublisherUtil.hostAddress = hostAddress;
return PublisherUtil.hostAddress;
} else {
return hostAddress.trim();
}
}
|
LogBench/LogBench-O_prefix_1point/micro-integrator_PublisherUtil_getHostAddress.java
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 2