language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | playframework__playframework | documentation/manual/working/commonGuide/pekko/code/javaguide/pekko/typed/oo/ConfiguredActor.java | {
"start": 584,
"end": 683
} | class ____ extends AbstractBehavior<ConfiguredActor.GetConfig> {
public static final | ConfiguredActor |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/extension/activate/impl/ActivateOnClassExt1Impl.java | {
"start": 1108,
"end": 1244
} | class ____ implements ActivateExt1 {
@Override
public String echo(String msg) {
return null;
}
}
| ActivateOnClassExt1Impl |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/jndi/JndiContext.java | {
"start": 13778,
"end": 14224
} | class ____ extends LocalNamingEnumeration {
ListEnumeration() {
}
@Override
public Object next() throws NamingException {
return nextElement();
}
@Override
public Object nextElement() {
Map.Entry<String, Object> entry = getNext();
return new NameClassPair(entry.getKey(), entry.getValue().getClass().getName());
}
}
private | ListEnumeration |
java | apache__rocketmq | store/src/test/java/org/apache/rocketmq/store/DefaultMessageStoreTest.java | {
"start": 2990,
"end": 42596
} | class ____ {
private final String storeMessage = "Once, there was a chance for me!";
private final String messageTopic = "FooBar";
private int queueTotal = 100;
private AtomicInteger queueId = new AtomicInteger(0);
private SocketAddress bornHost;
private SocketAddress storeHost;
private byte[] messageBody;
private MessageStore messageStore;
@Before
public void init() throws Exception {
storeHost = new InetSocketAddress(InetAddress.getLocalHost(), 8123);
bornHost = new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 0);
messageStore = buildMessageStore();
boolean load = messageStore.load();
assertTrue(load);
messageStore.start();
}
@Test(expected = OverlappingFileLockException.class)
public void test_repeat_restart() throws Exception {
queueTotal = 1;
messageBody = storeMessage.getBytes();
MessageStoreConfig messageStoreConfig = new MessageStoreConfig();
messageStoreConfig.setMappedFileSizeCommitLog(1024 * 8);
messageStoreConfig.setMappedFileSizeConsumeQueue(1024 * 4);
messageStoreConfig.setMaxHashSlotNum(100);
messageStoreConfig.setMaxIndexNum(100 * 10);
messageStoreConfig.setStorePathRootDir(System.getProperty("java.io.tmpdir") + File.separator + "store");
messageStoreConfig.setHaListenPort(0);
MessageStore master = new DefaultMessageStore(messageStoreConfig, null, new MyMessageArrivingListener(), new BrokerConfig(), new ConcurrentHashMap<>());
boolean load = master.load();
assertTrue(load);
try {
master.start();
master.start();
} finally {
master.shutdown();
master.destroy();
}
}
@After
public void destroy() {
messageStore.shutdown();
messageStore.destroy();
MessageStoreConfig messageStoreConfig = new MessageStoreConfig();
File file = new File(messageStoreConfig.getStorePathRootDir());
UtilAll.deleteFile(file);
}
private MessageStore buildMessageStore() throws Exception {
return buildMessageStore(null);
}
private MessageStore buildMessageStore(String storePathRootDir) throws Exception {
MessageStoreConfig messageStoreConfig = new MessageStoreConfig();
messageStoreConfig.setMappedFileSizeCommitLog(1024 * 1024 * 10);
messageStoreConfig.setMappedFileSizeConsumeQueue(1024 * 1024 * 10);
messageStoreConfig.setMaxHashSlotNum(10000);
messageStoreConfig.setMaxIndexNum(100 * 100);
messageStoreConfig.setFlushDiskType(FlushDiskType.SYNC_FLUSH);
messageStoreConfig.setFlushIntervalConsumeQueue(1);
messageStoreConfig.setHaListenPort(0);
if (Strings.isNullOrEmpty(storePathRootDir)) {
UUID uuid = UUID.randomUUID();
storePathRootDir = System.getProperty("java.io.tmpdir") + File.separator + "store-" + uuid.toString();
}
messageStoreConfig.setStorePathRootDir(storePathRootDir);
return new DefaultMessageStore(messageStoreConfig,
new BrokerStatsManager("simpleTest", true),
new MyMessageArrivingListener(),
new BrokerConfig(), new ConcurrentHashMap<>());
}
@Test
public void testWriteAndRead() {
long ipv4HostMsgs = 10;
long ipv6HostMsgs = 10;
long totalMsgs = ipv4HostMsgs + ipv6HostMsgs;
queueTotal = 1;
messageBody = storeMessage.getBytes();
for (long i = 0; i < ipv4HostMsgs; i++) {
messageStore.putMessage(buildMessage());
}
for (long i = 0; i < ipv6HostMsgs; i++) {
messageStore.putMessage(buildIPv6HostMessage());
}
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
for (long i = 0; i < totalMsgs; i++) {
GetMessageResult result = messageStore.getMessage("GROUP_A", "FooBar", 0, i, 1024 * 1024, null);
assertThat(result).isNotNull();
result.release();
}
verifyThatMasterIsFunctional(totalMsgs, messageStore);
}
@Test
public void testLookMessageByOffset_OffsetIsFirst() {
final int totalCount = 10;
int queueId = new Random().nextInt(10);
String topic = "FooBar";
int firstOffset = 0;
AppendMessageResult[] appendMessageResultArray = putMessages(totalCount, topic, queueId);
AppendMessageResult firstResult = appendMessageResultArray[0];
MessageExt messageExt = messageStore.lookMessageByOffset(firstResult.getWroteOffset());
MessageExt messageExt1 = getDefaultMessageStore().lookMessageByOffset(firstResult.getWroteOffset(), firstResult.getWroteBytes());
assertThat(new String(messageExt.getBody())).isEqualTo(buildMessageBodyByOffset(storeMessage, firstOffset));
assertThat(new String(messageExt1.getBody())).isEqualTo(buildMessageBodyByOffset(storeMessage, firstOffset));
}
@Test
public void testLookMessageByOffset_OffsetIsLast() {
final int totalCount = 10;
int queueId = new Random().nextInt(10);
String topic = "FooBar";
AppendMessageResult[] appendMessageResultArray = putMessages(totalCount, topic, queueId);
int lastIndex = totalCount - 1;
AppendMessageResult lastResult = appendMessageResultArray[lastIndex];
MessageExt messageExt = getDefaultMessageStore().lookMessageByOffset(lastResult.getWroteOffset(), lastResult.getWroteBytes());
assertThat(new String(messageExt.getBody())).isEqualTo(buildMessageBodyByOffset(storeMessage, lastIndex));
}
@Test
public void testLookMessageByOffset_OffsetIsOutOfBound() {
final int totalCount = 10;
int queueId = new Random().nextInt(10);
String topic = "FooBar";
AppendMessageResult[] appendMessageResultArray = putMessages(totalCount, topic, queueId);
long lastOffset = getMaxOffset(appendMessageResultArray);
MessageExt messageExt = getDefaultMessageStore().lookMessageByOffset(lastOffset);
assertThat(messageExt).isNull();
}
@Test
public void testGetOffsetInQueueByTime() {
final int totalCount = 10;
int queueId = 0;
String topic = "FooBar";
AppendMessageResult[] appendMessageResults = putMessages(totalCount, topic, queueId, true);
//Thread.sleep(10);
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
ConsumeQueueInterface consumeQueue = getDefaultMessageStore().findConsumeQueue(topic, queueId);
for (AppendMessageResult appendMessageResult : appendMessageResults) {
long offset = messageStore.getOffsetInQueueByTime(topic, queueId, appendMessageResult.getStoreTimestamp());
CqUnit cqUnit = consumeQueue.get(offset);
assertThat(cqUnit.getPos()).isEqualTo(appendMessageResult.getWroteOffset());
assertThat(cqUnit.getSize()).isEqualTo(appendMessageResult.getWroteBytes());
}
}
@Test
public void testGetOffsetInQueueByTime_TimestampIsSkewing() {
final int totalCount = 10;
int queueId = 0;
String topic = "FooBar";
AppendMessageResult[] appendMessageResults = putMessages(totalCount, topic, queueId, true);
//Thread.sleep(10);
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
int skewing = 2;
ConsumeQueueInterface consumeQueue = getDefaultMessageStore().findConsumeQueue(topic, queueId);
for (AppendMessageResult appendMessageResult : appendMessageResults) {
long offset = messageStore.getOffsetInQueueByTime(topic, queueId, appendMessageResult.getStoreTimestamp() - skewing);
CqUnit cqUnit = consumeQueue.get(offset);
assertThat(cqUnit.getPos()).isEqualTo(appendMessageResult.getWroteOffset());
assertThat(cqUnit.getSize()).isEqualTo(appendMessageResult.getWroteBytes());
}
}
@Test
public void testGetOffsetInQueueByTime_TimestampSkewingIsLarge() {
final int totalCount = 10;
int queueId = 0;
String topic = "FooBar";
AppendMessageResult[] appendMessageResults = putMessages(totalCount, topic, queueId, true);
//Thread.sleep(10);
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
int skewing = 20000;
ConsumeQueueInterface consumeQueue = getDefaultMessageStore().findConsumeQueue(topic, queueId);
for (AppendMessageResult appendMessageResult : appendMessageResults) {
long offset = messageStore.getOffsetInQueueByTime(topic, queueId, appendMessageResult.getStoreTimestamp() - skewing);
CqUnit cqUnit = consumeQueue.get(offset);
assertThat(cqUnit.getPos()).isEqualTo(appendMessageResults[0].getWroteOffset());
assertThat(cqUnit.getSize()).isEqualTo(appendMessageResults[0].getWroteBytes());
}
}
@Test
public void testGetOffsetInQueueByTime_ConsumeQueueNotFound1() {
final int totalCount = 10;
int queueId = 0;
int wrongQueueId = 1;
String topic = "FooBar";
AppendMessageResult[] appendMessageResults = putMessages(totalCount, topic, queueId, false);
//Thread.sleep(10);
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
long offset = messageStore.getOffsetInQueueByTime(topic, wrongQueueId, appendMessageResults[0].getStoreTimestamp());
assertThat(offset).isEqualTo(0);
}
@Test
public void testGetOffsetInQueueByTime_ConsumeQueueNotFound2() {
final int totalCount = 10;
int queueId = 0;
int wrongQueueId = 1;
String topic = "FooBar";
putMessages(totalCount, topic, queueId, false);
//Thread.sleep(10);
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
long messageStoreTimeStamp = messageStore.getMessageStoreTimeStamp(topic, wrongQueueId, 0);
assertThat(messageStoreTimeStamp).isEqualTo(-1);
}
@Test
public void testGetOffsetInQueueByTime_ConsumeQueueOffsetNotExist() {
final int totalCount = 10;
int queueId = 0;
int wrongQueueId = 1;
String topic = "FooBar";
putMessages(totalCount, topic, queueId, true);
//Thread.sleep(10);
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
long messageStoreTimeStamp = messageStore.getMessageStoreTimeStamp(topic, wrongQueueId, -1);
assertThat(messageStoreTimeStamp).isEqualTo(-1);
}
@Test
public void testGetMessageStoreTimeStamp() {
final int totalCount = 10;
int queueId = 0;
String topic = "FooBar";
AppendMessageResult[] appendMessageResults = putMessages(totalCount, topic, queueId, false);
//Thread.sleep(10);
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
ConsumeQueueInterface consumeQueue = getDefaultMessageStore().findConsumeQueue(topic, queueId);
int minOffsetInQueue = (int) consumeQueue.getMinOffsetInQueue();
for (int i = minOffsetInQueue; i < consumeQueue.getMaxOffsetInQueue(); i++) {
long messageStoreTimeStamp = messageStore.getMessageStoreTimeStamp(topic, queueId, i);
assertThat(messageStoreTimeStamp).isEqualTo(appendMessageResults[i].getStoreTimestamp());
}
}
@Test
public void testGetStoreTime_ParamIsNull() {
long storeTime = getStoreTime(null);
assertThat(storeTime).isEqualTo(-1);
}
@Test
public void testGetStoreTime_EverythingIsOk() {
final int totalCount = 10;
int queueId = 0;
String topic = "FooBar";
AppendMessageResult[] appendMessageResults = putMessages(totalCount, topic, queueId, false);
//Thread.sleep(10);
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
ConsumeQueueInterface consumeQueue = messageStore.getConsumeQueue(topic, queueId);
for (int i = 0; i < totalCount; i++) {
CqUnit cqUnit = consumeQueue.get(i);
long storeTime = getStoreTime(cqUnit);
assertThat(storeTime).isEqualTo(appendMessageResults[i].getStoreTimestamp());
}
}
@Test
public void testGetStoreTime_PhyOffsetIsLessThanCommitLogMinOffset() {
long phyOffset = -10;
int size = 138;
CqUnit cqUnit = new CqUnit(0, phyOffset, size, 0);
long storeTime = getStoreTime(cqUnit);
assertThat(storeTime).isEqualTo(-1);
}
@Test
public void testPutMessage_whenMessagePropertyIsTooLong() throws ConsumeQueueException {
String topicName = "messagePropertyIsTooLongTest";
MessageExtBrokerInner illegalMessage = buildSpecifyLengthPropertyMessage("123".getBytes(StandardCharsets.UTF_8), topicName, Short.MAX_VALUE + 1);
assertEquals(messageStore.putMessage(illegalMessage).getPutMessageStatus(), PutMessageStatus.PROPERTIES_SIZE_EXCEEDED);
assertEquals(0L, messageStore.getQueueStore().getMaxOffset(topicName, 0).longValue());
MessageExtBrokerInner normalMessage = buildSpecifyLengthPropertyMessage("123".getBytes(StandardCharsets.UTF_8), topicName, 100);
assertEquals(messageStore.putMessage(normalMessage).getPutMessageStatus(), PutMessageStatus.PUT_OK);
assertEquals(1L, messageStore.getQueueStore().getMaxOffset(topicName, 0).longValue());
}
private DefaultMessageStore getDefaultMessageStore() {
return (DefaultMessageStore) this.messageStore;
}
private AppendMessageResult[] putMessages(int totalCount, String topic, int queueId) {
return putMessages(totalCount, topic, queueId, false);
}
private AppendMessageResult[] putMessages(int totalCount, String topic, int queueId, boolean interval) {
AppendMessageResult[] appendMessageResultArray = new AppendMessageResult[totalCount];
for (int i = 0; i < totalCount; i++) {
String messageBody = buildMessageBodyByOffset(storeMessage, i);
MessageExtBrokerInner msgInner =
i < totalCount / 2 ? buildMessage(messageBody.getBytes(), topic) : buildIPv6HostMessage(messageBody.getBytes(), topic);
msgInner.setQueueId(queueId);
PutMessageResult result = messageStore.putMessage(msgInner);
appendMessageResultArray[i] = result.getAppendMessageResult();
assertThat(result.getPutMessageStatus()).isEqualTo(PutMessageStatus.PUT_OK);
if (interval) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
throw new RuntimeException("Thread sleep ERROR");
}
}
}
return appendMessageResultArray;
}
private long getMaxOffset(AppendMessageResult[] appendMessageResultArray) {
if (appendMessageResultArray == null) {
return 0;
}
AppendMessageResult last = appendMessageResultArray[appendMessageResultArray.length - 1];
return last.getWroteOffset() + last.getWroteBytes();
}
private String buildMessageBodyByOffset(String message, long i) {
return String.format("%s offset %d", message, i);
}
private long getStoreTime(CqUnit cqUnit) {
try {
Class abstractConsumeQueueStore = getDefaultMessageStore().getQueueStore().getClass().getSuperclass();
Method getStoreTime = abstractConsumeQueueStore.getDeclaredMethod("getStoreTime", CqUnit.class);
getStoreTime.setAccessible(true);
return (long) getStoreTime.invoke(getDefaultMessageStore().getQueueStore(), cqUnit);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
private MessageExtBrokerInner buildMessage(byte[] messageBody, String topic) {
MessageExtBrokerInner msg = new MessageExtBrokerInner();
msg.setTopic(topic);
msg.setTags("TAG1");
msg.setKeys("Hello");
msg.setBody(messageBody);
msg.setKeys(String.valueOf(System.currentTimeMillis()));
msg.setQueueId(Math.abs(queueId.getAndIncrement()) % queueTotal);
msg.setSysFlag(0);
msg.setBornTimestamp(System.currentTimeMillis());
msg.setStoreHost(storeHost);
msg.setBornHost(bornHost);
msg.setPropertiesString(MessageDecoder.messageProperties2String(msg.getProperties()));
return msg;
}
private MessageExtBrokerInner buildSpecifyLengthPropertyMessage(byte[] messageBody, String topic, int length) {
StringBuilder stringBuilder = new StringBuilder();
Random random = new Random();
for (int i = 0; i < length; i++) {
stringBuilder.append(random.nextInt(10));
}
MessageExtBrokerInner msg = new MessageExtBrokerInner();
msg.putUserProperty("test", stringBuilder.toString());
msg.setTopic(topic);
msg.setTags("TAG1");
msg.setKeys("Hello");
msg.setBody(messageBody);
msg.setQueueId(0);
msg.setBornTimestamp(System.currentTimeMillis());
msg.setStoreHost(storeHost);
msg.setBornHost(bornHost);
msg.setPropertiesString(MessageDecoder.messageProperties2String(msg.getProperties()));
return msg;
}
private MessageExtBrokerInner buildIPv6HostMessage(byte[] messageBody, String topic) {
MessageExtBrokerInner msg = new MessageExtBrokerInner();
msg.setTopic(topic);
msg.setTags("TAG1");
msg.setKeys("Hello");
msg.setBody(messageBody);
msg.setMsgId("24084004018081003FAA1DDE2B3F898A00002A9F0000000000000CA0");
msg.setKeys(String.valueOf(System.currentTimeMillis()));
msg.setQueueId(Math.abs(queueId.getAndIncrement()) % queueTotal);
msg.setSysFlag(0);
msg.setBornHostV6Flag();
msg.setStoreHostAddressV6Flag();
msg.setBornTimestamp(System.currentTimeMillis());
try {
msg.setBornHost(new InetSocketAddress(InetAddress.getByName("1050:0000:0000:0000:0005:0600:300c:326b"), 0));
} catch (UnknownHostException e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
try {
msg.setStoreHost(new InetSocketAddress(InetAddress.getByName("::1"), 0));
} catch (UnknownHostException e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
return msg;
}
private MessageExtBrokerInner buildMessage() {
return buildMessage(messageBody, messageTopic);
}
public MessageExtBatch buildMessageBatch(MessageBatch msgBatch) {
MessageExtBatch msgExtBatch = new MessageExtBatch();
msgExtBatch.setTopic(messageTopic);
msgExtBatch.setTags("TAG1");
msgExtBatch.setKeys("Hello");
msgExtBatch.setBody(msgBatch.getBody());
msgExtBatch.setKeys(String.valueOf(System.currentTimeMillis()));
msgExtBatch.setQueueId(Math.abs(queueId.getAndIncrement()) % queueTotal);
msgExtBatch.setSysFlag(0);
msgExtBatch.setBornTimestamp(System.currentTimeMillis());
msgExtBatch.setStoreHost(storeHost);
msgExtBatch.setBornHost(bornHost);
return msgExtBatch;
}
@Test
public void testGroupCommit() throws Exception {
long totalMsgs = 10;
queueTotal = 1;
messageBody = storeMessage.getBytes();
for (long i = 0; i < totalMsgs; i++) {
messageStore.putMessage(buildMessage());
}
for (long i = 0; i < totalMsgs; i++) {
GetMessageResult result = messageStore.getMessage("GROUP_A", "TOPIC_A", 0, i, 1024 * 1024, null);
assertThat(result).isNotNull();
result.release();
}
verifyThatMasterIsFunctional(totalMsgs, messageStore);
}
@Test
public void testMaxOffset() throws InterruptedException, ConsumeQueueException {
int firstBatchMessages = 3;
int queueId = 0;
messageBody = storeMessage.getBytes();
assertThat(messageStore.getMaxOffsetInQueue(messageTopic, queueId)).isEqualTo(0);
for (int i = 0; i < firstBatchMessages; i++) {
final MessageExtBrokerInner msg = buildMessage();
msg.setQueueId(queueId);
messageStore.putMessage(msg);
}
while (messageStore.dispatchBehindBytes() != 0) {
TimeUnit.MILLISECONDS.sleep(1);
}
assertThat(messageStore.getMaxOffsetInQueue(messageTopic, queueId)).isEqualTo(firstBatchMessages);
// Disable the dispatcher
messageStore.getDispatcherList().clear();
int secondBatchMessages = 2;
for (int i = 0; i < secondBatchMessages; i++) {
final MessageExtBrokerInner msg = buildMessage();
msg.setQueueId(queueId);
messageStore.putMessage(msg);
}
assertThat(messageStore.getMaxOffsetInQueue(messageTopic, queueId)).isEqualTo(firstBatchMessages);
assertThat(messageStore.getMaxOffsetInQueue(messageTopic, queueId, true)).isEqualTo(firstBatchMessages);
assertThat(messageStore.getMaxOffsetInQueue(messageTopic, queueId, false)).isEqualTo(firstBatchMessages + secondBatchMessages);
}
private MessageExtBrokerInner buildIPv6HostMessage() {
return buildIPv6HostMessage(messageBody, "FooBar");
}
private void verifyThatMasterIsFunctional(long totalMsgs, MessageStore master) {
for (long i = 0; i < totalMsgs; i++) {
master.putMessage(buildMessage());
}
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
for (long i = 0; i < totalMsgs; i++) {
GetMessageResult result = master.getMessage("GROUP_A", "FooBar", 0, i, 1024 * 1024, null);
assertThat(result).isNotNull();
result.release();
}
}
@Test
public void testPullSize() throws Exception {
String topic = "pullSizeTopic";
for (int i = 0; i < 32; i++) {
MessageExtBrokerInner messageExtBrokerInner = buildMessage();
messageExtBrokerInner.setTopic(topic);
messageExtBrokerInner.setQueueId(0);
messageStore.putMessage(messageExtBrokerInner);
}
// wait for consume queue build
// the sleep time should be great than consume queue flush interval
//Thread.sleep(100);
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
String group = "simple";
GetMessageResult getMessageResult32 = messageStore.getMessage(group, topic, 0, 0, 32, null);
assertThat(getMessageResult32.getMessageBufferList().size()).isEqualTo(32);
getMessageResult32.release();
GetMessageResult getMessageResult20 = messageStore.getMessage(group, topic, 0, 0, 20, null);
assertThat(getMessageResult20.getMessageBufferList().size()).isEqualTo(20);
getMessageResult20.release();
GetMessageResult getMessageResult45 = messageStore.getMessage(group, topic, 0, 0, 10, null);
assertThat(getMessageResult45.getMessageBufferList().size()).isEqualTo(10);
getMessageResult45.release();
}
@Test
public void testRecover() throws Exception {
String topic = "recoverTopic";
messageBody = storeMessage.getBytes();
for (int i = 0; i < 100; i++) {
MessageExtBrokerInner messageExtBrokerInner = buildMessage();
messageExtBrokerInner.setTopic(topic);
messageExtBrokerInner.setQueueId(0);
messageStore.putMessage(messageExtBrokerInner);
}
// Thread.sleep(100);//wait for build consumer queue
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
long maxPhyOffset = messageStore.getMaxPhyOffset();
long maxCqOffset = messageStore.getMaxOffsetInQueue(topic, 0);
//1.just reboot
messageStore.shutdown();
String storeRootDir = ((DefaultMessageStore) messageStore).getMessageStoreConfig().getStorePathRootDir();
messageStore = buildMessageStore(storeRootDir);
boolean load = messageStore.load();
assertTrue(load);
messageStore.start();
assertTrue(maxPhyOffset == messageStore.getMaxPhyOffset());
assertTrue(maxCqOffset == messageStore.getMaxOffsetInQueue(topic, 0));
//2.damage commit-log and reboot normal
for (int i = 0; i < 100; i++) {
MessageExtBrokerInner messageExtBrokerInner = buildMessage();
messageExtBrokerInner.setTopic(topic);
messageExtBrokerInner.setQueueId(0);
messageStore.putMessage(messageExtBrokerInner);
}
//Thread.sleep(100);
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
long secondLastPhyOffset = messageStore.getMaxPhyOffset();
long secondLastCqOffset = messageStore.getMaxOffsetInQueue(topic, 0);
MessageExtBrokerInner messageExtBrokerInner = buildMessage();
messageExtBrokerInner.setTopic(topic);
messageExtBrokerInner.setQueueId(0);
messageStore.putMessage(messageExtBrokerInner);
messageStore.shutdown();
//damage last message
damageCommitLog((DefaultMessageStore) messageStore, secondLastPhyOffset);
//reboot
messageStore = buildMessageStore(storeRootDir);
load = messageStore.load();
assertTrue(load);
messageStore.start();
assertTrue(secondLastPhyOffset == messageStore.getMaxPhyOffset());
assertTrue(secondLastCqOffset == messageStore.getMaxOffsetInQueue(topic, 0));
//3.damage commitlog and reboot abnormal
for (int i = 0; i < 100; i++) {
messageExtBrokerInner = buildMessage();
messageExtBrokerInner.setTopic(topic);
messageExtBrokerInner.setQueueId(0);
messageStore.putMessage(messageExtBrokerInner);
}
//Thread.sleep(100);
StoreTestUtil.waitCommitLogReput((DefaultMessageStore) messageStore);
secondLastPhyOffset = messageStore.getMaxPhyOffset();
secondLastCqOffset = messageStore.getMaxOffsetInQueue(topic, 0);
messageExtBrokerInner = buildMessage();
messageExtBrokerInner.setTopic(topic);
messageExtBrokerInner.setQueueId(0);
messageStore.putMessage(messageExtBrokerInner);
messageStore.shutdown();
//damage last message
damageCommitLog((DefaultMessageStore) messageStore, secondLastPhyOffset);
//add abort file
String fileName = StorePathConfigHelper.getAbortFile(((DefaultMessageStore) messageStore).getMessageStoreConfig().getStorePathRootDir());
File file = new File(fileName);
UtilAll.ensureDirOK(file.getParent());
file.createNewFile();
messageStore = buildMessageStore(storeRootDir);
load = messageStore.load();
assertTrue(load);
messageStore.start();
assertTrue(secondLastPhyOffset == messageStore.getMaxPhyOffset());
assertTrue(secondLastCqOffset == messageStore.getMaxOffsetInQueue(topic, 0));
//message write again
for (int i = 0; i < 100; i++) {
messageExtBrokerInner = buildMessage();
messageExtBrokerInner.setTopic(topic);
messageExtBrokerInner.setQueueId(0);
messageStore.putMessage(messageExtBrokerInner);
}
}
@Test
public void testStorePathOK() {
if (messageStore instanceof DefaultMessageStore) {
assertTrue(fileExists(((DefaultMessageStore) messageStore).getStorePathPhysic()));
assertTrue(fileExists(((DefaultMessageStore) messageStore).getStorePathLogic()));
}
}
private boolean fileExists(String path) {
if (path != null) {
File f = new File(path);
return f.exists();
}
return false;
}
private void damageCommitLog(DefaultMessageStore store, long offset) throws Exception {
assertThat(store).isNotNull();
MessageStoreConfig messageStoreConfig = store.getMessageStoreConfig();
File file = new File(messageStoreConfig.getStorePathCommitLog() + File.separator + "00000000000000000000");
try (RandomAccessFile raf = new RandomAccessFile(file, "rw");
FileChannel fileChannel = raf.getChannel()) {
MappedByteBuffer mappedByteBuffer = fileChannel.map(FileChannel.MapMode.READ_WRITE, 0, 1024 * 1024 * 10);
int bodyLen = mappedByteBuffer.getInt((int) offset + 84);
int topicLenIndex = (int) offset + 84 + bodyLen + 4;
mappedByteBuffer.position(topicLenIndex);
mappedByteBuffer.putInt(0);
mappedByteBuffer.putInt(0);
mappedByteBuffer.putInt(0);
mappedByteBuffer.putInt(0);
mappedByteBuffer.force();
fileChannel.force(true);
}
}
@Test
public void testPutMsgExceedsMaxLength() {
messageBody = new byte[4 * 1024 * 1024 + 1];
MessageExtBrokerInner msg = buildMessage();
PutMessageResult result = messageStore.putMessage(msg);
assertThat(result.getPutMessageStatus()).isEqualTo(PutMessageStatus.MESSAGE_ILLEGAL);
}
@Test
public void testPutMsgBatchExceedsMaxLength() {
messageBody = new byte[4 * 1024 * 1024 + 1];
MessageExtBrokerInner msg1 = buildMessage();
MessageExtBrokerInner msg2 = buildMessage();
MessageExtBrokerInner msg3 = buildMessage();
MessageBatch msgBatch = MessageBatch.generateFromList(Arrays.asList(msg1, msg2, msg3));
msgBatch.setBody(msgBatch.encode());
MessageExtBatch msgExtBatch = buildMessageBatch(msgBatch);
try {
PutMessageResult result = this.messageStore.putMessages(msgExtBatch);
} catch (Exception e) {
assertThat(e.getMessage()).contains("message body size exceeded");
}
}
@Test
public void testPutMsgWhenReplicasNotEnough() {
MessageStoreConfig messageStoreConfig = ((DefaultMessageStore) this.messageStore).getMessageStoreConfig();
messageStoreConfig.setBrokerRole(BrokerRole.SYNC_MASTER);
messageStoreConfig.setTotalReplicas(2);
messageStoreConfig.setInSyncReplicas(2);
messageStoreConfig.setEnableAutoInSyncReplicas(false);
((DefaultMessageStore) this.messageStore).getBrokerConfig().setEnableSlaveActingMaster(true);
this.messageStore.setAliveReplicaNumInGroup(1);
MessageExtBrokerInner msg = buildMessage();
PutMessageResult result = this.messageStore.putMessage(msg);
assertThat(result.getPutMessageStatus()).isEqualTo(PutMessageStatus.IN_SYNC_REPLICAS_NOT_ENOUGH);
((DefaultMessageStore) this.messageStore).getBrokerConfig().setEnableSlaveActingMaster(false);
}
@Test
public void testPutMsgWhenAdaptiveDegradation() {
MessageStoreConfig messageStoreConfig = ((DefaultMessageStore) this.messageStore).getMessageStoreConfig();
messageStoreConfig.setBrokerRole(BrokerRole.SYNC_MASTER);
messageStoreConfig.setTotalReplicas(2);
messageStoreConfig.setInSyncReplicas(2);
messageStoreConfig.setEnableAutoInSyncReplicas(true);
((DefaultMessageStore) this.messageStore).getBrokerConfig().setEnableSlaveActingMaster(true);
this.messageStore.setAliveReplicaNumInGroup(1);
MessageExtBrokerInner msg = buildMessage();
PutMessageResult result = this.messageStore.putMessage(msg);
assertThat(result.getPutMessageStatus()).isEqualTo(PutMessageStatus.PUT_OK);
((DefaultMessageStore) this.messageStore).getBrokerConfig().setEnableSlaveActingMaster(false);
messageStoreConfig.setEnableAutoInSyncReplicas(false);
}
@Test
public void testGetBulkCommitLogData() {
DefaultMessageStore defaultMessageStore = (DefaultMessageStore) messageStore;
messageBody = new byte[2 * 1024 * 1024];
for (int i = 0; i < 10; i++) {
MessageExtBrokerInner msg1 = buildMessage();
messageStore.putMessage(msg1);
}
System.out.printf("%d%n", defaultMessageStore.getMaxPhyOffset());
List<SelectMappedBufferResult> bufferResultList = defaultMessageStore.getBulkCommitLogData(0, (int) defaultMessageStore.getMaxPhyOffset());
List<MessageExt> msgList = new ArrayList<>();
for (SelectMappedBufferResult bufferResult : bufferResultList) {
msgList.addAll(MessageDecoder.decodesBatch(bufferResult.getByteBuffer(), true, false, false));
bufferResult.release();
}
assertThat(msgList.size()).isEqualTo(10);
}
@Test
public void testPutLongMessage() throws Exception {
MessageExtBrokerInner messageExtBrokerInner = buildMessage();
CommitLog commitLog = ((DefaultMessageStore) messageStore).getCommitLog();
MessageStoreConfig messageStoreConfig = ((DefaultMessageStore) messageStore).getMessageStoreConfig();
MessageExtEncoder.PutMessageThreadLocal putMessageThreadLocal = commitLog.getPutMessageThreadLocal().get();
//body size, topic size, properties size exactly equal to max size
messageExtBrokerInner.setBody(new byte[messageStoreConfig.getMaxMessageSize()]);
messageExtBrokerInner.setTopic(new String(new byte[127]));
messageExtBrokerInner.setPropertiesString(new String(new byte[Short.MAX_VALUE]));
PutMessageResult encodeResult1 = putMessageThreadLocal.getEncoder().encode(messageExtBrokerInner);
assertTrue(encodeResult1 == null);
//body size exactly more than max message body size
messageExtBrokerInner.setBody(new byte[messageStoreConfig.getMaxMessageSize() + 1]);
PutMessageResult encodeResult2 = putMessageThreadLocal.getEncoder().encode(messageExtBrokerInner);
assertTrue(encodeResult2.getPutMessageStatus() == PutMessageStatus.MESSAGE_ILLEGAL);
//body size exactly equal to max message size
messageExtBrokerInner.setBody(new byte[messageStoreConfig.getMaxMessageSize() + 64 * 1024]);
PutMessageResult encodeResult3 = putMessageThreadLocal.getEncoder().encode(messageExtBrokerInner);
assertTrue(encodeResult3.getPutMessageStatus() == PutMessageStatus.MESSAGE_ILLEGAL);
//message properties length more than properties maxSize
messageExtBrokerInner.setBody(new byte[messageStoreConfig.getMaxMessageSize()]);
messageExtBrokerInner.setPropertiesString(new String(new byte[Short.MAX_VALUE + 1]));
PutMessageResult encodeResult4 = putMessageThreadLocal.getEncoder().encode(messageExtBrokerInner);
assertTrue(encodeResult4.getPutMessageStatus() == PutMessageStatus.PROPERTIES_SIZE_EXCEEDED);
//message length more than buffer length capacity
messageExtBrokerInner.setBody(new byte[messageStoreConfig.getMaxMessageSize()]);
messageExtBrokerInner.setTopic(new String(new byte[Short.MAX_VALUE]));
messageExtBrokerInner.setPropertiesString(new String(new byte[Short.MAX_VALUE]));
PutMessageResult encodeResult5 = putMessageThreadLocal.getEncoder().encode(messageExtBrokerInner);
assertTrue(encodeResult5.getPutMessageStatus() == PutMessageStatus.MESSAGE_ILLEGAL);
}
@Test
public void testDynamicMaxMessageSize() {
MessageExtBrokerInner messageExtBrokerInner = buildMessage();
MessageStoreConfig messageStoreConfig = ((DefaultMessageStore) messageStore).getMessageStoreConfig();
int originMaxMessageSize = messageStoreConfig.getMaxMessageSize();
messageExtBrokerInner.setBody(new byte[originMaxMessageSize + 10]);
PutMessageResult putMessageResult = messageStore.putMessage(messageExtBrokerInner);
assertTrue(putMessageResult.getPutMessageStatus() == PutMessageStatus.MESSAGE_ILLEGAL);
int newMaxMessageSize = originMaxMessageSize + 10;
messageStoreConfig.setMaxMessageSize(newMaxMessageSize);
putMessageResult = messageStore.putMessage(messageExtBrokerInner);
assertTrue(putMessageResult.getPutMessageStatus() == PutMessageStatus.PUT_OK);
messageStoreConfig.setMaxMessageSize(10);
putMessageResult = messageStore.putMessage(messageExtBrokerInner);
assertTrue(putMessageResult.getPutMessageStatus() == PutMessageStatus.MESSAGE_ILLEGAL);
messageStoreConfig.setMaxMessageSize(originMaxMessageSize);
}
@Test
public void testDeleteTopics() {
MessageStoreConfig messageStoreConfig = messageStore.getMessageStoreConfig();
ConcurrentMap<String, ConcurrentMap<Integer, ConsumeQueueInterface>> consumeQueueTable =
((DefaultMessageStore) messageStore).getConsumeQueueTable();
for (int i = 0; i < 10; i++) {
ConcurrentMap<Integer, ConsumeQueueInterface> cqTable = new ConcurrentHashMap<>();
String topicName = "topic-" + i;
for (int j = 0; j < 4; j++) {
ConsumeQueue consumeQueue = new ConsumeQueue(topicName, j, messageStoreConfig.getStorePathRootDir(),
messageStoreConfig.getMappedFileSizeConsumeQueue(), (DefaultMessageStore) messageStore);
cqTable.put(j, consumeQueue);
}
consumeQueueTable.put(topicName, cqTable);
}
Assert.assertEquals(consumeQueueTable.size(), 10);
HashSet<String> resultSet = Sets.newHashSet("topic-3", "topic-5");
messageStore.deleteTopics(Sets.difference(consumeQueueTable.keySet(), resultSet));
Assert.assertEquals(consumeQueueTable.size(), 2);
Assert.assertEquals(resultSet, consumeQueueTable.keySet());
}
@Test
public void testCleanUnusedTopic() {
MessageStoreConfig messageStoreConfig = messageStore.getMessageStoreConfig();
ConcurrentMap<String, ConcurrentMap<Integer, ConsumeQueueInterface>> consumeQueueTable =
((DefaultMessageStore) messageStore).getConsumeQueueTable();
for (int i = 0; i < 10; i++) {
ConcurrentMap<Integer, ConsumeQueueInterface> cqTable = new ConcurrentHashMap<>();
String topicName = "topic-" + i;
for (int j = 0; j < 4; j++) {
ConsumeQueue consumeQueue = new ConsumeQueue(topicName, j, messageStoreConfig.getStorePathRootDir(),
messageStoreConfig.getMappedFileSizeConsumeQueue(), (DefaultMessageStore) messageStore);
cqTable.put(j, consumeQueue);
}
consumeQueueTable.put(topicName, cqTable);
}
Assert.assertEquals(consumeQueueTable.size(), 10);
HashSet<String> resultSet = Sets.newHashSet("topic-3", "topic-5");
messageStore.cleanUnusedTopic(resultSet);
Assert.assertEquals(consumeQueueTable.size(), 2);
Assert.assertEquals(resultSet, consumeQueueTable.keySet());
}
@Test
public void testChangeStoreConfig() {
Properties properties = new Properties();
properties.setProperty("enableBatchPush", "true");
MessageStoreConfig messageStoreConfig = new MessageStoreConfig();
MixAll.properties2Object(properties, messageStoreConfig);
assertThat(messageStoreConfig.isEnableBatchPush()).isTrue();
}
private | DefaultMessageStoreTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/fs/contract/router/TestRouterHDFSContractDelegationToken.java | {
"start": 1780,
"end": 4520
} | class ____
extends AbstractFSContractTestBase {
@BeforeAll
public static void createCluster() throws Exception {
RouterHDFSContract.createCluster(false, 1, true);
}
@AfterAll
public static void teardownCluster() throws IOException {
RouterHDFSContract.destroyCluster();
}
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new RouterHDFSContract(conf);
}
@Test
public void testRouterDelegationToken() throws Exception {
RouterMBean bean = FederationTestUtils.getBean(
ROUTER_BEAN, RouterMBean.class);
// Initially there is no token in memory
assertEquals(0, bean.getCurrentTokensCount());
// Generate delegation token
Token<DelegationTokenIdentifier> token =
(Token<DelegationTokenIdentifier>) getFileSystem()
.getDelegationToken("router");
assertNotNull(token);
// Verify properties of the token
assertEquals("HDFS_DELEGATION_TOKEN", token.getKind().toString());
DelegationTokenIdentifier identifier = token.decodeIdentifier();
assertNotNull(identifier);
String owner = identifier.getOwner().toString();
// Windows will not reverse name lookup "127.0.0.1" to "localhost".
String host = Path.WINDOWS ? "127.0.0.1" : "localhost";
String expectedOwner = "router/"+ host + "@EXAMPLE.COM";
assertEquals(expectedOwner, owner);
assertEquals("router", identifier.getRenewer().toString());
int masterKeyId = identifier.getMasterKeyId();
assertTrue(masterKeyId > 0);
int sequenceNumber = identifier.getSequenceNumber();
assertTrue(sequenceNumber > 0);
long existingMaxTime = token.decodeIdentifier().getMaxDate();
assertTrue(identifier.getMaxDate() >= identifier.getIssueDate());
// one token is expected after the generation
assertEquals(1, bean.getCurrentTokensCount());
// Renew delegation token
long expiryTime = token.renew(initSecurity());
assertNotNull(token);
assertEquals(existingMaxTime, token.decodeIdentifier().getMaxDate());
// Expiry time after renewal should never exceed max time of the token.
assertTrue(expiryTime <= existingMaxTime);
// Renewal should retain old master key id and sequence number
identifier = token.decodeIdentifier();
assertEquals(identifier.getMasterKeyId(), masterKeyId);
assertEquals(identifier.getSequenceNumber(), sequenceNumber);
assertEquals(1, bean.getCurrentTokensCount());
// Cancel delegation token
token.cancel(initSecurity());
assertEquals(0, bean.getCurrentTokensCount());
// Renew a cancelled token
assertThrows(SecretManager.InvalidToken.class, () -> {
token.renew(initSecurity());
});
}
}
| TestRouterHDFSContractDelegationToken |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java | {
"start": 1969,
"end": 7120
} | interface ____ {
/** Global settings for the current node */
Settings settings();
/** Provides access to key filesystem paths */
Environment environment();
/** An internal client for retrieving information/data from this cluster */
Client client();
/** The Elasticsearch thread pools */
ThreadPool threadPool();
/** Provides the ability to monitor files for changes */
ResourceWatcherService resourceWatcherService();
/** Access to listen to changes in cluster state and settings */
ClusterService clusterService();
/** Provides support for mapping users' roles from groups and metadata */
UserRoleMapper roleMapper();
/** Provides the ability to access project-scoped data from the global scope **/
ProjectResolver projectResolver();
/** Provides the ability to access the APM tracer and meter registry **/
TelemetryProvider telemetryProvider();
}
/**
* Returns authentication realm implementations added by this extension.
*
* The key of the returned {@link Map} is the type name of the realm, and the value
* is a {@link Realm.Factory} which will construct
* that realm for use in authentication when that realm type is configured.
*
* @param components Access to components that may be used to build realms
*/
default Map<String, Realm.Factory> getRealms(SecurityComponents components) {
return Collections.emptyMap();
}
/**
* Returns a handler for authentication failures, or null to use the default handler.
*
* Only one installed extension may have an authentication failure handler. If more than
* one extension returns a non-null handler, an error is raised.
*
* @param components Access to components that may be used to build the handler
*/
default AuthenticationFailureHandler getAuthenticationFailureHandler(SecurityComponents components) {
return null;
}
/**
* Returns an ordered list of role providers that are used to resolve role names
* to {@link RoleDescriptor} objects. Each provider is invoked in order to
* resolve any role names not resolved by the reserved or native roles stores.
*
* Each role provider is represented as a {@link BiConsumer} which takes a set
* of roles to resolve as the first parameter to consume and an {@link ActionListener}
* as the second parameter to consume. The implementation of the role provider
* should be asynchronous if the computation is lengthy or any disk and/or network
* I/O is involved. The implementation is responsible for resolving whatever roles
* it can into a set of {@link RoleDescriptor} instances. If successful, the
* implementation must wrap the set of {@link RoleDescriptor} instances in a
* {@link RoleRetrievalResult} using {@link RoleRetrievalResult#success(Set)} and then invoke
* {@link ActionListener#onResponse(Object)}. If a failure was encountered, the
* implementation should wrap the failure in a {@link RoleRetrievalResult} using
* {@link RoleRetrievalResult#failure(Exception)} and then invoke
* {@link ActionListener#onResponse(Object)} unless the failure needs to terminate the request,
* in which case the implementation should invoke {@link ActionListener#onFailure(Exception)}.
*
* By default, an empty list is returned.
*
* @param components Access to components that may be used to build roles
*/
default List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>> getRolesProviders(SecurityComponents components) {
return Collections.emptyList();
}
/**
* Returns a {@link NodeLocalServiceAccountTokenStore} used to authenticate service account tokens.
* If {@code null} is returned, the default service account token stores will be used.
*
* Providing a custom {@link NodeLocalServiceAccountTokenStore} here overrides the default implementation.
*
* @param components Access to components that can be used to authenticate service account tokens
*/
default ServiceAccountTokenStore getServiceAccountTokenStore(SecurityComponents components) {
return null;
}
default List<CustomAuthenticator> getCustomAuthenticators(SecurityComponents components) {
return null;
}
/**
* Returns a authorization engine for authorizing requests, or null to use the default authorization mechanism.
*
* Only one installed extension may have an authorization engine. If more than
* one extension returns a non-null authorization engine, an error is raised.
*
* @param settings The configured settings for the node
*/
default AuthorizationEngine getAuthorizationEngine(Settings settings) {
return null;
}
default String extensionName() {
return getClass().getName();
}
default AuthorizedProjectsResolver getAuthorizedProjectsResolver(SecurityComponents components) {
return null;
}
}
| SecurityComponents |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/jobgraph/topology/DefaultLogicalResultTest.java | {
"start": 2017,
"end": 4385
} | class ____ extends TestLogger {
private IntermediateDataSet result;
private DefaultLogicalResult logicalResult;
private Map<JobVertexID, JobVertex> vertexMap;
private JobVertex producerVertex;
@Before
public void setUp() throws Exception {
buildVerticesAndResults();
logicalResult =
new DefaultLogicalResult(
result, vid -> new DefaultLogicalVertex(vertexMap.get(vid), rid -> null));
}
@Test
public void testConstructor() {
assertResultInfoEquals(result, logicalResult);
}
@Test
public void testGetProducer() {
assertVertexInfoEquals(producerVertex, logicalResult.getProducer());
}
private void buildVerticesAndResults() {
vertexMap = new HashMap<>();
final int parallelism = 3;
producerVertex = createNoOpVertex(parallelism);
vertexMap.put(producerVertex.getID(), producerVertex);
final JobVertex consumerVertex = createNoOpVertex(parallelism);
final JobEdge edge =
connectNewDataSetAsInput(consumerVertex, producerVertex, ALL_TO_ALL, PIPELINED);
vertexMap.put(consumerVertex.getID(), consumerVertex);
result = edge.getSource();
}
static void assertResultsEquals(
final Iterable<IntermediateDataSet> results,
final Iterable<DefaultLogicalResult> logicalResults) {
final Map<IntermediateDataSetID, DefaultLogicalResult> logicalResultMap =
IterableUtils.toStream(logicalResults)
.collect(
Collectors.toMap(DefaultLogicalResult::getId, Function.identity()));
for (IntermediateDataSet result : results) {
final DefaultLogicalResult logicalResult = logicalResultMap.remove(result.getId());
assertNotNull(logicalResult);
assertResultInfoEquals(result, logicalResult);
}
// this ensures the two collections exactly matches
assertEquals(0, logicalResultMap.size());
}
static void assertResultInfoEquals(
final IntermediateDataSet result, final DefaultLogicalResult logicalResult) {
assertEquals(result.getId(), logicalResult.getId());
assertEquals(result.getResultType(), logicalResult.getResultType());
}
}
| DefaultLogicalResultTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/writeClassName/WriteClassNameTest_Set.java | {
"start": 337,
"end": 1198
} | class ____ extends TestCase {
protected void setUp() throws Exception {
ParserConfig.global.addAccept("com.alibaba.json.bvt.writeClassName.WriteClassNameTest_Set");
}
public void test_list() throws Exception {
A a = new A();
Set<B> set = new LinkedHashSet<B>();
set.add(new B());
set.add(new B1());
a.setList(set);
String text = JSON.toJSONString(a, SerializerFeature.WriteClassName);
System.out.println(text);
Assert.assertEquals("{\"@type\":\"com.alibaba.json.bvt.writeClassName.WriteClassNameTest_Set$A\",\"list\":Set[{},{\"@type\":\"com.alibaba.json.bvt.writeClassName.WriteClassNameTest_Set$B1\"}]}",
text);
A a1 = (A) JSON.parse(text);
Assert.assertEquals(2, a1.getList().size());
}
public static | WriteClassNameTest_Set |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/foreignkeys/UserSetting.java | {
"start": 525,
"end": 731
} | class ____ {
@Id
@GeneratedValue
@Column(name = "USER_SETTING_ID")
public long id;
@OneToOne
@JoinColumn(name = "USER_ID", foreignKey = @ForeignKey(name = "FK_TO_USER"))
private User user;
}
| UserSetting |
java | google__guava | android/guava-tests/test/com/google/common/math/BigIntegerMathTest.java | {
"start": 18608,
"end": 27724
} | class ____ {
private final BigInteger input;
private final Map<RoundingMode, Double> expectedValues = new EnumMap<>(RoundingMode.class);
private boolean unnecessaryShouldThrow = false;
RoundToDoubleTester(BigInteger input) {
this.input = input;
}
@CanIgnoreReturnValue
RoundToDoubleTester setExpectation(double expectedValue, RoundingMode... modes) {
for (RoundingMode mode : modes) {
Double previous = expectedValues.put(mode, expectedValue);
if (previous != null) {
throw new AssertionError();
}
}
return this;
}
@CanIgnoreReturnValue
RoundToDoubleTester roundUnnecessaryShouldThrow() {
unnecessaryShouldThrow = true;
return this;
}
void test() {
assertThat(expectedValues.keySet())
.containsAtLeastElementsIn(EnumSet.complementOf(EnumSet.of(UNNECESSARY)));
for (Map.Entry<RoundingMode, Double> entry : expectedValues.entrySet()) {
RoundingMode mode = entry.getKey();
Double expectation = entry.getValue();
assertWithMessage("roundToDouble(%s, %s)", input, mode)
.that(BigIntegerMath.roundToDouble(input, mode))
.isEqualTo(expectation);
}
if (!expectedValues.containsKey(UNNECESSARY)) {
assertWithMessage("Expected roundUnnecessaryShouldThrow call")
.that(unnecessaryShouldThrow)
.isTrue();
assertThrows(
ArithmeticException.class, () -> BigIntegerMath.roundToDouble(input, UNNECESSARY));
}
}
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_zero() {
new RoundToDoubleTester(BigInteger.ZERO).setExpectation(0.0, RoundingMode.values()).test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_smallPositive() {
new RoundToDoubleTester(BigInteger.valueOf(16))
.setExpectation(16.0, RoundingMode.values())
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_maxPreciselyRepresentable() {
new RoundToDoubleTester(BigInteger.valueOf(1L << 53))
.setExpectation(Math.pow(2, 53), RoundingMode.values())
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_maxPreciselyRepresentablePlusOne() {
double twoToThe53 = Math.pow(2, 53);
// the representable doubles are 2^53 and 2^53 + 2.
// 2^53+1 is halfway between, so HALF_UP will go up and HALF_DOWN will go down.
new RoundToDoubleTester(BigInteger.valueOf((1L << 53) + 1))
.setExpectation(twoToThe53, DOWN, FLOOR, HALF_DOWN, HALF_EVEN)
.setExpectation(Math.nextUp(twoToThe53), CEILING, UP, HALF_UP)
.roundUnnecessaryShouldThrow()
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_twoToThe54PlusOne() {
double twoToThe54 = Math.pow(2, 54);
// the representable doubles are 2^54 and 2^54 + 4
// 2^54+1 is less than halfway between, so HALF_DOWN and HALF_UP will both go down.
new RoundToDoubleTester(BigInteger.valueOf((1L << 54) + 1))
.setExpectation(twoToThe54, DOWN, FLOOR, HALF_DOWN, HALF_UP, HALF_EVEN)
.setExpectation(Math.nextUp(twoToThe54), CEILING, UP)
.roundUnnecessaryShouldThrow()
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_twoToThe54PlusThree() {
double twoToThe54 = Math.pow(2, 54);
// the representable doubles are 2^54 and 2^54 + 4
// 2^54+3 is more than halfway between, so HALF_DOWN and HALF_UP will both go up.
new RoundToDoubleTester(BigInteger.valueOf((1L << 54) + 3))
.setExpectation(twoToThe54, DOWN, FLOOR)
.setExpectation(Math.nextUp(twoToThe54), CEILING, UP, HALF_DOWN, HALF_UP, HALF_EVEN)
.roundUnnecessaryShouldThrow()
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_twoToThe54PlusFour() {
new RoundToDoubleTester(BigInteger.valueOf((1L << 54) + 4))
.setExpectation(Math.pow(2, 54) + 4, RoundingMode.values())
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_maxDouble() {
BigInteger maxDoubleAsBigInteger = DoubleMath.roundToBigInteger(Double.MAX_VALUE, UNNECESSARY);
new RoundToDoubleTester(maxDoubleAsBigInteger)
.setExpectation(Double.MAX_VALUE, RoundingMode.values())
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_maxDoublePlusOne() {
BigInteger maxDoubleAsBigInteger =
DoubleMath.roundToBigInteger(Double.MAX_VALUE, UNNECESSARY).add(BigInteger.ONE);
new RoundToDoubleTester(maxDoubleAsBigInteger)
.setExpectation(Double.MAX_VALUE, DOWN, FLOOR, HALF_EVEN, HALF_UP, HALF_DOWN)
.setExpectation(Double.POSITIVE_INFINITY, UP, CEILING)
.roundUnnecessaryShouldThrow()
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_wayTooBig() {
BigInteger bi = BigInteger.ONE.shiftLeft(2 * Double.MAX_EXPONENT);
new RoundToDoubleTester(bi)
.setExpectation(Double.MAX_VALUE, DOWN, FLOOR, HALF_EVEN, HALF_UP, HALF_DOWN)
.setExpectation(Double.POSITIVE_INFINITY, UP, CEILING)
.roundUnnecessaryShouldThrow()
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_smallNegative() {
new RoundToDoubleTester(BigInteger.valueOf(-16))
.setExpectation(-16.0, RoundingMode.values())
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_minPreciselyRepresentable() {
new RoundToDoubleTester(BigInteger.valueOf(-1L << 53))
.setExpectation(-Math.pow(2, 53), RoundingMode.values())
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_minPreciselyRepresentableMinusOne() {
// the representable doubles are -2^53 and -2^53 - 2.
// -2^53-1 is halfway between, so HALF_UP will go up and HALF_DOWN will go down.
new RoundToDoubleTester(BigInteger.valueOf((-1L << 53) - 1))
.setExpectation(-Math.pow(2, 53), DOWN, CEILING, HALF_DOWN, HALF_EVEN)
.setExpectation(DoubleUtils.nextDown(-Math.pow(2, 53)), FLOOR, UP, HALF_UP)
.roundUnnecessaryShouldThrow()
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_negativeTwoToThe54MinusOne() {
new RoundToDoubleTester(BigInteger.valueOf((-1L << 54) - 1))
.setExpectation(-Math.pow(2, 54), DOWN, CEILING, HALF_DOWN, HALF_UP, HALF_EVEN)
.setExpectation(DoubleUtils.nextDown(-Math.pow(2, 54)), FLOOR, UP)
.roundUnnecessaryShouldThrow()
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_negativeTwoToThe54MinusThree() {
new RoundToDoubleTester(BigInteger.valueOf((-1L << 54) - 3))
.setExpectation(-Math.pow(2, 54), DOWN, CEILING)
.setExpectation(
DoubleUtils.nextDown(-Math.pow(2, 54)), FLOOR, UP, HALF_DOWN, HALF_UP, HALF_EVEN)
.roundUnnecessaryShouldThrow()
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_negativeTwoToThe54MinusFour() {
new RoundToDoubleTester(BigInteger.valueOf((-1L << 54) - 4))
.setExpectation(-Math.pow(2, 54) - 4, RoundingMode.values())
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_minDouble() {
BigInteger minDoubleAsBigInteger = DoubleMath.roundToBigInteger(-Double.MAX_VALUE, UNNECESSARY);
new RoundToDoubleTester(minDoubleAsBigInteger)
.setExpectation(-Double.MAX_VALUE, RoundingMode.values())
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_minDoubleMinusOne() {
BigInteger minDoubleAsBigInteger =
DoubleMath.roundToBigInteger(-Double.MAX_VALUE, UNNECESSARY).subtract(BigInteger.ONE);
new RoundToDoubleTester(minDoubleAsBigInteger)
.setExpectation(-Double.MAX_VALUE, DOWN, CEILING, HALF_EVEN, HALF_UP, HALF_DOWN)
.setExpectation(Double.NEGATIVE_INFINITY, UP, FLOOR)
.roundUnnecessaryShouldThrow()
.test();
}
@J2ktIncompatible
@GwtIncompatible
public void testRoundToDouble_negativeWayTooBig() {
BigInteger bi = BigInteger.ONE.shiftLeft(2 * Double.MAX_EXPONENT).negate();
new RoundToDoubleTester(bi)
.setExpectation(-Double.MAX_VALUE, DOWN, CEILING, HALF_EVEN, HALF_UP, HALF_DOWN)
.setExpectation(Double.NEGATIVE_INFINITY, UP, FLOOR)
.roundUnnecessaryShouldThrow()
.test();
}
@J2ktIncompatible
@GwtIncompatible // NullPointerTester
public void testNullPointers() {
NullPointerTester tester = new NullPointerTester();
tester.setDefault(BigInteger.class, ONE);
tester.setDefault(int.class, 1);
tester.setDefault(long.class, 1L);
tester.testAllPublicStaticMethods(BigIntegerMath.class);
}
@GwtIncompatible // String.format
@FormatMethod
private static void failFormat(String template, Object... args) {
fail(String.format(template, args));
}
}
| RoundToDoubleTester |
java | apache__camel | components/camel-twilio/src/generated/java/org/apache/camel/component/twilio/ValidationRequestEndpointConfigurationConfigurer.java | {
"start": 740,
"end": 3829
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("ApiName", org.apache.camel.component.twilio.internal.TwilioApiName.class);
map.put("MethodName", java.lang.String.class);
map.put("PathAccountSid", java.lang.String.class);
map.put("PhoneNumber", com.twilio.type.PhoneNumber.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.component.twilio.ValidationRequestEndpointConfiguration target = (org.apache.camel.component.twilio.ValidationRequestEndpointConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiname":
case "apiName": target.setApiName(property(camelContext, org.apache.camel.component.twilio.internal.TwilioApiName.class, value)); return true;
case "methodname":
case "methodName": target.setMethodName(property(camelContext, java.lang.String.class, value)); return true;
case "pathaccountsid":
case "pathAccountSid": target.setPathAccountSid(property(camelContext, java.lang.String.class, value)); return true;
case "phonenumber":
case "phoneNumber": target.setPhoneNumber(property(camelContext, com.twilio.type.PhoneNumber.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiname":
case "apiName": return org.apache.camel.component.twilio.internal.TwilioApiName.class;
case "methodname":
case "methodName": return java.lang.String.class;
case "pathaccountsid":
case "pathAccountSid": return java.lang.String.class;
case "phonenumber":
case "phoneNumber": return com.twilio.type.PhoneNumber.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.component.twilio.ValidationRequestEndpointConfiguration target = (org.apache.camel.component.twilio.ValidationRequestEndpointConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiname":
case "apiName": return target.getApiName();
case "methodname":
case "methodName": return target.getMethodName();
case "pathaccountsid":
case "pathAccountSid": return target.getPathAccountSid();
case "phonenumber":
case "phoneNumber": return target.getPhoneNumber();
default: return null;
}
}
}
| ValidationRequestEndpointConfigurationConfigurer |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/guice/OverridesJavaxInjectableMethodTest.java | {
"start": 2197,
"end": 2696
} | class ____ extends TestClass2 {
// BUG: Diagnostic contains: @Inject
public void foo() {}
}
}\
""")
.doTest();
}
@Test
public void negativeCase() {
compilationHelper
.addSourceLines(
"OverridesJavaxInjectableMethodNegativeCases.java",
"""
package com.google.errorprone.bugpatterns.inject.guice.testdata;
/**
* @author sgoldfeder@google.com (Steven Goldfeder)
*/
public | TestClass3 |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/UnknownEntityTypeException.java | {
"start": 430,
"end": 803
} | class ____ extends MappingException {
public UnknownEntityTypeException(String message, Throwable cause) {
super( message, cause );
}
public UnknownEntityTypeException(String entityName) {
super( "Unknown entity type '" + entityName + "'" );
}
public UnknownEntityTypeException(Class<?> entityClass) {
this( entityClass.getName() );
}
}
| UnknownEntityTypeException |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/junit/ExceptionFactory.java | {
"start": 231,
"end": 311
} | class ____ {
private ExceptionFactory() {}
private static | ExceptionFactory |
java | apache__flink | flink-formats/flink-parquet/src/main/java/org/apache/flink/formats/parquet/row/ParquetRowDataWriter.java | {
"start": 7970,
"end": 8424
} | class ____ implements FieldWriter {
@Override
public void write(RowData row, int ordinal) {
writeBoolean(row.getBoolean(ordinal));
}
@Override
public void write(ArrayData arrayData, int ordinal) {
writeBoolean(arrayData.getBoolean(ordinal));
}
private void writeBoolean(boolean value) {
recordConsumer.addBoolean(value);
}
}
private | BooleanWriter |
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/data/nosql/couchbase/repositories/MyBean.java | {
"start": 834,
"end": 1081
} | class ____ {
private final CouchbaseTemplate template;
public MyBean(CouchbaseTemplate template) {
this.template = template;
}
// @fold:on // ...
public String someMethod() {
return this.template.getBucketName();
}
// @fold:off
}
| MyBean |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-maven-plugin/src/main/java/org/apache/camel/maven/dsl/yaml/support/TypeSpecHolder.java | {
"start": 1023,
"end": 1578
} | class ____ {
public final TypeSpec type;
public final Map<String, Set<String>> attributes;
public TypeSpecHolder(TypeSpec type, Map<String, Set<String>> attributes) {
Objects.requireNonNull(type, "type");
Objects.requireNonNull(attributes, "attributes");
this.type = type;
this.attributes = new HashMap<>(attributes);
}
public static void put(Map<String, Set<String>> attributes, String key, String value) {
attributes.computeIfAbsent(key, k -> new TreeSet<>()).add(value);
}
}
| TypeSpecHolder |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/util/AccessPattern.java | {
"start": 326,
"end": 976
} | enum ____ {
/**
* Value that indicates that provider never returns anything other than
* Java `null`.
*/
ALWAYS_NULL,
/**
* Value that indicates that provider will always return a constant
* value, regardless of when it is called; and also that it never
* uses `context` argument (which may then be passed as `null`)
*/
CONSTANT,
/**
* Value that indicates that provider may return different values
* at different times (often a freshly constructed empty container),
* and thus must be called every time "null replacement" value is
* needed.
*/
DYNAMIC
}
| AccessPattern |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MockIllegalThrowsTest.java | {
"start": 1195,
"end": 1681
} | class ____ {
abstract Object foo();
void test(Test t) {
// BUG: Diagnostic contains: only unchecked
when(t.foo()).thenThrow(new Exception());
}
}
""")
.doTest();
}
@Test
public void positive_multipleThrows() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static org.mockito.Mockito.when;
abstract | Test |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMergeManager.java | {
"start": 2004,
"end": 5123
} | class ____ {
@Test
@Timeout(value = 10)
public void testMemoryMerge() throws Exception {
final int TOTAL_MEM_BYTES = 10000;
final int OUTPUT_SIZE = 7950;
JobConf conf = new JobConf();
conf.setFloat(MRJobConfig.SHUFFLE_INPUT_BUFFER_PERCENT, 1.0f);
conf.setLong(MRJobConfig.REDUCE_MEMORY_TOTAL_BYTES, TOTAL_MEM_BYTES);
conf.setFloat(MRJobConfig.SHUFFLE_MEMORY_LIMIT_PERCENT, 0.8f);
conf.setFloat(MRJobConfig.SHUFFLE_MERGE_PERCENT, 0.9f);
TestExceptionReporter reporter = new TestExceptionReporter();
CyclicBarrier mergeStart = new CyclicBarrier(2);
CyclicBarrier mergeComplete = new CyclicBarrier(2);
StubbedMergeManager mgr = new StubbedMergeManager(conf, reporter,
mergeStart, mergeComplete);
// reserve enough map output to cause a merge when it is committed
MapOutput<Text, Text> out1 = mgr.reserve(null, OUTPUT_SIZE, 0);
assertTrue((out1 instanceof InMemoryMapOutput), "Should be a memory merge");
InMemoryMapOutput<Text, Text> mout1 = (InMemoryMapOutput<Text, Text>)out1;
fillOutput(mout1);
MapOutput<Text, Text> out2 = mgr.reserve(null, OUTPUT_SIZE, 0);
assertTrue((out2 instanceof InMemoryMapOutput), "Should be a memory merge");
InMemoryMapOutput<Text, Text> mout2 = (InMemoryMapOutput<Text, Text>)out2;
fillOutput(mout2);
// next reservation should be a WAIT
MapOutput<Text, Text> out3 = mgr.reserve(null, OUTPUT_SIZE, 0);
assertThat(out3).withFailMessage("Should be told to wait").isNull();
// trigger the first merge and wait for merge thread to start merging
// and free enough output to reserve more
mout1.commit();
mout2.commit();
mergeStart.await();
assertEquals(1, mgr.getNumMerges());
// reserve enough map output to cause another merge when committed
out1 = mgr.reserve(null, OUTPUT_SIZE, 0);
assertTrue((out1 instanceof InMemoryMapOutput), "Should be a memory merge");
mout1 = (InMemoryMapOutput<Text, Text>)out1;
fillOutput(mout1);
out2 = mgr.reserve(null, OUTPUT_SIZE, 0);
assertTrue((out2 instanceof InMemoryMapOutput), "Should be a memory merge");
mout2 = (InMemoryMapOutput<Text, Text>)out2;
fillOutput(mout2);
// next reservation should be null
out3 = mgr.reserve(null, OUTPUT_SIZE, 0);
assertThat(out3).withFailMessage("Should be told to wait").isNull();
// commit output *before* merge thread completes
mout1.commit();
mout2.commit();
// allow the first merge to complete
mergeComplete.await();
// start the second merge and verify
mergeStart.await();
assertEquals(2, mgr.getNumMerges());
// trigger the end of the second merge
mergeComplete.await();
assertEquals(2, mgr.getNumMerges());
assertEquals(0, reporter.getNumExceptions(), "exception reporter invoked");
}
private void fillOutput(InMemoryMapOutput<Text, Text> output) throws IOException {
BoundedByteArrayOutputStream stream = output.getArrayStream();
int count = stream.getLimit();
for (int i=0; i < count; ++i) {
stream.write(i);
}
}
private static | TestMergeManager |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/QueryComparingAssociationToNullTest.java | {
"start": 2309,
"end": 2644
} | class ____ {
@Id
private Integer id;
@Column(name = "COLUMN_DATE")
private LocalDate date;
@ManyToOne
private Child child;
public Parent() {
}
public Parent(Integer id, LocalDate date, Child child) {
this.id = id;
this.date = date;
this.child = child;
}
}
@Entity(name = "Child")
public static | Parent |
java | quarkusio__quarkus | test-framework/junit5-component/src/main/java/io/quarkus/test/component/ComponentContainer.java | {
"start": 8564,
"end": 31787
} | class ____ in a test method parameter
if (b.getTarget().isPresent()
&& beanDeployment.get().hasAnnotation(b.getTarget().get(), DotNames.UNREMOVABLE)) {
return true;
}
for (Field injectionPoint : injectFields) {
if (injectionPointMatchesBean(injectionPoint.getGenericType(), injectionPoint, qualifiers,
beanResolver.get(), b)) {
return true;
}
}
for (Parameter param : injectParams) {
if (injectionPointMatchesBean(param.getParameterizedType(), param, qualifiers, beanResolver.get(),
b)) {
return true;
}
}
return false;
})
.setImmutableBeanArchiveIndex(index)
.setComputingBeanArchiveIndex(computingIndex)
.setRemoveUnusedBeans(true)
.setTransformUnproxyableClasses(true);
Path generatedClassesDirectory;
if (isContinuousTesting) {
generatedClassesDirectory = null;
} else {
File testOutputDirectory = getTestOutputDirectory(testClass);
generatedClassesDirectory = testOutputDirectory.getParentFile()
.toPath()
.resolve("generated-classes")
.resolve(beanProcessorName);
Files.createDirectories(generatedClassesDirectory);
}
builder.setOutput(new ResourceOutput() {
@Override
public void writeResource(Resource resource) throws IOException {
switch (resource.getType()) {
case JAVA_CLASS:
generatedClasses.put(resource.getFullyQualifiedName(), resource.getData());
if (generatedClassesDirectory != null) {
// debug generated bytecode
resource.writeTo(generatedClassesDirectory.toFile());
}
break;
case SERVICE_PROVIDER:
if (resource.getName()
.equals(ComponentsProvider.class.getName())) {
componentsProvider.set(resource.getData());
}
break;
default:
throw new IllegalArgumentException("Unsupported resource type: " + resource.getType());
}
}
});
builder.addAnnotationTransformation(AnnotationsTransformer.appliedToField().whenContainsAny(qualifiers)
.whenContainsNone(DotName.createSimple(Inject.class)).thenTransform(t -> t.add(Inject.class)));
builder.addAnnotationTransformation(new JaxrsSingletonTransformer());
for (AnnotationTransformation transformation : annotationTransformations) {
builder.addAnnotationTransformation(transformation);
}
// Register:
// 1) Dummy mock beans for all unsatisfied injection points
// 2) Synthetic beans for Config and @ConfigProperty injection points
builder.addBeanRegistrar(new BeanRegistrar() {
@Override
public void register(RegistrationContext registrationContext) {
long start = System.nanoTime();
List<BeanInfo> beans = registrationContext.beans().collect();
BeanDeployment beanDeployment = registrationContext.get(Key.DEPLOYMENT);
Set<TypeAndQualifiers> unsatisfiedInjectionPoints = new HashSet<>();
boolean configInjectionPoint = false;
Set<TypeAndQualifiers> configPropertyInjectionPoints = new HashSet<>();
DotName configDotName = DotName.createSimple(Config.class);
DotName configPropertyDotName = DotName.createSimple(ConfigProperty.class);
DotName configMappingDotName = DotName.createSimple(ConfigMapping.class);
// We need to analyze all injection points in order to find
// Config, @ConfigProperty and config mappings injection points
// and all unsatisfied injection points
// to register appropriate synthetic beans
for (InjectionPointInfo injectionPoint : registrationContext.getInjectionPoints()) {
if (injectionPoint.getRequiredType().name().equals(configDotName)
&& injectionPoint.hasDefaultedQualifier()) {
configInjectionPoint = true;
continue;
}
if (injectionPoint.getRequiredQualifier(configPropertyDotName) != null) {
configPropertyInjectionPoints.add(new TypeAndQualifiers(injectionPoint.getRequiredType(),
injectionPoint.getRequiredQualifiers()));
continue;
}
BuiltinBean builtin = BuiltinBean.resolve(injectionPoint);
if (builtin != null && builtin != BuiltinBean.INSTANCE && builtin != BuiltinBean.LIST) {
continue;
}
Type requiredType = injectionPoint.getRequiredType();
Set<AnnotationInstance> requiredQualifiers = injectionPoint.getRequiredQualifiers();
if (builtin == BuiltinBean.LIST) {
// @All List<Delta> -> Delta
requiredType = requiredType.asParameterizedType().arguments().get(0);
requiredQualifiers = new HashSet<>(requiredQualifiers);
requiredQualifiers.removeIf(q -> q.name().equals(DotNames.ALL));
if (requiredQualifiers.isEmpty()) {
requiredQualifiers.add(AnnotationInstance.builder(DotNames.DEFAULT).build());
}
}
if (requiredType.kind() == Kind.CLASS) {
ClassInfo clazz = computingIndex.getClassByName(requiredType.name());
if (clazz != null && clazz.isInterface()) {
AnnotationInstance configMapping = clazz.declaredAnnotation(configMappingDotName);
if (configMapping != null) {
AnnotationValue prefixValue = configMapping.value("prefix");
String prefix = prefixValue == null ? "" : prefixValue.asString();
Set<String> mappingClasses = configMappings.computeIfAbsent(prefix,
k -> new HashSet<>());
mappingClasses.add(clazz.name().toString());
}
}
}
if (isSatisfied(requiredType, requiredQualifiers, injectionPoint, beans, beanDeployment,
configuration)) {
continue;
}
if (requiredType.kind() == Kind.PRIMITIVE || requiredType.kind() == Kind.ARRAY) {
throw new IllegalStateException(
"Found an unmockable unsatisfied injection point: " + injectionPoint.getTargetInfo());
}
unsatisfiedInjectionPoints.add(new TypeAndQualifiers(requiredType, requiredQualifiers));
LOG.debugf("Unsatisfied injection point found: %s", injectionPoint.getTargetInfo());
}
// Make sure that all @InjectMock injection points are also considered unsatisfied dependencies
// This means that a mock is created even if no component declares this dependency
for (Field field : findFields(testClass, List.of(InjectMock.class))) {
Set<AnnotationInstance> requiredQualifiers = getQualifiers(field, qualifiers);
if (requiredQualifiers.isEmpty()) {
requiredQualifiers = Set.of(AnnotationInstance.builder(DotNames.DEFAULT).build());
}
unsatisfiedInjectionPoints
.add(new TypeAndQualifiers(Types.jandexType(field.getGenericType()), requiredQualifiers));
}
for (Parameter param : findInjectMockParams(testClass)) {
Set<AnnotationInstance> requiredQualifiers = getQualifiers(param, qualifiers);
if (requiredQualifiers.isEmpty()) {
requiredQualifiers = Set.of(AnnotationInstance.builder(DotNames.DEFAULT).build());
}
unsatisfiedInjectionPoints
.add(new TypeAndQualifiers(Types.jandexType(param.getParameterizedType()), requiredQualifiers));
}
for (TypeAndQualifiers unsatisfied : unsatisfiedInjectionPoints) {
ClassInfo implementationClass = computingIndex.getClassByName(unsatisfied.type.name());
BeanConfigurator<Object> configurator = registrationContext.configure(implementationClass.name())
.scope(Singleton.class)
.addType(unsatisfied.type);
unsatisfied.qualifiers.forEach(configurator::addQualifier);
configurator.param("implementationClass", implementationClass)
.creator(MockBeanCreator.class)
.defaultBean()
.identifier("dummy")
.done();
}
if (configInjectionPoint) {
registrationContext.configure(Config.class)
.addType(Config.class)
.creator(ConfigBeanCreator.class)
.done();
}
if (!configPropertyInjectionPoints.isEmpty()) {
BeanConfigurator<Object> configPropertyConfigurator = registrationContext.configure(Object.class)
.identifier("configProperty")
.addQualifier(ConfigProperty.class)
.param("useDefaultConfigProperties", configuration.useDefaultConfigProperties)
.addInjectionPoint(ClassType.create(InjectionPoint.class))
.creator(ConfigPropertyBeanCreator.class);
for (TypeAndQualifiers configPropertyInjectionPoint : configPropertyInjectionPoints) {
configPropertyConfigurator.addType(configPropertyInjectionPoint.type);
}
configPropertyConfigurator.done();
}
if (!configMappings.isEmpty()) {
for (Entry<String, Set<String>> e : configMappings.entrySet()) {
for (String mapping : e.getValue()) {
DotName mappingName = DotName.createSimple(mapping);
registrationContext.configure(mappingName)
.addType(mappingName)
.creator(ConfigMappingBeanCreator.class)
.param("mappingClass", mapping)
.param("prefix", e.getKey())
.done();
}
}
}
LOG.debugf("Test injection points analyzed in %s ms [found: %s, mocked: %s]",
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start),
registrationContext.getInjectionPoints().size(),
unsatisfiedInjectionPoints.size());
// Find all methods annotated with interceptor annotations and register them as synthetic interceptors
processTestInterceptorMethods(testClass, registrationContext, interceptorBindings, interceptorMethods);
}
});
// Register mock beans
for (MockBeanConfiguratorImpl<?> mockConfigurator : configuration.mockConfigurators) {
builder.addBeanRegistrar(registrarForMock(testClass, mockConfigurator));
}
// Synthetic beans from callbacks
for (BeanRegistrar beanRegistrar : beanRegistrars) {
builder.addBeanRegistrar(beanRegistrar);
}
// Process the deployment
BeanProcessor beanProcessor = builder.build();
beanDeployment.set(beanProcessor.getBeanDeployment());
try {
Consumer<BytecodeTransformer> bytecodeTransformerConsumer = bytecodeTransformers::add;
// Populate the list of qualifiers used to simulate quarkus auto injection
ContextRegistrar.RegistrationContext registrationContext = beanProcessor.registerCustomContexts();
qualifiers.addAll(registrationContext.get(Key.QUALIFIERS).keySet());
for (DotName binding : registrationContext.get(Key.INTERCEPTOR_BINDINGS).keySet()) {
interceptorBindings.add(binding.toString());
}
beanResolver.set(registrationContext.get(Key.DEPLOYMENT).getBeanResolver());
beanProcessor.registerScopes();
beanProcessor.registerBeans();
beanProcessor.getBeanDeployment().initBeanByTypeMap();
beanProcessor.registerSyntheticObservers();
beanProcessor.initialize(bytecodeTransformerConsumer, Collections.emptyList());
ValidationContext validationContext = beanProcessor.validate(bytecodeTransformerConsumer);
beanProcessor.processValidationErrors(validationContext);
// Generate resources in parallel
ExecutorService executor = Executors.newCachedThreadPool();
beanProcessor.generateResources(null, new HashSet<>(), bytecodeTransformerConsumer, true, executor);
executor.shutdown();
Map<String, byte[]> transformedClasses = new HashMap<>();
Path transformedClassesDirectory = null;
if (!isContinuousTesting) {
File testOutputDirectory = getTestOutputDirectory(testClass);
transformedClassesDirectory = testOutputDirectory.getParentFile().toPath()
.resolve("transformed-classes").resolve(beanProcessorName);
Files.createDirectories(transformedClassesDirectory);
}
// Make sure the traced classes are transformed in continuous testing
for (String tracedClass : tracedClasses) {
if (tracedClass.startsWith("io.quarkus.test.component")) {
continue;
}
bytecodeTransformers.add(new BytecodeTransformer(tracedClass, (cn, cv) -> new TracingClassVisitor(cv, cn)));
}
if (!bytecodeTransformers.isEmpty()) {
Map<String, List<BytecodeTransformer>> map = bytecodeTransformers.stream()
.collect(Collectors.groupingBy(BytecodeTransformer::getClassToTransform,
Collectors.mapping(Function.identity(), Collectors.toList())));
for (Map.Entry<String, List<BytecodeTransformer>> entry : map.entrySet()) {
String className = entry.getKey();
List<BytecodeTransformer> transformers = entry.getValue();
String classFileName = className.replace('.', '/') + ".class";
byte[] bytecode;
try (InputStream in = testClassLoader.getResourceAsStream(classFileName)) {
if (in == null) {
throw new IOException("Resource not found: " + classFileName);
}
bytecode = in.readAllBytes();
}
// Apply input transformers first
for (BytecodeTransformer t : transformers) {
if (t.getInputTransformer() != null) {
bytecode = t.getInputTransformer().apply(className, bytecode);
}
}
// Then apply ASM visitors
ClassReader reader = new ClassReader(bytecode);
ClassWriter writer = new ClassWriter(reader, ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS);
ClassVisitor visitor = writer;
for (BytecodeTransformer t : transformers) {
if (t.getVisitorFunction() != null) {
visitor = t.getVisitorFunction().apply(className, visitor);
}
}
reader.accept(visitor, 0);
bytecode = writer.toByteArray();
transformedClasses.put(className, bytecode);
if (transformedClassesDirectory != null) {
// debug generated bytecode
Path classFile = transformedClassesDirectory.resolve(
classFileName.replace('/', '_').replace('$', '_'));
Files.write(classFile, bytecode);
}
}
}
generatedClasses.putAll(transformedClasses);
} catch (IOException e) {
throw new IllegalStateException("Error generating resources", e);
}
} catch (Throwable e) {
if (buildShouldFail) {
buildFailure = e;
} else {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw new RuntimeException(e);
}
}
} finally {
if (buildShouldFail && buildFailure == null) {
throw new AssertionError("The container build was expected to fail!");
}
}
LOG.debugf("Component container for %s built in %s ms, using CL: %s", testClass.getSimpleName(),
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start),
ComponentContainer.class.getClassLoader().getClass().getSimpleName());
return new BuildResult(generatedClasses, componentsProvider.get(), configMappings, interceptorMethods,
buildFailure);
}
private static BeanRegistrar registrarForMock(Class<?> testClass, MockBeanConfiguratorImpl<?> mock) {
return new BeanRegistrar() {
@Override
public void register(RegistrationContext context) {
BeanConfigurator<Object> configurator = context.configure(mock.beanClass);
configurator.scope(mock.scope);
mock.jandexTypes().forEach(configurator::addType);
mock.jandexQualifiers().forEach(configurator::addQualifier);
if (mock.name != null) {
configurator.name(mock.name);
}
configurator.alternative(mock.alternative);
if (mock.priority != null) {
configurator.priority(mock.priority);
}
if (mock.defaultBean) {
configurator.defaultBean();
}
String key = MockBeanCreator.registerCreate(testClass.getName(), cast(mock.create));
configurator.creator(MockBeanCreator.class).param(MockBeanCreator.CREATE_KEY, key).done();
}
};
}
private static void indexComponentClass(Indexer indexer, Class<?> componentClass) {
try {
while (componentClass != null) {
indexer.indexClass(componentClass);
for (Annotation annotation : componentClass.getAnnotations()) {
indexer.indexClass(annotation.annotationType());
}
for (Field field : componentClass.getDeclaredFields()) {
indexAnnotatedElement(indexer, field);
}
for (Method method : componentClass.getDeclaredMethods()) {
indexAnnotatedElement(indexer, method);
for (Parameter param : method.getParameters()) {
indexAnnotatedElement(indexer, param);
}
}
for (Class<?> iface : componentClass.getInterfaces()) {
indexComponentClass(indexer, iface);
}
componentClass = componentClass.getSuperclass();
}
} catch (IOException e) {
throw new IllegalStateException("Failed to index:" + componentClass, e);
}
}
private static void indexAnnotatedElement(Indexer indexer, AnnotatedElement element) throws IOException {
for (Annotation annotation : element.getAnnotations()) {
indexer.indexClass(annotation.annotationType());
}
}
private static List<Field> findInjectFields(Class<?> testClass, boolean scanEnclosingClasses) {
List<Class<? extends Annotation>> injectAnnotations;
Class<? extends Annotation> injectSpy = loadInjectSpy();
if (injectSpy != null) {
injectAnnotations = List.of(Inject.class, InjectMock.class, injectSpy);
} else {
injectAnnotations = List.of(Inject.class, InjectMock.class);
}
List<Field> found = findFields(testClass, injectAnnotations);
if (scanEnclosingClasses) {
Class<?> enclosing = testClass.getEnclosingClass();
while (enclosing != null) {
// @Nested test | or |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/annotation/EnumNaming.java | {
"start": 521,
"end": 876
} | interface ____ {
/**
* @return Type of {@link EnumNamingStrategy} to use, if any. Default value
* of <code>EnumNamingStrategy.class</code> means "no strategy specified"
* (and may also be used for overriding to remove otherwise applicable
* naming strategy)
*/
public Class<? extends EnumNamingStrategy> value();
}
| EnumNaming |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/mapper/blockloader/docvalues/fn/MvMaxBooleansFromDocValuesBlockLoaderTests.java | {
"start": 997,
"end": 3417
} | class ____ extends AbstractBooleansFromDocValuesBlockLoaderTests {
public MvMaxBooleansFromDocValuesBlockLoaderTests(boolean blockAtATime, boolean multiValues, boolean missingValues) {
super(blockAtATime, multiValues, missingValues);
}
@Override
protected void innerTest(LeafReaderContext ctx, int mvCount) throws IOException {
var booleansLoader = new BooleansBlockLoader("field");
var mvMaxBooleansLoader = new MvMaxBooleansBlockLoader("field");
var booleansReader = booleansLoader.reader(ctx);
var mvMaxBooleansReader = mvMaxBooleansLoader.reader(ctx);
assertThat(mvMaxBooleansReader, readerMatcher());
BlockLoader.Docs docs = TestBlock.docs(ctx);
try (
TestBlock doubles = read(booleansLoader, booleansReader, ctx, docs);
TestBlock maxDoubles = read(mvMaxBooleansLoader, mvMaxBooleansReader, ctx, docs);
) {
checkBlocks(doubles, maxDoubles);
}
booleansReader = booleansLoader.reader(ctx);
mvMaxBooleansReader = mvMaxBooleansLoader.reader(ctx);
for (int i = 0; i < ctx.reader().numDocs(); i += 10) {
int[] docsArray = new int[Math.min(10, ctx.reader().numDocs() - i)];
for (int d = 0; d < docsArray.length; d++) {
docsArray[d] = i + d;
}
docs = TestBlock.docs(docsArray);
try (
TestBlock booleans = read(booleansLoader, booleansReader, ctx, docs);
TestBlock maxBooleans = read(mvMaxBooleansLoader, mvMaxBooleansReader, ctx, docs);
) {
checkBlocks(booleans, maxBooleans);
}
}
}
private Matcher<Object> readerMatcher() {
if (multiValues) {
return hasToString("MvMaxBooleansFromDocValues.Sorted");
}
return hasToString("BooleansFromDocValues.Singleton");
}
private void checkBlocks(TestBlock booleans, TestBlock mvMax) {
for (int i = 0; i < booleans.size(); i++) {
Object v = booleans.get(i);
if (v == null) {
assertThat(mvMax.get(i), nullValue());
continue;
}
Boolean max = (Boolean) (v instanceof List<?> l ? l.stream().anyMatch(b -> (Boolean) b) : v);
assertThat(mvMax.get(i), equalTo(max));
}
}
}
| MvMaxBooleansFromDocValuesBlockLoaderTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/RuncContainerExecutorConfig.java | {
"start": 27323,
"end": 28726
} | class ____ {
final private long quota;
final private long period;
final private long realtimeRuntime;
final private long realtimePeriod;
final private String cpus;
final private String mems;
public long getShares() {
return shares;
}
public long getQuota() {
return quota;
}
public long getPeriod() {
return period;
}
public long getRealtimeRuntime() {
return realtimeRuntime;
}
public long getRealtimePeriod() {
return realtimePeriod;
}
public String getCpus() {
return cpus;
}
public String getMems() {
return mems;
}
final private long shares;
public CPU(long shares, long quota, long period,
long realtimeRuntime, long realtimePeriod,
String cpus, String mems) {
this.shares = shares;
this.quota = quota;
this.period = period;
this.realtimeRuntime = realtimeRuntime;
this.realtimePeriod = realtimePeriod;
this.cpus = cpus;
this.mems = mems;
}
public CPU() {
this(0, 0, 0, 0, 0, null, null);
}
}
/**
* This | CPU |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/SortOperationFactory.java | {
"start": 1725,
"end": 5371
} | class ____ {
/**
* Creates a valid {@link SortQueryOperation}.
*
* <p><b>NOTE:</b> If the collation is not explicitly specified for an expression, the
* expression is wrapped in a default ascending order. If no expression is specified, the result
* is not sorted but only limited.
*
* @param orders expressions describing order
* @param child relational expression on top of which to apply the sort operation
* @param postResolverFactory factory for creating resolved expressions
* @return valid sort operation
*/
QueryOperation createSort(
List<ResolvedExpression> orders,
QueryOperation child,
PostResolverFactory postResolverFactory) {
final OrderWrapper orderWrapper = new OrderWrapper(postResolverFactory);
List<ResolvedExpression> convertedOrders =
orders.stream().map(f -> f.accept(orderWrapper)).collect(Collectors.toList());
return new SortQueryOperation(convertedOrders, child);
}
/**
* Creates a valid {@link SortQueryOperation} with offset (possibly merged into a preceding
* {@link SortQueryOperation}).
*
* @param offset offset to start from
* @param child relational expression on top of which to apply the sort operation
* @param postResolverFactory factory for creating resolved expressions
* @return valid sort operation with applied offset
*/
QueryOperation createLimitWithOffset(
int offset, QueryOperation child, PostResolverFactory postResolverFactory) {
SortQueryOperation previousSort = validateAndGetChildSort(child, postResolverFactory);
if (offset < 0) {
throw new ValidationException("Offset should be greater or equal 0");
}
if (previousSort.getOffset() != -1) {
throw new ValidationException("OFFSET already defined");
}
return new SortQueryOperation(previousSort.getOrder(), previousSort.getChild(), offset, -1);
}
/**
* Creates a valid {@link SortQueryOperation} with fetch (possibly merged into a preceding
* {@link SortQueryOperation}).
*
* @param fetch fetch to limit
* @param child relational expression on top of which to apply the sort operation
* @param postResolverFactory factory for creating resolved expressions
* @return valid sort operation with applied offset
*/
QueryOperation createLimitWithFetch(
int fetch, QueryOperation child, PostResolverFactory postResolverFactory) {
SortQueryOperation previousSort = validateAndGetChildSort(child, postResolverFactory);
if (fetch < 0) {
throw new ValidationException("Fetch should be greater or equal 0");
}
int offset = Math.max(previousSort.getOffset(), 0);
return new SortQueryOperation(
previousSort.getOrder(), previousSort.getChild(), offset, fetch);
}
private SortQueryOperation validateAndGetChildSort(
QueryOperation child, PostResolverFactory postResolverFactory) {
final SortQueryOperation previousSort;
if (child instanceof SortQueryOperation) {
previousSort = (SortQueryOperation) child;
} else {
previousSort =
(SortQueryOperation)
createSort(Collections.emptyList(), child, postResolverFactory);
}
if ((previousSort).getFetch() != -1) {
throw new ValidationException("FETCH is already defined.");
}
return previousSort;
}
private static | SortOperationFactory |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/datageneration/matchers/source/FieldSpecificMatcher.java | {
"start": 33895,
"end": 34680
} | class ____ extends GenericMappingAwareMatcher {
WildcardMatcher(
XContentBuilder actualMappings,
Settings.Builder actualSettings,
XContentBuilder expectedMappings,
Settings.Builder expectedSettings
) {
super("wildcard", actualMappings, actualSettings, expectedMappings, expectedSettings);
}
@Override
Object convert(Object value, Object nullValue) {
if (value == null) {
if (nullValue != null) {
return nullValue;
}
return null;
}
return value;
}
}
/**
* Generic matcher that supports common matching logic like null values.
*/
abstract | WildcardMatcher |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/security/JakartaRestResourceHttpPermissionTest.java | {
"start": 5845,
"end": 5988
} | class ____ {
@GET
public String get() {
return "root";
}
}
@Path("/")
public static | RootResource |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/vld/BasicPTVWithArraysTest.java | {
"start": 720,
"end": 893
} | class ____ extends Base2534 {
protected Good2534() { }
public Good2534(int x) {
super();
this.x = x;
}
}
static | Good2534 |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/JSONPath.java | {
"start": 99570,
"end": 99958
} | class ____ extends PropertyFilter {
public NotNullSegement(String propertyName, boolean function){
super(propertyName, function);
}
public boolean apply(JSONPath path, Object rootObject, Object currentObject, Object item) {
return path.getPropertyValue(item, propertyName, propertyNameHash) != null;
}
}
static | NotNullSegement |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/errors/InvalidSessionTimeoutException.java | {
"start": 847,
"end": 1168
} | class ____ extends ApiException {
private static final long serialVersionUID = 1L;
public InvalidSessionTimeoutException(String message, Throwable cause) {
super(message, cause);
}
public InvalidSessionTimeoutException(String message) {
super(message);
}
}
| InvalidSessionTimeoutException |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java | {
"start": 852,
"end": 2153
} | class ____ extends StringFieldScript {
private final SortedSetDocValues sortedSetDocValues;
boolean hasValue = false;
public SortedSetDocValuesStringFieldScript(String fieldName, SearchLookup searchLookup, LeafReaderContext ctx) {
super(fieldName, Map.of(), searchLookup, OnScriptError.FAIL, ctx);
try {
sortedSetDocValues = DocValues.getSortedSet(ctx.reader(), fieldName);
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
}
}
@Override
public void setDocument(int docID) {
try {
hasValue = sortedSetDocValues.advanceExact(docID);
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
}
}
@Override
public void execute() {
try {
if (hasValue) {
for (int i = 0; i < sortedSetDocValues.docValueCount(); i++) {
BytesRef bytesRef = sortedSetDocValues.lookupOrd(sortedSetDocValues.nextOrd());
emit(bytesRef.utf8ToString());
}
}
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
}
}
}
| SortedSetDocValuesStringFieldScript |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/BugPatternNaming.java | {
"start": 1681,
"end": 1743
} | class ____ be confusing",
severity = WARNING)
public final | can |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/SaveFederationQueuePolicyRequest.java | {
"start": 1360,
"end": 2385
} | class ____ {
@Private
@Unstable
public static SaveFederationQueuePolicyRequest newInstance(
String queue, FederationQueueWeight federationQueueWeight, String policyManagerClassName) {
SaveFederationQueuePolicyRequest request =
Records.newRecord(SaveFederationQueuePolicyRequest.class);
request.setQueue(queue);
request.setFederationQueueWeight(federationQueueWeight);
request.setPolicyManagerClassName(policyManagerClassName);
return request;
}
@Public
@Unstable
public abstract FederationQueueWeight getFederationQueueWeight();
@Private
@Unstable
public abstract void setFederationQueueWeight(FederationQueueWeight federationQueueWeight);
@Public
@Unstable
public abstract String getQueue();
@Public
@Unstable
public abstract void setQueue(String queue);
@Public
@Unstable
public abstract String getPolicyManagerClassName();
@Public
@Unstable
public abstract void setPolicyManagerClassName(String className);
}
| SaveFederationQueuePolicyRequest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java | {
"start": 3230,
"end": 8582
} | class ____ {
private final Logger logger;
private final IndexCommit commit;
private final TrackingReadBytesDirectory directory;
private final CancellationChecker cancellationChecker;
private IndexDiskUsageAnalyzer(ShardId shardId, IndexCommit commit, Runnable checkForCancellation) {
this.logger = Loggers.getLogger(IndexDiskUsageAnalyzer.class, shardId);
this.directory = new TrackingReadBytesDirectory(commit.getDirectory());
this.commit = new FilterIndexCommit(commit) {
@Override
public Directory getDirectory() {
return directory;
}
};
this.cancellationChecker = new CancellationChecker(checkForCancellation);
}
static IndexDiskUsageStats analyze(ShardId shardId, IndexCommit commit, Runnable checkForCancellation) throws IOException {
final IndexDiskUsageAnalyzer analyzer = new IndexDiskUsageAnalyzer(shardId, commit, checkForCancellation);
final IndexDiskUsageStats stats = new IndexDiskUsageStats(getIndexSize(commit));
analyzer.doAnalyze(stats);
return stats;
}
void doAnalyze(IndexDiskUsageStats stats) throws IOException {
long startTimeInNanos;
final ExecutionTime executionTime = new ExecutionTime();
try (DirectoryReader directoryReader = DirectoryReader.open(commit)) {
directory.resetBytesRead();
for (LeafReaderContext leaf : directoryReader.leaves()) {
cancellationChecker.checkForCancellation();
final SegmentReader reader = Lucene.segmentReader(leaf.reader());
startTimeInNanos = System.nanoTime();
analyzeInvertedIndex(reader, stats);
executionTime.invertedIndexTimeInNanos += System.nanoTime() - startTimeInNanos;
startTimeInNanos = System.nanoTime();
analyzeStoredFields(reader, stats);
executionTime.storedFieldsTimeInNanos += System.nanoTime() - startTimeInNanos;
startTimeInNanos = System.nanoTime();
analyzeDocValues(reader, stats);
executionTime.docValuesTimeInNanos += System.nanoTime() - startTimeInNanos;
startTimeInNanos = System.nanoTime();
analyzePoints(reader, stats);
executionTime.pointsTimeInNanos += System.nanoTime() - startTimeInNanos;
startTimeInNanos = System.nanoTime();
analyzeNorms(reader, stats);
executionTime.normsTimeInNanos += System.nanoTime() - startTimeInNanos;
startTimeInNanos = System.nanoTime();
analyzeTermVectors(reader, stats);
executionTime.termVectorsTimeInNanos += System.nanoTime() - startTimeInNanos;
startTimeInNanos = System.nanoTime();
analyzeKnnVectors(reader, stats);
executionTime.knnVectorsTimeInNanos += System.nanoTime() - startTimeInNanos;
}
}
logger.debug("analyzing the disk usage took {} stats: {}", executionTime, stats);
}
void analyzeStoredFields(SegmentReader reader, IndexDiskUsageStats stats) throws IOException {
final StoredFieldsReader storedFieldsReader = reader.getFieldsReader().getMergeInstance();
directory.resetBytesRead();
final TrackingSizeStoredFieldVisitor visitor = new TrackingSizeStoredFieldVisitor();
int docID = 0;
final int skipMask = 0x1FF; // 511
while (docID < reader.maxDoc()) {
cancellationChecker.logEvent();
storedFieldsReader.document(docID, visitor);
// As we already estimate the size of stored fields, we can trade off the accuracy for the speed of the estimate.
// Here we only visit 1/11 documents instead of all documents. Ideally, we should visit 1 doc then skip 10 docs
// to avoid missing many skew documents. But, documents are stored in chunks in compressed format and a chunk can
// have up to 4096 docs, we need to skip a large number of docs to avoid loading/decompressing some chunks.
if ((docID & skipMask) == skipMask && docID < reader.maxDoc() - 512) {
docID = Math.toIntExact(Math.min(docID + 5120L, reader.maxDoc() - 512L)); // always visit both ends
} else {
docID++;
}
}
if (visitor.fields.isEmpty() == false) {
// Computing the compression ratio for each chunk would provide a better estimate for each field individually.
// But it's okay to do this entire segment because source and _id are the only two stored fields in ES most the cases.
final long totalBytes = visitor.fields.values().stream().mapToLong(v -> v).sum();
final double ratio = (double) directory.getBytesRead() / (double) totalBytes;
final FieldInfos fieldInfos = reader.getFieldInfos();
for (Map.Entry<Integer, Long> field : visitor.fields.entrySet()) {
final String fieldName = fieldInfos.fieldInfo(field.getKey()).name;
final long fieldSize = (long) Math.ceil(field.getValue() * ratio);
stats.addStoredField(fieldName, fieldSize);
}
}
}
private static | IndexDiskUsageAnalyzer |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportGetTaskActionTests.java | {
"start": 2613,
"end": 9125
} | class ____ extends ESTestCase {
private ThreadPool threadPool;
private ProjectResolver projectResolver;
@Before
public void setupThreadPool() {
threadPool = new TestThreadPool(TransportGetTaskActionTests.class.getSimpleName());
projectResolver = TestProjectResolvers.usingRequestHeader(threadPool.getThreadContext());
}
@After
public final void shutdownTestNodes() throws Exception {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
}
public void testGetTaskActionWithMultiProjectEnabled() {
var transportService = mock(TransportService.class);
var clusterService = mock(ClusterService.class);
var nodeId = "node1";
NodeClient client = new NodeClient(Settings.EMPTY, threadPool, TestProjectResolvers.alwaysThrow()) {
@Override
@SuppressWarnings("unchecked")
public <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
ActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
if (TransportGetAction.TYPE.equals(action)) {
var getRequest = (GetRequest) request;
var getResponse = new GetResponse(
new GetResult(
TaskResultsService.TASK_INDEX,
getRequest.id(),
SequenceNumbers.UNASSIGNED_SEQ_NO,
SequenceNumbers.UNASSIGNED_PRIMARY_TERM,
1,
false,
null,
null,
null
)
);
((ActionListener<GetResponse>) listener).onResponse(getResponse);
} else {
fail(new AssertionError("Unexpected call"));
}
}
};
when(clusterService.localNode()).thenReturn(DiscoveryNodeUtils.create(nodeId));
var taskManager = new TaskManager(Settings.EMPTY, threadPool, Task.HEADERS_TO_COPY);
when(transportService.getTaskManager()).thenReturn(taskManager);
TransportGetTaskAction getTaskAction = new TransportGetTaskAction(
threadPool,
transportService,
new ActionFilters(emptySet()),
clusterService,
client,
NamedXContentRegistry.EMPTY,
projectResolver
);
var project1 = randomUniqueProjectId();
var project2 = randomUniqueProjectId();
var project1Task = registerRandomTaskWithProjectId(taskManager, project1);
var project2Task = registerRandomTaskWithProjectId(taskManager, project2);
var taskWithNoProjectIdHeader = registerRandomTaskWithProjectId(taskManager, null);
{
var taskId = new TaskId(nodeId, project1Task.getId());
var result = executeGetTaskWithProjectId(taskManager, getTaskAction, taskId, project1);
assertNull(result.exception);
assertNotNull(result.response);
assertThat(result.response.getTask().getTask().taskId(), equalTo(taskId));
}
{
var taskId = new TaskId(nodeId, project2Task.getId());
var result = executeGetTaskWithProjectId(taskManager, getTaskAction, taskId, project2);
assertNull(result.exception);
assertNotNull(result.response);
assertThat(result.response.getTask().getTask().taskId(), equalTo(taskId));
}
{
var taskToGet = randomFrom(project1Task, taskWithNoProjectIdHeader);
var result = executeGetTaskWithProjectId(taskManager, getTaskAction, new TaskId(nodeId, taskToGet.getId()), project2);
assertNull(result.response);
assertNotNull(result.exception);
var exception = ExceptionsHelper.unwrap(result.exception, ResourceNotFoundException.class);
assertNotNull(result.exception.toString(), exception);
}
{
var taskToGet = randomFrom(project2Task, taskWithNoProjectIdHeader);
var result = executeGetTaskWithProjectId(taskManager, getTaskAction, new TaskId(nodeId, taskToGet.getId()), project1);
assertNull(result.response);
assertNotNull(result.exception);
var exception = ExceptionsHelper.unwrap(result.exception, ResourceNotFoundException.class);
assertNotNull(result.exception.toString(), exception);
}
{
var result = executeGetTaskWithProjectId(
taskManager,
getTaskAction,
new TaskId(nodeId, taskWithNoProjectIdHeader.getId()),
randomFrom(project1, project2)
);
assertNull(result.response);
assertNotNull(result.exception);
var exception = ExceptionsHelper.unwrap(result.exception, ResourceNotFoundException.class);
assertNotNull(result.exception.toString(), exception);
}
}
private Task registerRandomTaskWithProjectId(TaskManager taskManager, ProjectId projectId) {
if (projectId == null) {
try (var ignore = threadPool.getThreadContext().newStoredContext()) {
return taskManager.register("task", "action", new BulkRequest());
}
}
AtomicReference<Task> task = new AtomicReference<>();
projectResolver.executeOnProject(projectId, () -> task.set(taskManager.register("task", "action", new BulkRequest())));
return task.get();
}
record GetTaskResult(GetTaskResponse response, Exception exception) {}
private GetTaskResult executeGetTaskWithProjectId(
TaskManager taskManager,
TransportGetTaskAction getTaskAction,
TaskId taskId,
ProjectId projectId
) {
var future = new TestPlainActionFuture<GetTaskResponse>();
projectResolver.executeOnProject(
projectId,
() -> taskManager.registerAndExecute("transport", getTaskAction, new GetTaskRequest().setTaskId(taskId), null, future)
);
try {
var resp = future.get(10, TimeUnit.SECONDS);
assertNotNull(resp);
return new GetTaskResult(resp, null);
} catch (Exception e) {
return new GetTaskResult(null, e);
}
}
}
| TransportGetTaskActionTests |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/filter/factory/RequestHeaderToRequestUriGatewayFilterFactoryIntegrationTests.java | {
"start": 2912,
"end": 3470
} | class ____ {
@Bean
public RouteLocator routeLocator(RouteLocatorBuilder builder) {
return builder.routes()
.route(r -> r.host("**.changeuri.org")
.and()
.header("X-Next-Url")
.filters(f -> f.requestHeaderToRequestUri("X-Next-Url"))
.uri("https://example.com"))
.route(r -> r.host("**.changeuri.org")
.and()
.query("url")
.filters(f -> f.changeRequestUri(
e -> Optional.of(URI.create(e.getRequest().getQueryParams().getFirst("url")))))
.uri("https://example.com"))
.build();
}
}
}
| TestConfig |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/util/ArrayBuilders.java | {
"start": 2695,
"end": 2925
} | class ____
extends PrimitiveArrayBuilder<byte[]>
{
public ByteBuilder() { }
@Override
public final byte[] _constructArray(int len) { return new byte[len]; }
}
public final static | ByteBuilder |
java | google__guice | extensions/persist/test/com/google/inject/persist/jpa/EntityManagerFactoryProvisionTest.java | {
"start": 984,
"end": 1798
} | class ____ extends TestCase {
private Injector injector;
@Override
public void setUp() {
injector = Guice.createInjector(new JpaPersistModule("testUnit"));
}
@Override
public final void tearDown() {
injector.getInstance(UnitOfWork.class).end();
injector.getInstance(EntityManagerFactory.class).close();
}
public void testSessionCreateOnInjection() {
assertEquals(
"SINGLETON VIOLATION " + UnitOfWork.class.getName(),
injector.getInstance(UnitOfWork.class),
injector.getInstance(UnitOfWork.class));
//startup persistence
injector.getInstance(PersistService.class).start();
injector.getInstance(UnitOfWork.class).begin();
//obtain em
assertTrue(injector.getInstance(EntityManager.class).isOpen());
}
}
| EntityManagerFactoryProvisionTest |
java | quarkusio__quarkus | independent-projects/tools/codestarts/src/test/java/io/quarkus/devtools/codestarts/CodestartProjectGenerationTest.java | {
"start": 462,
"end": 6968
} | class ____ {
private static final TestCodestartResourceLoader RESOURCE_LOADER = new TestCodestartResourceLoader();
private final Path projectPath = Paths.get("target/codestarts-test");
@BeforeEach
void setUp() {
FileUtils.deleteQuietly(projectPath.toFile());
assertThat(projectPath).doesNotExist();
}
@Test
void checkStaticConflictFail() throws IOException {
final CodestartProjectInput input = CodestartProjectInput.builder()
.build();
final CodestartCatalog<CodestartProjectInput> catalog = loadSpecific("static-conflicting-file");
final CodestartProjectDefinition projectDefinition = catalog.createProject(input);
Assertions.assertThatExceptionOfType(CodestartStructureException.class)
.isThrownBy(() -> projectDefinition.generate(Files.createTempDirectory("checkStaticConflictFail")))
.withMessageContaining("Multiple files found for path with 'fail-on-duplicate' FileStrategy:")
.withMessageContaining(".tooling-t");
}
@Test
void checkConflictFail() throws IOException {
final CodestartProjectInput input = CodestartProjectInput.builder()
.build();
final CodestartCatalog<CodestartProjectInput> catalog = loadSpecific("conflicting-file");
final CodestartProjectDefinition projectDefinition = catalog.createProject(input);
Assertions.assertThatExceptionOfType(CodestartStructureException.class)
.isThrownBy(() -> projectDefinition.generate(Files.createTempDirectory("checkConflictFail")))
.withMessageContaining("Multiple files found for path with 'fail-on-duplicate' FileStrategy:")
.withMessageContaining(".tooling-t");
}
@Test
void checkConflictingFallbackProjectFail() throws IOException {
final CodestartProjectInput input = CodestartProjectInput.builder()
.build();
final CodestartCatalog<CodestartProjectInput> catalog = loadSpecific("conflicting-fallback-project");
Assertions.assertThatExceptionOfType(CodestartStructureException.class)
.isThrownBy(() -> catalog.createProject(input))
.withMessageContaining("Multiple fallback found for a base codestart of type: 'PROJECT'");
}
@Test
void checkDefaultProject() throws IOException {
final CodestartProjectInput input = CodestartProjectInput.builder()
.build();
final CodestartProjectDefinition projectDefinition = load().createProject(input);
assertThat(projectDefinition.getRequiredCodestart(CodestartType.PROJECT).getName()).isEqualTo("foo");
assertThat(projectDefinition.getRequiredCodestart(CodestartType.LANGUAGE).getName()).isEqualTo("a");
assertThat(projectDefinition.getRequiredCodestart(CodestartType.BUILDTOOL).getName()).isEqualTo("y");
assertThat(projectDefinition.getRequiredCodestart(CodestartType.CONFIG).getName()).isEqualTo("config-properties");
assertThat(projectDefinition.getExtraCodestarts()).extracting(Codestart::getName)
.containsExactlyInAnyOrder("t");
final Path targetDirectory = projectPath.resolve("default-project");
projectDefinition.generate(targetDirectory);
assertThat(targetDirectory.resolve("README.md")).hasContent("Base readme world y");
assertThat(targetDirectory.resolve("config.properties")).hasContent("foo.bar=baz\nfoo.foo=bar\n");
assertThat(targetDirectory.resolve(".gitignore")).hasContent("base-ignore1\nbase-ignore2\n");
assertThat(targetDirectory.resolve("a/.tooling-t")).hasContent("a/.tooling-t");
assertThat(targetDirectory.resolve(".tooling-t")).hasContent(".tooling-t");
assertThat(targetDirectory.resolve("ybuild.build")).hasContent("fooa\n\nbara\n\nappend test");
}
@Test
void checkSpecificProject() throws IOException {
final CodestartProjectInput input = CodestartProjectInput.builder()
.addCodestart("b")
.addCodestart("example-with-b")
.addCodestart("maven")
.addCodestart("config-yaml")
.putData("project.version", "1.2.3")
.putData("prop1", "prop-1-nonamespace")
.putData("maven.prop2", "prop-2-namespaced")
.putData("example-with-b.my-file-name", "my-dynamic-file-from-input")
.build();
final CodestartProjectDefinition projectDefinition = load().createProject(input);
assertThat(projectDefinition.getRequiredCodestart(CodestartType.PROJECT).getName()).isEqualTo("foo");
assertThat(projectDefinition.getRequiredCodestart(CodestartType.LANGUAGE).getName()).isEqualTo("b");
assertThat(projectDefinition.getRequiredCodestart(CodestartType.BUILDTOOL).getName()).isEqualTo("maven");
assertThat(projectDefinition.getRequiredCodestart(CodestartType.CONFIG).getName()).isEqualTo("config-yaml");
assertThat(projectDefinition.getExtraCodestarts()).extracting(Codestart::getName)
.containsExactlyInAnyOrder("example-with-b");
final Path targetDirectory = projectPath.resolve("specific-project");
projectDefinition.generate(targetDirectory);
assertThat(targetDirectory.resolve("README.md")).hasContent("Base readme world maven");
assertThat(targetDirectory.resolve("config.yml")).hasContent("example: \"code\"");
assertThat(targetDirectory.resolve(".gitignore")).hasContent("base-ignore1\nbase-ignore2\n");
assertThat(targetDirectory.resolve("b/example-code")).hasContent("example-code");
assertThat(targetDirectory.resolve("my-dynamic-dir/so-cool/my-dynamic-file-from-input.test"))
.hasContent("hello my-dynamic-file-from-input");
assertThat(targetDirectory.resolve("pom.xml"))
.hasSameTextualContentAs(getResource("expected-pom-maven-merge.xml"));
}
public static CodestartCatalog<CodestartProjectInput> load() throws IOException {
return CodestartCatalogLoader.loadDefaultCatalog(RESOURCE_LOADER, "codestarts/core", "codestarts/examples");
}
public static CodestartCatalog<CodestartProjectInput> loadSpecific(String s) throws IOException {
return CodestartCatalogLoader.loadDefaultCatalog(RESOURCE_LOADER, "codestarts/core", "codestarts/examples",
"codestarts/" + s);
}
}
| CodestartProjectGenerationTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/lock/AbstractSelectLockingStrategy.java | {
"start": 1111,
"end": 5384
} | class ____ implements LockingStrategy {
private final EntityPersister lockable;
private final LockMode lockMode;
private final String waitForeverSql;
protected AbstractSelectLockingStrategy(EntityPersister lockable, LockMode lockMode) {
this.lockable = lockable;
this.lockMode = lockMode;
this.waitForeverSql = generateLockString( WAIT_FOREVER );
}
protected EntityPersister getLockable() {
return lockable;
}
protected LockMode getLockMode() {
return lockMode;
}
protected String generateLockString(Timeout lockTimeout) {
// for now, use the deprecated form passing the milliseconds to avoid copy/paste.
// move that logic here when we can remove that overload.
return generateLockString( lockTimeout.milliseconds() );
}
/**
* @deprecated Use {@linkplain #generateLockString(Timeout)} instead.
*/
@Deprecated
protected String generateLockString(int lockTimeout) {
final var factory = lockable.getFactory();
final var lockOptions = new LockOptions( lockMode );
lockOptions.setTimeOut( lockTimeout );
final var select =
new SimpleSelect( factory )
.setLockOptions( lockOptions )
.setTableName( lockable.getRootTableName() )
.addColumn( lockable.getRootTableIdentifierColumnNames()[0] )
.addRestriction( lockable.getRootTableIdentifierColumnNames() );
if ( lockable.isVersioned() ) {
select.addRestriction( lockable.getVersionColumnName() );
}
if ( factory.getSessionFactoryOptions().isCommentsEnabled() ) {
select.setComment( lockMode + " lock " + lockable.getEntityName() );
}
return select.toStatementString();
}
@Override
public void lock(Object id, Object version, Object object, int timeout, SharedSessionContractImplementor session)
throws StaleObjectStateException, JDBCException {
final String sql = determineSql( timeout );
final var factory = session.getFactory();
final var lockable = getLockable();
try {
final var jdbcCoordinator = session.getJdbcCoordinator();
final var preparedStatement = jdbcCoordinator.getStatementPreparer().prepareStatement( sql );
try {
lockable.getIdentifierType().nullSafeSet( preparedStatement, id, 1, session );
if ( lockable.isVersioned() ) {
lockable.getVersionType().nullSafeSet(
preparedStatement,
version,
lockable.getIdentifierType().getColumnSpan( factory.getRuntimeMetamodels() ) + 1,
session
);
}
final var resultSet = jdbcCoordinator.getResultSetReturn().extract( preparedStatement, sql );
try {
if ( !resultSet.next() ) {
final var statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
statistics.optimisticFailure( lockable.getEntityName() );
}
throw new StaleObjectStateException( lockable.getEntityName(), id );
}
}
finally {
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( resultSet, preparedStatement );
}
}
finally {
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( preparedStatement );
jdbcCoordinator.afterStatementExecution();
}
}
catch ( SQLException sqle ) {
throw convertException( object, jdbcException( id, session, sqle, sql ) );
}
}
private JDBCException jdbcException(Object id, SharedSessionContractImplementor session, SQLException sqle, String sql) {
return session.getJdbcServices().getSqlExceptionHelper()
.convert( sqle, "could not lock: " + infoString( lockable, id, session.getFactory() ), sql );
}
protected HibernateException convertException(Object entity, JDBCException ex) {
return ex;
}
protected String determineSql(int timeout) {
return switch ( timeout ) {
case WAIT_FOREVER_MILLI -> waitForeverSql;
case NO_WAIT_MILLI -> getNoWaitSql();
case SKIP_LOCKED_MILLI -> getSkipLockedSql();
default -> generateLockString( timeout );
};
}
private String noWaitSql;
protected String getNoWaitSql() {
if ( noWaitSql == null ) {
noWaitSql = generateLockString( NO_WAIT_MILLI );
}
return noWaitSql;
}
private String skipLockedSql;
protected String getSkipLockedSql() {
if ( skipLockedSql == null ) {
skipLockedSql = generateLockString( SKIP_LOCKED_MILLI );
}
return skipLockedSql;
}
}
| AbstractSelectLockingStrategy |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryLongToIntConversionTest.java | {
"start": 8589,
"end": 9099
} | class ____ {
void acceptsLong(long value) {}
void foo() {
long x = 1L;
acceptsLong(Longs.constrainToRange(x, Integer.MIN_VALUE, Integer.MAX_VALUE));
}
}
""")
.setFixChooser(SECOND)
.doTest(TEXT_MATCH);
}
@Test
public void suggestReplacingTypeCastWithoutSpacingWithConstrainToRange() {
refactoringHelper
.addInputLines(
"in/A.java",
"""
public | A |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/metrics/dump/MetricDumpSerialization.java | {
"start": 3225,
"end": 5219
} | class ____ implements Serializable {
private static final long serialVersionUID = 6928770855951536906L;
public final byte[] serializedCounters;
public final byte[] serializedGauges;
public final byte[] serializedMeters;
public final byte[] serializedHistograms;
public final int numCounters;
public final int numGauges;
public final int numMeters;
public final int numHistograms;
public MetricSerializationResult(
byte[] serializedCounters,
byte[] serializedGauges,
byte[] serializedMeters,
byte[] serializedHistograms,
int numCounters,
int numGauges,
int numMeters,
int numHistograms) {
Preconditions.checkNotNull(serializedCounters);
Preconditions.checkNotNull(serializedGauges);
Preconditions.checkNotNull(serializedMeters);
Preconditions.checkNotNull(serializedHistograms);
Preconditions.checkArgument(numCounters >= 0);
Preconditions.checkArgument(numGauges >= 0);
Preconditions.checkArgument(numMeters >= 0);
Preconditions.checkArgument(numHistograms >= 0);
this.serializedCounters = serializedCounters;
this.serializedGauges = serializedGauges;
this.serializedMeters = serializedMeters;
this.serializedHistograms = serializedHistograms;
this.numCounters = numCounters;
this.numGauges = numGauges;
this.numMeters = numMeters;
this.numHistograms = numHistograms;
}
}
// -------------------------------------------------------------------------
// Serialization
// -------------------------------------------------------------------------
/** Serializes a set of metrics into a {@link MetricSerializationResult}. */
public static | MetricSerializationResult |
java | apache__kafka | shell/src/main/java/org/apache/kafka/shell/command/LsCommandHandler.java | {
"start": 8354,
"end": 10713
} | class ____ {
private final int[] columnWidths;
private final int entriesPerColumn;
ColumnSchema(int numColumns, int entriesPerColumn) {
this.columnWidths = new int[numColumns];
this.entriesPerColumn = entriesPerColumn;
}
ColumnSchema setColumnWidths(Integer... widths) {
for (int i = 0; i < widths.length; i++) {
columnWidths[i] = widths[i];
}
return this;
}
void process(int entryIndex, String output) {
int columnIndex = entryIndex / entriesPerColumn;
columnWidths[columnIndex] = Math.max(
columnWidths[columnIndex], output.length() + 2);
}
int totalWidth() {
int total = 0;
for (int columnWidth : columnWidths) {
total += columnWidth;
}
return total;
}
int numColumns() {
return columnWidths.length;
}
int columnWidth(int columnIndex) {
return columnWidths[columnIndex];
}
int entriesPerColumn() {
return entriesPerColumn;
}
@Override
public int hashCode() {
return Objects.hash(Arrays.hashCode(columnWidths), entriesPerColumn);
}
@Override
public boolean equals(Object o) {
if (!(o instanceof ColumnSchema other)) return false;
if (entriesPerColumn != other.entriesPerColumn) return false;
return Arrays.equals(columnWidths, other.columnWidths);
}
@Override
public String toString() {
StringBuilder bld = new StringBuilder("ColumnSchema(columnWidths=[");
String prefix = "";
for (int columnWidth : columnWidths) {
bld.append(prefix);
bld.append(columnWidth);
prefix = ", ";
}
bld.append("], entriesPerColumn=").append(entriesPerColumn).append(")");
return bld.toString();
}
}
@Override
public int hashCode() {
return Objects.hashCode(targets);
}
@Override
public boolean equals(Object other) {
if (!(other instanceof LsCommandHandler o)) return false;
return Objects.equals(o.targets, targets);
}
}
| ColumnSchema |
java | apache__camel | components/camel-xmlsecurity/src/test/java/org/apache/camel/component/xmlsecurity/XAdESSignaturePropertiesTest.java | {
"start": 50549,
"end": 51622
} | class ____ extends XAdESSignatureProperties {
private KeyStore keystore = getKeystore();
private String alias = "bob";
CertChainXAdESSignatureProperties() {
setAddSigningTime(false);
}
@Override
protected X509Certificate getSigningCertificate() {
return null;
}
@Override
protected X509Certificate[] getSigningCertificateChain() throws Exception {
Certificate[] certs = keystore.getCertificateChain(alias);
X509Certificate[] result = new X509Certificate[certs.length];
int counter = 0;
for (Certificate cert : certs) {
result[counter] = (X509Certificate) cert;
counter++;
}
return result;
}
private static KeyStore getKeystore() {
try {
return TestKeystore.getKeyStore();
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
}
}
| CertChainXAdESSignatureProperties |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringXPathFilterWithNamespaceTest.java | {
"start": 1077,
"end": 1546
} | class ____ extends XPathFilterTest {
@Override
@BeforeEach
public void setUp() throws Exception {
matchingBody = "<person name='James' city='London' xmlns='http://example.com/person'/>";
super.setUp();
}
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/xpathFilterWithNamespace.xml");
}
}
| SpringXPathFilterWithNamespaceTest |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/legacy/RecursiveComparisonAssert_isEqualTo_Test.java | {
"start": 12623,
"end": 18175
} | class ____ {
public String color;
public LightString(String value) {
this.color = value;
}
}
@Test
void should_not_treat_Path_as_Iterable_to_avoid_infinite_recursion() {
final Container container1 = new Container("/tmp/example");
final Container container2 = new Container("/tmp/example");
assertThat(container1).usingRecursiveComparison(recursiveComparisonConfiguration)
.isEqualTo(container2)
.ignoringAllOverriddenEquals()
.isEqualTo(container2);
}
// issue #2434
@Test
void should_treat_class_cast_exception_as_comparison_difference_when_comparing_lists() {
// GIVEN
Wrapper a = new Wrapper(Double.MAX_VALUE);
Wrapper b = new Wrapper(Integer.MAX_VALUE);
Wrappers actual = new Wrappers(a, b);
Wrappers expected = new Wrappers(b, a);
// WHEN/THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.ignoringCollectionOrderInFields("values")
.isEqualTo(expected);
}
@Test
void should_report_class_cast_exception_as_comparison_difference() {
// GIVEN
Wrapper actual = new Wrapper(1.0);
Wrapper expected = new Wrapper(5);
// WHEN/THEN
compareRecursivelyFailsWithDifferences(actual, expected, diff("value", 1.0, 5));
}
@Test
void should_treat_class_cast_exception_as_comparison_difference_when_comparing_lists_with_specific_equals() {
// GIVEN
Wrapper a = new Wrapper(1.001);
Wrapper b = new Wrapper(1);
Wrappers actual = new Wrappers(a, b);
Wrappers expected = new Wrappers(b, a);
// WHEN/THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.ignoringCollectionOrderInFields("values")
.withEqualsForType((x, y) -> Math.abs(x - y) <= 0.05, Double.class)
.isEqualTo(expected);
}
@Test
void should_treat_class_cast_exception_as_comparison_difference() {
// GIVEN
Wrapper a = new Wrapper(Double.MAX_VALUE);
Wrapper b = new Wrapper(Integer.MAX_VALUE);
Wrappers actual = new Wrappers(a, b);
Wrappers expected = new Wrappers(b, a);
// WHEN/THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.withComparatorForFields(new DoubleComparator(0.01), "values.value")
.ignoringCollectionOrderInFields("values")
.isEqualTo(expected);
}
@Test
void should_not_handle_value_node_as_iterable() throws IOException {
// GIVEN
ObjectMapper om = new ObjectMapper();
JsonNode actual = om.readTree("{\"someNotImportantValue\":1,\"importantValue\":\"10\"}");
JsonNode expected = om.readTree("{\"someNotImportantValue\":10,\"importantValue\":\"1\"}");
// WHEN/THEN
ComparisonDifference difference1 = javaTypeDiff("_children.importantValue._value", "10", "1");
ComparisonDifference difference2 = javaTypeDiff("_children.someNotImportantValue._value", 1, 10);
compareRecursivelyFailsWithDifferences(actual, expected, difference1, difference2);
}
// issue #2459
@Test
void should_not_handle_object_node_as_iterable() throws IOException {
// GIVEN
ObjectMapper om = new ObjectMapper();
JsonNode actual = om.readTree("{\"someNotImportantValue\":1,\"importantValue\":\"10\"}");
JsonNode expected = om.readTree("{\"foo\":1,\"bar\":\"10\"}");
// WHEN/THEN
ComparisonDifference difference = diff("_children",
mapOf(entry("importantValue", "10"), entry("someNotImportantValue", 1)),
mapOf(entry("bar", "10"), entry("foo", 1)),
("The following keys were not found in the actual map value:%n [\"foo\", \"bar\"]" +
"%nThe following keys were present in the actual map value, but not in the expected map value:%n [\"someNotImportantValue\", \"importantValue\"]").formatted());
compareRecursivelyFailsWithDifferences(actual, expected, difference);
}
@Test
void issue_2475_example_should_succeed() {
then(issue2475Map()).usingRecursiveComparison(recursiveComparisonConfiguration)
.isEqualTo(issue2475Map());
}
private static Map<String, List<String>> issue2475Map() {
Map<String, List<String>> map = newHashMap("VMP", list("OztNUFPcnceerHAppabgHT",
"IW",
"AfBSmPEYfOBwGzWHzQveOi",
"dSalYEgeHNTe",
"mXjwEZBxeimMiWrmRVePVAwWHtRXfqQyD",
"TGgLRwnPQUbZWFr",
"pQWceZdDmTXdyQXcJdB",
"ProMMnAnRXg"));
map.put("Uko", list("pUisdBNIy",
"rfX",
"BagGdILqDLrNRfotwKqjCVNOJxSNoYKtSgBLMEJEJymhZjZvDuwvsqBiJuJpmvWOkiuSobCjRkeWVenaqIdlltsiUMPNtKcDMOAKiRRHHfikxUnOotnJFzNjwyYrcbkNBjxlvici",
"AR",
"dDvIHrhSxskuTvDSdUZwoUDdxFxxaxBWkTiprWPqSPZumdoHkvwPRrecqCLagzeeOjCuSufGwLoKATVaXfIPmjYsVfGuwlyEysXwWbVfPLgbVkaPaQdcVFQfADfDKEJeuQZlKKSsfuXICYWrmOGILeuqXKZyfEXHLnGILUcWmaVRRjrSjXXnHiTXYgdkrDeLEXZnAlbIEUYSblPqOaxuvpmOS"));
return map;
}
static | LightString |
java | spring-projects__spring-security | buildSrc/src/test/resources/samples/showcase/sgbcs-core/src/main/java/core/CoreClass.java | {
"start": 57,
"end": 103
} | class ____ {
public void run() {
}
}
| CoreClass |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/support/monitor/MonitorServlet.java | {
"start": 800,
"end": 1599
} | class ____ extends ResourceServlet {
private String mappingPath = "support/http/resources";
private Set<String> mapping = new HashSet<String>();
public MonitorServlet() {
super("support/monitor/resources");
mapping.add("/css/bootstrap.min.css");
mapping.add("/js/bootstrap.min.js");
mapping.add("/js/jquery.min.js");
}
protected String getFilePath(String fileName) {
if (mapping.contains(fileName)) {
return mappingPath + fileName;
}
return super.getFilePath(fileName);
}
@Override
protected String process(String url) {
// data.json?type=dataSource
// data.json?type=sql
// data.json?type=webapp
// data.json?type=weburi
return null;
}
}
| MonitorServlet |
java | google__gson | gson/src/test/java/com/google/gson/functional/NullObjectAndFieldTest.java | {
"start": 6854,
"end": 6962
} | class ____ {
String str;
int[] array;
Collection<String> col;
}
private static | ClassWithMembers |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/SerializingExecutor.java | {
"start": 5677,
"end": 6316
} | class ____ extends AtomicHelper {
private final AtomicIntegerFieldUpdater<SerializingExecutor> runStateUpdater;
private FieldUpdaterAtomicHelper(
AtomicIntegerFieldUpdater<SerializingExecutor> runStateUpdater) {
this.runStateUpdater = runStateUpdater;
}
@Override
public boolean runStateCompareAndSet(SerializingExecutor obj, int expect, int update) {
return runStateUpdater.compareAndSet(obj, expect, update);
}
@Override
public void runStateSet(SerializingExecutor obj, int newValue) {
runStateUpdater.set(obj, newValue);
}
}
private static final | FieldUpdaterAtomicHelper |
java | quarkusio__quarkus | extensions/devui/runtime/src/main/java/io/quarkus/devui/runtime/continuoustesting/ContinuousTestingJsonRPCState.java | {
"start": 1321,
"end": 2718
} | class ____ {
private Counts counts;
private long totalTime;
private String[] tags;
private Item[] passed;
private Item[] failed;
private Item[] skipped;
public Counts getCounts() {
return counts;
}
public Result setCounts(Counts counts) {
this.counts = counts;
return this;
}
public long getTotalTime() {
return totalTime;
}
public Result setTotalTime(long totalTime) {
this.totalTime = totalTime;
return this;
}
public String[] getTags() {
return tags;
}
public Result setTags(String[] tags) {
this.tags = tags;
return this;
}
public Item[] getPassed() {
return passed;
}
public Result setPassed(Item[] passed) {
this.passed = passed;
return this;
}
public Item[] getFailed() {
return failed;
}
public Result setFailed(Item[] failed) {
this.failed = failed;
return this;
}
public Item[] getSkipped() {
return skipped;
}
public Result setSkipped(Item[] skipped) {
this.skipped = skipped;
return this;
}
public static | Result |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/cluster/selector/WeightedRoundRobinSelectorTest.java | {
"start": 1065,
"end": 3069
} | class ____ {
@Parameters
public static Collection<Object[]> data() {
List<Object[]> objects = new ArrayList<>();
Map<String, Weight> map = new HashMap<>();
map.put("foo", new Weight(38));
map.put("bar", new Weight(13));
objects.add(new Object[]{map});
map = new HashMap<>();
map.put("foo", new Weight(91));
map.put("bar", new Weight(22));
map.put("baz", new Weight(115));
objects.add(new Object[]{map});
map = new HashMap<>();
map.put("foo", new Weight(28));
map.put("bar", new Weight(91));
map.put("baz", new Weight(28));
map.put("qux", new Weight(13));
map.put("quux", new Weight(28));
objects.add(new Object[]{map});
return objects;
}
private final Map<String, Weight> weights;
private final int totalWeight;
private final WeightedRoundRobinSelector selector;
public WeightedRoundRobinSelectorTest(Map<String, Weight> weights) {
this.weights = weights;
totalWeight = weights.values().stream().mapToInt(Weight::value).sum();
selector = new WeightedRoundRobinSelector(weights);
}
@Test
public void testSelectForSend() {
List<String> list = IntStream.range(0, totalWeight * 10)
.mapToObj(i -> selector.selectForSend())
.collect(toList());
Map<String, Integer> counts = new HashMap<>();
for (String nodeId : list) {
assertTrue(weights.containsKey(nodeId));
counts.merge(nodeId, 1, Math::addExact);
}
for (Map.Entry<String, Integer> count : counts.entrySet()) {
assertEquals(10 * weights.get(count.getKey()).value(), count.getValue().intValue());
}
}
@Test
public void testSelectForPublish() {
for (int i = 0; i < 10; i++) {
Iterable<String> iterable = selector.selectForPublish();
List<String> list = StreamSupport.stream(iterable.spliterator(), false).collect(toList());
assertTrue(list.containsAll(weights.keySet()));
assertTrue(weights.keySet().containsAll(list));
}
}
}
| WeightedRoundRobinSelectorTest |
java | apache__flink | flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/RobustActorSystem.java | {
"start": 1630,
"end": 4396
} | class ____ extends ActorSystemImpl {
public RobustActorSystem(
String name,
Config applicationConfig,
ClassLoader classLoader,
Option<ExecutionContext> defaultExecutionContext,
ActorSystemSetup setup) {
super(name, applicationConfig, classLoader, defaultExecutionContext, Option.empty(), setup);
}
public static RobustActorSystem create(String name, Config applicationConfig) {
return create(name, applicationConfig, FatalExitExceptionHandler.INSTANCE);
}
@VisibleForTesting
static RobustActorSystem create(
String name,
Config applicationConfig,
Thread.UncaughtExceptionHandler uncaughtExceptionHandler) {
return create(
name,
ActorSystemSetup.create(
BootstrapSetup.create(
Optional.empty(),
Optional.of(applicationConfig),
Optional.empty())),
uncaughtExceptionHandler);
}
private static RobustActorSystem create(
String name,
ActorSystemSetup setup,
Thread.UncaughtExceptionHandler uncaughtExceptionHandler) {
final Optional<BootstrapSetup> bootstrapSettings = setup.get(BootstrapSetup.class);
final ClassLoader classLoader = RobustActorSystem.class.getClassLoader();
final Config appConfig =
bootstrapSettings
.map(BootstrapSetup::config)
.flatMap(RobustActorSystem::toJavaOptional)
.orElseGet(() -> ConfigFactory.load(classLoader));
final Option<ExecutionContext> defaultEC =
toScalaOption(
bootstrapSettings
.map(BootstrapSetup::defaultExecutionContext)
.flatMap(RobustActorSystem::toJavaOptional));
final PostShutdownClassLoadingErrorFilter postShutdownClassLoadingErrorFilter =
new PostShutdownClassLoadingErrorFilter(uncaughtExceptionHandler);
final RobustActorSystem robustActorSystem =
new RobustActorSystem(name, appConfig, classLoader, defaultEC, setup) {
@Override
public Thread.UncaughtExceptionHandler uncaughtExceptionHandler() {
return postShutdownClassLoadingErrorFilter;
}
};
robustActorSystem.registerOnTermination(
postShutdownClassLoadingErrorFilter::notifyShutdownComplete);
robustActorSystem.start();
return robustActorSystem;
}
private static | RobustActorSystem |
java | elastic__elasticsearch | modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java | {
"start": 1811,
"end": 9143
} | class ____ extends ESTestCase {
APMAgentSettings apmAgentSettings = spy(new APMAgentSettings());
APMTelemetryProvider apmTelemetryProvider = mock(Mockito.RETURNS_DEEP_STUBS);
/**
* Check that when the tracer is enabled, it also sets the APM agent's recording system property to true.
*/
public void testEnableTracing() {
for (boolean metricsEnabled : List.of(true, false)) {
clearInvocations(apmAgentSettings, apmTelemetryProvider.getTracer());
Settings update = Settings.builder()
.put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true)
.put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), metricsEnabled)
.build();
apmAgentSettings.initAgentSystemProperties(update);
verify(apmAgentSettings).setAgentSetting("recording", "true");
clearInvocations(apmAgentSettings);
Settings initial = Settings.builder().put(update).put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), false).build();
triggerUpdateConsumer(initial, update);
verify(apmAgentSettings).setAgentSetting("recording", "true");
verify(apmTelemetryProvider.getTracer()).setEnabled(true);
}
}
public void testEnableMetrics() {
for (boolean tracingEnabled : List.of(true, false)) {
clearInvocations(apmAgentSettings, apmTelemetryProvider.getMeterService());
Settings update = Settings.builder()
.put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true)
.put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), tracingEnabled)
.build();
apmAgentSettings.initAgentSystemProperties(update);
verify(apmAgentSettings).setAgentSetting("recording", "true");
clearInvocations(apmAgentSettings);
Settings initial = Settings.builder().put(update).put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), false).build();
triggerUpdateConsumer(initial, update);
verify(apmAgentSettings).setAgentSetting("recording", "true");
verify(apmTelemetryProvider.getMeterService()).setEnabled(true);
}
}
/**
* Check that when the tracer is disabled, it also sets the APM agent's recording system property to false unless metrics are enabled.
*/
public void testDisableTracing() {
for (boolean metricsEnabled : List.of(true, false)) {
clearInvocations(apmAgentSettings, apmTelemetryProvider.getTracer());
Settings update = Settings.builder()
.put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), false)
.put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), metricsEnabled)
.build();
apmAgentSettings.initAgentSystemProperties(update);
verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(metricsEnabled));
clearInvocations(apmAgentSettings);
Settings initial = Settings.builder().put(update).put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true).build();
triggerUpdateConsumer(initial, update);
verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(metricsEnabled));
verify(apmTelemetryProvider.getTracer()).setEnabled(false);
}
}
public void testDisableMetrics() {
for (boolean tracingEnabled : List.of(true, false)) {
clearInvocations(apmAgentSettings, apmTelemetryProvider.getMeterService());
Settings update = Settings.builder()
.put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), tracingEnabled)
.put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), false)
.build();
apmAgentSettings.initAgentSystemProperties(update);
verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(tracingEnabled));
clearInvocations(apmAgentSettings);
Settings initial = Settings.builder().put(update).put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build();
triggerUpdateConsumer(initial, update);
verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(tracingEnabled));
verify(apmTelemetryProvider.getMeterService()).setEnabled(false);
}
}
private void triggerUpdateConsumer(Settings initial, Settings update) {
ClusterService clusterService = mock();
ClusterSettings clusterSettings = new ClusterSettings(
initial,
Set.of(
TELEMETRY_TRACING_ENABLED_SETTING,
TELEMETRY_METRICS_ENABLED_SETTING,
TELEMETRY_TRACING_NAMES_INCLUDE_SETTING,
TELEMETRY_TRACING_NAMES_EXCLUDE_SETTING,
TELEMETRY_TRACING_SANITIZE_FIELD_NAMES,
APM_AGENT_SETTINGS
)
);
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
apmAgentSettings.addClusterSettingsListeners(clusterService, apmTelemetryProvider);
clusterSettings.applySettings(update);
}
/**
* Check that when cluster settings are synchronised with the system properties, agent settings are set.
*/
public void testSetAgentSettings() {
Settings settings = Settings.builder()
.put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true)
.put(APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true")
.build();
apmAgentSettings.initAgentSystemProperties(settings);
verify(apmAgentSettings).setAgentSetting("recording", "true");
verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true");
}
/**
* Check that invalid or forbidden APM agent settings are rejected.
*/
public void testRejectForbiddenOrUnknownAgentSettings() {
String prefix = APM_AGENT_SETTINGS.getKey();
Settings settings = Settings.builder().put(prefix + "unknown", "true").build();
Exception exception = expectThrows(IllegalArgumentException.class, () -> APM_AGENT_SETTINGS.getAsMap(settings));
assertThat(exception.getMessage(), containsString("[" + prefix + "unknown]"));
// though, accept / ignore nested global_labels
var map = APMAgentSettings.APM_AGENT_SETTINGS.getAsMap(Settings.builder().put(prefix + "global_labels.abc", "123").build());
assertThat(map, hasEntry("global_labels.abc", "123"));
}
public void testTelemetryTracingSanitizeFieldNamesFallbackDefault() {
List<String> included = TELEMETRY_TRACING_SANITIZE_FIELD_NAMES.get(Settings.EMPTY);
assertThat(included, hasItem("password")); // and more defaults
}
/**
* Check that invalid or forbidden APM agent settings are rejected if their last part resembles an allowed setting.
*/
public void testRejectUnknownSettingResemblingAnAllowedOne() {
Settings settings = Settings.builder().put(APM_AGENT_SETTINGS.getKey() + "unknown.service_name", "true").build();
Exception exception = expectThrows(IllegalArgumentException.class, () -> APM_AGENT_SETTINGS.getAsMap(settings));
assertThat(exception.getMessage(), containsString("[telemetry.agent.unknown.service_name]"));
}
}
| APMAgentSettingsTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/BytecodeEnhancedLazyLoadingOnDeletedEntityTest.java | {
"start": 1675,
"end": 3428
} | class ____ {
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void accessUnloadedLazyAssociationOnDeletedOwner(SessionFactoryScope scope) {
scope.inTransaction( s -> {
AssociationOwner owner = new AssociationOwner();
owner.setId( 1 );
for ( int i = 0; i < 2; i++ ) {
AssociationNonOwner nonOwner = new AssociationNonOwner();
nonOwner.setId( i );
s.persist( nonOwner );
nonOwner.getOwners().add( owner );
owner.getNonOwners().add( nonOwner );
}
s.persist( owner );
} );
assertThatThrownBy( () -> scope.inTransaction( session -> {
AssociationOwner owner = session.getReference( AssociationOwner.class, 1 );
session.remove( owner );
session.flush();
owner.getNonOwners().size();
} ) )
.isInstanceOf( LazyInitializationException.class )
.hasMessageContaining(
"Could not locate EntityEntry for the collection owner in the PersistenceContext" );
}
@Test
public void accessUnloadedLazyAssociationOnDeletedNonOwner(SessionFactoryScope scope) {
scope.inTransaction( s -> {
AssociationNonOwner nonOwner = new AssociationNonOwner();
nonOwner.setId( 1 );
s.persist( nonOwner );
} );
assertThatThrownBy( () -> scope.inTransaction( session -> {
AssociationNonOwner nonOwner = session.getReference( AssociationNonOwner.class, 1 );
session.remove( nonOwner );
session.flush();
nonOwner.getOwners().size();
} ) )
.isInstanceOf( LazyInitializationException.class )
.hasMessageContaining(
"Could not locate EntityEntry for the collection owner in the PersistenceContext" );
}
@Entity(name = "AOwner")
@Table
static | BytecodeEnhancedLazyLoadingOnDeletedEntityTest |
java | elastic__elasticsearch | modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java | {
"start": 1500,
"end": 3843
} | class ____ extends AbstractIndexAnalyzerProvider<StopwordAnalyzerBase> {
private final StopwordAnalyzerBase analyzer;
PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(name);
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) {
// since Lucene 10 this analyzer contains stemming by default
analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet()));
} else {
// for older index versions we need the old analyzer behaviour without stemming
analyzer = new StopwordAnalyzerBase(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())) {
protected Analyzer.TokenStreamComponents createComponents(String fieldName) {
final Tokenizer source = new StandardTokenizer();
TokenStream result = new LowerCaseFilter(source);
result = new DecimalDigitFilter(result);
result = new ArabicNormalizationFilter(result);
/* additional persian-specific normalization */
result = new PersianNormalizationFilter(result);
/*
* the order here is important: the stopword list is normalized with the
* above!
*/
return new TokenStreamComponents(source, new StopFilter(result, stopwords));
}
protected TokenStream normalize(String fieldName, TokenStream in) {
TokenStream result = new LowerCaseFilter(in);
result = new DecimalDigitFilter(result);
result = new ArabicNormalizationFilter(result);
/* additional persian-specific normalization */
result = new PersianNormalizationFilter(result);
return result;
}
protected Reader initReader(String fieldName, Reader reader) {
return new PersianCharFilter(reader);
}
};
}
}
@Override
public StopwordAnalyzerBase get() {
return this.analyzer;
}
}
| PersianAnalyzerProvider |
java | quarkusio__quarkus | extensions/spring-security/deployment/src/test/java/io/quarkus/spring/security/deployment/springapp/BeanWithBeanMethodChecks.java | {
"start": 187,
"end": 1155
} | class ____ {
@PreAuthorize("@personChecker.isTrue()")
public String noParamsAlwaysPasses() {
return "noParamsAlwaysPasses";
}
@PreAuthorize("@personChecker.isFalse()")
public String noParamsNeverPasses() {
return "noParamsNeverPasses";
}
@PreAuthorize("@personChecker.check(#person, #input)")
public String withParams(String input, Person person) {
return "withParams";
}
@PreAuthorize("@personChecker.check(#person, 'geo')")
public String withParamAndConstant(Person person) {
return "withParamAndConstant";
}
@PreAuthorize("@personChecker.check(#person, #input)")
public String anotherWithParams(String input, Person person) {
return "anotherWithParams";
}
@PreAuthorize("@principalChecker.isSame(#input, authentication.principal.username)")
public String principalChecker(String input) {
return "principalChecker";
}
}
| BeanWithBeanMethodChecks |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/results/graph/internal/ImmutableFetchList.java | {
"start": 2471,
"end": 2929
} | class ____ {
private final Fetch[] fetches;
public Builder(FetchableContainer container) {
this.fetches = new Fetch[container.getNumberOfFetchableKeys()];
}
public void add(Fetch fetch) {
fetches[fetch.getFetchedMapping().getFetchableKey()] = fetch;
}
public ImmutableFetchList build() {
for ( Fetch fetch : fetches ) {
if ( fetch != null ) {
return new ImmutableFetchList( fetches );
}
}
return EMPTY;
}
}
}
| Builder |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/benchmarks/JsonEncodeBenchmark.java | {
"start": 1427,
"end": 4002
} | class ____ extends BenchmarkBase {
private JsonObject tiny;
private JsonObject small;
private JsonObject wide;
private JsonObject deep;
private JsonCodec jacksonCodec;
private JsonCodec databindCodec;
@Setup
public void setup() {
ClassLoader classLoader = getClass().getClassLoader();
tiny = new JsonObject(Map.of("message", "Hello, World!"));
small = loadJson(classLoader.getResource("small_bench.json"));
wide = loadJson(classLoader.getResource("wide_bench.json"));
deep = loadJson(classLoader.getResource("deep_bench.json"));
jacksonCodec = new JacksonCodec();
databindCodec = new DatabindCodec();
}
private JsonObject loadJson(URL url) {
try {
return new JsonObject(new ObjectMapper().readValue(url, Map.class));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Benchmark
public String smallStringJackson() {
return stringJackson(small);
}
@Benchmark
public String smallStringDatabind() {
return stringDatabind(small);
}
@Benchmark
public String wideStringJackson() {
return stringJackson(wide);
}
@Benchmark
public String wideStringDatabind() {
return stringDatabind(wide);
}
@Benchmark
public String deepStringJackson() {
return stringJackson(deep);
}
@Benchmark
public String deepStringDatabind() {
return stringDatabind(deep);
}
@CompilerControl(INLINE)
private String stringJackson(JsonObject jsonObject) {
return jacksonCodec.toString(jsonObject);
}
@CompilerControl(INLINE)
private String stringDatabind(JsonObject jsonObject) {
return databindCodec.toString(jsonObject);
}
@Benchmark
public Buffer tinyBufferJackson() {
return bufferJackson(tiny);
}
@Benchmark
public Buffer smallBufferJackson() {
return bufferJackson(small);
}
@Benchmark
public Buffer smallBufferDatabind() {
return bufferDatabind(small);
}
@Benchmark
public Buffer deepBufferJackson() {
return bufferJackson(deep);
}
@Benchmark
public Buffer deepBufferDatabind() {
return bufferDatabind(deep);
}
@Benchmark
public Buffer wideBufferJackson() {
return bufferJackson(wide);
}
@Benchmark
public Buffer wideBufferDatabind() {
return bufferDatabind(wide);
}
@CompilerControl(INLINE)
private Buffer bufferJackson(JsonObject jsonObject) {
return jacksonCodec.toBuffer(jsonObject);
}
@CompilerControl(INLINE)
private Buffer bufferDatabind(JsonObject jsonObject) {
return databindCodec.toBuffer(jsonObject);
}
}
| JsonEncodeBenchmark |
java | apache__kafka | clients/src/test/java/org/apache/kafka/clients/admin/AdminClientConfigTest.java | {
"start": 1282,
"end": 2091
} | class ____ {
@Test
public void testDefaultMetadataRecoveryStrategy() {
Map<String, Object> configs = new HashMap<>();
final AdminClientConfig adminClientConfig = new AdminClientConfig(configs);
assertEquals(MetadataRecoveryStrategy.REBOOTSTRAP.name, adminClientConfig.getString(CommonClientConfigs.METADATA_RECOVERY_STRATEGY_CONFIG));
}
@Test
public void testInvalidMetadataRecoveryStrategy() {
Map<String, Object> configs = new HashMap<>();
configs.put(CommonClientConfigs.METADATA_RECOVERY_STRATEGY_CONFIG, "abc");
ConfigException ce = assertThrows(ConfigException.class, () -> new AdminClientConfig(configs));
assertTrue(ce.getMessage().contains(CommonClientConfigs.METADATA_RECOVERY_STRATEGY_CONFIG));
}
}
| AdminClientConfigTest |
java | apache__hadoop | hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java | {
"start": 1245,
"end": 1425
} | class ____ extends NFS3Response {
private final Nfs3FileAttributes postOpDirAttr;
private final long cookieVerf;
private final DirList3 dirList;
public static | READDIR3Response |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/ContainerAllocationExpirer.java | {
"start": 1289,
"end": 2193
} | class ____ extends
AbstractLivelinessMonitor<AllocationExpirationInfo> {
private EventHandler dispatcher;
public ContainerAllocationExpirer(Dispatcher d) {
super(ContainerAllocationExpirer.class.getName());
this.dispatcher = d.getEventHandler();
}
public void serviceInit(Configuration conf) throws Exception {
int expireIntvl = conf.getInt(
YarnConfiguration.RM_CONTAINER_ALLOC_EXPIRY_INTERVAL_MS,
YarnConfiguration.DEFAULT_RM_CONTAINER_ALLOC_EXPIRY_INTERVAL_MS);
setExpireInterval(expireIntvl);
setMonitorInterval(expireIntvl/3);
super.serviceInit(conf);
}
@Override
protected void expire(AllocationExpirationInfo allocationExpirationInfo) {
dispatcher.handle(new ContainerExpiredSchedulerEvent(
allocationExpirationInfo.getContainerId(),
allocationExpirationInfo.isIncrease()));
}
}
| ContainerAllocationExpirer |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorQueryContext.java | {
"start": 497,
"end": 1509
} | class ____ {
public final String[] indices;
public final QueryBuilder query;
public final String timeField;
public final long start;
public final long end;
public final Map<String, String> headers;
public final IndicesOptions indicesOptions;
public final Map<String, Object> runtimeMappings;
public DataExtractorQueryContext(
List<String> indices,
QueryBuilder query,
String timeField,
long start,
long end,
Map<String, String> headers,
IndicesOptions indicesOptions,
Map<String, Object> runtimeMappings
) {
this.indices = indices.toArray(new String[0]);
this.query = Objects.requireNonNull(query);
this.timeField = timeField;
this.start = start;
this.end = end;
this.headers = headers;
this.indicesOptions = Objects.requireNonNull(indicesOptions);
this.runtimeMappings = Objects.requireNonNull(runtimeMappings);
}
}
| DataExtractorQueryContext |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_containsAll_Test.java | {
"start": 1029,
"end": 1428
} | class ____ extends IterableAssertBaseTest {
private List<Object> list = new ArrayList<>();
@Override
protected ConcreteIterableAssert<Object> invoke_api_method() {
return assertions.containsAll(list);
}
@Override
protected void verify_internal_effects() {
verify(iterables).assertContainsAll(getInfo(assertions), getActual(assertions), list);
}
}
| IterableAssert_containsAll_Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java | {
"start": 69483,
"end": 70030
} | class ____ {
public <T> T getMessage(boolean b, T t) {
// BUG: Diagnostic contains: @Nullable
return b ? null : t;
}
}
""")
.doTest();
}
@Test
public void aggressive_voidTypedMethod() {
createAggressiveCompilationTestHelper()
.addSourceLines(
"com/google/errorprone/bugpatterns/nullness/VoidTypeTest.java",
"""
package com.google.errorprone.bugpatterns.nullness;
public | LiteralNullReturnTest |
java | netty__netty | resolver-dns/src/main/java/io/netty/resolver/dns/Cache.java | {
"start": 1381,
"end": 5339
} | class ____<E> {
private static final AtomicReferenceFieldUpdater<Cache.Entries, ScheduledFuture> FUTURE_UPDATER =
AtomicReferenceFieldUpdater.newUpdater(Cache.Entries.class, ScheduledFuture.class, "expirationFuture");
private static final ScheduledFuture<?> CANCELLED = new ScheduledFuture<Object>() {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public long getDelay(TimeUnit unit) {
// We ignore unit and always return the minimum value to ensure the TTL of the cancelled marker is
// the smallest.
return Long.MIN_VALUE;
}
@Override
public int compareTo(Delayed o) {
throw new UnsupportedOperationException();
}
@Override
public boolean isCancelled() {
return true;
}
@Override
public boolean isDone() {
return true;
}
@Override
public Object get() {
throw new UnsupportedOperationException();
}
@Override
public Object get(long timeout, TimeUnit unit) {
throw new UnsupportedOperationException();
}
};
// Two years are supported by all our EventLoop implementations and so safe to use as maximum.
// See also: https://github.com/netty/netty/commit/b47fb817991b42ec8808c7d26538f3f2464e1fa6
static final int MAX_SUPPORTED_TTL_SECS = (int) TimeUnit.DAYS.toSeconds(365 * 2);
private final ConcurrentMap<String, Entries> resolveCache = new ConcurrentHashMap<>();
/**
* Remove everything from the cache.
*/
final void clear() {
while (!resolveCache.isEmpty()) {
for (Iterator<Entry<String, Entries>> i = resolveCache.entrySet().iterator(); i.hasNext();) {
Map.Entry<String, Entries> e = i.next();
i.remove();
e.getValue().clearAndCancel();
}
}
}
/**
* Clear all entries (if anything exists) for the given hostname and return {@code true} if anything was removed.
*/
final boolean clear(String hostname) {
Entries entries = resolveCache.remove(hostname);
return entries != null && entries.clearAndCancel();
}
/**
* Returns all caches entries for the given hostname.
*/
final List<? extends E> get(String hostname) {
Entries entries = resolveCache.get(hostname);
return entries == null ? null : entries.get();
}
/**
* Cache a value for the given hostname that will automatically expire once the TTL is reached.
*/
final void cache(String hostname, E value, int ttl, EventLoop loop) {
Entries entries = resolveCache.get(hostname);
if (entries == null) {
entries = new Entries(hostname);
Entries oldEntries = resolveCache.putIfAbsent(hostname, entries);
if (oldEntries != null) {
entries = oldEntries;
}
}
entries.add(value, ttl, loop);
}
/**
* Return the number of hostnames for which we have cached something.
*/
final int size() {
return resolveCache.size();
}
/**
* Returns {@code true} if this entry should replace all other entries that are already cached for the hostname.
*/
protected abstract boolean shouldReplaceAll(E entry);
/**
* Sort the {@link List} for a {@code hostname} before caching these.
*/
protected void sortEntries(
@SuppressWarnings("unused") String hostname, @SuppressWarnings("unused") List<E> entries) {
// NOOP.
}
/**
* Returns {@code true} if both entries are equal.
*/
protected abstract boolean equals(E entry, E otherEntry);
// Directly extend AtomicReference for intrinsics and also to keep memory overhead low.
private final | Cache |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/idmanytoone/StoreCustomer.java | {
"start": 500,
"end": 921
} | class ____ implements Serializable {
StoreCustomer() {}
@Id
@ManyToOne(optional = false)
@JoinColumn(name = "idA")
public Store store;
@Id
@ManyToOne(optional = false)
@JoinColumn(name = "idB")
public Customer customer;
public StoreCustomer(Store store, Customer customer) {
this.store = store;
this.customer = customer;
}
private static final long serialVersionUID = -8295955012787627232L;
}
| StoreCustomer |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/beanvalidation/BeanValidationTest.java | {
"start": 1022,
"end": 2987
} | class ____ {
@AfterEach
public void tearDown(EntityManagerFactoryScope scope){
scope.getEntityManagerFactory().getSchemaManager().truncate();
}
@Test
public void testBeanValidationIntegrationOnFlush(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
CupHolder ch = new CupHolder();
ch.setRadius( new BigDecimal( "12" ) );
ch.setTitle( "foo" );
try {
entityManager.persist(ch);
entityManager.flush();
fail( "invalid object should not be persisted" );
}
catch (ConstraintViolationException e) {
assertEquals( 1, e.getConstraintViolations().size() );
}
assertTrue(
entityManager.getTransaction().getRollbackOnly(),
"A constraint violation exception should mark the transaction for rollback"
);
}
);
}
@Test
public void testBeanValidationIntegrationOnCommit(EntityManagerFactoryScope scope) {
try {
scope.inTransaction(
entityManager -> {
CupHolder ch = new CupHolder();
ch.setRadius( new BigDecimal( "9" ) );
ch.setTitle( "foo" );
entityManager.persist( ch );
entityManager.flush();
ch.setRadius( new BigDecimal( "12" ) );
}
);
fail( "invalid object should not be persisted" );
}
catch (RollbackException e) {
final Throwable cve = e.getCause();
assertTrue( cve instanceof ConstraintViolationException );
assertEquals( 1, ( (ConstraintViolationException) cve ).getConstraintViolations().size() );
}
}
@Test
@RequiresDialect(H2Dialect.class)
public void testTitleColumnHasExpectedLength(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
Number len = (Number) entityManager.createNativeQuery(
"select CHARACTER_MAXIMUM_LENGTH from INFORMATION_SCHEMA.COLUMNS c where c.TABLE_NAME = 'CUPHOLDER' and c.COLUMN_NAME = 'TITLE'"
).getSingleResult();
assertEquals(64, len.intValue());
}
);
}
}
| BeanValidationTest |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/hashtable/BinaryHashPartition.java | {
"start": 19692,
"end": 23995
} | class ____ extends AbstractPagedOutputView {
private final ArrayList<MemorySegment> targetList;
/** Segments of in memory partition, include the current segment. Cleared after spilled. */
private final ArrayList<MemorySegment> buildStageSegments;
private final RandomAccessInputView buildStageInputView;
private final MemorySegmentSource memSource;
private final int sizeBits;
private BlockChannelWriter<MemorySegment> writer;
private int currentBlockNumber;
private BuildSideBuffer(MemorySegment initialSegment, MemorySegmentSource memSource) {
super(initialSegment, initialSegment.size(), 0);
this.memSource = memSource;
this.sizeBits = MathUtils.log2strict(initialSegment.size());
this.targetList = new ArrayList<>();
this.buildStageSegments = new ArrayList<>();
this.buildStageSegments.add(initialSegment);
this.buildStageInputView =
new RandomAccessInputView(buildStageSegments, initialSegment.size());
}
@Override
protected MemorySegment nextSegment(MemorySegment current, int bytesUsed)
throws IOException {
final MemorySegment next;
if (this.writer == null) {
// Must first add current segment:
// This may happen when you need to spill:
// A partition called nextSegment, can not get memory, need to spill, the result
// give itself to the spill, Since it is switching currentSeg, it is necessary
// to give the previous currSeg to spill.
this.targetList.add(current);
next = this.memSource.nextSegment();
buildStageSegments.add(next);
} else {
this.writer.writeBlock(current);
try {
next = this.writer.getReturnQueue().take();
} catch (InterruptedException iex) {
throw new IOException(
"Hash Join Partition was interrupted while "
+ "grabbing a new write-behind buffer.");
}
}
this.currentBlockNumber++;
return next;
}
RandomAccessInputView getBuildStageInputView() {
return buildStageInputView;
}
long getCurrentPointer() {
return (((long) this.currentBlockNumber) << this.sizeBits)
+ getCurrentPositionInSegment();
}
int getBlockCount() {
return this.currentBlockNumber + 1;
}
int getNumOccupiedMemorySegments() {
// return the current segment + all filled segments
return this.targetList.size() + 1;
}
int spill(BlockChannelWriter<MemorySegment> writer) throws IOException {
this.writer = writer;
final int numSegments = this.targetList.size();
for (MemorySegment segment : this.targetList) {
this.writer.writeBlock(segment);
}
this.targetList.clear();
return numSegments;
}
MemorySegment[] close() throws IOException {
final MemorySegment current = getCurrentSegment();
if (current == null) {
throw new IllegalStateException(
"Illegal State in HashPartition: "
+ "No current buffer when finalizing build side.");
}
clear();
if (this.writer == null) {
this.targetList.add(current);
MemorySegment[] buffers =
this.targetList.toArray(new MemorySegment[this.targetList.size()]);
this.targetList.clear();
this.buildStageSegments.clear();
return buffers;
} else {
writer.writeBlock(current);
return null;
}
}
}
/**
* For spilled partition to rebuild index and hashcode when memory can store all the build side
* data. (After bulk load to memory, see {@link BulkBlockChannelReader}).
*/
final | BuildSideBuffer |
java | quarkusio__quarkus | extensions/elytron-security-jdbc/deployment/src/test/java/io/quarkus/elytron/security/jdbc/CustomRoleDecoder.java | {
"start": 403,
"end": 926
} | class ____ implements RoleDecoder {
@Override
public Roles decodeRoles(AuthorizationIdentity authorizationIdentity) {
Attributes.Entry groupsEntry = authorizationIdentity.getAttributes().get("groups");
Set<String> roles = new HashSet<>();
StreamSupport.stream(groupsEntry.spliterator(), false).forEach(groups -> {
for (String role : groups.split(",")) {
roles.add(role.trim());
}
});
return Roles.fromSet(roles);
}
}
| CustomRoleDecoder |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java | {
"start": 3684,
"end": 5255
} | class ____ extends ActionResponse implements ToXContentObject {
private final List<ModelConfigurations> endpoints;
public Response(List<ModelConfigurations> endpoints) {
this.endpoints = endpoints;
}
public Response(StreamInput in) throws IOException {
endpoints = in.readCollectionAsList(ModelConfigurations::new);
}
public List<ModelConfigurations> getEndpoints() {
return endpoints;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeCollection(endpoints);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.startArray("endpoints");
for (var endpoint : endpoints) {
if (endpoint != null) {
endpoint.toFilteredXContent(builder, params);
}
}
builder.endArray();
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetInferenceModelAction.Response response = (GetInferenceModelAction.Response) o;
return Objects.equals(endpoints, response.endpoints);
}
@Override
public int hashCode() {
return Objects.hash(endpoints);
}
}
}
| Response |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TypeParameterShadowingTest.java | {
"start": 4633,
"end": 4991
} | class ____<T> {
<T2 extends Comparable<T2>> void something(T2 t) {
T2 other = t;
}
}
""")
.doTest();
}
@Test
public void refactorUnderneathStuff() {
refactoring
.addInputLines(
"in/Test.java",
"""
package foo.bar;
| Test |
java | quarkusio__quarkus | integration-tests/jpa/src/test/java/io/quarkus/it/jpa/proxy/ProxyTest.java | {
"start": 2387,
"end": 2519
} | class ____ non-final.")
// Importantly, we don't expect any other warning about proxies!
);
}
}
| was |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/bind/binders/TypedRequestArgumentBinder.java | {
"start": 977,
"end": 1395
} | interface ____<T> extends RequestArgumentBinder<T>, TypeArgumentBinder<T, HttpRequest<?>> {
/**
* Returns additional super types.
*
* @return Additional supers types
*/
default @NonNull List<Class<?>> superTypes() {
return Collections.emptyList();
}
/**
* Check if this typed argument binder matches the provided class.
* @param aClass The | TypedRequestArgumentBinder |
java | apache__camel | components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/topic/HazelcastTopicComponent.java | {
"start": 1192,
"end": 1878
} | class ____ extends HazelcastDefaultComponent {
public HazelcastTopicComponent() {
}
public HazelcastTopicComponent(final CamelContext context) {
super(context);
}
@Override
protected HazelcastDefaultEndpoint doCreateEndpoint(
String uri, String remaining, Map<String, Object> parameters, HazelcastInstance hzInstance)
throws Exception {
final HazelcastTopicConfiguration config = new HazelcastTopicConfiguration();
HazelcastTopicEndpoint answer = new HazelcastTopicEndpoint(hzInstance, uri, this, remaining, config);
setProperties(answer, parameters);
return answer;
}
}
| HazelcastTopicComponent |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/condition/DisabledOnOs.java | {
"start": 768,
"end": 1046
} | class ____
* test method is <em>disabled</em> on one or more specified
* {@linkplain #value operating systems} or on one or more specified
* {@linkplain #architectures architectures}
*
* <p>If operating systems <em>and</em> architectures are specified, the annotated
* test | or |
java | apache__flink | flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/testutils/HttpUtils.java | {
"start": 1070,
"end": 1892
} | class ____ {
public static Tuple2<Integer, String> getFromHTTP(String url) throws Exception {
URL u = new URL(url);
HttpURLConnection connection = (HttpURLConnection) u.openConnection();
connection.setConnectTimeout(100000);
connection.connect();
InputStream is;
if (connection.getResponseCode() >= 400) {
// error!
is = connection.getErrorStream();
} else {
is = connection.getInputStream();
}
return Tuple2.of(
connection.getResponseCode(),
IOUtils.toString(
is,
connection.getContentEncoding() != null
? connection.getContentEncoding()
: "UTF-8"));
}
}
| HttpUtils |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RBinaryStreamRx.java | {
"start": 856,
"end": 1606
} | interface ____ extends RBucketRx<byte[]> {
/**
* Returns current channel's position
*
* @return current position
*/
long position();
/**
* Sets channel's position
*
* @param newPosition - new position
*/
void position(long newPosition);
/**
* Reads a sequence of bytes into defined buffer.
*
* @param buf buffer object into which bytes are read
* @return amount of read bytes
*/
Single<Integer> read(ByteBuffer buf);
/**
* Writes a sequence of bytes from defined buffer.
*
* @param buf buffer object from which bytes are transferred
* @return amount of written bytes
*/
Single<Integer> write(ByteBuffer buf);
}
| RBinaryStreamRx |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/consumer/LocalInputChannelTest.java | {
"start": 30940,
"end": 32264
} | class ____ implements TestProducerSource {
private final int bufferSize;
private final List<Byte> channelIndexes;
TestPartitionProducerBufferSource(
int parallelism, int bufferSize, int numberOfBuffersToProduce) {
this.bufferSize = bufferSize;
this.channelIndexes =
Lists.newArrayListWithCapacity(parallelism * numberOfBuffersToProduce);
// Array of channel indexes to produce buffers for
for (byte i = 0; i < parallelism; i++) {
for (int j = 0; j < numberOfBuffersToProduce; j++) {
channelIndexes.add(i);
}
}
// Random buffer to channel ordering
Collections.shuffle(channelIndexes);
}
@Override
public BufferAndChannel getNextBuffer() throws Exception {
if (channelIndexes.size() > 0) {
final int channelIndex = channelIndexes.remove(0);
return new BufferAndChannel(new byte[bufferSize], channelIndex);
}
return null;
}
}
/**
* Consumed the configured result partitions and verifies that each channel receives the
* expected number of buffers.
*/
private static | TestPartitionProducerBufferSource |
java | elastic__elasticsearch | modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamRestActionCancellationIT.java | {
"start": 2018,
"end": 7573
} | class ____ extends ESIntegTestCase {
@Override
protected boolean addMockHttpTransport() {
return false; // enable http
}
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
.put(NetworkModule.TRANSPORT_TYPE_KEY, Netty4Plugin.NETTY_TRANSPORT_NAME)
.put(NetworkModule.HTTP_TYPE_KEY, Netty4Plugin.NETTY_HTTP_TRANSPORT_NAME)
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(getTestTransportPlugin(), MainRestPlugin.class, CancellableActionTestPlugin.class, DataStreamsPlugin.class);
}
public void testGetDataStreamCancellation() {
runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_data_stream"), GetDataStreamAction.NAME);
runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_data_stream?verbose"), GetDataStreamAction.NAME);
}
public void testGetDataStreamLifecycleCancellation() {
runRestActionCancellationTest(
new Request(HttpGet.METHOD_NAME, "/_data_stream/test/_lifecycle"),
GetDataStreamLifecycleAction.INSTANCE.name()
);
}
public void testGetDataStreamOptionsCancellation() {
runRestActionCancellationTest(
new Request(HttpGet.METHOD_NAME, "/_data_stream/test/_options"),
GetDataStreamOptionsAction.INSTANCE.name()
);
}
private void runRestActionCancellationTest(Request request, String actionName) {
final var node = usually() ? internalCluster().getRandomNodeName() : internalCluster().startCoordinatingOnlyNode(Settings.EMPTY);
try (
var restClient = createRestClient(node);
var capturingAction = CancellableActionTestPlugin.capturingActionOnNode(actionName, node)
) {
final var responseFuture = new PlainActionFuture<Response>();
final var restInvocation = restClient.performRequestAsync(request, wrapAsRestResponseListener(responseFuture));
if (randomBoolean()) {
// cancel by aborting the REST request
capturingAction.captureAndCancel(restInvocation::cancel);
expectThrows(ExecutionException.class, CancellationException.class, () -> responseFuture.get(10, TimeUnit.SECONDS));
} else {
// cancel via the task management API
final var cancelFuture = new PlainActionFuture<Void>();
capturingAction.captureAndCancel(
() -> SubscribableListener
.<ObjectPath>newForked(
l -> restClient.performRequestAsync(
getListTasksRequest(node, actionName),
wrapAsRestResponseListener(l.map(ObjectPath::createFromResponse))
)
)
.<Void>andThen((l, listTasksResponse) -> {
final var taskCount = listTasksResponse.evaluateArraySize("tasks");
assertThat(taskCount, greaterThan(0));
try (var listeners = new RefCountingListener(l)) {
for (int i = 0; i < taskCount; i++) {
final var taskPrefix = "tasks." + i + ".";
assertTrue(listTasksResponse.evaluate(taskPrefix + "cancellable"));
assertFalse(listTasksResponse.evaluate(taskPrefix + "cancelled"));
restClient.performRequestAsync(
getCancelTaskRequest(
listTasksResponse.evaluate(taskPrefix + "node"),
listTasksResponse.evaluate(taskPrefix + "id")
),
wrapAsRestResponseListener(listeners.acquire(DataStreamRestActionCancellationIT::assertOK))
);
}
}
})
.addListener(cancelFuture)
);
cancelFuture.get(10, TimeUnit.SECONDS);
expectThrows(Exception.class, () -> responseFuture.get(10, TimeUnit.SECONDS));
}
assertAllTasksHaveFinished(actionName);
} catch (Exception e) {
fail(e);
}
}
private static Request getListTasksRequest(String taskNode, String actionName) {
final var listTasksRequest = new Request(HttpGet.METHOD_NAME, "/_tasks");
listTasksRequest.addParameter("nodes", taskNode);
listTasksRequest.addParameter("actions", actionName);
listTasksRequest.addParameter("group_by", "none");
return listTasksRequest;
}
private static Request getCancelTaskRequest(String taskNode, int taskId) {
final var cancelTaskRequest = new Request(HttpPost.METHOD_NAME, Strings.format("/_tasks/%s:%d/_cancel", taskNode, taskId));
cancelTaskRequest.addParameter("wait_for_completion", null);
return cancelTaskRequest;
}
public static void assertOK(Response response) {
assertThat(response.getStatusLine().getStatusCode(), oneOf(200, 201));
}
}
| DataStreamRestActionCancellationIT |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/async/TestRouterAsyncRpcWhenNamenodeFailover.java | {
"start": 1703,
"end": 3066
} | class ____ {
private StateStoreDFSCluster cluster;
private void setupCluster(boolean ha)
throws Exception {
// Build and start a federated cluster.
cluster = new StateStoreDFSCluster(ha, 2);
Configuration routerConf = new RouterConfigBuilder()
.stateStore()
.metrics()
.admin()
.rpc()
.heartbeat()
.build();
routerConf.setBoolean(RBFConfigKeys.DFS_ROUTER_ASYNC_RPC_ENABLE_KEY, true);
cluster.addRouterOverrides(routerConf);
cluster.startCluster();
cluster.startRouters();
cluster.waitClusterUp();
}
@Test
public void testGetFileInfoWhenNsFailover() throws Exception {
setupCluster(true);
Configuration conf = cluster.getRouterClientConf();
conf.setInt("dfs.client.retry.max.attempts", 2);
DFSClient routerClient = new DFSClient(new URI("hdfs://fed"), conf);
transitionClusterNSToActive(cluster, 0);
String basePath = "/ARR/testGetFileInfo";
routerClient.mkdirs(basePath);
DirectoryListing directoryListing = routerClient.listPaths("/ARR", new byte[0]);
assertEquals(1, directoryListing.getPartialListing().length);
transitionClusterNSToStandby(cluster);
assertThrows(IOException.class, () -> {
HdfsFileStatus fileInfo = routerClient.getFileInfo(basePath + 1);
});
}
}
| TestRouterAsyncRpcWhenNamenodeFailover |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/enums/EnumDeserializationTest.java | {
"start": 14605,
"end": 16075
} | enum ____-turned null");
} catch (InvalidNullException jex) {
verifyException(jex, "Invalid `null` value encountered");
}
}
@Test
public void testAllowUnknownEnumValuesAsMapKeysReadAsNull() throws Exception
{
ClassWithEnumMapKey result = MAPPER.reader(EnumFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL)
.forType(ClassWithEnumMapKey.class)
.readValue("{\"map\":{\"NO-SUCH-VALUE\":\"val\"}}");
// 25-Jan-2018, tatu: as per [databind#1883], we upgrade it to `EnumMap`, which won't accept nulls...
assertEquals(0, result.map.size());
}
// Ability to ignore unknown Enum values as a defined default:
// [databind#4979]
@Test
public void testAllowUnknownEnumValuesReadAsDefaultWithCreatorMethod4979() throws Exception
{
ObjectReader reader = MAPPER.reader(
EnumFeature.READ_UNKNOWN_ENUM_VALUES_USING_DEFAULT_VALUE);
assertEquals(
StrictEnumCreator.UNKNOWN,
reader.forType(StrictEnumCreator.class).readValue("\"NO-SUCH-VALUE\""));
}
@Test
public void testDoNotAllowUnknownEnumValuesAsMapKeysWhenReadAsNullDisabled() throws Exception
{
assertFalse(MAPPER.isEnabled(EnumFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL));
try {
MAPPER.readValue("{\"map\":{\"NO-SUCH-VALUE\":\"val\"}}", ClassWithEnumMapKey.class);
fail("Expected an exception for bogus | value |
java | resilience4j__resilience4j | resilience4j-core/src/main/java/io/github/resilience4j/core/ClassUtils.java | {
"start": 873,
"end": 3509
} | class ____ {
private static final String INSTANTIATION_ERROR_PREFIX = "Unable to create instance of class: ";
private ClassUtils() {
// utils
}
public static <T> IntervalBiFunction<T> instantiateIntervalBiFunctionClass(
Class<? extends IntervalBiFunction<T>> clazz) {
try {
Constructor<? extends IntervalBiFunction<T>> c = clazz.getConstructor();
if (c != null) {
return c.newInstance();
} else {
throw new InstantiationException(INSTANTIATION_ERROR_PREFIX + clazz.getName());
}
} catch (Exception e) {
throw new InstantiationException(INSTANTIATION_ERROR_PREFIX + clazz.getName(), e);
}
}
public static <T> Predicate<T> instantiatePredicateClass(Class<? extends Predicate<T>> clazz) {
try {
Constructor<? extends Predicate<T>> c = clazz.getConstructor();
if (c != null) {
return c.newInstance();
} else {
throw new InstantiationException(INSTANTIATION_ERROR_PREFIX + clazz.getName());
}
} catch (Exception e) {
throw new InstantiationException(INSTANTIATION_ERROR_PREFIX + clazz.getName(), e);
}
}
public static <T> BiConsumer<Integer, T> instantiateBiConsumer(Class<? extends BiConsumer<Integer, T>> clazz) {
try {
Constructor<? extends BiConsumer<Integer, T>> c = clazz.getConstructor();
if (c != null) {
return c.newInstance();
} else {
throw new InstantiationException(INSTANTIATION_ERROR_PREFIX + clazz.getName());
}
} catch (Exception e) {
throw new InstantiationException(INSTANTIATION_ERROR_PREFIX + clazz.getName(), e);
}
}
public static <T, R> Function<T, R> instantiateFunction(Class<? extends Function<T, R>> clazz) {
try {
Constructor<? extends Function<T, R>> c = clazz.getConstructor();
if (c != null) {
return c.newInstance();
} else {
throw new InstantiationException(INSTANTIATION_ERROR_PREFIX + clazz.getName());
}
} catch (Exception e) {
throw new InstantiationException(INSTANTIATION_ERROR_PREFIX + clazz.getName(), e);
}
}
public static <T> T instantiateClassDefConstructor(Class<T> clazz) {
//if constructor present then it should have a no arg constructor
//if not present then default constructor is already their
Objects.requireNonNull(clazz, " | ClassUtils |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/VertxHttpComponentBuilderFactory.java | {
"start": 1882,
"end": 13476
} | interface ____ extends ComponentBuilder<VertxHttpComponent> {
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the response body should be byte or as
* io.vertx.core.buffer.Buffer.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param responsePayloadAsByteArray the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder responsePayloadAsByteArray(boolean responsePayloadAsByteArray) {
doSetProperty("responsePayloadAsByteArray", responsePayloadAsByteArray);
return this;
}
/**
* Whether to allow java serialization when a request has the
* Content-Type application/x-java-serialized-object This is disabled by
* default. If you enable this, be aware that Java will deserialize the
* incoming data from the request. This can be a potential security
* risk.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param allowJavaSerializedObject the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder allowJavaSerializedObject(boolean allowJavaSerializedObject) {
doSetProperty("allowJavaSerializedObject", allowJavaSerializedObject);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* To use an existing vertx instead of creating a new instance.
*
* The option is a: <code>io.vertx.core.Vertx</code> type.
*
* Group: advanced
*
* @param vertx the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder vertx(io.vertx.core.Vertx vertx) {
doSetProperty("vertx", vertx);
return this;
}
/**
* A custom VertxHttpBinding which can control how to bind between
* Vert.x and Camel.
*
* The option is a:
* <code>org.apache.camel.component.vertx.http.VertxHttpBinding</code> type.
*
* Group: advanced
*
* @param vertxHttpBinding the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder vertxHttpBinding(org.apache.camel.component.vertx.http.VertxHttpBinding vertxHttpBinding) {
doSetProperty("vertxHttpBinding", vertxHttpBinding);
return this;
}
/**
* To provide a custom set of vertx options for configuring vertx.
*
* The option is a: <code>io.vertx.core.VertxOptions</code>
* type.
*
* Group: advanced
*
* @param vertxOptions the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder vertxOptions(io.vertx.core.VertxOptions vertxOptions) {
doSetProperty("vertxOptions", vertxOptions);
return this;
}
/**
* To provide a custom set of options for configuring vertx web client.
*
* The option is a:
* <code>io.vertx.ext.web.client.WebClientOptions</code>
* type.
*
* Group: advanced
*
* @param webClientOptions the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder webClientOptions(io.vertx.ext.web.client.WebClientOptions webClientOptions) {
doSetProperty("webClientOptions", webClientOptions);
return this;
}
/**
* To use a custom org.apache.camel.spi.HeaderFilterStrategy to filter
* header to and from Camel message.
*
* The option is a:
* <code>org.apache.camel.spi.HeaderFilterStrategy</code>
* type.
*
* Group: filter
*
* @param headerFilterStrategy the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder headerFilterStrategy(org.apache.camel.spi.HeaderFilterStrategy headerFilterStrategy) {
doSetProperty("headerFilterStrategy", headerFilterStrategy);
return this;
}
/**
* The proxy server host address.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: proxy
*
* @param proxyHost the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder proxyHost(java.lang.String proxyHost) {
doSetProperty("proxyHost", proxyHost);
return this;
}
/**
* The proxy server password if authentication is required.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: proxy
*
* @param proxyPassword the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder proxyPassword(java.lang.String proxyPassword) {
doSetProperty("proxyPassword", proxyPassword);
return this;
}
/**
* The proxy server port.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: proxy
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder proxyPort(java.lang.Integer proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* The proxy server type.
*
* The option is a: <code>io.vertx.core.net.ProxyType</code>
* type.
*
* Group: proxy
*
* @param proxyType the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder proxyType(io.vertx.core.net.ProxyType proxyType) {
doSetProperty("proxyType", proxyType);
return this;
}
/**
* The proxy server username if authentication is required.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: proxy
*
* @param proxyUsername the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder proxyUsername(java.lang.String proxyUsername) {
doSetProperty("proxyUsername", proxyUsername);
return this;
}
/**
* The password to use for basic authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param basicAuthPassword the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder basicAuthPassword(java.lang.String basicAuthPassword) {
doSetProperty("basicAuthPassword", basicAuthPassword);
return this;
}
/**
* The user name to use for basic authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param basicAuthUsername the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder basicAuthUsername(java.lang.String basicAuthUsername) {
doSetProperty("basicAuthUsername", basicAuthUsername);
return this;
}
/**
* The bearer token to use for bearer token authentication.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param bearerToken the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder bearerToken(java.lang.String bearerToken) {
doSetProperty("bearerToken", bearerToken);
return this;
}
/**
* To configure security using SSLContextParameters.
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder sslContextParameters(org.apache.camel.support.jsse.SSLContextParameters sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* Enable usage of global SSL context parameters.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param useGlobalSslContextParameters the value to set
* @return the dsl builder
*/
default VertxHttpComponentBuilder useGlobalSslContextParameters(boolean useGlobalSslContextParameters) {
doSetProperty("useGlobalSslContextParameters", useGlobalSslContextParameters);
return this;
}
}
| VertxHttpComponentBuilder |
java | spring-projects__spring-security | taglibs/src/main/java/org/springframework/security/taglibs/authz/AuthenticationTag.java | {
"start": 1911,
"end": 5263
} | class ____ extends TagSupport {
private SecurityContextHolderStrategy securityContextHolderStrategy = SecurityContextHolder
.getContextHolderStrategy();
private @Nullable String var;
private @Nullable String property;
private int scope;
private boolean scopeSpecified;
private boolean htmlEscape = true;
public AuthenticationTag() {
init();
}
// resets local state
private void init() {
this.var = null;
this.scopeSpecified = false;
this.scope = PageContext.PAGE_SCOPE;
}
public void setVar(String var) {
this.var = var;
}
public void setProperty(String operation) {
this.property = operation;
}
public void setScope(String scope) {
this.scope = TagUtils.getScope(scope);
this.scopeSpecified = true;
}
public void setPageContext(PageContext pageContext) {
super.setPageContext(pageContext);
ServletContext servletContext = pageContext.getServletContext();
ApplicationContext context = SecurityWebApplicationContextUtils
.findRequiredWebApplicationContext(servletContext);
String[] names = context.getBeanNamesForType(SecurityContextHolderStrategy.class);
if (names.length == 1) {
SecurityContextHolderStrategy strategy = context.getBean(SecurityContextHolderStrategy.class);
this.securityContextHolderStrategy = strategy;
}
}
@Override
public int doStartTag() throws JspException {
return super.doStartTag();
}
@Override
public int doEndTag() throws JspException {
Object result = null;
// determine the value by...
if (this.property != null) {
SecurityContext context = this.securityContextHolderStrategy.getContext();
if ((context == null) || !(context instanceof SecurityContext) || (context.getAuthentication() == null)) {
return Tag.EVAL_PAGE;
}
Authentication auth = context.getAuthentication();
if (auth.getPrincipal() == null) {
return Tag.EVAL_PAGE;
}
try {
BeanWrapperImpl wrapper = new BeanWrapperImpl(auth);
result = wrapper.getPropertyValue(this.property);
}
catch (BeansException ex) {
throw new JspException(ex);
}
}
if (this.var != null) {
/*
* Store the result, letting an IllegalArgumentException propagate back if the
* scope is invalid (e.g., if an attempt is made to store something in the
* session without any HttpSession existing).
*/
if (result != null) {
this.pageContext.setAttribute(this.var, result, this.scope);
}
else {
if (this.scopeSpecified) {
this.pageContext.removeAttribute(this.var, this.scope);
}
else {
this.pageContext.removeAttribute(this.var);
}
}
}
else {
if (this.htmlEscape) {
writeMessage(TextEscapeUtils.escapeEntities(String.valueOf(result)));
}
else {
writeMessage(String.valueOf(result));
}
}
return EVAL_PAGE;
}
protected void writeMessage(String msg) throws JspException {
try {
this.pageContext.getOut().write(String.valueOf(msg));
}
catch (IOException ioe) {
throw new JspException(ioe);
}
}
/**
* Set HTML escaping for this tag, as boolean value.
*/
public void setHtmlEscape(String htmlEscape) {
this.htmlEscape = Boolean.parseBoolean(htmlEscape);
}
/**
* Return the HTML escaping setting for this tag, or the default setting if not
* overridden.
*/
protected boolean isHtmlEscape() {
return this.htmlEscape;
}
}
| AuthenticationTag |
java | apache__kafka | clients/src/test/java/org/apache/kafka/test/MockSelector.java | {
"start": 1565,
"end": 7501
} | class ____ implements Selectable {
private final Time time;
private final List<NetworkSend> initiatedSends = new ArrayList<>();
private final List<NetworkSend> completedSends = new ArrayList<>();
private final List<ByteBufferChannel> completedSendBuffers = new ArrayList<>();
private final List<NetworkReceive> completedReceives = new ArrayList<>();
private final Map<String, ChannelState> disconnected = new HashMap<>();
private final List<String> connected = new ArrayList<>();
private final List<DelayedReceive> delayedReceives = new ArrayList<>();
private final Predicate<InetSocketAddress> canConnect;
private final Set<String> ready = new HashSet<>();
public MockSelector(Time time) {
this(time, null);
}
public MockSelector(Time time, Predicate<InetSocketAddress> canConnect) {
this.time = time;
this.canConnect = canConnect;
}
@Override
public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException {
if (canConnect == null || canConnect.test(address)) {
this.connected.add(id);
this.ready.add(id);
}
}
@Override
public void wakeup() {
}
@Override
public void close() {
}
@Override
public void close(String id) {
// Note that there are no notifications for client-side disconnects
removeSendsForNode(id, completedSends);
removeSendsForNode(id, initiatedSends);
ready.remove(id);
for (int i = 0; i < this.connected.size(); i++) {
if (this.connected.get(i).equals(id)) {
this.connected.remove(i);
break;
}
}
}
/**
* Since MockSelector.connect will always succeed and add the
* connection id to the Set connected, we can only simulate
* that the connection is still pending by removing the connection
* id from the Set connected.
*
* @param id connection id
*/
public void serverConnectionBlocked(String id) {
this.connected.remove(id);
}
/**
* Simulate a server disconnect. This id will be present in {@link #disconnected()} on
* the next {@link #poll(long)}.
*/
public void serverDisconnect(String id) {
this.disconnected.put(id, ChannelState.READY);
close(id);
}
public void serverAuthenticationFailed(String id) {
ChannelState authFailed = new ChannelState(ChannelState.State.AUTHENTICATION_FAILED,
new AuthenticationException("Authentication failed"), null);
this.disconnected.put(id, authFailed);
close(id);
}
private void removeSendsForNode(String id, Collection<NetworkSend> sends) {
sends.removeIf(send -> id.equals(send.destinationId()));
}
public void clear() {
this.completedSends.clear();
this.completedReceives.clear();
this.completedSendBuffers.clear();
this.disconnected.clear();
this.connected.clear();
}
@Override
public void send(NetworkSend send) {
this.initiatedSends.add(send);
}
@Override
public void poll(long timeout) throws IOException {
completeInitiatedSends();
completeDelayedReceives();
time.sleep(timeout);
}
private void completeInitiatedSends() throws IOException {
for (NetworkSend send : initiatedSends) {
completeSend(send);
}
this.initiatedSends.clear();
}
private void completeSend(NetworkSend send) throws IOException {
// Consume the send so that we will be able to send more requests to the destination
try (ByteBufferChannel discardChannel = new ByteBufferChannel(send.size())) {
while (!send.completed()) {
send.writeTo(discardChannel);
}
completedSends.add(send);
completedSendBuffers.add(discardChannel);
}
}
private void completeDelayedReceives() {
for (NetworkSend completedSend : completedSends) {
Iterator<DelayedReceive> delayedReceiveIterator = delayedReceives.iterator();
while (delayedReceiveIterator.hasNext()) {
DelayedReceive delayedReceive = delayedReceiveIterator.next();
if (delayedReceive.source().equals(completedSend.destinationId())) {
completedReceives.add(delayedReceive.receive());
delayedReceiveIterator.remove();
}
}
}
}
@Override
public List<NetworkSend> completedSends() {
return completedSends;
}
public List<ByteBufferChannel> completedSendBuffers() {
return completedSendBuffers;
}
@Override
public List<NetworkReceive> completedReceives() {
return completedReceives;
}
public void completeReceive(NetworkReceive receive) {
this.completedReceives.add(receive);
}
public void delayedReceive(DelayedReceive receive) {
this.delayedReceives.add(receive);
}
@Override
public Map<String, ChannelState> disconnected() {
return disconnected;
}
@Override
public List<String> connected() {
List<String> currentConnected = new ArrayList<>(connected);
connected.clear();
return currentConnected;
}
@Override
public void mute(String id) {
}
@Override
public void unmute(String id) {
}
@Override
public void muteAll() {
}
@Override
public void unmuteAll() {
}
public void channelNotReady(String id) {
ready.remove(id);
}
@Override
public boolean isChannelReady(String id) {
return ready.contains(id);
}
public void reset() {
clear();
initiatedSends.clear();
delayedReceives.clear();
}
}
| MockSelector |
java | apache__dubbo | dubbo-plugin/dubbo-mcp/src/main/java/org/apache/dubbo/mcp/tool/DubboServiceToolRegistry.java | {
"start": 2262,
"end": 19438
} | class ____ {
private static final ErrorTypeAwareLogger logger =
LoggerFactory.getErrorTypeAwareLogger(DubboServiceToolRegistry.class);
private final McpAsyncServer mcpServer;
private final DubboOpenApiToolConverter toolConverter;
private final DubboMcpGenericCaller genericCaller;
private final McpServiceFilter mcpServiceFilter;
private final Map<String, McpServerFeatures.AsyncToolSpecification> registeredTools = new ConcurrentHashMap<>();
private final Map<String, Set<String>> serviceToToolsMapping = new ConcurrentHashMap<>();
private final ObjectMapper objectMapper;
public DubboServiceToolRegistry(
McpAsyncServer mcpServer,
DubboOpenApiToolConverter toolConverter,
DubboMcpGenericCaller genericCaller,
McpServiceFilter mcpServiceFilter) {
this.mcpServer = mcpServer;
this.toolConverter = toolConverter;
this.genericCaller = genericCaller;
this.mcpServiceFilter = mcpServiceFilter;
this.objectMapper = new ObjectMapper();
}
public int registerService(ProviderModel providerModel) {
ServiceDescriptor serviceDescriptor = providerModel.getServiceModel();
List<URL> statedURLs = providerModel.getServiceUrls();
if (statedURLs == null || statedURLs.isEmpty()) {
return 0;
}
try {
URL url = statedURLs.get(0);
int registeredCount = 0;
String serviceKey = getServiceKey(providerModel);
Set<String> toolNames = new HashSet<>();
Class<?> serviceInterface = serviceDescriptor.getServiceInterfaceClass();
if (serviceInterface == null) {
return 0;
}
Method[] methods = serviceInterface.getDeclaredMethods();
boolean shouldRegisterServiceLevel = mcpServiceFilter.shouldExposeAsMcpTool(providerModel);
for (Method method : methods) {
if (mcpServiceFilter.shouldExposeMethodAsMcpTool(providerModel, method)) {
McpServiceFilter.McpToolConfig toolConfig =
mcpServiceFilter.getMcpToolConfig(providerModel, method);
String toolName = registerMethodAsTool(providerModel, method, url, toolConfig);
if (toolName != null) {
toolNames.add(toolName);
registeredCount++;
}
}
}
if (registeredCount == 0 && shouldRegisterServiceLevel) {
Set<String> serviceToolNames = registerServiceLevelTools(providerModel, url);
toolNames.addAll(serviceToolNames);
registeredCount = serviceToolNames.size();
}
if (registeredCount > 0) {
serviceToToolsMapping.put(serviceKey, toolNames);
logger.info(
"Registered {} MCP tools for service: {}",
registeredCount,
serviceDescriptor.getInterfaceName());
}
return registeredCount;
} catch (Exception e) {
logger.error(
LoggerCodeConstants.COMMON_UNEXPECTED_EXCEPTION,
"",
"",
"Failed to register service as MCP tools: " + serviceDescriptor.getInterfaceName(),
e);
return 0;
}
}
public void unregisterService(ProviderModel providerModel) {
String serviceKey = getServiceKey(providerModel);
Set<String> toolNames = serviceToToolsMapping.remove(serviceKey);
if (toolNames == null || toolNames.isEmpty()) {
return;
}
int unregisteredCount = 0;
for (String toolName : toolNames) {
try {
McpServerFeatures.AsyncToolSpecification toolSpec = registeredTools.remove(toolName);
if (toolSpec != null) {
mcpServer.removeTool(toolName).block();
unregisteredCount++;
}
} catch (Exception e) {
logger.error(
LoggerCodeConstants.COMMON_UNEXPECTED_EXCEPTION,
"",
"",
"Failed to unregister MCP tool: " + toolName,
e);
}
}
if (unregisteredCount > 0) {
logger.info(
"Unregistered {} MCP tools for service: {}",
unregisteredCount,
providerModel.getServiceModel().getInterfaceName());
}
}
private String getServiceKey(ProviderModel providerModel) {
return providerModel.getServiceKey();
}
private String registerMethodAsTool(
ProviderModel providerModel, Method method, URL url, McpServiceFilter.McpToolConfig toolConfig) {
try {
String toolName = toolConfig.getToolName();
if (toolName == null || toolName.isEmpty()) {
toolName = method.getName();
}
if (registeredTools.containsKey(toolName)) {
return null;
}
String description = toolConfig.getDescription();
if (description == null || description.isEmpty()) {
description = generateDefaultDescription(method, providerModel);
}
McpSchema.Tool mcpTool = new McpSchema.Tool(toolName, description, generateToolSchema(method));
McpServerFeatures.AsyncToolSpecification toolSpec =
createMethodToolSpecification(mcpTool, providerModel, method, url);
mcpServer.addTool(toolSpec).block();
registeredTools.put(toolName, toolSpec);
return toolName;
} catch (Exception e) {
logger.error(
LoggerCodeConstants.COMMON_UNEXPECTED_EXCEPTION,
"",
"",
"Failed to register method as MCP tool: " + method.getName(),
e);
return null;
}
}
private Set<String> registerServiceLevelTools(ProviderModel providerModel, URL url) {
ServiceDescriptor serviceDescriptor = providerModel.getServiceModel();
Set<String> toolNames = new HashSet<>();
McpServiceFilter.McpToolConfig serviceConfig = mcpServiceFilter.getMcpToolConfig(providerModel);
Map<String, McpSchema.Tool> tools = toolConverter.convertToTools(serviceDescriptor, url, serviceConfig);
if (tools.isEmpty()) {
return toolNames;
}
for (Map.Entry<String, McpSchema.Tool> entry : tools.entrySet()) {
McpSchema.Tool tool = entry.getValue();
String toolId = tool.name();
if (registeredTools.containsKey(toolId)) {
continue;
}
try {
Operation operation = toolConverter.getOperationByToolName(toolId);
if (operation == null) {
logger.error(
LoggerCodeConstants.COMMON_UNEXPECTED_EXCEPTION,
"",
"",
"Could not find Operation metadata for tool: " + tool + ". Skipping registration");
continue;
}
McpServerFeatures.AsyncToolSpecification toolSpec =
createServiceToolSpecification(tool, operation, url);
mcpServer.addTool(toolSpec).block();
registeredTools.put(toolId, toolSpec);
toolNames.add(toolId);
} catch (Exception e) {
logger.error(
LoggerCodeConstants.COMMON_UNEXPECTED_EXCEPTION,
"",
"",
"Failed to register MCP tool: " + toolId,
e);
}
}
return toolNames;
}
private McpServerFeatures.AsyncToolSpecification createMethodToolSpecification(
McpSchema.Tool mcpTool, ProviderModel providerModel, Method method, URL url) {
final String interfaceName = providerModel.getServiceModel().getInterfaceName();
final String methodName = method.getName();
final Class<?>[] parameterClasses = method.getParameterTypes();
final List<String> orderedJavaParameterNames = getStrings(method);
final String group = url.getGroup();
final String version = url.getVersion();
return getAsyncToolSpecification(
mcpTool, interfaceName, methodName, parameterClasses, orderedJavaParameterNames, group, version);
}
private McpServerFeatures.AsyncToolSpecification getAsyncToolSpecification(
McpSchema.Tool mcpTool,
String interfaceName,
String methodName,
Class<?>[] parameterClasses,
List<String> orderedJavaParameterNames,
String group,
String version) {
BiFunction<McpAsyncServerExchange, Map<String, Object>, Mono<McpSchema.CallToolResult>> callFunction =
(exchange, mcpProvidedParameters) -> {
try {
Object result = genericCaller.execute(
interfaceName,
methodName,
orderedJavaParameterNames,
parameterClasses,
mcpProvidedParameters,
group,
version);
String resultJson = (result != null) ? result.toString() : "null";
return Mono.just(new McpSchema.CallToolResult(resultJson, true));
} catch (Exception e) {
logger.error(
LoggerCodeConstants.COMMON_UNEXPECTED_EXCEPTION,
"",
"",
String.format(
"Error executing tool %s (interface: %s, method: %s): %s",
mcpTool.name(), interfaceName, methodName, e.getMessage()),
e);
return Mono.just(
new McpSchema.CallToolResult("Tool execution failed: " + e.getMessage(), false));
}
};
return new McpServerFeatures.AsyncToolSpecification(mcpTool, callFunction);
}
private static List<String> getStrings(Method method) {
final List<String> orderedJavaParameterNames = new ArrayList<>();
java.lang.reflect.Parameter[] parameters = method.getParameters();
for (int i = 0; i < parameters.length; i++) {
java.lang.reflect.Parameter parameter = parameters[i];
String paramName;
McpToolParam mcpToolParam = parameter.getAnnotation(McpToolParam.class);
if (mcpToolParam != null && !mcpToolParam.name().isEmpty()) {
paramName = mcpToolParam.name();
} else if (parameter.isNamePresent()) {
paramName = parameter.getName();
} else {
paramName = McpConstant.DEFAULT_TOOL_NAME_PREFIX + i;
}
orderedJavaParameterNames.add(paramName);
}
return orderedJavaParameterNames;
}
private McpServerFeatures.AsyncToolSpecification createServiceToolSpecification(
McpSchema.Tool mcpTool, Operation operation, URL url) {
final MethodMeta methodMeta = operation.getMeta();
if (methodMeta == null) {
throw new IllegalStateException("MethodMeta not found in Operation for tool: " + mcpTool.name());
}
final ServiceMeta serviceMeta = methodMeta.getServiceMeta();
final String interfaceName = serviceMeta.getServiceInterface();
final String methodName = methodMeta.getMethod().getName();
final Class<?>[] parameterClasses = methodMeta.getMethod().getParameterTypes();
final List<String> orderedJavaParameterNames = new ArrayList<>();
if (methodMeta.getParameters() != null) {
for (ParameterMeta javaParamMeta : methodMeta.getParameters()) {
orderedJavaParameterNames.add(javaParamMeta.getName());
}
}
final String group =
serviceMeta.getUrl() != null ? serviceMeta.getUrl().getGroup() : (url != null ? url.getGroup() : null);
final String version = serviceMeta.getUrl() != null
? serviceMeta.getUrl().getVersion()
: (url != null ? url.getVersion() : null);
return getAsyncToolSpecification(
mcpTool, interfaceName, methodName, parameterClasses, orderedJavaParameterNames, group, version);
}
private String generateDefaultDescription(Method method, ProviderModel providerModel) {
return String.format(
McpConstant.DEFAULT_TOOL_DESCRIPTION_TEMPLATE,
method.getName(),
providerModel.getServiceModel().getInterfaceName());
}
private String generateToolSchema(Method method) {
Map<String, Object> schemaMap = new HashMap<>();
schemaMap.put(McpConstant.SCHEMA_PROPERTY_TYPE, JsonSchemaType.OBJECT_SCHEMA.getJsonSchemaType());
Map<String, Object> properties = new HashMap<>();
List<String> requiredParams = new ArrayList<>();
generateSchemaFromMethodSignature(method, properties, requiredParams);
schemaMap.put(McpConstant.SCHEMA_PROPERTY_PROPERTIES, properties);
if (!requiredParams.isEmpty()) {
schemaMap.put(McpConstant.SCHEMA_PROPERTY_REQUIRED, requiredParams);
}
try {
return objectMapper.writeValueAsString(schemaMap);
} catch (Exception e) {
logger.error(
LoggerCodeConstants.COMMON_UNEXPECTED_EXCEPTION,
"",
"",
"Failed to generate tool schema for method " + method.getName() + ": " + e.getMessage(),
e);
return "{\"type\":\"object\",\"properties\":{}}";
}
}
private void generateSchemaFromMethodSignature(
Method method, Map<String, Object> properties, List<String> requiredParams) {
Class<?>[] paramTypes = method.getParameterTypes();
java.lang.reflect.Type[] genericTypes = method.getGenericParameterTypes();
java.lang.annotation.Annotation[][] parameterAnnotations = method.getParameterAnnotations();
for (int i = 0; i < paramTypes.length; i++) {
String paramName = null;
String paramDescription = null;
boolean isRequired = false;
for (java.lang.annotation.Annotation annotation : parameterAnnotations[i]) {
if (annotation instanceof McpToolParam mcpToolParam) {
if (!mcpToolParam.name().isEmpty()) {
paramName = mcpToolParam.name();
}
if (!mcpToolParam.description().isEmpty()) {
paramDescription = mcpToolParam.description();
}
isRequired = mcpToolParam.required();
break;
}
}
if (paramName == null) {
paramName = getParameterName(method, i);
if (paramName == null || paramName.isEmpty()) {
paramName = McpConstant.DEFAULT_TOOL_NAME_PREFIX + i;
}
}
if (paramDescription == null) {
paramDescription = String.format(
McpConstant.DEFAULT_PARAMETER_DESCRIPTION_TEMPLATE, i, paramTypes[i].getSimpleName());
}
TypeSchemaUtils.TypeSchemaInfo schemaInfo =
TypeSchemaUtils.resolveTypeSchema(paramTypes[i], genericTypes[i], paramDescription);
properties.put(paramName, TypeSchemaUtils.toSchemaMap(schemaInfo));
if (isRequired) {
requiredParams.add(paramName);
}
}
}
private String getParameterName(Method method, int index) {
if (method.getParameters().length > index) {
return method.getParameters()[index].getName();
}
return null;
}
public void clearRegistry() {
for (String toolId : registeredTools.keySet()) {
try {
mcpServer.removeTool(toolId).block();
} catch (Exception e) {
logger.error(
LoggerCodeConstants.COMMON_UNEXPECTED_EXCEPTION,
"",
"",
"Failed to unregister MCP tool: " + toolId,
e);
}
}
registeredTools.clear();
}
}
| DubboServiceToolRegistry |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/attribute/Attribute.java | {
"start": 1546,
"end": 1932
} | class ____ {
private boolean isNoOutputUntilEndOfInput = false;
public Builder setNoOutputUntilEndOfInput(boolean isNoOutputUntilEndOfInput) {
this.isNoOutputUntilEndOfInput = isNoOutputUntilEndOfInput;
return this;
}
public Attribute build() {
return new Attribute(isNoOutputUntilEndOfInput);
}
}
}
| Builder |
java | apache__rocketmq | store/src/main/java/org/apache/rocketmq/store/ha/HAConnection.java | {
"start": 890,
"end": 1885
} | interface ____ {
/**
* Start HA Connection
*/
void start();
/**
* Shutdown HA Connection
*/
void shutdown();
/**
* Close HA Connection
*/
void close();
/**
* Get socket channel
*/
SocketChannel getSocketChannel();
/**
* Get current state for ha connection
*
* @return HAConnectionState
*/
HAConnectionState getCurrentState();
/**
* Get client address for ha connection
*
* @return client ip address
*/
String getClientAddress();
/**
* Get the transfer rate per second
*
* @return transfer bytes in second
*/
long getTransferredByteInSecond();
/**
* Get the current transfer offset to the slave
*
* @return the current transfer offset to the slave
*/
long getTransferFromWhere();
/**
* Get slave ack offset
*
* @return slave ack offset
*/
long getSlaveAckOffset();
}
| HAConnection |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java | {
"start": 123212,
"end": 123868
} | class ____ {
public int foo(Suit suit) {
int x = 0;
switch (suit) {
case HEART:
// Heart comment
// Fall through
case DIAMOND:
x = (((x + 1) * (x * x)) << 2);
break;
case SPADE:
throw new RuntimeException();
case CLUB:
throw new NullPointerException();
}
return x;
}
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_comment.java | {
"start": 826,
"end": 1208
} | class ____
extends MysqlTest {
public void test_0() throws Exception {
String sql = "select xxx comment, xxx as comment from t";
SQLStatement stmt = SQLUtils
.parseSingleStatement(sql, DbType.mysql);
assertEquals("SELECT xxx AS comment, xxx AS comment\n" +
"FROM t", stmt.toString());
}
}
| MySqlSelectTest_comment |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/serde/ContextResolvedTableSerdeTest.java | {
"start": 26647,
"end": 27866
} | class ____ {
private final SerdeContext ctx =
serdeContext(
TableConfigOptions.CatalogPlanCompilation.ALL,
TableConfigOptions.CatalogPlanRestore.IDENTIFIER);
@Test
void withPermanentTable() throws Exception {
final Tuple2<JsonNode, ContextResolvedTable> result =
serDe(ctx, PERMANENT_PLAN_CONTEXT_RESOLVED_TABLE);
assertThatJsonContains(result.f0, FIELD_NAME_IDENTIFIER);
assertThatJsonContains(result.f0, FIELD_NAME_CATALOG_TABLE);
assertThatJsonContains(
result.f0,
FIELD_NAME_CATALOG_TABLE,
ResolvedCatalogTableJsonSerializer.OPTIONS);
assertThatJsonContains(
result.f0,
FIELD_NAME_CATALOG_TABLE,
ResolvedCatalogTableJsonSerializer.COMMENT);
assertThat(result.f1).isEqualTo(PERMANENT_CATALOG_CONTEXT_RESOLVED_TABLE);
}
}
@Nested
@DisplayName("and CatalogPlanRestore == ALL")
| TestRestoreIdentifier |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java | {
"start": 1352,
"end": 1435
} | class ____ the various pages that the History Server WebApp supports
*/
public | renders |
java | apache__avro | lang/java/protobuf/src/test/java/org/apache/avro/protobuf/multiplefiles/A.java | {
"start": 1442,
"end": 2993
} | enum ____ with the given numeric wire value.
*/
public static A forNumber(int value) {
switch (value) {
case 1:
return X;
case 2:
return Y;
case 3:
return Z;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<A> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<A> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<A>() {
public A findValueByNumber(int number) {
return A.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.getDescriptor().getEnumTypes().get(0);
}
private static final A[] VALUES = values();
public static A valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private A(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.apache.avro.protobuf.multiplefiles.A)
}
| associated |
java | apache__camel | test-infra/camel-test-infra-ollama/src/test/java/org/apache/camel/test/infra/ollama/services/OllamaRemoteService.java | {
"start": 864,
"end": 1136
} | class ____ extends OllamaRemoteInfraService implements OllamaService {
public OllamaRemoteService() {
super();
}
public OllamaRemoteService(OllamaServiceConfiguration serviceConfiguration) {
super(serviceConfiguration);
}
}
| OllamaRemoteService |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/records/impl/pb/MembershipStatePBImpl.java | {
"start": 1949,
"end": 10569
} | class ____ extends MembershipState implements PBRecord {
private FederationProtocolPBTranslator<NamenodeMembershipRecordProto, Builder,
NamenodeMembershipRecordProtoOrBuilder> translator =
new FederationProtocolPBTranslator<NamenodeMembershipRecordProto,
Builder, NamenodeMembershipRecordProtoOrBuilder>(
NamenodeMembershipRecordProto.class);
public MembershipStatePBImpl() {
}
public MembershipStatePBImpl(NamenodeMembershipRecordProto proto) {
this.translator.setProto(proto);
}
@Override
public NamenodeMembershipRecordProto getProto() {
return this.translator.build();
}
@Override
public void setProto(Message proto) {
this.translator.setProto(proto);
}
@Override
public void readInstance(String base64String) throws IOException {
this.translator.readInstance(base64String);
}
@Override
public void setRouterId(String routerId) {
Builder builder = this.translator.getBuilder();
if (routerId == null) {
builder.clearRouterId();
} else {
builder.setRouterId(routerId);
}
}
@Override
public void setNameserviceId(String nameserviceId) {
Builder builder = this.translator.getBuilder();
if (nameserviceId == null) {
builder.clearNameserviceId();
} else {
builder.setNameserviceId(nameserviceId);
}
}
@Override
public void setNamenodeId(String namenodeId) {
Builder builder = this.translator.getBuilder();
if (namenodeId == null) {
builder.clearNamenodeId();
} else {
builder.setNamenodeId(namenodeId);
}
}
@Override
public void setWebAddress(String webAddress) {
Builder builder = this.translator.getBuilder();
if (webAddress == null) {
builder.clearWebAddress();
} else {
builder.setWebAddress(webAddress);
}
}
@Override
public void setRpcAddress(String rpcAddress) {
Builder builder = this.translator.getBuilder();
if (rpcAddress == null) {
builder.clearRpcAddress();
} else {
builder.setRpcAddress(rpcAddress);
}
}
@Override
public void setServiceAddress(String serviceAddress) {
this.translator.getBuilder().setServiceAddress(serviceAddress);
}
@Override
public void setLifelineAddress(String lifelineAddress) {
Builder builder = this.translator.getBuilder();
if (lifelineAddress == null) {
builder.clearLifelineAddress();
} else {
builder.setLifelineAddress(lifelineAddress);
}
}
@Override
public void setIsSafeMode(boolean isSafeMode) {
Builder builder = this.translator.getBuilder();
builder.setIsSafeMode(isSafeMode);
}
@Override
public void setClusterId(String clusterId) {
Builder builder = this.translator.getBuilder();
if (clusterId == null) {
builder.clearClusterId();
} else {
builder.setClusterId(clusterId);
}
}
@Override
public void setBlockPoolId(String blockPoolId) {
Builder builder = this.translator.getBuilder();
if (blockPoolId == null) {
builder.clearBlockPoolId();
} else {
builder.setBlockPoolId(blockPoolId);
}
}
@Override
public void setState(FederationNamenodeServiceState state) {
Builder builder = this.translator.getBuilder();
if (state == null) {
builder.clearState();
} else {
builder.setState(state.toString());
}
}
@Override
public void setWebScheme(String webScheme) {
Builder builder = this.translator.getBuilder();
if (webScheme == null) {
builder.clearWebScheme();
} else {
builder.setWebScheme(webScheme);
}
}
@Override
public String getRouterId() {
NamenodeMembershipRecordProtoOrBuilder proto =
this.translator.getProtoOrBuilder();
if (!proto.hasRouterId()) {
return null;
}
return proto.getRouterId();
}
@Override
public String getNameserviceId() {
NamenodeMembershipRecordProtoOrBuilder proto =
this.translator.getProtoOrBuilder();
if (!proto.hasNameserviceId()) {
return null;
}
return this.translator.getProtoOrBuilder().getNameserviceId();
}
@Override
public String getNamenodeId() {
NamenodeMembershipRecordProtoOrBuilder proto =
this.translator.getProtoOrBuilder();
if (!proto.hasNamenodeId()) {
return null;
}
return this.translator.getProtoOrBuilder().getNamenodeId();
}
@Override
public String getClusterId() {
NamenodeMembershipRecordProtoOrBuilder proto =
this.translator.getProtoOrBuilder();
if (!proto.hasClusterId()) {
return null;
}
return this.translator.getProtoOrBuilder().getClusterId();
}
@Override
public String getBlockPoolId() {
NamenodeMembershipRecordProtoOrBuilder proto =
this.translator.getProtoOrBuilder();
if (!proto.hasBlockPoolId()) {
return null;
}
return this.translator.getProtoOrBuilder().getBlockPoolId();
}
@Override
public String getRpcAddress() {
NamenodeMembershipRecordProtoOrBuilder proto =
this.translator.getProtoOrBuilder();
if (!proto.hasRpcAddress()) {
return null;
}
return this.translator.getProtoOrBuilder().getRpcAddress();
}
@Override
public String getServiceAddress() {
NamenodeMembershipRecordProtoOrBuilder proto =
this.translator.getProtoOrBuilder();
if (!proto.hasServiceAddress()) {
return null;
}
return this.translator.getProtoOrBuilder().getServiceAddress();
}
@Override
public String getWebAddress() {
NamenodeMembershipRecordProtoOrBuilder proto =
this.translator.getProtoOrBuilder();
if (!proto.hasWebAddress()) {
return null;
}
return this.translator.getProtoOrBuilder().getWebAddress();
}
@Override
public String getLifelineAddress() {
NamenodeMembershipRecordProtoOrBuilder proto =
this.translator.getProtoOrBuilder();
if (!proto.hasLifelineAddress()) {
return null;
}
return this.translator.getProtoOrBuilder().getLifelineAddress();
}
@Override
public boolean getIsSafeMode() {
return this.translator.getProtoOrBuilder().getIsSafeMode();
}
@Override
public FederationNamenodeServiceState getState() {
FederationNamenodeServiceState ret =
FederationNamenodeServiceState.UNAVAILABLE;
NamenodeMembershipRecordProtoOrBuilder proto =
this.translator.getProtoOrBuilder();
if (!proto.hasState()) {
return null;
}
try {
ret = FederationNamenodeServiceState.valueOf(proto.getState());
} catch (IllegalArgumentException e) {
// Ignore this error
}
return ret;
}
@Override
public String getWebScheme() {
NamenodeMembershipRecordProtoOrBuilder proto =
this.translator.getProtoOrBuilder();
if (!proto.hasWebScheme()) {
return null;
}
return this.translator.getProtoOrBuilder().getWebScheme();
}
@Override
public void setStats(MembershipStats stats) {
if (stats instanceof MembershipStatsPBImpl) {
MembershipStatsPBImpl statsPB = (MembershipStatsPBImpl)stats;
NamenodeMembershipStatsRecordProto statsProto =
(NamenodeMembershipStatsRecordProto)statsPB.getProto();
this.translator.getBuilder().setStats(statsProto);
}
}
@Override
public MembershipStats getStats() {
NamenodeMembershipStatsRecordProto statsProto =
this.translator.getProtoOrBuilder().getStats();
MembershipStats stats =
StateStoreSerializer.newRecord(MembershipStats.class);
if (stats instanceof MembershipStatsPBImpl) {
MembershipStatsPBImpl statsPB = (MembershipStatsPBImpl)stats;
statsPB.setProto(statsProto);
return statsPB;
} else {
throw new IllegalArgumentException(
"Cannot get stats for the membership");
}
}
@Override
public void setLastContact(long contact) {
this.translator.getBuilder().setLastContact(contact);
}
@Override
public long getLastContact() {
return this.translator.getProtoOrBuilder().getLastContact();
}
@Override
public void setDateModified(long time) {
if (getState() != FederationNamenodeServiceState.EXPIRED) {
this.translator.getBuilder().setDateModified(time);
}
}
@Override
public long getDateModified() {
return this.translator.getProtoOrBuilder().getDateModified();
}
@Override
public void setDateCreated(long time) {
this.translator.getBuilder().setDateCreated(time);
}
@Override
public long getDateCreated() {
return this.translator.getProtoOrBuilder().getDateCreated();
}
} | MembershipStatePBImpl |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/onexception/OnExceptionUseOriginalMessageStreamTest.java | {
"start": 6256,
"end": 6772
} | class ____ extends ServiceSupport implements DataFormat {
@Override
public void marshal(Exchange exchange, Object graph, OutputStream stream) {
// noop
}
@Override
public Object unmarshal(Exchange exchange, InputStream stream) throws Exception {
// simulate reading the entire stream so its not re-readable later
String s = IOConverter.toString(stream, exchange);
throw new MyDataFormatException(s);
}
}
}
| MyDataFormat |
java | quarkusio__quarkus | extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/runtime/config/OidcClientCommonConfig.java | {
"start": 5145,
"end": 10217
} | enum ____ {
/**
* JWT token is generated by the OIDC provider client to support
* `client_secret_jwt` and `private_key_jwt` authentication methods.
*/
CLIENT,
/**
* JWT bearer token is used as a client assertion: https://www.rfc-editor.org/rfc/rfc7523#section-2.2.
*/
BEARER
}
/**
* JWT token source: OIDC provider client or an existing JWT bearer token.
*/
@WithDefault("client")
Source source();
/**
* Path to a file with a JWT bearer token that should be used as a client assertion.
* This path can only be set when JWT source ({@link #source()}) is set to {@link Source#BEARER}.
*/
Optional<Path> tokenPath();
/**
* If provided, indicates that JWT is signed using a secret key.
* It is mutually exclusive with {@link #key}, {@link #keyFile} and {@link #keyStore} properties.
*/
Optional<String> secret();
/**
* If provided, indicates that JWT is signed using a secret key provided by Secret CredentialsProvider.
*/
Provider secretProvider();
/**
* String representation of a private key. If provided, indicates that JWT is signed using a private key in PEM or
* JWK format.
* It is mutually exclusive with {@link #secret}, {@link #keyFile} and {@link #keyStore} properties.
* You can use the {@link #signatureAlgorithm} property to override the default key algorithm, `RS256`.
*/
Optional<String> key();
/**
* If provided, indicates that JWT is signed using a private key in PEM or JWK format.
* It is mutually exclusive with {@link #secret}, {@link #key} and {@link #keyStore} properties.
* You can use the {@link #signatureAlgorithm} property to override the default key algorithm, `RS256`.
*/
Optional<String> keyFile();
/**
* If provided, indicates that JWT is signed using a private key from a keystore.
* It is mutually exclusive with {@link #secret}, {@link #key} and {@link #keyFile} properties.
*/
Optional<String> keyStoreFile();
/**
* A parameter to specify the password of the keystore file.
*/
Optional<String> keyStorePassword();
/**
* The private key id or alias.
*/
Optional<String> keyId();
/**
* The private key password.
*/
Optional<String> keyPassword();
/**
* The JWT audience (`aud`) claim value.
* By default, the audience is set to the address of the OpenId Connect Provider's token endpoint.
*/
Optional<String> audience();
/**
* Whether to keep a trailing slash `/` in the {@link #audience()} value.
*/
@WithDefault("false")
boolean keepAudienceTrailingSlash();
/**
* The key identifier of the signing key added as a JWT `kid` header.
*/
Optional<String> tokenKeyId();
/**
* The issuer of the signing key added as a JWT `iss` claim. The default value is the client id.
*/
Optional<String> issuer();
/**
* Subject of the signing key added as a JWT `sub` claim The default value is the client id.
*/
Optional<String> subject();
/**
* Additional claims.
*/
@ConfigDocMapKey("claim-name")
Map<String, String> claims();
/**
* The signature algorithm used for the {@link #keyFile} property.
* Supported values: `RS256` (default), `RS384`, `RS512`, `PS256`, `PS384`, `PS512`, `ES256`, `ES384`, `ES512`,
* `HS256`, `HS384`, `HS512`.
*/
Optional<String> signatureAlgorithm();
/**
* The JWT lifespan in seconds. This value is added to the time at which the JWT was issued to calculate the
* expiration time.
*/
@WithDefault("10")
int lifespan();
/**
* If true then the client authentication token is a JWT bearer grant assertion. Instead of producing
* 'client_assertion'
* and 'client_assertion_type' form properties, only 'assertion' is produced.
* This option is only supported by the OIDC client extension.
*/
@WithDefault("false")
boolean assertion();
}
/**
* CredentialsProvider, which provides a client secret.
*/
| Source |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java | {
"start": 8651,
"end": 9379
} | class ____ extends TestBlock.Builder implements BlockLoader.IntBuilder {
private IntsBuilder() {
super(expectedCount);
}
@Override
public IntsBuilder appendInt(int value) {
add(value);
return this;
}
}
return new IntsBuilder();
}
@Override
public BlockLoader.LongBuilder longsFromDocValues(int expectedCount) {
return longs(expectedCount);
}
@Override
public BlockLoader.LongBuilder longs(int expectedCount) {
| IntsBuilder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.