language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__rocketmq | broker/src/test/java/org/apache/rocketmq/broker/pop/PopConsumerServiceTest.java | {
"start": 3179,
"end": 23471
} | class ____ {
private final String clientHost = "127.0.0.1:8888";
private final String groupId = "groupId";
private final String topicId = "topicId";
private final int queueId = 2;
private final String attemptId = UUID.randomUUID().toString().toUpperCase();
private final String filePath = PopConsumerRocksdbStoreTest.getRandomStorePath();
private BrokerController brokerController;
private PopConsumerService consumerService;
@Before
public void init() throws IOException {
BrokerConfig brokerConfig = new BrokerConfig();
brokerConfig.setEnablePopLog(true);
brokerConfig.setEnablePopBufferMerge(true);
brokerConfig.setEnablePopMessageThreshold(true);
brokerConfig.setPopInflightMessageThreshold(100);
brokerConfig.setPopConsumerKVServiceLog(true);
brokerConfig.setEnableRetryTopicV2(true);
MessageStoreConfig messageStoreConfig = new MessageStoreConfig();
messageStoreConfig.setStorePathRootDir(filePath);
TopicConfigManager topicConfigManager = Mockito.mock(TopicConfigManager.class);
ConsumerOffsetManager consumerOffsetManager = Mockito.mock(ConsumerOffsetManager.class);
PopMessageProcessor popMessageProcessor = Mockito.mock(PopMessageProcessor.class);
PopLongPollingService popLongPollingService = Mockito.mock(PopLongPollingService.class);
ConsumerOrderInfoManager consumerOrderInfoManager = Mockito.mock(ConsumerOrderInfoManager.class);
brokerController = Mockito.mock(BrokerController.class);
Mockito.when(brokerController.getBrokerConfig()).thenReturn(brokerConfig);
Mockito.when(brokerController.getTopicConfigManager()).thenReturn(topicConfigManager);
Mockito.when(brokerController.getMessageStoreConfig()).thenReturn(messageStoreConfig);
Mockito.when(brokerController.getConsumerOffsetManager()).thenReturn(consumerOffsetManager);
Mockito.when(brokerController.getPopMessageProcessor()).thenReturn(popMessageProcessor);
Mockito.when(popMessageProcessor.getPopLongPollingService()).thenReturn(popLongPollingService);
Mockito.when(brokerController.getConsumerOrderInfoManager()).thenReturn(consumerOrderInfoManager);
consumerService = new PopConsumerService(brokerController);
}
@After
public void shutdown() throws IOException {
FileUtils.deleteDirectory(new File(filePath));
}
public PopConsumerRecord getConsumerTestRecord() {
PopConsumerRecord popConsumerRecord = new PopConsumerRecord();
popConsumerRecord.setPopTime(System.currentTimeMillis());
popConsumerRecord.setGroupId(groupId);
popConsumerRecord.setTopicId(topicId);
popConsumerRecord.setQueueId(queueId);
popConsumerRecord.setRetryFlag(PopConsumerRecord.RetryType.NORMAL_TOPIC.getCode());
popConsumerRecord.setAttemptTimes(0);
popConsumerRecord.setInvisibleTime(TimeUnit.SECONDS.toMillis(20));
popConsumerRecord.setAttemptId(UUID.randomUUID().toString().toUpperCase());
return popConsumerRecord;
}
@Test
public void isPopShouldStopTest() throws IllegalAccessException {
Assert.assertFalse(consumerService.isPopShouldStop(groupId, topicId, queueId));
PopConsumerCache consumerCache = (PopConsumerCache) FieldUtils.readField(
consumerService, "popConsumerCache", true);
for (int i = 0; i < 100; i++) {
PopConsumerRecord record = getConsumerTestRecord();
record.setOffset(i);
consumerCache.writeRecords(Collections.singletonList(record));
}
Assert.assertTrue(consumerService.isPopShouldStop(groupId, topicId, queueId));
}
@Test
public void pendingFilterCountTest() throws ConsumeQueueException {
MessageStore messageStore = Mockito.mock(MessageStore.class);
Mockito.when(messageStore.getMaxOffsetInQueue(topicId, queueId)).thenReturn(100L);
Mockito.when(brokerController.getMessageStore()).thenReturn(messageStore);
ConsumerOffsetManager consumerOffsetManager = brokerController.getConsumerOffsetManager();
Mockito.when(consumerOffsetManager.queryOffset(groupId, topicId, queueId)).thenReturn(20L);
Assert.assertEquals(consumerService.getPendingFilterCount(groupId, topicId, queueId), 80L);
}
private MessageExt getMessageExt() {
MessageExt messageExt = new MessageExt();
messageExt.setTopic(topicId);
messageExt.setQueueId(queueId);
messageExt.setBody(new byte[128]);
messageExt.setBornHost(new InetSocketAddress("127.0.0.1", 8080));
messageExt.setStoreHost(new InetSocketAddress("127.0.0.1", 8080));
messageExt.putUserProperty("Key", "Value");
return messageExt;
}
@Test
public void recodeRetryMessageTest() throws Exception {
GetMessageResult getMessageResult = new GetMessageResult();
getMessageResult.setStatus(GetMessageStatus.FOUND);
// result is empty
SelectMappedBufferResult bufferResult = new SelectMappedBufferResult(
0, ByteBuffer.allocate(10), 10, null);
getMessageResult.addMessage(bufferResult);
getMessageResult.getMessageMapedList().clear();
GetMessageResult result = consumerService.recodeRetryMessage(
getMessageResult, topicId, 0, 100, 200);
Assert.assertEquals(0, result.getMessageMapedList().size());
ByteBuffer buffer = ByteBuffer.wrap(
MessageDecoder.encode(getMessageExt(), false));
getMessageResult = new GetMessageResult();
getMessageResult.setStatus(GetMessageStatus.FOUND);
getMessageResult.addMessage(new SelectMappedBufferResult(
0, buffer, buffer.remaining(), null));
result = consumerService.recodeRetryMessage(
getMessageResult, topicId, 0, 100, 200);
Assert.assertNotNull(result);
Assert.assertEquals(1, result.getMessageMapedList().size());
}
@Test
public void addGetMessageResultTest() {
PopConsumerContext context = new PopConsumerContext(
clientHost, System.currentTimeMillis(), 20000, groupId, false, ConsumeInitMode.MIN, attemptId);
GetMessageResult result = new GetMessageResult();
result.setStatus(GetMessageStatus.FOUND);
result.getMessageQueueOffset().add(100L);
consumerService.handleGetMessageResult(
context, result, topicId, queueId, PopConsumerRecord.RetryType.NORMAL_TOPIC, 100);
Assert.assertEquals(1, context.getGetMessageResultList().size());
}
@Test
public void getMessageAsyncTest() throws Exception {
MessageStore messageStore = Mockito.mock(MessageStore.class);
Mockito.when(brokerController.getMessageStore()).thenReturn(messageStore);
Mockito.when(messageStore.getMessageAsync(groupId, topicId, queueId, 0, 10, null))
.thenReturn(CompletableFuture.completedFuture(null));
GetMessageResult getMessageResult = consumerService.getMessageAsync(
"127.0.0.1:8888", groupId, topicId, queueId, 0, 10, null).join();
Assert.assertNull(getMessageResult);
// success when first get message
GetMessageResult firstGetMessageResult = new GetMessageResult();
firstGetMessageResult.setStatus(GetMessageStatus.FOUND);
Mockito.when(messageStore.getMessageAsync(groupId, topicId, queueId, 0, 10, null))
.thenReturn(CompletableFuture.completedFuture(firstGetMessageResult));
getMessageResult = consumerService.getMessageAsync(
"127.0.0.1:8888", groupId, topicId, queueId, 0, 10, null).join();
Assert.assertEquals(GetMessageStatus.FOUND, getMessageResult.getStatus());
// reset offset from server
firstGetMessageResult.setStatus(GetMessageStatus.OFFSET_FOUND_NULL);
firstGetMessageResult.setNextBeginOffset(25);
GetMessageResult resetGetMessageResult = new GetMessageResult();
resetGetMessageResult.setStatus(GetMessageStatus.FOUND);
Mockito.when(messageStore.getMessageAsync(groupId, topicId, queueId, 25, 10, null))
.thenReturn(CompletableFuture.completedFuture(resetGetMessageResult));
getMessageResult = consumerService.getMessageAsync(
"127.0.0.1:8888", groupId, topicId, queueId, 0, 10, null).join();
Assert.assertEquals(GetMessageStatus.FOUND, getMessageResult.getStatus());
// fifo block
PopConsumerContext context = new PopConsumerContext(
clientHost, System.currentTimeMillis(), 20000, groupId, false, ConsumeInitMode.MIN, attemptId);
consumerService.setFifoBlocked(context, groupId, topicId, queueId, Collections.singletonList(100L));
Mockito.when(brokerController.getConsumerOrderInfoManager()
.checkBlock(anyString(), anyString(), anyString(), anyInt(), anyLong())).thenReturn(true);
Assert.assertTrue(consumerService.isFifoBlocked(context, groupId, topicId, queueId));
// get message async normal
CompletableFuture<PopConsumerContext> future = CompletableFuture.completedFuture(context);
Assert.assertEquals(0L, consumerService.getMessageAsync(future, clientHost, groupId, topicId, queueId,
10, null, PopConsumerRecord.RetryType.NORMAL_TOPIC).join().getRestCount());
// get message result full, no need get again
for (int i = 0; i < 10; i++) {
ByteBuffer buffer = ByteBuffer.wrap(MessageDecoder.encode(getMessageExt(), false));
getMessageResult.addMessage(new SelectMappedBufferResult(
0, buffer, buffer.remaining(), null), i);
}
context.addGetMessageResult(getMessageResult, topicId, queueId, PopConsumerRecord.RetryType.NORMAL_TOPIC, 0);
Mockito.when(brokerController.getMessageStore().getMaxOffsetInQueue(topicId, queueId)).thenReturn(100L);
Mockito.when(brokerController.getConsumerOffsetManager().queryOffset(groupId, topicId, queueId)).thenReturn(0L);
Assert.assertEquals(100L, consumerService.getMessageAsync(future, clientHost, groupId, topicId, queueId,
10, null, PopConsumerRecord.RetryType.NORMAL_TOPIC).join().getRestCount());
// fifo block test
context = new PopConsumerContext(
clientHost, System.currentTimeMillis(), 20000, groupId, true, ConsumeInitMode.MIN, attemptId);
future = CompletableFuture.completedFuture(context);
Assert.assertEquals(0L, consumerService.getMessageAsync(future, clientHost, groupId, topicId, queueId,
10, null, PopConsumerRecord.RetryType.NORMAL_TOPIC).join().getRestCount());
}
@Test
public void popAsyncTest() {
PopConsumerService consumerServiceSpy = Mockito.spy(consumerService);
TopicConfigManager topicConfigManager = Mockito.mock(TopicConfigManager.class);
Mockito.when(topicConfigManager.selectTopicConfig(topicId)).thenReturn(new TopicConfig(
topicId, 2, 2, PermName.PERM_READ | PermName.PERM_WRITE, 0));
Mockito.when(brokerController.getTopicConfigManager()).thenReturn(topicConfigManager);
String[] retryTopic = new String[] {
KeyBuilder.buildPopRetryTopicV1(topicId, groupId),
KeyBuilder.buildPopRetryTopicV2(topicId, groupId)
};
for (String retry : retryTopic) {
GetMessageResult getMessageResult = new GetMessageResult();
getMessageResult.setStatus(GetMessageStatus.NO_MATCHED_MESSAGE);
getMessageResult.setMinOffset(0L);
getMessageResult.setMaxOffset(1L);
getMessageResult.setNextBeginOffset(1L);
Mockito.doReturn(CompletableFuture.completedFuture(getMessageResult))
.when(consumerServiceSpy).getMessageAsync(clientHost, groupId, retry, 0, 0, 10, null);
Mockito.doReturn(CompletableFuture.completedFuture(getMessageResult))
.when(consumerServiceSpy).getMessageAsync(clientHost, groupId, retry, 0, 0, 8, null);
}
for (int i = -1; i < 2; i++) {
GetMessageResult getMessageResult = new GetMessageResult();
getMessageResult.setStatus(GetMessageStatus.FOUND);
getMessageResult.setMinOffset(0L);
getMessageResult.setMaxOffset(1L);
getMessageResult.setNextBeginOffset(1L);
getMessageResult.addMessage(Mockito.mock(SelectMappedBufferResult.class), 1L);
Mockito.doReturn(CompletableFuture.completedFuture(getMessageResult))
.when(consumerServiceSpy).getMessageAsync(clientHost, groupId, topicId, i, 0, 8, null);
Mockito.doReturn(CompletableFuture.completedFuture(getMessageResult))
.when(consumerServiceSpy).getMessageAsync(clientHost, groupId, topicId, i, 0, 9, null);
Mockito.doReturn(CompletableFuture.completedFuture(getMessageResult))
.when(consumerServiceSpy).getMessageAsync(clientHost, groupId, topicId, i, 0, 10, null);
}
// pop broker
consumerServiceSpy.popAsync(clientHost, System.currentTimeMillis(),
20000, groupId, topicId, -1, 10, false, attemptId, ConsumeInitMode.MIN, null).join();
}
@Test
public void ackAsyncTest() {
long current = System.currentTimeMillis();
consumerService.getPopConsumerStore().start();
consumerService.ackAsync(
current, 10, groupId, topicId, queueId, 100).join();
consumerService.changeInvisibilityDuration(current, 10,
current + 100, 10, groupId, topicId, queueId, 100);
consumerService.shutdown();
}
@Test
public void reviveRetryTest() {
Mockito.when(brokerController.getTopicConfigManager().selectTopicConfig(topicId)).thenReturn(null);
Mockito.when(brokerController.getConsumerOffsetManager().queryOffset(groupId, topicId, 0)).thenReturn(-1L);
consumerService.createRetryTopicIfNeeded(groupId, topicId);
consumerService.clearCache(groupId, topicId, queueId);
MessageExt messageExt = new MessageExt();
messageExt.setBody("body".getBytes());
messageExt.setBornTimestamp(System.currentTimeMillis());
messageExt.setFlag(0);
messageExt.setSysFlag(0);
messageExt.setReconsumeTimes(1);
messageExt.putUserProperty("key", "value");
PopConsumerRecord record = new PopConsumerRecord();
record.setTopicId("topic");
record.setGroupId("group");
Mockito.when(brokerController.getBrokerStatsManager()).thenReturn(Mockito.mock(BrokerStatsManager.class));
Mockito.when(brokerController.getEscapeBridge()).thenReturn(Mockito.mock(EscapeBridge.class));
Mockito.when(brokerController.getEscapeBridge().putMessageToSpecificQueue(any(MessageExtBrokerInner.class)))
.thenReturn(new PutMessageResult(
PutMessageStatus.PUT_OK, new AppendMessageResult(AppendMessageStatus.PUT_OK)));
PopConsumerService consumerServiceSpy = Mockito.spy(consumerService);
Mockito.doNothing().when(consumerServiceSpy).createRetryTopicIfNeeded(any(), any());
Assert.assertTrue(consumerServiceSpy.reviveRetry(record, messageExt));
// write message error
Mockito.when(brokerController.getEscapeBridge().putMessageToSpecificQueue(any(MessageExtBrokerInner.class)))
.thenReturn(new PutMessageResult(PutMessageStatus.UNKNOWN_ERROR,
new AppendMessageResult(AppendMessageStatus.UNKNOWN_ERROR)));
Assert.assertFalse(consumerServiceSpy.reviveRetry(record, messageExt));
// revive backoff
consumerService.getPopConsumerStore().start();
List<PopConsumerRecord> consumerRecordList = IntStream.range(0, 3)
.mapToObj(i -> {
PopConsumerRecord temp = new PopConsumerRecord();
temp.setPopTime(0);
temp.setInvisibleTime(20 * 1000);
temp.setTopicId("topic");
temp.setGroupId("group");
temp.setQueueId(2);
temp.setOffset(i);
return temp;
})
.collect(Collectors.toList());
consumerService.getPopConsumerStore().writeRecords(consumerRecordList);
Mockito.doReturn(CompletableFuture.completedFuture(null))
.when(consumerServiceSpy).getMessageAsync(any(PopConsumerRecord.class));
consumerServiceSpy.revive(new AtomicLong(20 * 1000), 1);
Mockito.doReturn(CompletableFuture.completedFuture(
Triple.of(null, "GetMessageResult is null", false)))
.when(consumerServiceSpy).getMessageAsync(any(PopConsumerRecord.class));
consumerServiceSpy.revive(new AtomicLong(20 * 1000), 1);
Mockito.doReturn(CompletableFuture.completedFuture(
Triple.of(Mockito.mock(MessageExt.class), null, false)))
.when(consumerServiceSpy).getMessageAsync(any(PopConsumerRecord.class));
consumerServiceSpy.revive(new AtomicLong(20 * 1000), 1);
consumerService.shutdown();
}
@Test
public void reviveBackoffRetryTest() {
Mockito.when(brokerController.getEscapeBridge()).thenReturn(Mockito.mock(EscapeBridge.class));
PopConsumerService consumerServiceSpy = Mockito.spy(consumerService);
consumerService.getPopConsumerStore().start();
long popTime = 1000000000L;
long invisibleTime = 60 * 1000L;
PopConsumerRecord record = new PopConsumerRecord();
record.setPopTime(popTime);
record.setInvisibleTime(invisibleTime);
record.setTopicId("topic");
record.setGroupId("group");
record.setQueueId(0);
record.setOffset(0);
consumerService.getPopConsumerStore().writeRecords(Collections.singletonList(record));
Mockito.doReturn(CompletableFuture.completedFuture(Triple.of(Mockito.mock(MessageExt.class), "", false)))
.when(consumerServiceSpy).getMessageAsync(any(PopConsumerRecord.class));
Mockito.when(brokerController.getEscapeBridge().putMessageToSpecificQueue(any(MessageExtBrokerInner.class))).thenReturn(
new PutMessageResult(PutMessageStatus.UNKNOWN_ERROR, new AppendMessageResult(AppendMessageStatus.UNKNOWN_ERROR))
);
long visibleTimestamp = popTime + invisibleTime;
// revive fails
Assert.assertEquals(1, consumerServiceSpy.revive(new AtomicLong(visibleTimestamp), 1));
// should be invisible now
Assert.assertEquals(0, consumerService.getPopConsumerStore().scanExpiredRecords(0, visibleTimestamp, 1).size());
// will be visible again in 10 seconds
Assert.assertEquals(1, consumerService.getPopConsumerStore().scanExpiredRecords(visibleTimestamp, System.currentTimeMillis() + visibleTimestamp + 10 * 1000, 1).size());
consumerService.shutdown();
}
@Test
public void transferToFsStoreTest() {
Assert.assertNotNull(consumerService.getServiceName());
List<PopConsumerRecord> consumerRecordList = IntStream.range(0, 3)
.mapToObj(i -> {
PopConsumerRecord temp = new PopConsumerRecord();
temp.setPopTime(0);
temp.setInvisibleTime(20 * 1000);
temp.setTopicId("topic");
temp.setGroupId("group");
temp.setQueueId(2);
temp.setOffset(i);
return temp;
})
.collect(Collectors.toList());
Mockito.when(brokerController.getPopMessageProcessor().buildCkMsg(any(), anyInt()))
.thenReturn(new MessageExtBrokerInner());
Mockito.when(brokerController.getMessageStore()).thenReturn(Mockito.mock(MessageStore.class));
Mockito.when(brokerController.getMessageStore().asyncPutMessage(any()))
.thenReturn(CompletableFuture.completedFuture(
new PutMessageResult(PutMessageStatus.PUT_OK, new AppendMessageResult(AppendMessageStatus.PUT_OK))));
consumerService.start();
consumerService.getPopConsumerStore().writeRecords(consumerRecordList);
consumerService.transferToFsStore();
consumerService.shutdown();
}
} | PopConsumerServiceTest |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/aot/JpaTypeFilters.java | {
"start": 1072,
"end": 1873
} | class ____ implements TypeCollector.TypeCollectorFilters {
/**
* Match for bytecode-enhanced members.
*/
private static final Predicate<Member> IS_HIBERNATE_MEMBER = member -> member.getName().startsWith("$$_hibernate");
private static final Predicate<Class<?>> CLASS_FILTER = it -> TypeUtils.type(it).isPartOf("org.hibernate",
"org.eclipse.persistence", "jakarta.persistence");
@Override
public Predicate<Class<?>> classPredicate() {
return CLASS_FILTER.negate();
}
@Override
public Predicate<Field> fieldPredicate() {
return Predicates.<Field> declaringClass(CLASS_FILTER).or(IS_HIBERNATE_MEMBER).negate();
}
@Override
public Predicate<Method> methodPredicate() {
return Predicates.<Method> declaringClass(CLASS_FILTER).or(IS_HIBERNATE_MEMBER).negate();
}
}
| JpaTypeFilters |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/defaultvalue/CountryEntity.java | {
"start": 260,
"end": 1121
} | class ____ {
private String code;
private Integer id;
private long zipcode;
private Region region;
private Continent continent;
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public long getZipcode() {
return zipcode;
}
public void setZipcode(long zipcode) {
this.zipcode = zipcode;
}
public Region getRegion() {
return region;
}
public void setRegion(Region region) {
this.region = region;
}
public Continent getContinent() {
return continent;
}
public void setContinent(Continent continent) {
this.continent = continent;
}
}
| CountryEntity |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/management/MixinRequiredModelMBean.java | {
"start": 1841,
"end": 4649
} | class ____ extends RequiredModelMBean {
private static final Logger LOG = LoggerFactory.getLogger(MixinRequiredModelMBean.class);
private boolean mask;
private ModelMBeanInfo defaultMbi;
private DynamicMBean defaultObject;
public MixinRequiredModelMBean() throws MBeanException, RuntimeOperationsException {
// must have default no-arg constructor
}
public MixinRequiredModelMBean(ModelMBeanInfo mbi, boolean mask, ModelMBeanInfo defaultMbi,
DynamicMBean defaultObject) throws MBeanException, RuntimeOperationsException {
super(mbi);
this.mask = mask;
this.defaultMbi = defaultMbi;
this.defaultObject = defaultObject;
}
public boolean isMask() {
return mask;
}
@Override
public Object invoke(String opName, Object[] opArgs, String[] sig) throws MBeanException, ReflectionException {
Object answer;
if (defaultMbi != null && defaultObject != null && isDefaultOperation(opName)) {
answer = defaultObject.invoke(opName, opArgs, sig);
} else {
answer = super.invoke(opName, opArgs, sig);
}
// mask the answer if enabled and it was a String type (we cannot mask other types)
if (mask && answer instanceof String && ObjectHelper.isNotEmpty(answer) && isMaskOperation(opName)) {
answer = mask(opName, (String) answer);
}
return answer;
}
protected boolean isDefaultOperation(String opName) {
for (MBeanOperationInfo info : defaultMbi.getOperations()) {
if (info.getName().equals(opName)) {
return true;
}
}
return false;
}
protected boolean isMaskOperation(String opName) {
for (MBeanOperationInfo info : getMBeanInfo().getOperations()) {
if (info.getName().equals(opName)) {
Descriptor desc = info.getDescriptor();
if (desc != null) {
Object val = desc.getFieldValue("mask");
return "true".equals(val);
}
}
}
return false;
}
/**
* Masks the returned value from invoking the operation
*
* @param opName the operation name invoked
* @param value the current value
* @return the masked value
*/
protected String mask(String opName, String value) {
// use sanitize uri which will mask sensitive information
String answer = URISupport.sanitizeUri(value);
if (LOG.isTraceEnabled()) {
LOG.trace("Masking JMX operation: {}.{} value: {} -> {}",
getMBeanInfo().getClassName(), opName, value, answer);
}
return answer;
}
}
| MixinRequiredModelMBean |
java | apache__logging-log4j2 | log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/jmh/FileAppenderThrowableBenchmark.java | {
"start": 2266,
"end": 3100
} | class ____ {
static {
// log4j2
System.setProperty("log4j2.enableThreadlocals", "true");
System.setProperty("log4j2.configurationFile", "log4j2-perf-file-throwable.xml");
// log4j 1.2
System.setProperty("log4j.configuration", "log4j12-perf-file-throwable.xml");
// logback
System.setProperty("logback.configurationFile", "logback-perf-file-throwable.xml");
}
private static final Throwable THROWABLE = getSimpleThrowable();
private static final Throwable COMPLEX_THROWABLE = getComplexThrowable();
@SuppressWarnings("unused") // Set by JMH
@Param
private LoggingConfiguration loggingConfiguration;
private static Throwable getSimpleThrowable() {
return new IllegalStateException("Test Throwable");
}
| FileAppenderThrowableBenchmark |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/aggregate/JsonArrayAggFunction.java | {
"start": 5208,
"end": 5814
} | class ____ {
public ListView<StringData> list = new ListView<>();
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final JsonArrayAggFunction.Accumulator that = (JsonArrayAggFunction.Accumulator) other;
return Objects.equals(list, that.list);
}
@Override
public int hashCode() {
return Objects.hash(list);
}
}
}
| Accumulator |
java | apache__camel | components/camel-aws/camel-aws-xray/src/main/java/org/apache/camel/component/aws/xray/decorators/internal/VmSegmentDecorator.java | {
"start": 876,
"end": 1023
} | class ____ extends AbstractInternalSegmentDecorator {
@Override
public String getComponent() {
return "vm";
}
}
| VmSegmentDecorator |
java | redisson__redisson | redisson/src/main/java/org/redisson/RedissonSortedSet.java | {
"start": 1712,
"end": 1811
} | class ____<V> extends RedissonExpirable implements RSortedSet<V> {
public static | RedissonSortedSet |
java | alibaba__nacos | client/src/main/java/com/alibaba/nacos/client/config/impl/LocalEncryptedDataKeyProcessor.java | {
"start": 1328,
"end": 6073
} | class ____ extends LocalConfigInfoProcessor {
private static final Logger LOGGER = LogUtils.logger(LocalEncryptedDataKeyProcessor.class);
private static final String FAILOVER_CHILD_1 = "encrypted-data-key";
private static final String FAILOVER_CHILD_2 = "failover";
private static final String FAILOVER_CHILD_3 = "failover-tenant";
private static final String SNAPSHOT_CHILD_1 = "encrypted-data-key";
private static final String SNAPSHOT_CHILD_2 = "snapshot";
private static final String SNAPSHOT_CHILD_3 = "snapshot-tenant";
private static final String SUFFIX = "_nacos";
/**
* Obtain the EncryptedDataKey of the disaster recovery configuration. NULL means there is no local file or an
* exception is thrown.
*/
public static String getEncryptDataKeyFailover(String envName, String dataId, String group, String tenant) {
envName = simplyEnvNameIfOverLimit(envName);
File file = getEncryptDataKeyFailoverFile(envName, dataId, group, tenant);
if (!file.exists() || !file.isFile()) {
return null;
}
try {
return readFile(file);
} catch (IOException ioe) {
LOGGER.error("[" + envName + "] get failover error, " + file, ioe);
return null;
}
}
/**
* Get the EncryptedDataKey of the locally cached file. NULL means there is no local file or an exception is
* thrown.
*/
public static String getEncryptDataKeySnapshot(String envName, String dataId, String group, String tenant) {
if (!SnapShotSwitch.getIsSnapShot()) {
return null;
}
File file = getEncryptDataKeySnapshotFile(envName, dataId, group, tenant);
if (!file.exists() || !file.isFile()) {
return null;
}
try {
return readFile(file);
} catch (IOException ioe) {
LOGGER.error("[" + envName + "] get snapshot error, " + file, ioe);
return null;
}
}
/**
* Save the snapshot of encryptDataKey. If the content is NULL, delete the snapshot.
*/
public static void saveEncryptDataKeySnapshot(String envName, String dataId, String group, String tenant,
String encryptDataKey) {
if (!SnapShotSwitch.getIsSnapShot()) {
return;
}
File file = getEncryptDataKeySnapshotFile(envName, dataId, group, tenant);
try {
if (null == encryptDataKey) {
try {
IoUtils.delete(file);
} catch (IOException ioe) {
LOGGER.error("[" + envName + "] delete snapshot error, " + file, ioe);
}
} else {
File parentFile = file.getParentFile();
if (!parentFile.exists()) {
boolean isMdOk = parentFile.mkdirs();
if (!isMdOk) {
LOGGER.error("[{}] save snapshot error", envName);
}
}
if (JvmUtil.isMultiInstance()) {
ConcurrentDiskUtil.writeFileContent(file, encryptDataKey, Constants.ENCODE);
} else {
IoUtils.writeStringToFile(file, encryptDataKey, Constants.ENCODE);
}
}
} catch (IOException ioe) {
LOGGER.error("[" + envName + "] save snapshot error, " + file, ioe);
}
}
private static File getEncryptDataKeyFailoverFile(String envName, String dataId, String group, String tenant) {
envName = simplyEnvNameIfOverLimit(envName);
File tmp = new File(LOCAL_SNAPSHOT_PATH, envName + SUFFIX);
tmp = new File(tmp, FAILOVER_CHILD_1);
if (StringUtils.isBlank(tenant)) {
tmp = new File(tmp, FAILOVER_CHILD_2);
} else {
tmp = new File(tmp, FAILOVER_CHILD_3);
tmp = new File(tmp, tenant);
}
return new File(new File(tmp, group), dataId);
}
private static File getEncryptDataKeySnapshotFile(String envName, String dataId, String group, String tenant) {
envName = simplyEnvNameIfOverLimit(envName);
File tmp = new File(LOCAL_SNAPSHOT_PATH, envName + SUFFIX);
tmp = new File(tmp, SNAPSHOT_CHILD_1);
if (StringUtils.isBlank(tenant)) {
tmp = new File(tmp, SNAPSHOT_CHILD_2);
} else {
tmp = new File(tmp, SNAPSHOT_CHILD_3);
tmp = new File(tmp, tenant);
}
return new File(new File(tmp, group), dataId);
}
}
| LocalEncryptedDataKeyProcessor |
java | apache__flink | flink-end-to-end-tests/flink-stream-sql-test/src/main/java/org/apache/flink/sql/tests/StreamSQLTestProgram.java | {
"start": 9087,
"end": 9616
} | class ____ implements BucketAssigner<Row, String> {
private static final long serialVersionUID = 987325769970523326L;
@Override
public String getBucketId(final Row element, final Context context) {
return String.valueOf(element.getField(0));
}
@Override
public SimpleVersionedSerializer<String> getSerializer() {
return SimpleVersionedStringSerializer.INSTANCE;
}
}
/** Data-generating source function. */
public static | KeyBucketAssigner |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/bugs/creation/PackagePrivateWithContextClassLoaderTest.java | {
"start": 4895,
"end": 5202
} | class ____ {
public static void attemptMock() {
PublicClass mock = mock(PublicClass.class);
mock.packagePrivateAbstractMethod();
}
}
/**
* This classloader has a parent, but doesn't always delegate to it.
*/
public static final | LoadedByCustomLoader |
java | square__retrofit | retrofit-converters/wire/src/test/java/retrofit2/converter/wire/CrashingPhone.java | {
"start": 2089,
"end": 2447
} | class ____ extends Message.Builder<CrashingPhone, Builder> {
public String number;
public Builder() {}
public Builder number(String number) {
this.number = number;
return this;
}
@Override
public CrashingPhone build() {
return new CrashingPhone(number, buildUnknownFields());
}
}
private static final | Builder |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/ref/RefTest14.java | {
"start": 1314,
"end": 1970
} | class ____ {
private String name;
private List<User> members = new ArrayList<User>();
public Group(){
}
public Group(String name){
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<User> getMembers() {
return members;
}
public void setMembers(List<User> members) {
this.members = members;
}
public String toString() {
return this.name;
}
}
public static | Group |
java | apache__camel | components/camel-mock/src/main/java/org/apache/camel/component/mock/AssertionClause.java | {
"start": 4966,
"end": 5403
} | class ____ implements Expression {
@Override
public <T> T evaluate(Exchange exchange, Class<T> type) {
Date answer = null;
if (currentIndex < mock.getReceivedCounter() - 1) {
answer = mock.getReceivedExchanges().get(currentIndex + 1).getProperty(Exchange.RECEIVED_TIMESTAMP, Date.class);
}
return (T) answer;
}
}
/**
* Public | NextTimestamp |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/writing/AnonymousProviderCreationExpression.java | {
"start": 1604,
"end": 2604
} | class ____
implements FrameworkInstanceCreationExpression {
private final ContributionBinding binding;
private final ComponentRequestRepresentations componentRequestRepresentations;
private final XClassName requestingClass;
@AssistedInject
AnonymousProviderCreationExpression(
@Assisted ContributionBinding binding,
ComponentRequestRepresentations componentRequestRepresentations,
ComponentImplementation componentImplementation) {
this.binding = checkNotNull(binding);
this.componentRequestRepresentations = componentRequestRepresentations;
this.requestingClass = componentImplementation.name();
}
@Override
public XCodeBlock creationExpression() {
BindingRequest instanceExpressionRequest = bindingRequest(binding.key(), RequestKind.INSTANCE);
XExpression instanceExpression =
componentRequestRepresentations.getDependencyExpression(
instanceExpressionRequest,
// Not a real | AnonymousProviderCreationExpression |
java | google__error-prone | annotations/src/main/java/com/google/errorprone/annotations/Immutable.java | {
"start": 3280,
"end": 3545
} | interface ____ {
/**
* When annotating a generic type as immutable, {@code containerOf} specifies which type
* parameters must be instantiated with immutable types for the container to be deeply immutable.
*/
String[] containerOf() default {};
}
| Immutable |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/proxy/InvokerInvocationHandler.java | {
"start": 1302,
"end": 3260
} | class ____ implements InvocationHandler {
private static final Logger logger = LoggerFactory.getLogger(InvokerInvocationHandler.class);
private final Invoker<?> invoker;
private final ServiceModel serviceModel;
private final String protocolServiceKey;
public InvokerInvocationHandler(Invoker<?> handler) {
this.invoker = handler;
URL url = invoker.getUrl();
this.protocolServiceKey = url.getProtocolServiceKey();
this.serviceModel = url.getServiceModel();
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
if (method.getDeclaringClass() == Object.class) {
return method.invoke(invoker, args);
}
String methodName = method.getName();
Class<?>[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length == 0) {
if ("toString".equals(methodName)) {
return invoker.toString();
} else if ("$destroy".equals(methodName)) {
invoker.destroy();
return null;
} else if ("hashCode".equals(methodName)) {
return invoker.hashCode();
}
} else if (parameterTypes.length == 1 && "equals".equals(methodName)) {
return invoker.equals(args[0]);
}
RpcInvocation rpcInvocation = new RpcInvocation(
serviceModel,
method.getName(),
invoker.getInterface().getName(),
protocolServiceKey,
method.getParameterTypes(),
args);
if (serviceModel instanceof ConsumerModel) {
rpcInvocation.put(Constants.CONSUMER_MODEL, serviceModel);
rpcInvocation.put(Constants.METHOD_MODEL, ((ConsumerModel) serviceModel).getMethodModel(method));
}
return InvocationUtil.invoke(invoker, rpcInvocation);
}
}
| InvokerInvocationHandler |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/spi/ImmutablesBuilderProvider.java | {
"start": 1042,
"end": 4179
} | class ____ extends DefaultBuilderProvider {
private static final Pattern JAVA_JAVAX_PACKAGE = Pattern.compile( "^javax?\\..*" );
private static final String IMMUTABLE_FQN = "org.immutables.value.Value.Immutable";
@Override
protected BuilderInfo findBuilderInfo(TypeElement typeElement) {
Name name = typeElement.getQualifiedName();
if ( name.length() == 0 || JAVA_JAVAX_PACKAGE.matcher( name ).matches() ) {
return null;
}
// First look if there is a builder defined in my own type
BuilderInfo info = findBuilderInfo( typeElement, false );
if ( info != null ) {
return info;
}
// Check for a builder in the generated immutable type
BuilderInfo immutableInfo = findBuilderInfoForImmutables( typeElement );
if ( immutableInfo != null ) {
return immutableInfo;
}
return super.findBuilderInfo( typeElement.getSuperclass() );
}
protected BuilderInfo findBuilderInfoForImmutables(TypeElement typeElement) {
TypeElement immutableAnnotation = elementUtils.getTypeElement( IMMUTABLE_FQN );
if ( immutableAnnotation != null ) {
return findBuilderInfoForImmutables(
typeElement,
immutableAnnotation
);
}
return null;
}
protected BuilderInfo findBuilderInfoForImmutables(TypeElement typeElement,
TypeElement immutableAnnotation) {
for ( AnnotationMirror annotationMirror : elementUtils.getAllAnnotationMirrors( typeElement ) ) {
if ( typeUtils.isSameType( annotationMirror.getAnnotationType(), immutableAnnotation.asType() ) ) {
TypeElement immutableElement = asImmutableElement( typeElement );
if ( immutableElement != null ) {
return super.findBuilderInfo( immutableElement, false );
}
else {
// Immutables processor has not run yet. Trigger a postpone to the next round for MapStruct
throw new TypeHierarchyErroneousException( typeElement );
}
}
}
return null;
}
protected TypeElement asImmutableElement(TypeElement typeElement) {
Element enclosingElement = typeElement.getEnclosingElement();
StringBuilder builderQualifiedName = new StringBuilder( typeElement.getQualifiedName().length() + 17 );
if ( enclosingElement.getKind() == ElementKind.PACKAGE ) {
builderQualifiedName.append( ( (PackageElement) enclosingElement ).getQualifiedName().toString() );
}
else {
builderQualifiedName.append( ( (TypeElement) enclosingElement ).getQualifiedName().toString() );
}
if ( builderQualifiedName.length() > 0 ) {
builderQualifiedName.append( "." );
}
builderQualifiedName.append( "Immutable" ).append( typeElement.getSimpleName() );
return elementUtils.getTypeElement( builderQualifiedName );
}
}
| ImmutablesBuilderProvider |
java | apache__camel | components/camel-sql/src/test/java/org/apache/camel/processor/aggregate/jdbc/JdbcAggregateCompletionIntervalTest.java | {
"start": 1013,
"end": 2011
} | class ____ extends AbstractJdbcAggregationTestSupport {
@Test
public void testJdbcAggregateCompletionInterval() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:aggregated");
mock.setResultWaitTime(30 * 1000L);
mock.expectedBodiesReceived("ABCD", "E");
// wait a bit so we complete on the next poll
Thread.sleep(2000);
template.sendBodyAndHeader("direct:start", "A", "id", 123);
template.sendBodyAndHeader("direct:start", "B", "id", 123);
template.sendBodyAndHeader("direct:start", "C", "id", 123);
template.sendBodyAndHeader("direct:start", "D", "id", 123);
Thread.sleep(6000);
template.sendBodyAndHeader("direct:start", "E", "id", 123);
MockEndpoint.assertIsSatisfied(context);
// from endpoint should be preserved
assertEquals("direct://start", mock.getReceivedExchanges().get(0).getFromEndpoint().getEndpointUri());
}
}
| JdbcAggregateCompletionIntervalTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java | {
"start": 12278,
"end": 16214
} | class ____ {
private TaskType taskType;
private String inferenceEntityId;
private List<String> input;
private InputType inputType = InputType.UNSPECIFIED;
private Map<String, Object> taskSettings = Map.of();
private String query;
private Boolean returnDocuments;
private Integer topN;
private TimeValue timeout = DEFAULT_TIMEOUT;
private boolean stream = false;
private InferenceContext context;
private Builder() {}
public Builder setInferenceEntityId(String inferenceEntityId) {
this.inferenceEntityId = Objects.requireNonNull(inferenceEntityId);
return this;
}
public Builder setTaskType(TaskType taskType) {
this.taskType = taskType;
return this;
}
public Builder setInput(List<String> input) {
this.input = input;
return this;
}
public Builder setQuery(String query) {
this.query = query;
return this;
}
public Builder setReturnDocuments(Boolean returnDocuments) {
this.returnDocuments = returnDocuments;
return this;
}
public Builder setTopN(Integer topN) {
this.topN = topN;
return this;
}
public Builder setInputType(InputType inputType) {
this.inputType = inputType;
return this;
}
public Builder setInputType(String inputType) {
this.inputType = InputType.fromRestString(inputType);
return this;
}
public Builder setTaskSettings(Map<String, Object> taskSettings) {
this.taskSettings = taskSettings;
return this;
}
public Builder setInferenceTimeout(TimeValue inferenceTimeout) {
this.timeout = inferenceTimeout;
return this;
}
private Builder setInferenceTimeout(String inferenceTimeout) {
return setInferenceTimeout(TimeValue.parseTimeValue(inferenceTimeout, TIMEOUT.getPreferredName()));
}
public Builder setStream(boolean stream) {
this.stream = stream;
return this;
}
public Builder setContext(InferenceContext context) {
this.context = context;
return this;
}
public Request build() {
return new Request(
taskType,
inferenceEntityId,
query,
returnDocuments,
topN,
input,
taskSettings,
inputType,
timeout,
stream,
context
);
}
}
public String toString() {
return "InferenceAction.Request(taskType="
+ this.getTaskType()
+ ", inferenceEntityId="
+ this.getInferenceEntityId()
+ ", query="
+ this.getQuery()
+ ", returnDocuments="
+ this.getReturnDocuments()
+ ", topN="
+ this.getTopN()
+ ", input="
+ this.getInput()
+ ", taskSettings="
+ this.getTaskSettings()
+ ", inputType="
+ this.getInputType()
+ ", timeout="
+ this.getInferenceTimeout()
+ ", context="
+ this.getContext()
+ ")";
}
}
public static | Builder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/blockloader/docvalues/DenseVectorBlockLoader.java | {
"start": 1343,
"end": 3915
} | class ____<B extends BlockLoader.Builder> extends BlockDocValuesReader.DocValuesBlockLoader {
private final String fieldName;
private final int dimensions;
private final DenseVectorFieldMapper.DenseVectorFieldType fieldType;
private final DenseVectorBlockLoaderProcessor<B> processor;
public DenseVectorBlockLoader(
String fieldName,
int dimensions,
DenseVectorFieldMapper.DenseVectorFieldType fieldType,
DenseVectorBlockLoaderProcessor<B> processor
) {
this.fieldName = fieldName;
this.dimensions = dimensions;
this.fieldType = fieldType;
this.processor = processor;
}
@Override
public Builder builder(BlockFactory factory, int expectedCount) {
return processor.createBuilder(factory, expectedCount, dimensions);
}
@Override
public AllReader reader(LeafReaderContext context) throws IOException {
switch (fieldType.getElementType()) {
case FLOAT, BFLOAT16 -> {
FloatVectorValues floatVectorValues = context.reader().getFloatVectorValues(fieldName);
if (floatVectorValues != null) {
if (fieldType.isNormalized()) {
NumericDocValues magnitudeDocValues = context.reader()
.getNumericDocValues(fieldType.name() + COSINE_MAGNITUDE_FIELD_SUFFIX);
return new FloatDenseVectorNormalizedValuesBlockReader<>(
floatVectorValues,
dimensions,
processor,
magnitudeDocValues
);
}
return new FloatDenseVectorValuesBlockReader<>(floatVectorValues, dimensions, processor);
}
}
case BYTE -> {
ByteVectorValues byteVectorValues = context.reader().getByteVectorValues(fieldName);
if (byteVectorValues != null) {
return new ByteDenseVectorValuesBlockReader<>(byteVectorValues, dimensions, processor);
}
}
case BIT -> {
ByteVectorValues byteVectorValues = context.reader().getByteVectorValues(fieldName);
if (byteVectorValues != null) {
return new BitDenseVectorValuesBlockReader<>(byteVectorValues, dimensions, processor);
}
}
}
return new ConstantNullsReader();
}
/**
* Abstract base | DenseVectorBlockLoader |
java | apache__flink | flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/catalog/FunctionCatalogTest.java | {
"start": 32630,
"end": 32807
} | class ____ extends ScalarFunction {
// missing implementation
}
/** Testing table function. */
@SuppressWarnings("unused")
public static | InvalidTestFunction |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/creation/settings/CreationSettings.java | {
"start": 1440,
"end": 6239
} | class ____ also need concurrency-safe implementation. However, no
// issue was reported about it.
// If we do it, we need to understand usage patterns and choose the right concurrent
// implementation.
protected List<StubbingLookupListener> stubbingLookupListeners = new CopyOnWriteArrayList<>();
protected List<VerificationStartedListener> verificationStartedListeners = new LinkedList<>();
protected boolean stubOnly;
protected boolean stripAnnotations;
private boolean useConstructor;
private Object outerClassInstance;
private Object[] constructorArgs;
protected Strictness strictness = null;
protected String mockMaker;
protected MockType mockType;
public CreationSettings() {}
@SuppressWarnings("unchecked")
public CreationSettings(CreationSettings copy) {
// TODO can we have a reflection test here? We had a couple of bugs here in the past.
this.typeToMock = copy.typeToMock;
this.genericTypeToMock = copy.genericTypeToMock;
this.extraInterfaces = copy.extraInterfaces;
this.name = copy.name;
this.spiedInstance = copy.spiedInstance;
this.defaultAnswer = copy.defaultAnswer;
this.mockName = copy.mockName;
this.serializableMode = copy.serializableMode;
this.invocationListeners = copy.invocationListeners;
this.stubbingLookupListeners = copy.stubbingLookupListeners;
this.verificationStartedListeners = copy.verificationStartedListeners;
this.stubOnly = copy.stubOnly;
this.useConstructor = copy.isUsingConstructor();
this.outerClassInstance = copy.getOuterClassInstance();
this.constructorArgs = copy.getConstructorArgs();
this.strictness = copy.strictness;
this.stripAnnotations = copy.stripAnnotations;
this.mockMaker = copy.mockMaker;
this.mockType = copy.mockType;
}
@Override
public Class<T> getTypeToMock() {
return typeToMock;
}
public CreationSettings<T> setTypeToMock(Class<T> typeToMock) {
this.typeToMock = typeToMock;
return this;
}
public CreationSettings<T> setGenericTypeToMock(Type genericTypeToMock) {
this.genericTypeToMock = genericTypeToMock;
return this;
}
@Override
public Set<Class<?>> getExtraInterfaces() {
return extraInterfaces;
}
public CreationSettings<T> setExtraInterfaces(Set<Class<?>> extraInterfaces) {
this.extraInterfaces = extraInterfaces;
return this;
}
public String getName() {
return name;
}
@Override
public Object getSpiedInstance() {
return spiedInstance;
}
@Override
public Answer<Object> getDefaultAnswer() {
return defaultAnswer;
}
@Override
public MockName getMockName() {
return mockName;
}
public CreationSettings<T> setMockName(MockName mockName) {
this.mockName = mockName;
return this;
}
@Override
public boolean isSerializable() {
return serializableMode != SerializableMode.NONE;
}
public CreationSettings<T> setSerializableMode(SerializableMode serializableMode) {
this.serializableMode = serializableMode;
return this;
}
@Override
public SerializableMode getSerializableMode() {
return serializableMode;
}
@Override
public List<InvocationListener> getInvocationListeners() {
return invocationListeners;
}
@Override
public List<VerificationStartedListener> getVerificationStartedListeners() {
return verificationStartedListeners;
}
@Override
public List<StubbingLookupListener> getStubbingLookupListeners() {
return stubbingLookupListeners;
}
@Override
public boolean isUsingConstructor() {
return useConstructor;
}
@Override
public boolean isStripAnnotations() {
return stripAnnotations;
}
@Override
public Object[] getConstructorArgs() {
return constructorArgs;
}
@Override
public Object getOuterClassInstance() {
return outerClassInstance;
}
@Override
public boolean isStubOnly() {
return stubOnly;
}
@Override
public boolean isLenient() {
return strictness == Strictness.LENIENT;
}
@Override
public Strictness getStrictness() {
return strictness;
}
@Override
public String getMockMaker() {
return mockMaker;
}
@Override
public Type getGenericTypeToMock() {
return genericTypeToMock;
}
@Override
public MockType getMockType() {
return mockType;
}
public void setMockType(MockType mockType) {
this.mockType = mockType;
}
}
| may |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/MappableBlockLoader.java | {
"start": 1585,
"end": 6579
} | class ____ {
/**
* Initialize a specific MappableBlockLoader.
*/
abstract CacheStats initialize(DNConf dnConf) throws IOException;
/**
* Load the block.
*
* Map the block, and then verify its checksum.
*
* @param length The current length of the block.
* @param blockIn The block input stream. Should be positioned at the
* start. The caller must close this.
* @param metaIn The meta file input stream. Should be positioned at
* the start. The caller must close this.
* @param blockFileName The block file name, for logging purposes.
* @param key The extended block ID.
*
* @throws IOException If mapping block to cache region fails or checksum
* fails.
*
* @return The Mappable block.
*/
abstract MappableBlock load(long length, FileInputStream blockIn,
FileInputStream metaIn, String blockFileName, ExtendedBlockId key)
throws IOException;
/**
* Try to reserve some given bytes.
*
* @param key The ExtendedBlockId for a block.
*
* @param bytesCount The number of bytes to add.
*
* @return The new number of usedBytes if we succeeded;
* -1 if we failed.
*/
abstract long reserve(ExtendedBlockId key, long bytesCount);
/**
* Release some bytes that we're using.
*
* @param key The ExtendedBlockId for a block.
*
* @param bytesCount The number of bytes to release.
*
* @return The new number of usedBytes.
*/
abstract long release(ExtendedBlockId key, long bytesCount);
/**
* Get the approximate amount of cache space used.
*/
abstract long getCacheUsed();
/**
* Get the maximum amount of cache bytes.
*/
abstract long getCacheCapacity();
/**
* Check whether the cache is non-volatile.
*/
abstract boolean isTransientCache();
/**
* Check whether this is a native pmem cache loader.
*/
abstract boolean isNativeLoader();
/**
* Get mappableBlock recovered from persistent memory.
*/
abstract MappableBlock getRecoveredMappableBlock(
File cacheFile, String bpid, byte volumeIndex) throws IOException;
/**
* Clean up cache, can be used during DataNode shutdown.
*/
void shutdown() {
// Do nothing.
}
/**
* Verifies the block's checksum. This is an I/O intensive operation.
*/
protected void verifyChecksum(long length, FileInputStream metaIn,
FileChannel blockChannel, String blockFileName) throws IOException {
// Verify the checksum from the block's meta file
// Get the DataChecksum from the meta file header
BlockMetadataHeader header =
BlockMetadataHeader.readHeader(new DataInputStream(
new BufferedInputStream(metaIn, BlockMetadataHeader
.getHeaderSize())));
try (FileChannel metaChannel = metaIn.getChannel()) {
if (metaChannel == null) {
throw new IOException(
"Block InputStream meta file has no FileChannel.");
}
DataChecksum checksum = header.getChecksum();
final int bytesPerChecksum = checksum.getBytesPerChecksum();
final int checksumSize = checksum.getChecksumSize();
final int numChunks = (8 * 1024 * 1024) / bytesPerChecksum;
ByteBuffer blockBuf = ByteBuffer.allocate(numChunks * bytesPerChecksum);
ByteBuffer checksumBuf = ByteBuffer.allocate(numChunks * checksumSize);
// Verify the checksum
int bytesVerified = 0;
while (bytesVerified < length) {
Preconditions.checkState(bytesVerified % bytesPerChecksum == 0,
"Unexpected partial chunk before EOF");
assert bytesVerified % bytesPerChecksum == 0;
int bytesRead = fillBuffer(blockChannel, blockBuf);
if (bytesRead == -1) {
throw new IOException("checksum verification failed: premature EOF");
}
blockBuf.flip();
// Number of read chunks, including partial chunk at end
int chunks = (bytesRead + bytesPerChecksum - 1) / bytesPerChecksum;
checksumBuf.limit(chunks * checksumSize);
fillBuffer(metaChannel, checksumBuf);
checksumBuf.flip();
checksum.verifyChunkedSums(blockBuf, checksumBuf, blockFileName,
bytesVerified);
// Success
bytesVerified += bytesRead;
blockBuf.clear();
checksumBuf.clear();
}
}
}
/**
* Reads bytes into a buffer until EOF or the buffer's limit is reached.
*/
protected int fillBuffer(FileChannel channel, ByteBuffer buf)
throws IOException {
int bytesRead = channel.read(buf);
if (bytesRead < 0) {
//EOF
return bytesRead;
}
while (buf.remaining() > 0) {
int n = channel.read(buf);
if (n < 0) {
//EOF
return bytesRead;
}
bytesRead += n;
}
return bytesRead;
}
}
| MappableBlockLoader |
java | google__dagger | javatests/dagger/hilt/processor/internal/aliasof/AliasOfProcessorTest.java | {
"start": 1489,
"end": 1840
} | interface ____{}");
Source root =
HiltCompilerTests.javaSource(
"test.MyApp",
"package test;",
"",
"import android.app.Application;",
"import dagger.hilt.android.HiltAndroidApp;",
"",
"@HiltAndroidApp(Application.class)",
"public final | AliasScope |
java | apache__flink | flink-architecture-tests/flink-architecture-tests-test/src/main/java/org/apache/flink/architecture/TestCodeArchitectureTestBase.java | {
"start": 1062,
"end": 1214
} | class ____ be extended for common
* tests that are required for all submodules.
*
* <p>Architectural tests built in submodules should include this | should |
java | apache__camel | components/camel-netty/src/test/java/org/apache/camel/component/netty/NettyUDPByteArrayProviderTest.java | {
"start": 3647,
"end": 3967
} | class ____ extends MessageToMessageDecoder<DatagramPacket> {
@Override
protected void decode(
ChannelHandlerContext channelHandlerContext, DatagramPacket datagramPacket, List<Object> objects) {
objects.add(datagramPacket.content().retain());
}
}
public | UdpHandler |
java | elastic__elasticsearch | x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/transport/SecurityServerTransportServiceTests.java | {
"start": 540,
"end": 1445
} | class ____ extends SecurityIntegTestCase {
public void testSecurityServerTransportServiceWrapsAllHandlers() {
for (TransportService transportService : internalCluster().getInstances(TransportService.class)) {
@SuppressWarnings("rawtypes")
RequestHandlerRegistry handler = transportService.transport.getRequestHandlers()
.getHandler(TransportService.HANDSHAKE_ACTION_NAME);
assertEquals(
"handler not wrapped by "
+ SecurityServerTransportInterceptor.ProfileSecuredRequestHandler.class
+ "; do all the handler registration methods have overrides?",
handler.toString(),
"ProfileSecuredRequestHandler{action='" + handler.getAction() + "', forceExecution=" + handler.isForceExecution() + "}"
);
}
}
}
| SecurityServerTransportServiceTests |
java | apache__camel | components/camel-debezium/camel-debezium-postgres/src/generated/java/org/apache/camel/component/debezium/postgres/configuration/PostgresConnectorEmbeddedDebeziumConfiguration.java | {
"start": 473,
"end": 19992
} | class ____
extends
EmbeddedDebeziumConfiguration {
private static final String LABEL_NAME = "consumer,postgres";
@UriParam(label = LABEL_NAME, defaultValue = "none")
private String snapshotLockingMode = "none";
@UriParam(label = LABEL_NAME)
private String messageKeyColumns;
@UriParam(label = LABEL_NAME, defaultValue = "io.debezium.pipeline.txmetadata.DefaultTransactionMetadataFactory")
private String transactionMetadataFactory = "io.debezium.pipeline.txmetadata.DefaultTransactionMetadataFactory";
@UriParam(label = LABEL_NAME)
private String customMetricTags;
@UriParam(label = LABEL_NAME, defaultValue = "dbz_publication")
private String publicationName = "dbz_publication";
@UriParam(label = LABEL_NAME, defaultValue = "source")
private String signalEnabledChannels = "source";
@UriParam(label = LABEL_NAME, defaultValue = "6")
private int slotMaxRetries = 6;
@UriParam(label = LABEL_NAME, defaultValue = "columns_diff")
private String schemaRefreshMode = "columns_diff";
@UriParam(label = LABEL_NAME, defaultValue = "prefer")
private String databaseSslmode = "prefer";
@UriParam(label = LABEL_NAME)
private String signalDataCollection;
@UriParam(label = LABEL_NAME)
private String databaseInitialStatements;
@UriParam(label = LABEL_NAME)
private String converters;
@UriParam(label = LABEL_NAME)
private String databaseSslfactory;
@UriParam(label = LABEL_NAME)
private int snapshotFetchSize;
@UriParam(label = LABEL_NAME)
private String openlineageIntegrationJobTags;
@UriParam(label = LABEL_NAME, defaultValue = "10s", javaType = "java.time.Duration")
private long snapshotLockTimeoutMs = 10000;
@UriParam(label = LABEL_NAME)
private String databaseDbname;
@UriParam(label = LABEL_NAME)
private String databaseSslkey;
@UriParam(label = LABEL_NAME, defaultValue = "disabled")
private String snapshotTablesOrderByRowCount = "disabled";
@UriParam(label = LABEL_NAME)
private String snapshotSelectStatementOverrides;
@UriParam(label = LABEL_NAME)
private String databaseSslpassword;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean slotFailover = false;
@UriParam(label = LABEL_NAME)
private String tableExcludeList;
@UriParam(label = LABEL_NAME)
private String databaseSslrootcert;
@UriParam(label = LABEL_NAME, defaultValue = "2048")
private int maxBatchSize = 2048;
@UriParam(label = LABEL_NAME, defaultValue = "io.debezium.schema.SchemaTopicNamingStrategy")
private String topicNamingStrategy = "io.debezium.schema.SchemaTopicNamingStrategy";
@UriParam(label = LABEL_NAME, defaultValue = "initial")
private String snapshotMode = "initial";
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean snapshotModeConfigurationBasedSnapshotData = false;
@UriParam(label = LABEL_NAME, defaultValue = "debezium")
private String slotName = "debezium";
@UriParam(label = LABEL_NAME, defaultValue = "1024")
private int incrementalSnapshotChunkSize = 1024;
@UriParam(label = LABEL_NAME)
private String openlineageIntegrationJobOwners;
@UriParam(label = LABEL_NAME, defaultValue = "./openlineage.yml")
private String openlineageIntegrationConfigFilePath = "./openlineage.yml";
@UriParam(label = LABEL_NAME, defaultValue = "10s", javaType = "java.time.Duration")
private long retriableRestartConnectorWaitMs = 10000;
@UriParam(label = LABEL_NAME, defaultValue = "0ms", javaType = "java.time.Duration")
private long snapshotDelayMs = 0;
@UriParam(label = LABEL_NAME, defaultValue = "4s", javaType = "java.time.Duration")
private long executorShutdownTimeoutMs = 4000;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean snapshotModeConfigurationBasedSnapshotOnDataError = false;
@UriParam(label = LABEL_NAME)
private String schemaHistoryInternalFileFilename;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean tombstonesOnDelete = false;
@UriParam(label = LABEL_NAME, defaultValue = "precise")
private String decimalHandlingMode = "precise";
@UriParam(label = LABEL_NAME, defaultValue = "bytes")
private String binaryHandlingMode = "bytes";
@UriParam(label = LABEL_NAME)
private String snapshotQueryModeCustomName;
@UriParam(label = LABEL_NAME)
private String openlineageIntegrationDatasetKafkaBootstrapServers;
@UriParam(label = LABEL_NAME, defaultValue = "true")
private boolean tableIgnoreBuiltin = true;
@UriParam(label = LABEL_NAME)
private String schemaExcludeList;
@UriParam(label = LABEL_NAME)
private String snapshotIncludeCollectionList;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean snapshotModeConfigurationBasedStartStream = false;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean slotDropOnStop = false;
@UriParam(label = LABEL_NAME, defaultValue = "5s", javaType = "java.time.Duration")
private long signalPollIntervalMs = 5000;
@UriParam(label = LABEL_NAME)
private String notificationEnabledChannels;
@UriParam(label = LABEL_NAME, defaultValue = "fail")
private String eventProcessingFailureHandlingMode = "fail";
@UriParam(label = LABEL_NAME, defaultValue = "serializable")
private String snapshotIsolationMode = "serializable";
@UriParam(label = LABEL_NAME, defaultValue = "1")
private int snapshotMaxThreads = 1;
@UriParam(label = LABEL_NAME)
private String notificationSinkTopicName;
@UriParam(label = LABEL_NAME)
private String snapshotModeCustomName;
@UriParam(label = LABEL_NAME, defaultValue = "none")
private String schemaNameAdjustmentMode = "none";
@UriParam(label = LABEL_NAME)
private String tableIncludeList;
@UriParam(label = LABEL_NAME)
private String slotStreamParams;
@UriParam(label = LABEL_NAME, defaultValue = "0ms", javaType = "java.time.Duration")
private long streamingDelayMs = 0;
@UriParam(label = LABEL_NAME)
private String openlineageIntegrationJobNamespace;
@UriParam(label = LABEL_NAME, defaultValue = "10m", javaType = "java.time.Duration")
private int databaseQueryTimeoutMs = 600000;
@UriParam(label = LABEL_NAME, defaultValue = "0")
private int queryFetchSize = 0;
@UriParam(label = LABEL_NAME)
private String schemaIncludeList;
@UriParam(label = LABEL_NAME, defaultValue = "30s", javaType = "java.time.Duration")
private long lsnFlushTimeoutMs = 30000;
@UriParam(label = LABEL_NAME, defaultValue = "__debezium_unavailable_value")
private String unavailableValuePlaceholder = "__debezium_unavailable_value";
@UriParam(label = LABEL_NAME)
private String heartbeatActionQuery;
@UriParam(label = LABEL_NAME)
private String replicaIdentityAutosetValues;
@UriParam(label = LABEL_NAME)
private String databaseSslcert;
@UriParam(label = LABEL_NAME, defaultValue = "500ms", javaType = "java.time.Duration")
private long pollIntervalMs = 500;
@UriParam(label = LABEL_NAME, defaultValue = "0")
private int guardrailCollectionsMax = 0;
@UriParam(label = LABEL_NAME, defaultValue = "numeric")
private String intervalHandlingMode = "numeric";
@UriParam(label = LABEL_NAME, defaultValue = "__debezium-heartbeat")
private String heartbeatTopicsPrefix = "__debezium-heartbeat";
@UriParam(label = LABEL_NAME, defaultValue = "10s", javaType = "java.time.Duration")
private int statusUpdateIntervalMs = 10000;
@UriParam(label = LABEL_NAME)
private String databaseUser;
@UriParam(label = LABEL_NAME)
private String datatypePropagateSourceType;
@UriParam(label = LABEL_NAME, defaultValue = "INSERT_INSERT")
private String incrementalSnapshotWatermarkingStrategy = "INSERT_INSERT";
@UriParam(label = LABEL_NAME, defaultValue = "0ms", javaType = "java.time.Duration")
private int heartbeatIntervalMs = 0;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean snapshotModeConfigurationBasedSnapshotOnSchemaError = false;
@UriParam(label = LABEL_NAME)
private String columnIncludeList;
@UriParam(label = LABEL_NAME, defaultValue = "decoderbufs")
private String pluginName = "decoderbufs";
@UriParam(label = LABEL_NAME)
private String columnPropagateSourceType;
@UriParam(label = LABEL_NAME, defaultValue = "-1")
private int errorsMaxRetries = -1;
@UriParam(label = LABEL_NAME)
@Metadata(required = true)
private String databasePassword;
@UriParam(label = LABEL_NAME, defaultValue = "t")
private String skippedOperations = "t";
@UriParam(label = LABEL_NAME, defaultValue = "Debezium change data capture job")
private String openlineageIntegrationJobDescription = "Debezium change data capture job";
@UriParam(label = LABEL_NAME)
private String messagePrefixIncludeList;
@UriParam(label = LABEL_NAME, defaultValue = "true")
private boolean extendedHeadersEnabled = true;
@UriParam(label = LABEL_NAME, defaultValue = "8192")
private int maxQueueSize = 8192;
@UriParam(label = LABEL_NAME, defaultValue = "warn")
private String guardrailCollectionsLimitAction = "warn";
@UriParam(label = LABEL_NAME, defaultValue = "json")
private String hstoreHandlingMode = "json";
@UriParam(label = LABEL_NAME)
private String snapshotLockingModeCustomName;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean provideTransactionMetadata = false;
@UriParam(label = LABEL_NAME, defaultValue = "select_all")
private String snapshotQueryMode = "select_all";
@UriParam(label = LABEL_NAME)
@Metadata(required = true)
private String topicPrefix;
@UriParam(label = LABEL_NAME, defaultValue = "10s", javaType = "java.time.Duration")
private long slotRetryDelayMs = 10000;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean includeSchemaComments = false;
@UriParam(label = LABEL_NAME, defaultValue = "io.debezium.connector.postgresql.PostgresSourceInfoStructMaker")
private String sourceinfoStructMaker = "io.debezium.connector.postgresql.PostgresSourceInfoStructMaker";
@UriParam(label = LABEL_NAME, defaultValue = "true")
private boolean flushLsnSource = true;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean openlineageIntegrationEnabled = false;
@UriParam(label = LABEL_NAME, defaultValue = "true")
private boolean databaseTcpkeepalive = true;
@UriParam(label = LABEL_NAME, defaultValue = "all_tables")
private String publicationAutocreateMode = "all_tables";
@UriParam(label = LABEL_NAME, defaultValue = "0")
private long maxQueueSizeInBytes = 0;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean publishViaPartitionRoot = false;
@UriParam(label = LABEL_NAME, defaultValue = "0ms", javaType = "java.time.Duration")
private long xminFetchIntervalMs = 0;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean snapshotModeConfigurationBasedSnapshotSchema = false;
@UriParam(label = LABEL_NAME, defaultValue = "adaptive")
private String timePrecisionMode = "adaptive";
@UriParam(label = LABEL_NAME)
private String messagePrefixExcludeList;
@UriParam(label = LABEL_NAME)
private String postProcessors;
@UriParam(label = LABEL_NAME, defaultValue = "fail")
private String lsnFlushTimeoutAction = "fail";
@UriParam(label = LABEL_NAME, defaultValue = "5432")
private int databasePort = 5432;
@UriParam(label = LABEL_NAME)
private String columnExcludeList;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean includeUnknownDatatypes = false;
@UriParam(label = LABEL_NAME)
private String databaseHostname;
@UriParam(label = LABEL_NAME, defaultValue = "1m", javaType = "java.time.Duration")
private long connectionValidationTimeoutMs = 60000;
/**
* Controls how the connector holds locks on tables while performing the
* schema snapshot. The 'shared' which means the connector will hold a table
* lock that prevents exclusive table access for just the initial portion of
* the snapshot while the database schemas and other metadata are being
* read. The remaining work in a snapshot involves selecting all rows from
* each table, and this is done using a flashback query that requires no
* locks. However, in some cases it may be desirable to avoid locks entirely
* which can be done by specifying 'none'. This mode is only safe to use if
* no schema changes are happening while the snapshot is taken.
*/
public void setSnapshotLockingMode(String snapshotLockingMode) {
this.snapshotLockingMode = snapshotLockingMode;
}
public String getSnapshotLockingMode() {
return snapshotLockingMode;
}
/**
* A semicolon-separated list of expressions that match fully-qualified
* tables and column(s) to be used as message key. Each expression must
* match the pattern '<fully-qualified table name>:<key columns>', where the
* table names could be defined as (DB_NAME.TABLE_NAME) or
* (SCHEMA_NAME.TABLE_NAME), depending on the specific connector, and the
* key columns are a comma-separated list of columns representing the custom
* key. For any table without an explicit key configuration the table's
* primary key column(s) will be used as message key. Example:
* dbserver1.inventory.orderlines:orderId,orderLineId;dbserver1.inventory.orders:id
*/
public void setMessageKeyColumns(String messageKeyColumns) {
this.messageKeyColumns = messageKeyColumns;
}
public String getMessageKeyColumns() {
return messageKeyColumns;
}
/**
* Class to make transaction context & transaction struct/schemas
*/
public void setTransactionMetadataFactory(String transactionMetadataFactory) {
this.transactionMetadataFactory = transactionMetadataFactory;
}
public String getTransactionMetadataFactory() {
return transactionMetadataFactory;
}
/**
* The custom metric tags will accept key-value pairs to customize the MBean
* object name which should be appended the end of regular name, each key
* would represent a tag for the MBean object name, and the corresponding
* value would be the value of that tag the key is. For example: k1=v1,k2=v2
*/
public void setCustomMetricTags(String customMetricTags) {
this.customMetricTags = customMetricTags;
}
public String getCustomMetricTags() {
return customMetricTags;
}
/**
* The name of the Postgres 10+ publication used for streaming changes from
* a plugin. Defaults to 'dbz_publication'
*/
public void setPublicationName(String publicationName) {
this.publicationName = publicationName;
}
public String getPublicationName() {
return publicationName;
}
/**
* List of channels names that are enabled. Source channel is enabled by
* default
*/
public void setSignalEnabledChannels(String signalEnabledChannels) {
this.signalEnabledChannels = signalEnabledChannels;
}
public String getSignalEnabledChannels() {
return signalEnabledChannels;
}
/**
* How many times to retry connecting to a replication slot when an attempt
* fails.
*/
public void setSlotMaxRetries(int slotMaxRetries) {
this.slotMaxRetries = slotMaxRetries;
}
public int getSlotMaxRetries() {
return slotMaxRetries;
}
/**
* Specify the conditions that trigger a refresh of the in-memory schema for
* a table. 'columns_diff' (the default) is the safest mode, ensuring the
* in-memory schema stays in-sync with the database table's schema at all
* times. 'columns_diff_exclude_unchanged_toast' instructs the connector to
* refresh the in-memory schema cache if there is a discrepancy between it
* and the schema derived from the incoming message, unless unchanged
* TOASTable data fully accounts for the discrepancy. This setting can
* improve connector performance significantly if there are
* frequently-updated tables that have TOASTed data that are rarely part of
* these updates. However, it is possible for the in-memory schema to become
* outdated if TOASTable columns are dropped from the table.
*/
public void setSchemaRefreshMode(String schemaRefreshMode) {
this.schemaRefreshMode = schemaRefreshMode;
}
public String getSchemaRefreshMode() {
return schemaRefreshMode;
}
/**
* Whether to use an encrypted connection to Postgres. Options include:
* 'disable' (the default) to use an unencrypted connection; 'allow' to try
* and use an unencrypted connection first and, failing that, a secure
* (encrypted) connection; 'prefer' (the default) to try and use a secure
* (encrypted) connection first and, failing that, an unencrypted
* connection; 'require' to use a secure (encrypted) connection, and fail if
* one cannot be established; 'verify-ca' like 'required' but additionally
* verify the server TLS certificate against the configured Certificate
* Authority (CA) certificates, or fail if no valid matching CA certificates
* are found; or 'verify-full' like 'verify-ca' but additionally verify that
* the server certificate matches the host to which the connection is
* attempted.
*/
public void setDatabaseSslmode(String databaseSslmode) {
this.databaseSslmode = databaseSslmode;
}
public String getDatabaseSslmode() {
return databaseSslmode;
}
/**
* The name of the data collection that is used to send signals/commands to
* Debezium. Signaling is disabled when not set.
*/
public void setSignalDataCollection(String signalDataCollection) {
this.signalDataCollection = signalDataCollection;
}
public String getSignalDataCollection() {
return signalDataCollection;
}
/**
* A semicolon separated list of SQL statements to be executed when a JDBC
* connection to the database is established. Note that the connector may
* establish JDBC connections at its own discretion, so this should
* typically be used for configuration of session parameters only, but not
* for executing DML statements. Use doubled semicolon (';;') to use a
* semicolon as a character and not as a delimiter.
*/
public void setDatabaseInitialStatements(String databaseInitialStatements) {
this.databaseInitialStatements = databaseInitialStatements;
}
public String getDatabaseInitialStatements() {
return databaseInitialStatements;
}
/**
* Optional list of custom converters that would be used instead of default
* ones. The converters are defined using '<converter.prefix>.type' config
* option and configured using options '<converter.prefix>.<option>'
*/
public void setConverters(String converters) {
this.converters = converters;
}
public String getConverters() {
return converters;
}
/**
* A name of | PostgresConnectorEmbeddedDebeziumConfiguration |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/ott/OneTimeTokenLoginConfigurerTests.java | {
"start": 4060,
"end": 10642
} | class ____ {
public SpringTestContext spring = new SpringTestContext(this);
@Autowired(required = false)
MockMvc mvc;
@Autowired(required = false)
private GenerateOneTimeTokenRequestResolver resolver;
@Autowired(required = false)
private OneTimeTokenService tokenService;
@Autowired(required = false)
private OneTimeTokenGenerationSuccessHandler tokenGenerationSuccessHandler;
@Test
void oneTimeTokenWhenCorrectTokenThenCanAuthenticate() throws Exception {
this.spring.register(OneTimeTokenDefaultConfig.class).autowire();
this.mvc.perform(post("/ott/generate").param("username", "user").with(csrf()))
.andExpectAll(status().isFound(), redirectedUrl("/login/ott"));
String token = getLastToken().getTokenValue();
this.mvc.perform(post("/login/ott").param("token", token).with(csrf()))
.andExpectAll(status().isFound(), redirectedUrl("/"), authenticated());
}
@Test
void oneTimeTokenWhenDifferentAuthenticationUrlsThenCanAuthenticate() throws Exception {
this.spring.register(OneTimeTokenDifferentUrlsConfig.class).autowire();
this.mvc.perform(post("/generateurl").param("username", "user").with(csrf()))
.andExpectAll(status().isFound(), redirectedUrl("/redirected"));
String token = getLastToken().getTokenValue();
this.mvc.perform(post("/loginprocessingurl").param("token", token).with(csrf()))
.andExpectAll(status().isFound(), redirectedUrl("/authenticated"), authenticated());
}
@Test
void oneTimeTokenWhenCorrectTokenUsedTwiceThenSecondTimeFails() throws Exception {
this.spring.register(OneTimeTokenDefaultConfig.class).autowire();
this.mvc.perform(post("/ott/generate").param("username", "user").with(csrf()))
.andExpectAll(status().isFound(), redirectedUrl("/login/ott"));
String token = getLastToken().getTokenValue();
this.mvc.perform(post("/login/ott").param("token", token).with(csrf()))
.andExpectAll(status().isFound(), redirectedUrl("/"), authenticated());
this.mvc.perform(post("/login/ott").param("token", token).with(csrf()))
.andExpectAll(status().isFound(), redirectedUrl("/login?error"), unauthenticated());
}
@Test
void oneTimeTokenWhenWrongTokenThenAuthenticationFail() throws Exception {
this.spring.register(OneTimeTokenDefaultConfig.class).autowire();
this.mvc.perform(post("/ott/generate").param("username", "user").with(csrf()))
.andExpectAll(status().isFound(), redirectedUrl("/login/ott"));
String token = "wrong";
this.mvc.perform(post("/login/ott").param("token", token).with(csrf()))
.andExpectAll(status().isFound(), redirectedUrl("/login?error"), unauthenticated());
}
@Test
void oneTimeTokenWhenConfiguredThenServesCss() throws Exception {
this.spring.register(OneTimeTokenDefaultConfig.class).autowire();
this.mvc.perform(get("/default-ui.css"))
.andExpect(status().isOk())
.andExpect(content().string(Matchers.containsString("body {")));
}
@Test
void oneTimeTokenWhenConfiguredThenRendersRequestTokenForm() throws Exception {
this.spring.register(OneTimeTokenDefaultConfig.class).autowire();
CsrfToken csrfToken = new DefaultCsrfToken("X-CSRF-TOKEN", "_csrf", "BaseSpringSpec_CSRFTOKEN");
String csrfAttributeName = HttpSessionCsrfTokenRepository.class.getName().concat(".CSRF_TOKEN");
//@formatter:off
this.mvc.perform(get("/login").sessionAttr(csrfAttributeName, csrfToken))
.andExpect((result) -> {
CsrfToken token = (CsrfToken) result.getRequest().getAttribute(CsrfToken.class.getName());
assertThat(result.getResponse().getContentAsString()).isEqualTo(
"""
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="description" content="">
<meta name="author" content="">
<title>Please sign in</title>
<link href="/default-ui.css" rel="stylesheet" />
</head>
<body>
<div class="content">
<form id="ott-form" class="login-form" method="post" action="/ott/generate">
<h2>Request a One-Time Token</h2>
<p>
<label for="ott-username" class="screenreader">Username</label>
<input type="text" id="ott-username" name="username" placeholder="Username" required>
</p>
<input name="_csrf" type="hidden" value="%s" />
<button class="primary" type="submit" form="ott-form">Send Token</button>
</form>
</div>
</body>
</html>""".formatted(token.getToken(), token.getToken()));
});
//@formatter:on
}
@Test
void oneTimeTokenWhenLoginPageConfiguredThenRedirects() throws Exception {
this.spring.register(OneTimeTokenLoginPageConfig.class).autowire();
this.mvc.perform(get("/login")).andExpect(status().isFound()).andExpect(redirectedUrl("/custom-login"));
}
@Test
void oneTimeTokenWhenNoTokenGenerationSuccessHandlerThenException() {
assertThatException()
.isThrownBy(() -> this.spring.register(OneTimeTokenNoGeneratedOttHandlerConfig.class).autowire())
.havingRootCause()
.isInstanceOf(IllegalStateException.class)
.withMessage("""
A OneTimeTokenGenerationSuccessHandler is required to enable oneTimeTokenLogin().
Please provide it as a bean or pass it to the oneTimeTokenLogin() DSL.
""");
}
@Test
void oneTimeTokenWhenCustomTokenExpirationTimeSetThenAuthenticate() throws Exception {
this.spring.register(OneTimeTokenConfigWithCustomImpls.class).autowire();
GenerateOneTimeTokenRequest expectedGenerateRequest = new GenerateOneTimeTokenRequest("username-123",
Duration.ofMinutes(10));
OneTimeToken ott = new DefaultOneTimeToken("token-123", expectedGenerateRequest.getUsername(),
Instant.now().plus(expectedGenerateRequest.getExpiresIn()));
given(this.resolver.resolve(any())).willReturn(expectedGenerateRequest);
given(this.tokenService.generate(expectedGenerateRequest)).willReturn(ott);
this.mvc.perform(post("/ott/generate").param("username", "user").with(csrf()));
verify(this.resolver).resolve(any());
verify(this.tokenService).generate(expectedGenerateRequest);
verify(this.tokenGenerationSuccessHandler).handle(any(), any(), eq(ott));
}
private OneTimeToken getLastToken() {
OneTimeToken lastToken = this.spring.getContext()
.getBean(TestOneTimeTokenGenerationSuccessHandler.class).lastToken;
return lastToken;
}
@Configuration(proxyBeanMethods = false)
@EnableWebSecurity
@Import(UserDetailsServiceConfig.class)
static | OneTimeTokenLoginConfigurerTests |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_222.java | {
"start": 856,
"end": 1656
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "/*+engine=mpp*/ (select close from linxi1 where id = 3)";
System.out.println(sql);
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
assertEquals(1, statementList.size());
SQLStatement stmt = statementList.get(0);
assertEquals("/*+engine=mpp*/\n" +
"(SELECT close\n" +
"FROM linxi1\n" +
"WHERE id = 3)", stmt.toString());
assertEquals("/*+engine=mpp*/\n" +
"(select close\n" +
"from linxi1\n" +
"where id = 3)", stmt.clone().toLowerCaseString());
}
}
| MySqlSelectTest_222 |
java | google__dagger | dagger-android/main/java/dagger/android/AndroidInjectionKey.java | {
"start": 1254,
"end": 1316
} | class ____ of the type to be injected. */
String value();
}
| name |
java | apache__camel | core/camel-main/src/test/java/org/apache/camel/main/MainSedaWildcardTest.java | {
"start": 2521,
"end": 2736
} | class ____ extends RouteBuilder {
@Override
public void configure() {
from("direct:start").to("seda:foo");
from("direct:hello").to("seda2:bar");
}
}
}
| MyRouteBuilder |
java | alibaba__nacos | ai/src/main/java/com/alibaba/nacos/ai/service/SimpleSyncEffectService.java | {
"start": 1310,
"end": 1608
} | class ____ implements SyncEffectService {
@Override
public void toSync(ConfigForm configForm, long startTimeStamp, long timeout, TimeUnit timeUnit) {
try {
Thread.sleep(timeout);
} catch (InterruptedException ignored) {
}
}
}
| SimpleSyncEffectService |
java | apache__camel | components/camel-telegram/src/main/java/org/apache/camel/component/telegram/model/EditMessageReplyMarkupMessage.java | {
"start": 1075,
"end": 2552
} | class ____ extends OutgoingMessage {
@JsonProperty("message_id")
private Integer messageId;
@JsonProperty("inline_message_id")
private String inlineMessageId;
@JsonProperty("reply_markup")
private InlineKeyboardMarkup replyMarkup;
/**
* Builds {@link EditMessageReplyMarkupMessage} instance.
*
* @param chatId Unique identifier for the target chat or username of the target channel.
* @param messageId Identifier of the message to edit. Required if inline_message_id is not specified.
* @param inlineMessageId Required if chat_id and message_id are not specified. Identifier of the inline message.
* @param replyMarkup An inline keyboard that appears right next to the message it belongs to.
*/
public EditMessageReplyMarkupMessage(String chatId, Integer messageId, String inlineMessageId,
InlineKeyboardMarkup replyMarkup) {
this.chatId = chatId;
this.messageId = messageId;
this.inlineMessageId = inlineMessageId;
this.replyMarkup = replyMarkup;
}
public Integer getMessageId() {
return messageId;
}
public String getInlineMessageId() {
return inlineMessageId;
}
public InlineKeyboardMarkup getReplyMarkup() {
return replyMarkup;
}
public static Builder builder() {
return new Builder();
}
public static final | EditMessageReplyMarkupMessage |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit4/nested/SpringRuleConfigurer.java | {
"start": 1082,
"end": 1287
} | class ____ {
@ClassRule
public static final SpringClassRule springClassRule = new SpringClassRule();
@Rule
public final SpringMethodRule springMethodRule = new SpringMethodRule();
}
| SpringRuleConfigurer |
java | apache__hadoop | hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java | {
"start": 15466,
"end": 17909
} | enum ____ 3 values, of which if the comparison
// returns NOT_COMPATIBLE, we'll try to check modtime again, else return
// the result of checksum comparison which are compatible(true or false).
//
// Note: Different object stores can have different checksum algorithms
// resulting in no checksum comparison that results in return true
// always, having the modification time enabled can help in these
// scenarios to not incorrectly skip a copy. Refer: HADOOP-18596.
if (sameLength && sameBlockSize) {
if (skipCrc) {
return maybeUseModTimeToCompare(source, target);
} else {
ChecksumComparison checksumComparison = DistCpUtils
.checksumsAreEqual(sourceFS, source.getPath(), null,
targetFS, target.getPath(), source.getLen());
LOG.debug("Result of checksum comparison between src {} and target "
+ "{} : {}", source, target, checksumComparison);
if (checksumComparison.equals(ChecksumComparison.INCOMPATIBLE)) {
return maybeUseModTimeToCompare(source, target);
}
// if skipCrc is disabled and checksumComparison is compatible we
// need not check the mod time.
return checksumComparison.equals(ChecksumComparison.TRUE);
}
}
return false;
}
/**
* If the mod time comparison is enabled, check the mod time else return
* false.
* Comparison: If the target file perceives to have greater or equal mod time
* (older) than the source file, we can assume that there has been no new
* changes that occurred in the source file, hence we should return true to
* skip the copy of the file.
*
* @param source Source fileStatus.
* @param target Target fileStatus.
* @return boolean representing result of modTime check.
*/
private boolean maybeUseModTimeToCompare(
CopyListingFileStatus source, FileStatus target) {
if (useModTimeToUpdate) {
return source.getModificationTime() <= target.getModificationTime();
}
// if we cannot check mod time, return true (skip the copy).
return true;
}
@Override
protected void cleanup(Context context)
throws IOException, InterruptedException {
super.cleanup(context);
long secs = (System.currentTimeMillis() - startEpoch) / 1000;
incrementCounter(context, Counter.BANDWIDTH_IN_BYTES,
totalBytesCopied / ((secs == 0 ? 1 : secs)));
}
}
| representing |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/aggregator/AggregateCompletionOnlyTwoTest.java | {
"start": 1434,
"end": 2772
} | class ____ extends ContextTestSupport {
private final MyRepo repo = new MyRepo();
@Test
void testOnlyTwo() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:aggregated");
mock.expectedBodiesReceived("A+B", "C+END");
template.sendBodyAndHeader("direct:start", "A", "id", "foo");
template.sendBodyAndHeader("direct:start", "B", "id", "foo");
template.sendBodyAndHeader("direct:start", "C", "id", "foo");
template.sendBodyAndHeader("direct:start", "END", "id", "foo");
assertMockEndpointsSatisfied();
assertEquals(4, repo.getGet());
assertEquals(2, repo.getAdd());
assertEquals(2, repo.getRemove());
// A second thread is involved so let's use awaitility to add more flexibility to the test
await().atMost(10, TimeUnit.SECONDS).untilAsserted(() -> assertEquals(2, repo.getConfirm()));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").aggregate(header("id"), new BodyInAggregatingStrategy()).aggregationRepository(repo)
.completionSize(2).to("mock:aggregated");
}
};
}
private static | AggregateCompletionOnlyTwoTest |
java | grpc__grpc-java | s2a/src/main/java/io/grpc/s2a/internal/handshaker/S2AProtocolNegotiatorFactory.java | {
"start": 6431,
"end": 7160
} | class ____ extends ChannelInboundHandlerAdapter {
private final List<Object> reads = new ArrayList<>();
private boolean readComplete;
public List<Object> getReads() {
return reads;
}
@Override
public void channelRead(ChannelHandlerContext unused, Object msg) {
reads.add(msg);
}
@Override
public void channelReadComplete(ChannelHandlerContext unused) {
readComplete = true;
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
for (Object msg : reads) {
super.channelRead(ctx, msg);
}
if (readComplete) {
super.channelReadComplete(ctx);
}
}
}
private static final | BufferReadsHandler |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/manytoone/jointable/InverseManyToOneJoinTableSimpleIdTest.java | {
"start": 1561,
"end": 4295
} | class ____ {
@Test
public void assertModel(SessionFactoryScope scope) {
final MappingMetamodelImplementor mappingMetamodel = scope.getSessionFactory()
.getRuntimeMetamodels()
.getMappingMetamodel();
final EntityPersister entityDescriptor = mappingMetamodel.getEntityDescriptor( Author.class );
final PluralAttributeMapping books = (PluralAttributeMapping) entityDescriptor.findAttributeMapping( "books" );
final ManyToManyCollectionPart booksElementDescriptor = (ManyToManyCollectionPart) books.getElementDescriptor();
final SimpleForeignKeyDescriptor booksFk = (SimpleForeignKeyDescriptor) booksElementDescriptor.getForeignKeyDescriptor();
assertThat( booksFk.getKeyTable() ).isEqualTo( "book_authors" );
assertThat( booksFk.getKeyPart().getSelectionExpression() ).isEqualTo( "book_id" );
assertThat( booksFk.getTargetTable() ).isEqualTo( "books" );
assertThat( booksFk.getTargetPart().getSelectionExpression() ).isEqualTo( "id" );
}
@Test
public void usageSmokeTest(SessionFactoryScope scope) {
createTestData( scope );
try {
scope.inTransaction( (session) -> {
final Author stephenKing = session.find( Author.class, 1 );
verifyStephenKingBooks( stephenKing );
} );
scope.inTransaction( (session) -> {
final Author stephenKing = session
.createSelectionQuery( "from Author a join fetch a.books where a.id = 1", Author.class )
.getSingleResult();
verifyStephenKingBooks( stephenKing );
} );
}
finally {
dropTestData( scope );
}
}
private void verifyStephenKingBooks(Author author) {
final List<String> bookNames = author.books.stream().map( Book::getName ).collect( Collectors.toList() );
assertThat( bookNames ).contains( "It", "The Shining" );
}
private void createTestData(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final Author stephenKing = new Author( 1, "Stephen King" );
final Author johnMilton = new Author( 2, "John Milton" );
session.persist( stephenKing );
session.persist( johnMilton );
session.persist( new Book( 1, "It", stephenKing ) );
session.persist( new Book( 2, "The Shining", stephenKing ) );
session.persist( new Book( 3, "Paradise Lost", johnMilton ) );
} );
}
private void dropTestData(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createQuery( "from Author", Author.class ).list().forEach( session::remove );
} );
scope.inTransaction( (session) -> {
final Long bookCount = session.createSelectionQuery( "select count(1) from Book", Long.class ).uniqueResult();
assertThat( bookCount ).isEqualTo( 0L );
} );
}
@Entity( name = "Book" )
@Table( name = "books" )
public static | InverseManyToOneJoinTableSimpleIdTest |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/tools/picocli/CommandLine.java | {
"start": 219919,
"end": 221006
} | class ____ implements IOptionRenderer {
@Override
public Text[][] render(
final Option option,
final Field field,
final IParamLabelRenderer parameterLabelRenderer,
final ColorScheme scheme) {
Text optionText = scheme.optionText(option.names()[0]);
final Text paramLabelText =
parameterLabelRenderer.renderParameterLabel(field, scheme.ansi(), scheme.optionParamStyles);
optionText = optionText.append(paramLabelText);
return new Text[][] {
{optionText, scheme.ansi().new Text(option.description().length == 0 ? "" : option.description()[0])
}
};
}
}
/** The MinimalParameterRenderer converts {@link Parameters Parameters} to a single row with two columns of
* text: the parameters label and a description. If multiple description lines exist, the first value is used. */
static | MinimalOptionRenderer |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/search/VectorType.java | {
"start": 104,
"end": 259
} | enum ____ {
/**
* A 32-bit floating point number.
*/
FLOAT32,
/**
* A 64-bit floating point number.
*/
FLOAT64
}
| VectorType |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/context/support/SpringBeanAutowiringSupportTests.java | {
"start": 2163,
"end": 2288
} | class ____ {
@Autowired
public ITestBean testBean;
@Value("#{testBean.name}")
public String name;
}
}
| InjectionTarget |
java | elastic__elasticsearch | x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/generator/command/pipe/ChangePointGenerator.java | {
"start": 623,
"end": 2219
} | class ____ implements CommandGenerator {
public static final String CHANGE_POINT = "change_point";
public static final CommandGenerator INSTANCE = new ChangePointGenerator();
@Override
public CommandDescription generate(
List<CommandDescription> previousCommands,
List<Column> previousOutput,
QuerySchema schema,
QueryExecutor executor
) {
String timestampField = EsqlQueryGenerator.randomDateField(previousOutput);
String numericField = EsqlQueryGenerator.randomNumericField(previousOutput);
if (timestampField == null || numericField == null) {
return EMPTY_DESCRIPTION;
}
String alias1 = EsqlQueryGenerator.randomAttributeOrIdentifier(previousOutput);
String alias2 = EsqlQueryGenerator.randomAttributeOrIdentifier(previousOutput);
while (alias1.equals(alias2)) {
alias2 = EsqlQueryGenerator.randomAttributeOrIdentifier(previousOutput);
}
String cmd = " | CHANGE_POINT " + numericField + " ON " + timestampField + " AS " + alias1 + ", " + alias2;
return new CommandDescription(CHANGE_POINT, this, cmd, Map.of());
}
@Override
public ValidationResult validateOutput(
List<CommandDescription> previousCommands,
CommandDescription command,
List<Column> previousColumns,
List<List<Object>> previousOutput,
List<Column> columns,
List<List<Object>> output
) {
return CommandGenerator.expectAtLeastSameNumberOfColumns(previousColumns, columns);
}
}
| ChangePointGenerator |
java | google__auto | value/src/main/java/com/google/auto/value/extension/toprettystring/processor/ToPrettyStringExtension.java | {
"start": 3392,
"end": 5806
} | class ____ extends AutoValueExtension {
private static final ImmutableSet<Modifier> INHERITED_VISIBILITY_MODIFIERS =
ImmutableSet.of(PUBLIC, PROTECTED);
private static final String INDENT = " ";
private static final String INDENT_METHOD_NAME = "$indent";
private static final CodeBlock KEY_VALUE_SEPARATOR = CodeBlock.of("$S", ": ");
@Override
public String generateClass(
Context context, String className, String classToExtend, boolean isFinal) {
TypeSpec type =
extensionClassTypeSpecBuilder(context, className, classToExtend, isFinal)
.addMethods(toPrettyStringMethodSpecs(context))
.build();
return JavaFile.builder(context.packageName(), type)
.skipJavaLangImports(true)
.build()
.toString();
}
private ImmutableList<MethodSpec> toPrettyStringMethodSpecs(Context context) {
ExecutableElement toPrettyStringMethod = getOnlyElement(toPrettyStringMethods(context));
MethodSpec.Builder method =
methodBuilder(toPrettyStringMethod.getSimpleName().toString())
.addAnnotation(Override.class)
.returns(ClassName.get(String.class))
.addModifiers(FINAL)
.addModifiers(
intersection(toPrettyStringMethod.getModifiers(), INHERITED_VISIBILITY_MODIFIERS));
method.addCode("return $S", context.autoValueClass().getSimpleName() + " {");
ToPrettyStringImplementation implementation = ToPrettyStringImplementation.create(context);
method.addCode(implementation.toStringCodeBlock.build());
if (!context.properties().isEmpty()) {
method.addCode(" + $S", "\n");
}
method.addCode(" + $S;\n", "}");
return ImmutableList.<MethodSpec>builder()
.add(method.build())
.addAll(implementation.delegateMethods.values())
.add(indentMethod())
.build();
}
private static MethodSpec indentMethod() {
return methodBuilder(INDENT_METHOD_NAME)
.addModifiers(PRIVATE, STATIC)
.returns(ClassName.get(String.class))
.addParameter(TypeName.INT, "level")
.addStatement("$1T builder = new $1T()", StringBuilder.class)
.beginControlFlow("for (int i = 0; i < level; i++)")
.addStatement("builder.append($S)", INDENT)
.endControlFlow()
.addStatement("return builder.toString()")
.build();
}
private static | ToPrettyStringExtension |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessActionRequestTests.java | {
"start": 887,
"end": 2578
} | class ____ extends AbstractWireSerializingTestCase<UpdateProcessAction.Request> {
@Override
protected UpdateProcessAction.Request createTestInstance() {
ModelPlotConfig modelPlotConfig = null;
if (randomBoolean()) {
modelPlotConfig = ModelPlotConfigTests.createRandomized();
}
PerPartitionCategorizationConfig perPartitionCategorizationConfig = null;
if (randomBoolean()) {
perPartitionCategorizationConfig = new PerPartitionCategorizationConfig(true, randomBoolean());
}
List<JobUpdate.DetectorUpdate> updates = null;
if (randomBoolean()) {
updates = new ArrayList<>();
int detectorUpdateCount = randomIntBetween(0, 5);
for (int i = 0; i < detectorUpdateCount; i++) {
updates.add(new JobUpdate.DetectorUpdate(randomInt(), randomAlphaOfLength(10), null));
}
}
MlFilter filter = null;
if (randomBoolean()) {
filter = MlFilterTests.createTestFilter();
}
return new UpdateProcessAction.Request(
randomAlphaOfLength(10),
modelPlotConfig,
perPartitionCategorizationConfig,
updates,
filter,
randomBoolean()
);
}
@Override
protected UpdateProcessAction.Request mutateInstance(UpdateProcessAction.Request instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Writeable.Reader<UpdateProcessAction.Request> instanceReader() {
return UpdateProcessAction.Request::new;
}
}
| UpdateProcessActionRequestTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java | {
"start": 5811,
"end": 6376
} | interface ____<T> extends NamedDiffable<T>, ChunkedToXContent {
EnumSet<XContentContext> context();
/**
* @return true if this custom could be restored from snapshot
*/
default boolean isRestorable() {
return context().contains(XContentContext.SNAPSHOT);
}
}
/**
* Cluster-level custom metadata that persists (via XContent) across restarts.
* The deserialization method for each implementation must be registered with the {@link NamedXContentRegistry}.
*/
public | MetadataCustom |
java | google__guice | extensions/dagger-adapter/test/com/google/inject/daggeradapter/DaggerAdapterTest.java | {
"start": 4696,
"end": 5887
} | class ____ extends UnsupportedAnnotationModule {}
public void testUnsupportedBindingAnnotation() {
try {
Guice.createInjector(DaggerAdapter.from(new UnsupportedAnnotationModule()));
fail();
} catch (CreationException expected) {
assertThat(expected)
.hasMessageThat()
.contains(
"noGuiceEquivalentForElementsIntoSet() is annotated with"
+ " @ElementsIntoSet which is not supported by DaggerAdapter");
}
try {
Guice.createInjector(DaggerAdapter.from(UnsupportedAnnotationStaticModule.class));
fail();
} catch (CreationException expected) {
assertThat(expected)
.hasMessageThat()
.contains(
"noGuiceEquivalentForElementsIntoSet() is annotated with"
+ " @ElementsIntoSet which is not supported by DaggerAdapter");
}
}
public void testUnsupportedBindingAnnotationFromModuleSuperclass() {
try {
Guice.createInjector(DaggerAdapter.from(new UnsupportedAnnotationSubclassModule()));
fail();
} catch (CreationException expected) {
}
}
// TODO(ronshapiro): break this | UnsupportedAnnotationSubclassModule |
java | apache__maven | compat/maven-model-builder/src/main/java/org/apache/maven/model/building/ModelProblemCollectorRequest.java | {
"start": 1249,
"end": 2602
} | class ____ {
private final ModelProblem.Severity severity;
private final ModelProblem.Version version;
private Exception exception;
private String message;
private InputLocation location;
/**
* Create a new request with mandatory parameters.
* @param severity
* @param version
*/
public ModelProblemCollectorRequest(Severity severity, Version version) {
this.severity = Objects.requireNonNull(severity, "severity cannot be null");
this.version = Objects.requireNonNull(version, "version cannot be null");
}
public Severity getSeverity() {
return severity;
}
public Version getVersion() {
return version;
}
public Exception getException() {
return exception;
}
public ModelProblemCollectorRequest setException(Exception exception) {
this.exception = exception;
return this;
}
public String getMessage() {
return message;
}
public ModelProblemCollectorRequest setMessage(String message) {
this.message = message;
return this;
}
public InputLocation getLocation() {
return location;
}
public ModelProblemCollectorRequest setLocation(InputLocation location) {
this.location = location;
return this;
}
}
| ModelProblemCollectorRequest |
java | micronaut-projects__micronaut-core | http-server-tck/src/main/java/io/micronaut/http/server/tck/tests/cors/CorsDisabledByDefaultTest.java | {
"start": 1622,
"end": 3040
} | class ____ {
private static final String SPECNAME = "CorsDisabledByDefaultTest";
/**
* By default, CORS is disabled no cors headers are present in response.
* @throws IOException may throw the try for resources
*/
@Test
void corsDisabledByDefault() throws IOException {
asserts(SPECNAME,
createRequest("https://foo.com"),
(server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.assertResponse(CorsUtils::assertCorsHeadersNotPresent)
.build());
});
}
static HttpRequest<?> createRequest(String origin) {
return HttpRequest.POST("/refresh", Collections.emptyMap())
.header("Content-Type", MediaType.APPLICATION_JSON)
.header("Origin", origin)
.header("Accept-Encoding", "gzip, deflate")
.header("Connection", "keep-alive")
.header("Accept", "*/*")
.header("User-Agent", "Mozilla / 5.0 (Macintosh; Intel Mac OS X 10_15_7)AppleWebKit / 605.1 .15 (KHTML, like Gecko)Version / 16.1 Safari / 605.1 .15")
.header("Referer", origin)
.header("Accept-Language", "en - GB, en");
}
@Requires(property = "spec.name", value = SPECNAME)
@Controller
static | CorsDisabledByDefaultTest |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/service/HealthStatusManager.java | {
"start": 941,
"end": 3171
} | class ____ {
/**
* The special "service name" that represent all services on a GRPC server. It is an empty
* string.
*/
public static final String SERVICE_NAME_ALL_SERVICES = "";
private final TriHealthImpl healthService;
public HealthStatusManager(TriHealthImpl healthService) {
this.healthService = healthService;
}
public Health getHealthService() {
return healthService;
}
/**
* Updates the status of the server.
*
* @param service the name of some aspect of the server that is associated with a health status.
* This name can have no relation with the gRPC services that the server is
* running with. It can also be an empty String {@code ""} per the gRPC
* specification.
* @param status is one of the values {@link HealthCheckResponse.ServingStatus#SERVING}, {@link
* HealthCheckResponse.ServingStatus#NOT_SERVING} and {@link
* HealthCheckResponse.ServingStatus#UNKNOWN}.
*/
public void setStatus(String service, HealthCheckResponse.ServingStatus status) {
healthService.setStatus(service, status);
}
/**
* Clears the health status record of a service. The health service will respond with NOT_FOUND
* error on checking the status of a cleared service.
*
* @param service the name of some aspect of the server that is associated with a health status.
* This name can have no relation with the gRPC services that the server is
* running with. It can also be an empty String {@code ""} per the gRPC
* specification.
*/
public void clearStatus(String service) {
healthService.clearStatus(service);
}
/**
* enterTerminalState causes the health status manager to mark all services as not serving, and
* prevents future updates to services. This method is meant to be called prior to server
* shutdown as a way to indicate that clients should redirect their traffic elsewhere.
*/
public void enterTerminalState() {
healthService.enterTerminalState();
}
}
| HealthStatusManager |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/web/authentication/OAuth2EndpointUtils.java | {
"start": 1682,
"end": 5654
} | class ____ {
static final String ACCESS_TOKEN_REQUEST_ERROR_URI = "https://datatracker.ietf.org/doc/html/rfc6749#section-5.2";
private OAuth2EndpointUtils() {
}
static MultiValueMap<String, String> getFormParameters(HttpServletRequest request) {
Map<String, String[]> parameterMap = request.getParameterMap();
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameterMap.forEach((key, values) -> {
String queryString = StringUtils.hasText(request.getQueryString()) ? request.getQueryString() : "";
// If not query parameter then it's a form parameter
if (!queryString.contains(key) && values.length > 0) {
for (String value : values) {
parameters.add(key, value);
}
}
});
return parameters;
}
static MultiValueMap<String, String> getQueryParameters(HttpServletRequest request) {
Map<String, String[]> parameterMap = request.getParameterMap();
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameterMap.forEach((key, values) -> {
String queryString = StringUtils.hasText(request.getQueryString()) ? request.getQueryString() : "";
if (queryString.contains(key) && values.length > 0) {
for (String value : values) {
parameters.add(key, value);
}
}
});
return parameters;
}
static Map<String, Object> getParametersIfMatchesAuthorizationCodeGrantRequest(HttpServletRequest request,
String... exclusions) {
if (!matchesAuthorizationCodeGrantRequest(request)) {
return Collections.emptyMap();
}
MultiValueMap<String, String> multiValueParameters = "GET".equals(request.getMethod())
? getQueryParameters(request) : getFormParameters(request);
for (String exclusion : exclusions) {
multiValueParameters.remove(exclusion);
}
Map<String, Object> parameters = new HashMap<>();
multiValueParameters.forEach(
(key, value) -> parameters.put(key, (value.size() == 1) ? value.get(0) : value.toArray(new String[0])));
return parameters;
}
static boolean matchesAuthorizationCodeGrantRequest(HttpServletRequest request) {
return AuthorizationGrantType.AUTHORIZATION_CODE.getValue()
.equals(request.getParameter(OAuth2ParameterNames.GRANT_TYPE))
&& request.getParameter(OAuth2ParameterNames.CODE) != null;
}
static boolean matchesPkceTokenRequest(HttpServletRequest request) {
return matchesAuthorizationCodeGrantRequest(request)
&& request.getParameter(PkceParameterNames.CODE_VERIFIER) != null;
}
static void validateAndAddDPoPParametersIfAvailable(HttpServletRequest request,
Map<String, Object> additionalParameters) {
final String dPoPProofHeaderName = OAuth2AccessToken.TokenType.DPOP.getValue();
String dPoPProof = request.getHeader(dPoPProofHeaderName);
if (StringUtils.hasText(dPoPProof)) {
if (Collections.list(request.getHeaders(dPoPProofHeaderName)).size() != 1) {
throwError(OAuth2ErrorCodes.INVALID_REQUEST, dPoPProofHeaderName, ACCESS_TOKEN_REQUEST_ERROR_URI);
}
else {
additionalParameters.put("dpop_proof", dPoPProof);
additionalParameters.put("dpop_method", request.getMethod());
additionalParameters.put("dpop_target_uri", request.getRequestURL().toString());
}
}
}
static void throwError(String errorCode, String parameterName, String errorUri) {
OAuth2Error error = new OAuth2Error(errorCode, "OAuth 2.0 Parameter: " + parameterName, errorUri);
throw new OAuth2AuthenticationException(error);
}
static String normalizeUserCode(String userCode) {
Assert.hasText(userCode, "userCode cannot be empty");
StringBuilder sb = new StringBuilder(userCode.toUpperCase(Locale.ENGLISH).replaceAll("[^A-Z\\d]+", ""));
Assert.isTrue(sb.length() == 8, "userCode must be exactly 8 alpha/numeric characters");
sb.insert(4, '-');
return sb.toString();
}
static boolean validateUserCode(String userCode) {
return (userCode != null && userCode.toUpperCase(Locale.ENGLISH).replaceAll("[^A-Z\\d]+", "").length() == 8);
}
}
| OAuth2EndpointUtils |
java | quarkusio__quarkus | integration-tests/picocli/src/test/java/io/quarkus/it/picocli/TestVersion.java | {
"start": 279,
"end": 771
} | class ____ {
@RegisterExtension
static final QuarkusProdModeTest config = createConfig("version-app", EntryWithVersionCommand.class,
VersionProvider.class)
.overrideConfigKey("some.version", "1.1")
.setCommandLineParameters("--version");
@Test
public void simpleTest() {
Assertions.assertThat(config.getStartupConsoleOutput()).containsOnlyOnce("1.1");
Assertions.assertThat(config.getExitCode()).isZero();
}
}
| TestVersion |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java | {
"start": 20653,
"end": 25225
} | class ____ {
final Map<Integer, Integer> uidMapping;
final Map<Integer, Integer> gidMapping;
public StaticMapping(Map<Integer, Integer> uidMapping,
Map<Integer, Integer> gidMapping) {
this.uidMapping = new PassThroughMap<Integer>(uidMapping);
this.gidMapping = new PassThroughMap<Integer>(gidMapping);
}
public void clear() {
uidMapping.clear();
gidMapping.clear();
}
public boolean isNonEmpty() {
return uidMapping.size() > 0 || gidMapping.size() > 0;
}
}
static StaticMapping parseStaticMap(File staticMapFile)
throws IOException {
Map<Integer, Integer> uidMapping = new HashMap<Integer, Integer>();
Map<Integer, Integer> gidMapping = new HashMap<Integer, Integer>();
BufferedReader in = new BufferedReader(new InputStreamReader(
Files.newInputStream(staticMapFile.toPath()), StandardCharsets.UTF_8));
try {
String line = null;
while ((line = in.readLine()) != null) {
// Skip entirely empty and comment lines.
if (EMPTY_LINE.matcher(line).matches() ||
COMMENT_LINE.matcher(line).matches()) {
continue;
}
Matcher lineMatcher = MAPPING_LINE.matcher(line);
if (!lineMatcher.matches()) {
LOG.warn("Could not parse line '" + line + "'. Lines should be of " +
"the form '[uid|gid] [remote id] [local id]'. Blank lines and " +
"everything following a '#' on a line will be ignored.");
continue;
}
// We know the line is fine to parse without error checking like this
// since it matched the regex above.
String firstComponent = lineMatcher.group(1);
Integer remoteId = parseId(lineMatcher.group(2));
Integer localId = parseId(lineMatcher.group(3));
if (firstComponent.equals("uid")) {
uidMapping.put(localId, remoteId);
} else {
gidMapping.put(localId, remoteId);
}
}
} finally {
in.close();
}
return new StaticMapping(uidMapping, gidMapping);
}
synchronized public int getUid(String user) throws IOException {
checkAndUpdateMaps();
Integer id = uidNameMap.inverse().get(user);
if (id == null) {
updateMapIncr(user, false);
id = uidNameMap.inverse().get(user);
if (id == null) {
throw new IOException("User just deleted?:" + user);
}
}
return id.intValue();
}
synchronized public int getGid(String group) throws IOException {
checkAndUpdateMaps();
Integer id = gidNameMap.inverse().get(group);
if (id == null) {
updateMapIncr(group, true);
id = gidNameMap.inverse().get(group);
if (id == null) {
throw new IOException("No such group:" + group);
}
}
return id.intValue();
}
synchronized public String getUserName(int uid, String unknown) {
checkAndUpdateMaps();
String uname = uidNameMap.get(uid);
if (uname == null) {
try {
updateMapIncr(uid, false);
} catch (Exception e) {
}
uname = uidNameMap.get(uid);
if (uname == null) {
LOG.warn("Can't find user name for uid " + uid
+ ". Use default user name " + unknown);
uname = unknown;
}
}
return uname;
}
synchronized public String getGroupName(int gid, String unknown) {
checkAndUpdateMaps();
String gname = gidNameMap.get(gid);
if (gname == null) {
try {
updateMapIncr(gid, true);
} catch (Exception e) {
}
gname = gidNameMap.get(gid);
if (gname == null) {
LOG.warn("Can't find group name for gid " + gid
+ ". Use default group name " + unknown);
gname = unknown;
}
}
return gname;
}
// When can't map user, return user name's string hashcode
public int getUidAllowingUnknown(String user) {
checkAndUpdateMaps();
int uid;
try {
uid = getUid(user);
} catch (IOException e) {
uid = user.hashCode();
LOG.info("Can't map user " + user + ". Use its string hashcode:" + uid);
}
return uid;
}
// When can't map group, return group name's string hashcode
public int getGidAllowingUnknown(String group) {
checkAndUpdateMaps();
int gid;
try {
gid = getGid(group);
} catch (IOException e) {
gid = group.hashCode();
LOG.info("Can't map group " + group + ". Use its string hashcode:" + gid);
}
return gid;
}
}
| StaticMapping |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/exceptionhandling/StaleVersionedObjectMergeTest.java | {
"start": 3089,
"end": 3260
} | class ____ {
@Id
private long id;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
@Entity(name = "B")
public static | A |
java | apache__camel | core/camel-management/src/test/java/org/apache/camel/management/ManagedDataFormatTest.java | {
"start": 1326,
"end": 2117
} | class ____ extends ManagementTestSupport {
@Test
public void testManageDataFormat() throws Exception {
MBeanServer mbeanServer = getMBeanServer();
// there should be 1 data format
Set<ObjectName> set = mbeanServer.queryNames(new ObjectName("*:type=dataformats,*"), null);
assertEquals(1, set.size());
ObjectName on = set.iterator().next();
assertNotNull(on);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("seda:test")
.unmarshal(new DataFormatServiceTest.MyDataFormat())
.to("mock:result");
}
};
}
}
| ManagedDataFormatTest |
java | quarkusio__quarkus | integration-tests/jpa-postgresql/src/test/java/io/quarkus/it/jpa/postgresql/PostgresTestResourceLifecycleManager.java | {
"start": 196,
"end": 984
} | class ____ implements QuarkusTestResourceLifecycleManager {
private static PostgreSQLContainer<?> postgres;
@SuppressWarnings("resource")
@Override
public Map<String, String> start() {
postgres = new PostgreSQLContainer<>("postgres:14") // the exact value doesn't really matter here
.withDatabaseName("testdb")
.withUsername("test")
.withPassword("test");
postgres.start();
return Map.of("quarkus.datasource.jdbc.url", postgres.getJdbcUrl(), "quarkus.datasource.username", "test",
"quarkus.datasource.password", "test");
}
@Override
public void stop() {
if (postgres != null) {
postgres.stop();
}
}
}
| PostgresTestResourceLifecycleManager |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/resource/ResourceUrlProviderTests.java | {
"start": 1723,
"end": 7512
} | class ____ {
private final List<Resource> locations = new ArrayList<>();
private final ResourceHttpRequestHandler handler = new ResourceHttpRequestHandler();
private final Map<String, ResourceHttpRequestHandler> handlerMap = new HashMap<>();
private final ResourceUrlProvider urlProvider = new ResourceUrlProvider();
@BeforeEach
void setUp() throws Exception {
this.locations.add(new ClassPathResource("test/", getClass()));
this.locations.add(new ClassPathResource("testalternatepath/", getClass()));
this.handler.setServletContext(new MockServletContext());
this.handler.setLocations(locations);
this.handler.afterPropertiesSet();
this.handlerMap.put("/resources/**", this.handler);
this.urlProvider.setHandlerMap(this.handlerMap);
}
@Test
void getStaticResourceUrl() {
String url = this.urlProvider.getForLookupPath("/resources/foo.css");
assertThat(url).isEqualTo("/resources/foo.css");
}
@Test // SPR-13374
void getStaticResourceUrlRequestWithQueryOrHash() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContextPath("/");
request.setRequestURI("/");
String url = "/resources/foo.css?foo=bar&url=https://example.org";
String resolvedUrl = this.urlProvider.getForRequestUrl(request, url);
assertThat(resolvedUrl).isEqualTo("/resources/foo.css?foo=bar&url=https://example.org");
url = "/resources/foo.css#hash";
resolvedUrl = this.urlProvider.getForRequestUrl(request, url);
assertThat(resolvedUrl).isEqualTo("/resources/foo.css#hash");
}
@Test // SPR-16526
void getStaticResourceWithMissingContextPath() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContextPath("/contextpath-longer-than-request-path");
request.setRequestURI("/contextpath-longer-than-request-path/style.css");
String url = "/resources/foo.css";
String resolvedUrl = this.urlProvider.getForRequestUrl(request, url);
assertThat(resolvedUrl).isNull();
}
@Test
void getFingerprintedResourceUrl() {
Map<String, VersionStrategy> versionStrategyMap = new HashMap<>();
versionStrategyMap.put("/**", new ContentVersionStrategy());
VersionResourceResolver versionResolver = new VersionResourceResolver();
versionResolver.setStrategyMap(versionStrategyMap);
List<ResourceResolver> resolvers = new ArrayList<>();
resolvers.add(versionResolver);
resolvers.add(new PathResourceResolver());
this.handler.setResourceResolvers(resolvers);
String url = this.urlProvider.getForLookupPath("/resources/foo.css");
assertThat(url).isEqualTo("/resources/foo-e36d2e05253c6c7085a91522ce43a0b4.css");
}
@Test // SPR-12647
void bestPatternMatch() {
ResourceHttpRequestHandler otherHandler = new ResourceHttpRequestHandler();
otherHandler.setLocations(this.locations);
Map<String, VersionStrategy> versionStrategyMap = new HashMap<>();
versionStrategyMap.put("/**", new ContentVersionStrategy());
VersionResourceResolver versionResolver = new VersionResourceResolver();
versionResolver.setStrategyMap(versionStrategyMap);
List<ResourceResolver> resolvers = new ArrayList<>();
resolvers.add(versionResolver);
resolvers.add(new PathResourceResolver());
otherHandler.setResourceResolvers(resolvers);
this.handlerMap.put("/resources/*.css", otherHandler);
this.urlProvider.setHandlerMap(this.handlerMap);
String url = this.urlProvider.getForLookupPath("/resources/foo.css");
assertThat(url).isEqualTo("/resources/foo-e36d2e05253c6c7085a91522ce43a0b4.css");
}
@Test // SPR-12592
void initializeOnce() {
AnnotationConfigWebApplicationContext context = new AnnotationConfigWebApplicationContext();
context.setServletContext(new MockServletContext());
context.register(HandlerMappingConfiguration.class);
context.refresh();
ResourceUrlProvider urlProviderBean = context.getBean(ResourceUrlProvider.class);
assertThat(urlProviderBean.getHandlerMap()).containsKey("/resources/**");
assertThat(urlProviderBean.isAutodetect()).isFalse();
}
@Test
void initializeOnCurrentContext() {
AnnotationConfigWebApplicationContext parentContext = new AnnotationConfigWebApplicationContext();
parentContext.setServletContext(new MockServletContext());
parentContext.register(ParentHandlerMappingConfiguration.class);
AnnotationConfigWebApplicationContext childContext = new AnnotationConfigWebApplicationContext();
childContext.setParent(parentContext);
childContext.setServletContext(new MockServletContext());
childContext.register(HandlerMappingConfiguration.class);
parentContext.refresh();
childContext.refresh();
ResourceUrlProvider parentUrlProvider = parentContext.getBean(ResourceUrlProvider.class);
assertThat(parentUrlProvider.getHandlerMap()).isEmpty();
assertThat(parentUrlProvider.isAutodetect()).isTrue();
ResourceUrlProvider childUrlProvider = childContext.getBean(ResourceUrlProvider.class);
assertThat(childUrlProvider.getHandlerMap()).containsOnlyKeys("/resources/**");
assertThat(childUrlProvider.isAutodetect()).isFalse();
}
@Test // SPR-16296
void getForLookupPathShouldNotFailIfPathContainsDoubleSlashes() {
// given
ResourceResolver mockResourceResolver = mock();
given(mockResourceResolver.resolveUrlPath(any(), any(), any())).willReturn("some-path");
ResourceHttpRequestHandler handler = new ResourceHttpRequestHandler();
handler.getResourceResolvers().add(mockResourceResolver);
ResourceUrlProvider provider = new ResourceUrlProvider();
provider.getHandlerMap().put("/some-pattern/**", handler);
// when
String lookupForPath = provider.getForLookupPath("/some-pattern/some-lib//some-resource");
// then
assertThat(lookupForPath).isEqualTo("/some-pattern/some-path");
}
@Configuration
@SuppressWarnings({"unused", "WeakerAccess"})
static | ResourceUrlProviderTests |
java | quarkusio__quarkus | integration-tests/kafka-ssl/src/test/java/io/quarkus/it/kafka/KafkaSSLTestResource.java | {
"start": 391,
"end": 2702
} | class ____ implements QuarkusTestResourceLifecycleManager {
Map<String, String> conf = new HashMap<>();
private final StrimziKafkaContainer kafka = new StrimziKafkaContainer()
.withBootstrapServers(c -> String.format("SSL://%s:%s", c.getHost(), c.getMappedPort(KAFKA_PORT)))
.withBrokerId(0)
.withKafkaConfigurationMap(Map.ofEntries(
entry("ssl.keystore.location", "/opt/kafka/config/kafka-keystore.p12"),
entry("ssl.keystore.password", "Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L"),
entry("ssl.keystore.type", "PKCS12"),
entry("ssl.key.password", "Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L"),
entry("ssl.truststore.location", "/opt/kafka/config/kafka-truststore.p12"),
entry("ssl.truststore.password", "Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L"),
entry("ssl.truststore.type", "PKCS12"),
entry("ssl.endpoint.identification.algorithm", ""),
entry("listener.security.protocol.map",
"BROKER1:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL,CONTROLLER:PLAINTEXT")))
.withCopyFileToContainer(MountableFile.forHostPath("target/certs/kafka-keystore.p12"),
"/opt/kafka/config/kafka-keystore.p12")
.withCopyFileToContainer(MountableFile.forHostPath("target/certs/kafka-truststore.p12"),
"/opt/kafka/config/kafka-truststore.p12");
private Map<String, String> initProps;
@Override
public void init(Map<String, String> initArgs) {
initProps = initArgs;
}
@Override
public Map<String, String> start() {
kafka.start();
// Used by the test
System.setProperty("bootstrap.servers", kafka.getBootstrapServers());
// Used by the application
Map<String, String> properties = new HashMap<>(initProps);
properties.put("kafka.bootstrap.servers", kafka.getBootstrapServers());
properties.put("ssl-dir", new File("target/certs").getAbsolutePath());
return properties;
}
@Override
public void stop() {
if (kafka != null) {
kafka.close();
}
System.clearProperty("boostrap.servers");
}
}
| KafkaSSLTestResource |
java | apache__camel | components/camel-infinispan/camel-infinispan/src/test/java/org/apache/camel/component/infinispan/remote/cluster/AbstractInfinispanRemoteClusteredIT.java | {
"start": 7436,
"end": 8435
} | class ____ {
public RouteBuilder getRouteBuilder(RunnerEnv runnerEnv) {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
this.getContext().addRoutePolicyFactory(ClusteredRoutePolicyFactory.forNamespace(viewName));
fromF("timer:%s?delay=1000&period=1000&repeatCount=%d", runnerEnv.id, runnerEnv.events)
.routeId("route-" + runnerEnv.id)
.routePolicy(ClusteredRoutePolicy.forNamespace(viewName))
.log("From id=${routeId} counter=${header.CamelTimerCounter}")
.process(e -> runnerEnv.latch.countDown());
}
};
}
@Timeout(value = 1, unit = TimeUnit.MINUTES)
@Test
public void test() throws Exception {
runTest(this::getRouteBuilder);
}
}
}
| InfinispanRemoteClusteredRoutePolicyTestNested |
java | spring-projects__spring-boot | module/spring-boot-webflux/src/testFixtures/java/org/springframework/boot/webflux/actuate/endpoint/web/test/WebFluxWebEndpointInfrastructureProvider.java | {
"start": 999,
"end": 1376
} | class ____ implements WebEndpointInfrastructureProvider {
@Override
public boolean supports(Infrastructure infrastructure) {
return infrastructure == Infrastructure.WEBFLUX;
}
@Override
public List<Class<?>> getInfrastructureConfiguration(Infrastructure infrastructure) {
return List.of(WebFluxEndpointConfiguration.class);
}
}
| WebFluxWebEndpointInfrastructureProvider |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/pool/ha/selector/DataSourceSelector.java | {
"start": 804,
"end": 1397
} | interface ____ {
/**
* Return a DataSource according to the implemention.
*/
DataSource get();
/**
* Set the target DataSource name to return.
* Wether to use this or not, it's decided by the implemention.
*/
void setTarget(String name);
/**
* Return the name of this DataSourceSelector.
* e.g. byName
*/
String getName();
/**
* Init the DataSourceSelector before use it.
*/
void init();
/**
* Destroy the DataSourceSelector, maybe interrupt the Thread.
*/
void destroy();
}
| DataSourceSelector |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/internal/BasicValueBinder.java | {
"start": 44300,
"end": 45365
} | class ____ not specified for basic mapping: " + memberDetails.getName() );
}
parameters.put( DynamicParameterizedType.RETURNED_CLASS, returnedClassName );
parameters.put( DynamicParameterizedType.XPROPERTY, memberDetails );
parameters.put( DynamicParameterizedType.PROPERTY, memberDetails.getName() );
parameters.put( DynamicParameterizedType.IS_DYNAMIC, Boolean.toString( true ) );
parameters.put( DynamicParameterizedType.IS_PRIMARY_KEY, Boolean.toString( kind == Kind.MAP_KEY ) );
if ( persistentClassName != null ) {
parameters.put( DynamicParameterizedType.ENTITY, persistentClassName );
}
if ( returnedClassName != null ) {
parameters.put( DynamicParameterizedType.RETURNED_CLASS, returnedClassName );
}
if ( accessType != null ) {
parameters.put( DynamicParameterizedType.ACCESS_TYPE, accessType.getType() );
}
if ( explicitLocalTypeParams != null ) {
parameters.putAll( explicitLocalTypeParams );
}
return parameters;
}
/**
* Access to detail of basic value mappings based on {@link Kind}
*/
private | name |
java | apache__camel | core/camel-xml-io/src/main/java/org/apache/camel/xml/io/MXParser.java | {
"start": 1871,
"end": 133339
} | class ____ implements XmlPullParser {
// NOTE: no interning of those strings --> by Java lang spec they MUST be
// already interned
protected static final String XML_URI = "http://www.w3.org/XML/1998/namespace";
protected static final String XMLNS_URI = "http://www.w3.org/2000/xmlns/";
protected static final String FEATURE_XML_ROUNDTRIP =
// "http://xmlpull.org/v1/doc/features.html#xml-roundtrip";
"http://xmlpull.org/v1/doc/features.html#xml-roundtrip";
protected static final String FEATURE_NAMES_INTERNED = "http://xmlpull.org/v1/doc/features.html#names-interned";
protected static final String PROPERTY_XMLDECL_VERSION = "http://xmlpull.org/v1/doc/properties.html#xmldecl-version";
protected static final String PROPERTY_XMLDECL_STANDALONE = "http://xmlpull.org/v1/doc/properties.html#xmldecl-standalone";
protected static final String PROPERTY_XMLDECL_CONTENT = "http://xmlpull.org/v1/doc/properties.html#xmldecl-content";
protected static final String PROPERTY_LOCATION = "http://xmlpull.org/v1/doc/properties.html#location";
/**
* Implementation notice: the is instance variable that controls if newString() is interning.
* <p>
* <b>NOTE:</b> newStringIntern <b>always</b> returns interned strings and newString MAY return interned String
* depending on this variable.
* <p>
* <b>NOTE:</b> by default in this minimal implementation it is false!
*/
protected boolean allStringsInterned;
protected void resetStringCache() {
// System.out.println("resetStringCache() minimum called");
}
protected String newString(char[] cbuf, int off, int len) {
return new String(cbuf, off, len);
}
protected String newStringIntern(char[] cbuf, int off, int len) {
return (new String(cbuf, off, len)).intern();
}
private static final boolean TRACE_SIZING = false;
// NOTE: features are not resettable and typically defaults to false ...
protected boolean processNamespaces;
protected boolean roundtripSupported;
// global parser state
protected String location;
protected int startLineNumber;
protected int lineNumber;
protected int columnNumber;
protected boolean seenRoot;
protected boolean reachedEnd;
protected int eventType;
protected boolean emptyElementTag;
// element stack
protected int depth;
protected char[][] elRawName;
protected int[] elRawNameEnd;
protected int[] elRawNameLine;
protected String[] elName;
protected String[] elPrefix;
protected String[] elUri;
// protected String elValue[];
protected int[] elNamespaceCount;
/**
* Make sure that we have enough space to keep element stack if passed size. It will always create one additional
* slot then current depth
*/
protected void ensureElementsCapacity() {
final int elStackSize = elName != null ? elName.length : 0;
if ((depth + 1) >= elStackSize) {
// we add at least one extra slot ...
final int newSize = (depth >= 7 ? 2 * depth : 8) + 2; // = lucky 7 +
// 1 //25
if (TRACE_SIZING) {
System.err.println("TRACE_SIZING elStackSize " + elStackSize + " ==> " + newSize);
}
final boolean needsCopying = elStackSize > 0;
String[] arr;
// reuse arr local variable slot
arr = new String[newSize];
if (needsCopying)
System.arraycopy(elName, 0, arr, 0, elStackSize);
elName = arr;
arr = new String[newSize];
if (needsCopying)
System.arraycopy(elPrefix, 0, arr, 0, elStackSize);
elPrefix = arr;
arr = new String[newSize];
if (needsCopying)
System.arraycopy(elUri, 0, arr, 0, elStackSize);
elUri = arr;
int[] iarr = new int[newSize];
if (needsCopying) {
System.arraycopy(elNamespaceCount, 0, iarr, 0, elStackSize);
} else {
// special initialization
iarr[0] = 0;
}
elNamespaceCount = iarr;
// TODO: avoid using element raw name ...
iarr = new int[newSize];
if (needsCopying) {
System.arraycopy(elRawNameEnd, 0, iarr, 0, elStackSize);
}
elRawNameEnd = iarr;
iarr = new int[newSize];
if (needsCopying) {
System.arraycopy(elRawNameLine, 0, iarr, 0, elStackSize);
}
elRawNameLine = iarr;
final char[][] carr = new char[newSize][];
if (needsCopying) {
System.arraycopy(elRawName, 0, carr, 0, elStackSize);
}
elRawName = carr;
// arr = new String[newSize];
// if(needsCopying) System.arraycopy(elLocalName, 0, arr, 0,
// elStackSize);
// elLocalName = arr;
// arr = new String[newSize];
// if(needsCopying) System.arraycopy(elDefaultNs, 0, arr, 0,
// elStackSize);
// elDefaultNs = arr;
// int[] iarr = new int[newSize];
// if(needsCopying) System.arraycopy(elNsStackPos, 0, iarr, 0,
// elStackSize);
// for (int i = elStackSize; i < iarr.length; i++)
// {
// iarr[i] = (i > 0) ? -1 : 0;
// }
// elNsStackPos = iarr;
// assert depth < elName.length;
}
}
// attribute stack
protected int attributeCount;
protected String[] attributeName;
protected int[] attributeNameHash;
// protected int attributeNameStart[];
// protected int attributeNameEnd[];
protected String[] attributePrefix;
protected String[] attributeUri;
protected String[] attributeValue;
// protected int attributeValueStart[];
// protected int attributeValueEnd[];
/**
* Make sure that in attributes temporary array is enough space.
*/
protected void ensureAttributesCapacity(int size) {
final int attrPosSize = attributeName != null ? attributeName.length : 0;
if (size >= attrPosSize) {
final int newSize = size > 7 ? 2 * size : 8; // = lucky 7 + 1 //25
if (TRACE_SIZING) {
System.err.println("TRACE_SIZING attrPosSize " + attrPosSize + " ==> " + newSize);
}
final boolean needsCopying = attrPosSize > 0;
String[] arr;
arr = new String[newSize];
if (needsCopying)
System.arraycopy(attributeName, 0, arr, 0, attrPosSize);
attributeName = arr;
arr = new String[newSize];
if (needsCopying)
System.arraycopy(attributePrefix, 0, arr, 0, attrPosSize);
attributePrefix = arr;
arr = new String[newSize];
if (needsCopying)
System.arraycopy(attributeUri, 0, arr, 0, attrPosSize);
attributeUri = arr;
arr = new String[newSize];
if (needsCopying)
System.arraycopy(attributeValue, 0, arr, 0, attrPosSize);
attributeValue = arr;
if (!allStringsInterned) {
final int[] iarr = new int[newSize];
if (needsCopying)
System.arraycopy(attributeNameHash, 0, iarr, 0, attrPosSize);
attributeNameHash = iarr;
}
// //assert attrUri.length > size
}
}
// namespace stack
protected int namespaceEnd;
protected String[] namespacePrefix;
protected int[] namespacePrefixHash;
protected String[] namespaceUri;
protected void ensureNamespacesCapacity(int size) {
final int namespaceSize = namespacePrefix != null ? namespacePrefix.length : 0;
if (size >= namespaceSize) {
final int newSize = size > 7 ? 2 * size : 8; // = lucky 7 + 1 //25
if (TRACE_SIZING) {
System.err.println("TRACE_SIZING namespaceSize " + namespaceSize + " ==> " + newSize);
}
final String[] newNamespacePrefix = new String[newSize];
final String[] newNamespaceUri = new String[newSize];
if (namespacePrefix != null) {
System.arraycopy(namespacePrefix, 0, newNamespacePrefix, 0, namespaceEnd);
System.arraycopy(namespaceUri, 0, newNamespaceUri, 0, namespaceEnd);
}
namespacePrefix = newNamespacePrefix;
namespaceUri = newNamespaceUri;
if (!allStringsInterned) {
final int[] newNamespacePrefixHash = new int[newSize];
if (namespacePrefixHash != null) {
System.arraycopy(namespacePrefixHash, 0, newNamespacePrefixHash, 0, namespaceEnd);
}
namespacePrefixHash = newNamespacePrefixHash;
}
// prefixesSize = newSize;
// //assert nsPrefixes.length > size && nsPrefixes.length == newSize
}
}
/**
* simplistic implementation of hash function that has <b>constant</b> time to compute - so it also means
* diminishing hash quality for long strings but for XML parsing it should be good enough ...
*/
protected static int fastHash(char[] ch, int off, int len) {
if (len == 0)
return 0;
// assert len >0
int hash = ch[off]; // hash at beginning
// try {
hash = (hash << 7) + ch[off + len - 1]; // hash at the end
// } catch(ArrayIndexOutOfBoundsException aie) {
// aie.printStackTrace(); //should never happen ...
// throw new RuntimeException("this is violation of pre-condition");
// }
if (len > 16)
hash = (hash << 7) + ch[off + (len / 4)]; // 1/4 from beginning
if (len > 8)
hash = (hash << 7) + ch[off + (len / 2)]; // 1/2 of string size ...
// notice that hash is at most done 3 times <<7 so shifted by 21 bits 8
// bit value
// so max result == 29 bits so it is quite just below 31 bits for long
// (2^32) ...
// assert hash >= 0;
return hash;
}
// entity replacement stack
protected int entityEnd;
protected String[] entityName;
protected char[][] entityNameBuf;
protected String[] entityReplacement;
protected char[][] entityReplacementBuf;
protected int[] entityNameHash;
protected void ensureEntityCapacity() {
final int entitySize = entityReplacementBuf != null ? entityReplacementBuf.length : 0;
if (entityEnd >= entitySize) {
final int newSize = entityEnd > 7 ? 2 * entityEnd : 8; // = lucky 7
// + 1 //25
if (TRACE_SIZING) {
System.err.println("TRACE_SIZING entitySize " + entitySize + " ==> " + newSize);
}
final String[] newEntityName = new String[newSize];
final char[][] newEntityNameBuf = new char[newSize][];
final String[] newEntityReplacement = new String[newSize];
final char[][] newEntityReplacementBuf = new char[newSize][];
if (entityName != null) {
System.arraycopy(entityName, 0, newEntityName, 0, entityEnd);
System.arraycopy(entityNameBuf, 0, newEntityNameBuf, 0, entityEnd);
System.arraycopy(entityReplacement, 0, newEntityReplacement, 0, entityEnd);
System.arraycopy(entityReplacementBuf, 0, newEntityReplacementBuf, 0, entityEnd);
}
entityName = newEntityName;
entityNameBuf = newEntityNameBuf;
entityReplacement = newEntityReplacement;
entityReplacementBuf = newEntityReplacementBuf;
if (!allStringsInterned) {
final int[] newEntityNameHash = new int[newSize];
if (entityNameHash != null) {
System.arraycopy(entityNameHash, 0, newEntityNameHash, 0, entityEnd);
}
entityNameHash = newEntityNameHash;
}
}
}
// input buffer management
protected static final int READ_CHUNK_SIZE = 8 * 1024; // max data chars in
// one read() call
protected Reader reader;
protected String inputEncoding;
protected int bufLoadFactor = 95; // 99%
// protected int bufHardLimit; // only matters when expanding
protected char[] buf = new char[Runtime.getRuntime().freeMemory() > 1000000L ? READ_CHUNK_SIZE : 256];
protected int bufSoftLimit = (bufLoadFactor * buf.length) / 100; // desirable
// size of
// buffer
protected boolean preventBufferCompaction;
protected int bufAbsoluteStart; // this is buf
protected int bufStart;
protected int bufEnd;
protected int pos;
protected int posStart;
protected int posEnd;
protected char[] pc = new char[Runtime.getRuntime().freeMemory() > 1000000L ? READ_CHUNK_SIZE : 64];
protected int pcStart;
protected int pcEnd;
// parsing state
// protected boolean needsMore;
// protected boolean seenMarkup;
protected boolean usePC;
protected boolean seenStartTag;
protected boolean seenEndTag;
protected boolean pastEndTag;
protected boolean seenAmpersand;
protected boolean seenMarkup;
protected boolean seenDocdecl;
// transient variable set during each call to next/Token()
protected boolean tokenize;
protected String text;
protected String entityRefName;
protected String xmlDeclVersion;
protected Boolean xmlDeclStandalone;
protected String xmlDeclContent;
protected void reset() {
// System.out.println("reset() called");
location = null;
startLineNumber = 1;
lineNumber = 1;
columnNumber = 0;
seenRoot = false;
reachedEnd = false;
eventType = START_DOCUMENT;
emptyElementTag = false;
depth = 0;
attributeCount = 0;
namespaceEnd = 0;
entityEnd = 0;
reader = null;
inputEncoding = null;
preventBufferCompaction = false;
bufAbsoluteStart = 0;
bufEnd = bufStart = 0;
pos = posStart = posEnd = 0;
pcEnd = pcStart = 0;
usePC = false;
seenStartTag = false;
seenEndTag = false;
pastEndTag = false;
seenAmpersand = false;
seenMarkup = false;
seenDocdecl = false;
xmlDeclVersion = null;
xmlDeclStandalone = null;
xmlDeclContent = null;
resetStringCache();
}
public MXParser() {
}
/**
* Method setFeature
*
* @param name a String
* @param state a boolean
* @throws XmlPullParserException
*/
public void setFeature(String name, boolean state) throws XmlPullParserException {
if (name == null)
throw new IllegalArgumentException("feature name should not be null");
if (FEATURE_PROCESS_NAMESPACES.equals(name)) {
if (eventType != START_DOCUMENT)
throw new XmlPullParserException("namespace processing feature can only be changed before parsing", this, null);
processNamespaces = state;
// } else if(FEATURE_REPORT_NAMESPACE_ATTRIBUTES.equals(name)) {
// if(type != START_DOCUMENT) throw new XmlPullParserException(
// "namespace reporting feature can only be changed before parsing",
// this, null);
// reportNsAttribs = state;
} else if (FEATURE_NAMES_INTERNED.equals(name)) {
if (state) {
throw new XmlPullParserException("interning names in this implementation is not supported");
}
} else if (FEATURE_PROCESS_DOCDECL.equals(name)) {
if (state) {
throw new XmlPullParserException("processing DOCDECL is not supported");
}
// } else if(REPORT_DOCDECL.equals(name)) {
// paramNotifyDoctype = state;
} else if (FEATURE_XML_ROUNDTRIP.equals(name)) {
// if(state == false) {
// throw new XmlPullParserException(
// "roundtrip feature can not be switched off");
// }
roundtripSupported = state;
} else {
throw new XmlPullParserException("unsupported feature " + name);
}
}
/**
* Unknown properties are <strong>always</strong> returned as false
*/
public boolean getFeature(String name) {
if (name == null)
throw new IllegalArgumentException("feature name should not be null");
if (FEATURE_PROCESS_NAMESPACES.equals(name)) {
return processNamespaces;
// } else if(FEATURE_REPORT_NAMESPACE_ATTRIBUTES.equals(name)) {
// return reportNsAttribs;
} else if (FEATURE_NAMES_INTERNED.equals(name)) {
return false;
} else if (FEATURE_PROCESS_DOCDECL.equals(name)) {
return false;
// } else if(REPORT_DOCDECL.equals(name)) {
// return paramNotifyDoctype;
} else if (FEATURE_XML_ROUNDTRIP.equals(name)) {
// return true;
return roundtripSupported;
}
return false;
}
public void setProperty(String name, Object value) throws XmlPullParserException {
if (PROPERTY_LOCATION.equals(name)) {
location = (String) value;
} else {
throw new XmlPullParserException("unsupported property: '" + name + "'");
}
}
public Object getProperty(String name) {
if (name == null)
throw new IllegalArgumentException("property name should not be null");
if (PROPERTY_XMLDECL_VERSION.equals(name)) {
return xmlDeclVersion;
} else if (PROPERTY_XMLDECL_STANDALONE.equals(name)) {
return xmlDeclStandalone;
} else if (PROPERTY_XMLDECL_CONTENT.equals(name)) {
return xmlDeclContent;
} else if (PROPERTY_LOCATION.equals(name)) {
return location;
}
return null;
}
public void setInput(Reader in) throws XmlPullParserException {
if (in == null) {
throw new IllegalArgumentException("input reader can not be null");
}
reset();
reader = in;
}
public void setInput(InputStream inputStream, String inputEncoding) throws XmlPullParserException {
if (inputStream == null) {
throw new IllegalArgumentException("input stream can not be null");
}
reset();
try {
if (inputEncoding != null) {
this.reader = new InputStreamReader(inputStream, inputEncoding);
this.inputEncoding = inputEncoding;
} else {
XmlStreamReader xr = new XmlStreamReader(inputStream);
this.reader = xr;
this.inputEncoding = xr.getEncoding();
}
} catch (IOException une) {
throw new XmlPullParserException("could not create reader for encoding " + inputEncoding + " : " + une, this, une);
}
}
public String getInputEncoding() {
return inputEncoding;
}
public void defineEntityReplacementText(String entityName, String replacementText) throws XmlPullParserException {
// throw new XmlPullParserException("not allowed");
// protected char[] entityReplacement[];
ensureEntityCapacity();
// this is to make sure that if interning works we will take advantage
// of it ...
this.entityName[entityEnd] = newString(entityName.toCharArray(), 0, entityName.length());
entityNameBuf[entityEnd] = entityName.toCharArray();
entityReplacement[entityEnd] = replacementText;
entityReplacementBuf[entityEnd] = replacementText.toCharArray();
if (!allStringsInterned) {
entityNameHash[entityEnd] = fastHash(entityNameBuf[entityEnd], 0, entityNameBuf[entityEnd].length);
}
++entityEnd;
// TODO disallow < or & in entity replacement text (or ]]>???)
// TOOD keepEntityNormalizedForAttributeValue cached as well ...
}
public int getNamespaceCount(int depth) throws XmlPullParserException {
if (!processNamespaces || depth == 0) {
return 0;
}
// int maxDepth = eventType == END_TAG ? this.depth + 1 : this.depth;
// if(depth < 0 || depth > maxDepth) throw new IllegalArgumentException(
if (depth < 0 || depth > this.depth)
throw new IllegalArgumentException("allowed namespace depth 0.." + this.depth + " not " + depth);
return elNamespaceCount[depth];
}
public String getNamespacePrefix(int pos) throws XmlPullParserException {
// int end = eventType == END_TAG ? elNamespaceCount[ depth + 1 ] :
// namespaceEnd;
// if(pos < end) {
if (pos < namespaceEnd) {
return namespacePrefix[pos];
} else {
throw new XmlPullParserException("position " + pos + " exceeded number of available namespaces " + namespaceEnd);
}
}
public String getNamespaceUri(int pos) throws XmlPullParserException {
// int end = eventType == END_TAG ? elNamespaceCount[ depth + 1 ] :
// namespaceEnd;
// if(pos < end) {
if (pos < namespaceEnd) {
return namespaceUri[pos];
} else {
throw new XmlPullParserException("position " + pos + " exceeded number of available namespaces " + namespaceEnd);
}
}
public String getNamespace(String prefix)
// throws XmlPullParserException
{
// int count = namespaceCount[ depth ];
if (prefix != null) {
for (int i = namespaceEnd - 1; i >= 0; i--) {
if (prefix.equals(namespacePrefix[i])) {
return namespaceUri[i];
}
}
if ("xml".equals(prefix)) {
return XML_URI;
} else if ("xmlns".equals(prefix)) {
return XMLNS_URI;
}
} else {
for (int i = namespaceEnd - 1; i >= 0; i--) {
if (namespacePrefix[i] == null) { // "") { //null ) { //TODO
// check FIXME Alek
return namespaceUri[i];
}
}
}
return null;
}
public int getDepth() {
return depth;
}
private static int findFragment(int bufMinPos, char[] b, int start, int end) {
// System.err.println("bufStart="+bufStart+" b="+printable(new String(b,
// start, end - start))+" start="+start+" end="+end);
if (start < bufMinPos) {
start = bufMinPos;
if (start > end)
start = end;
return start;
}
if (end - start > 65) {
start = end - 10; // try to find good location
}
int i = start + 1;
while (--i > bufMinPos) {
if ((end - i) > 65)
break;
final char c = b[i];
if (c == '<' && (start - i) > 10)
break;
}
return i;
}
/**
* Return string describing current position of parsers as text 'STATE [seen %s...] @line:column'.
*/
public String getPositionDescription() {
String fragment = null;
if (posStart <= pos) {
final int start = findFragment(0, buf, posStart, pos);
// System.err.println("start="+start);
if (start < pos) {
fragment = new String(buf, start, pos - start);
}
if (bufAbsoluteStart > 0 || start > 0)
fragment = "..." + fragment;
}
// return " at line "+tokenizerPosRow
// +" and column "+(tokenizerPosCol-1)
// +(fragment != null ? " seen "+printable(fragment)+"..." : "");
return " " + TYPES[eventType] + (fragment != null ? " seen " + printable(fragment) + "..." : "") + " "
+ (location != null ? location : "") + "@" + getLineNumber() + ":"
+ getColumnNumber();
}
public int getStartLineNumber() {
return startLineNumber;
}
public int getLineNumber() {
return lineNumber;
}
public int getColumnNumber() {
return columnNumber;
}
public boolean isWhitespace() throws XmlPullParserException {
if (eventType == TEXT || eventType == CDSECT) {
if (usePC) {
for (int i = pcStart; i < pcEnd; i++) {
if (!isS(pc[i]))
return false;
}
return true;
} else {
for (int i = posStart; i < posEnd; i++) {
if (!isS(buf[i]))
return false;
}
return true;
}
} else if (eventType == IGNORABLE_WHITESPACE) {
return true;
}
throw new XmlPullParserException("no content available to check for white spaces");
}
public String getText() {
if (eventType == START_DOCUMENT || eventType == END_DOCUMENT) {
// throw new XmlPullParserException("no content available to read");
// if(roundtripSupported) {
// text = new String(buf, posStart, posEnd - posStart);
// } else {
return null;
// }
} else if (eventType == ENTITY_REF) {
return text;
}
if (text == null) {
if (!usePC || eventType == START_TAG || eventType == END_TAG) {
text = new String(buf, posStart, posEnd - posStart);
} else {
text = new String(pc, pcStart, pcEnd - pcStart);
}
}
return text;
}
public char[] getTextCharacters(int[] holderForStartAndLength) {
if (eventType == TEXT) {
if (usePC) {
holderForStartAndLength[0] = pcStart;
holderForStartAndLength[1] = pcEnd - pcStart;
return pc;
} else {
holderForStartAndLength[0] = posStart;
holderForStartAndLength[1] = posEnd - posStart;
return buf;
}
} else if (eventType == START_TAG || eventType == END_TAG || eventType == CDSECT || eventType == COMMENT
|| eventType == ENTITY_REF || eventType == PROCESSING_INSTRUCTION
|| eventType == IGNORABLE_WHITESPACE || eventType == DOCDECL) {
holderForStartAndLength[0] = posStart;
holderForStartAndLength[1] = posEnd - posStart;
return buf;
} else if (eventType == START_DOCUMENT || eventType == END_DOCUMENT) {
// throw new XmlPullParserException("no content available to read");
holderForStartAndLength[0] = holderForStartAndLength[1] = -1;
return null;
} else {
throw new IllegalArgumentException("unknown text eventType: " + eventType);
}
// String s = getText();
// char[] cb = null;
// if(s!= null) {
// cb = s.toCharArray();
// holderForStartAndLength[0] = 0;
// holderForStartAndLength[1] = s.length();
// } else {
// }
// return cb;
}
public String getNamespace() {
if (eventType == START_TAG) {
// return processNamespaces ? elUri[ depth - 1 ] : NO_NAMESPACE;
return processNamespaces ? elUri[depth] : NO_NAMESPACE;
} else if (eventType == END_TAG) {
return processNamespaces ? elUri[depth] : NO_NAMESPACE;
}
return null;
// String prefix = elPrefix[ maxDepth ];
// if(prefix != null) {
// for( int i = namespaceEnd -1; i >= 0; i--) {
// if( prefix.equals( namespacePrefix[ i ] ) ) {
// return namespaceUri[ i ];
// }
// }
// } else {
// for( int i = namespaceEnd -1; i >= 0; i--) {
// if( namespacePrefix[ i ] == null ) {
// return namespaceUri[ i ];
// }
// }
//
// }
// return "";
}
public String getName() {
if (eventType == START_TAG) {
// return elName[ depth - 1 ] ;
return elName[depth];
} else if (eventType == END_TAG) {
return elName[depth];
} else if (eventType == ENTITY_REF) {
if (entityRefName == null) {
entityRefName = newString(buf, posStart, posEnd - posStart);
}
return entityRefName;
} else {
return null;
}
}
@Override
public String[] getNames() {
return elName;
}
public String getPrefix() {
if (eventType == START_TAG) {
// return elPrefix[ depth - 1 ] ;
return elPrefix[depth];
} else if (eventType == END_TAG) {
return elPrefix[depth];
}
return null;
// if(eventType != START_TAG && eventType != END_TAG) return null;
// int maxDepth = eventType == END_TAG ? depth : depth - 1;
// return elPrefix[ maxDepth ];
}
public boolean isEmptyElementTag() throws XmlPullParserException {
if (eventType != START_TAG)
throw new XmlPullParserException("parser must be on START_TAG to check for empty element", this, null);
return emptyElementTag;
}
public int getAttributeCount() {
if (eventType != START_TAG)
return -1;
return attributeCount;
}
public String getAttributeNamespace(int index) {
if (eventType != START_TAG)
throw new IndexOutOfBoundsException("only START_TAG can have attributes");
if (!processNamespaces)
return NO_NAMESPACE;
if (index < 0 || index >= attributeCount)
throw new IndexOutOfBoundsException("attribute position must be 0.." + (attributeCount - 1) + " and not " + index);
return attributeUri[index];
}
public String getAttributeName(int index) {
if (eventType != START_TAG)
throw new IndexOutOfBoundsException("only START_TAG can have attributes");
if (index < 0 || index >= attributeCount)
throw new IndexOutOfBoundsException("attribute position must be 0.." + (attributeCount - 1) + " and not " + index);
return attributeName[index];
}
public String getAttributePrefix(int index) {
if (eventType != START_TAG)
throw new IndexOutOfBoundsException("only START_TAG can have attributes");
if (!processNamespaces)
return null;
if (index < 0 || index >= attributeCount)
throw new IndexOutOfBoundsException("attribute position must be 0.." + (attributeCount - 1) + " and not " + index);
return attributePrefix[index];
}
public String getAttributeType(int index) {
if (eventType != START_TAG)
throw new IndexOutOfBoundsException("only START_TAG can have attributes");
if (index < 0 || index >= attributeCount)
throw new IndexOutOfBoundsException("attribute position must be 0.." + (attributeCount - 1) + " and not " + index);
return "CDATA";
}
public boolean isAttributeDefault(int index) {
if (eventType != START_TAG)
throw new IndexOutOfBoundsException("only START_TAG can have attributes");
if (index < 0 || index >= attributeCount)
throw new IndexOutOfBoundsException("attribute position must be 0.." + (attributeCount - 1) + " and not " + index);
return false;
}
public String getAttributeValue(int index) {
if (eventType != START_TAG)
throw new IndexOutOfBoundsException("only START_TAG can have attributes");
if (index < 0 || index >= attributeCount)
throw new IndexOutOfBoundsException("attribute position must be 0.." + (attributeCount - 1) + " and not " + index);
return attributeValue[index];
}
public String getAttributeValue(String namespace, String name) {
if (eventType != START_TAG)
throw new IndexOutOfBoundsException("only START_TAG can have attributes" + getPositionDescription());
if (name == null) {
throw new IllegalArgumentException("attribute name can not be null");
}
// TODO make check if namespace is interned!!! etc. for names!!!
if (processNamespaces) {
if (namespace == null) {
namespace = "";
}
for (int i = 0; i < attributeCount; ++i) {
if ((namespace == attributeUri[i] || namespace.equals(attributeUri[i]))
// (namespace != null && namespace.equals(attributeUri[ i
// ]))
// taking advantage of String.intern()
&& name.equals(attributeName[i])) {
return attributeValue[i];
}
}
} else {
if (namespace != null && namespace.isEmpty()) {
namespace = null;
}
if (namespace != null)
throw new IllegalArgumentException("when namespaces processing is disabled attribute namespace must be null");
for (int i = 0; i < attributeCount; ++i) {
if (name.equals(attributeName[i])) {
return attributeValue[i];
}
}
}
return null;
}
public int getEventType() throws XmlPullParserException {
return eventType;
}
public void require(int type, String namespace, String name) throws XmlPullParserException, IOException {
if (!processNamespaces && namespace != null) {
throw new XmlPullParserException(
"processing namespaces must be enabled on parser (or factory)"
+ " to have possible namespaces declared on elements"
+ (" (position:" + getPositionDescription()) + ")");
}
if (type != getEventType() || (namespace != null && !namespace.equals(getNamespace()))
|| (name != null && !name.equals(getName()))) {
throw new XmlPullParserException(
"expected event " + TYPES[type] + (name != null ? " with name '" + name + "'" : "")
+ (namespace != null && name != null ? " and" : "")
+ (namespace != null ? " with namespace '" + namespace + "'" : "") + " but got"
+ (type != getEventType() ? " " + TYPES[getEventType()] : "")
+ (name != null && getName() != null && !name.equals(getName())
? " name '" + getName() + "'" : "")
+ (namespace != null && name != null && getName() != null
&& !name.equals(getName()) && getNamespace() != null
&& !namespace.equals(getNamespace()) ? " and" : "")
+ (namespace != null && getNamespace() != null && !namespace.equals(getNamespace())
? " namespace '" + getNamespace() + "'" : "")
+ (" (position:" + getPositionDescription()) + ")");
}
}
/**
* Skip sub tree that is currently parser positioned on. <br>
* NOTE: parser must be on START_TAG and when function returns parser will be positioned on corresponding END_TAG
*/
public void skipSubTree() throws XmlPullParserException, IOException {
require(START_TAG, null, null);
int level = 1;
while (level > 0) {
int eventType = next();
if (eventType == END_TAG) {
--level;
} else if (eventType == START_TAG) {
++level;
}
}
}
// public String readText() throws XmlPullParserException, IOException
// {
// if (getEventType() != TEXT) return "";
// String result = getText();
// next();
// return result;
// }
public String nextText() throws XmlPullParserException, IOException {
// String result = null;
// boolean onStartTag = false;
// if(eventType == START_TAG) {
// onStartTag = true;
// next();
// }
// if(eventType == TEXT) {
// result = getText();
// next();
// } else if(onStartTag && eventType == END_TAG) {
// result = "";
// } else {
// throw new XmlPullParserException(
// "parser must be on START_TAG or TEXT to read text", this, null);
// }
// if(eventType != END_TAG) {
// throw new XmlPullParserException(
// "event TEXT it must be immediately followed by END_TAG", this, null);
// }
// return result;
if (getEventType() != START_TAG) {
throw new XmlPullParserException("parser must be on START_TAG to read next text", this, null);
}
int eventType = next();
if (eventType == TEXT) {
final String result = getText();
eventType = next();
if (eventType != END_TAG) {
throw new XmlPullParserException(
"TEXT must be immediately followed by END_TAG and not " + TYPES[getEventType()], this, null);
}
return result;
} else if (eventType == END_TAG) {
return "";
} else {
throw new XmlPullParserException("parser must be on START_TAG or TEXT to read text", this, null);
}
}
public int nextTag() throws XmlPullParserException, IOException {
next();
if (eventType == TEXT && isWhitespace()) { // skip whitespace
next();
}
if (eventType != START_TAG && eventType != END_TAG) {
throw new XmlPullParserException("expected START_TAG or END_TAG not " + TYPES[getEventType()], this, null);
}
return eventType;
}
public int next() throws XmlPullParserException, IOException {
tokenize = false;
return nextImpl();
}
public int nextToken() throws XmlPullParserException, IOException {
tokenize = true;
return nextImpl();
}
protected int nextImpl() throws XmlPullParserException, IOException {
text = null;
pcEnd = pcStart = 0;
usePC = false;
bufStart = posEnd;
if (pastEndTag) {
pastEndTag = false;
--depth;
namespaceEnd = elNamespaceCount[depth]; // less namespaces available
}
if (emptyElementTag) {
emptyElementTag = false;
pastEndTag = true;
return eventType = END_TAG;
}
// [1] document ::= prolog element Misc*
if (depth > 0) {
if (seenStartTag) {
seenStartTag = false;
return eventType = parseStartTag();
}
if (seenEndTag) {
seenEndTag = false;
return eventType = parseEndTag();
}
// ASSUMPTION: we are _on_ first character of content or markup!!!!
// [43] content ::= CharData? ((element | Reference | CDSect | PI |
// Comment) CharData?)*
char ch;
if (seenMarkup) { // we have read ahead ...
seenMarkup = false;
ch = '<';
} else if (seenAmpersand) {
seenAmpersand = false;
ch = '&';
} else {
ch = more();
}
posStart = pos - 1; // VERY IMPORTANT: this is correct start of
// event!!!
// when true there is some potential event TEXT to return - keep
// gathering
boolean hadCharData = false;
// when true TEXT data is not continual (like <![CDATA[text]]>) and
// requires PC merging
boolean needsMerging = false;
MAIN_LOOP: while (true) {
// work on MARKUP
if (ch == '<') {
if (hadCharData) {
// posEnd = pos - 1;
if (tokenize) {
seenMarkup = true;
return eventType = TEXT;
}
}
ch = more();
if (ch == '/') {
if (!tokenize && hadCharData) {
seenEndTag = true;
// posEnd = pos - 2;
return eventType = TEXT;
}
return eventType = parseEndTag();
} else if (ch == '!') {
ch = more();
if (ch == '-') {
// note: if(tokenize == false) posStart/End is NOT
// changed!!!!
parseComment();
if (tokenize)
return eventType = COMMENT;
if (!usePC && hadCharData) {
needsMerging = true;
} else {
posStart = pos; // completely ignore comment
}
} else if (ch == '[') {
// posEnd = pos - 3;
// must remember previous posStart/End as it merges
// with content of CDATA
// int oldStart = posStart + bufAbsoluteStart;
// int oldEnd = posEnd + bufAbsoluteStart;
parseCDSect(hadCharData);
if (tokenize)
return eventType = CDSECT;
final int cdStart = posStart;
final int cdEnd = posEnd;
final int cdLen = cdEnd - cdStart;
if (cdLen > 0) { // was there anything inside CDATA
// section?
hadCharData = true;
if (!usePC) {
needsMerging = true;
}
}
// posStart = oldStart;
// posEnd = oldEnd;
// if(cdLen > 0) { // was there anything inside
// CDATA section?
// if(hadCharData) {
// // do merging if there was anything in CDSect!!!!
// // if(!usePC) {
// // // posEnd is correct already!!!
// // if(posEnd > posStart) {
// // joinPC();
// // } else {
// // usePC = true;
// // pcStart = pcEnd = 0;
// // }
// // }
// // if(pcEnd + cdLen >= pc.length) ensurePC(pcEnd
// + cdLen);
// // // copy [cdStart..cdEnd) into PC
// // System.arraycopy(buf, cdStart, pc, pcEnd,
// cdLen);
// // pcEnd += cdLen;
// if(!usePC) {
// needsMerging = true;
// posStart = cdStart;
// posEnd = cdEnd;
// }
// } else {
// if(!usePC) {
// needsMerging = true;
// posStart = cdStart;
// posEnd = cdEnd;
// hadCharData = true;
// }
// }
// //hadCharData = true;
// } else {
// if( !usePC && hadCharData ) {
// needsMerging = true;
// }
// }
} else {
throw new XmlPullParserException("unexpected character in markup " + printable(ch), this, null);
}
} else if (ch == '?') {
parsePI();
if (tokenize)
return eventType = PROCESSING_INSTRUCTION;
if (!usePC && hadCharData) {
needsMerging = true;
} else {
posStart = pos; // completely ignore PI
}
} else if (isNameStartChar(ch)) {
if (!tokenize && hadCharData) {
seenStartTag = true;
// posEnd = pos - 2;
return eventType = TEXT;
}
return eventType = parseStartTag();
} else {
throw new XmlPullParserException("unexpected character in markup " + printable(ch), this, null);
}
// do content compaction if it makes sense!!!!
} else if (ch == '&') {
// work on ENTITTY
// posEnd = pos - 1;
if (tokenize && hadCharData) {
seenAmpersand = true;
return eventType = TEXT;
}
final int oldStart = posStart + bufAbsoluteStart;
final int oldEnd = posEnd + bufAbsoluteStart;
final char[] resolvedEntity = parseEntityRef();
if (tokenize)
return eventType = ENTITY_REF;
// check if replacement text can be resolved !!!
if (resolvedEntity == null) {
if (entityRefName == null) {
entityRefName = newString(buf, posStart, posEnd - posStart);
}
throw new XmlPullParserException(
"could not resolve entity named '" + printable(entityRefName) + "'", this, null);
}
// int entStart = posStart;
// int entEnd = posEnd;
posStart = oldStart - bufAbsoluteStart;
posEnd = oldEnd - bufAbsoluteStart;
if (!usePC) {
if (hadCharData) {
joinPC(); // posEnd is already set correctly!!!
needsMerging = false;
} else {
usePC = true;
pcStart = pcEnd = 0;
}
}
// assert usePC == true;
// write into PC replacement text - do merge for replacement
// text!!!!
for (int i = 0; i < resolvedEntity.length; i++) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = resolvedEntity[i];
}
hadCharData = true;
// assert needsMerging == false;
} else {
if (needsMerging) {
// assert usePC == false;
joinPC(); // posEnd is already set correctly!!!
// posStart = pos - 1;
needsMerging = false;
}
// no MARKUP not ENTITIES so work on character data ...
// [14] CharData ::= [^<&]* - ([^<&]* ']]>' [^<&]*)
hadCharData = true;
boolean normalizedCR = false;
final boolean normalizeInput = !tokenize || !roundtripSupported;
// use loop locality here!!!!
boolean seenBracket = false;
boolean seenBracketBracket = false;
do {
// check that ]]> does not show in
if (ch == ']') {
if (seenBracket) {
seenBracketBracket = true;
} else {
seenBracket = true;
}
} else if (seenBracketBracket && ch == '>') {
throw new XmlPullParserException("characters ]]> are not allowed in content", this, null);
} else {
if (seenBracket) {
seenBracketBracket = seenBracket = false;
}
// assert seenTwoBrackets == seenBracket == false;
}
if (normalizeInput) {
// deal with normalization issues ...
if (ch == '\r') {
normalizedCR = true;
posEnd = pos - 1;
// posEnd is already is set
if (!usePC) {
if (posEnd > posStart) {
joinPC();
} else {
usePC = true;
pcStart = pcEnd = 0;
}
}
// assert usePC == true;
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
} else if (ch == '\n') {
// if(!usePC) { joinPC(); } else { if(pcEnd >=
// pc.length) ensurePC(); }
if (!normalizedCR && usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
}
normalizedCR = false;
} else {
if (usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = ch;
}
normalizedCR = false;
}
}
ch = more();
} while (ch != '<' && ch != '&');
posEnd = pos - 1;
continue MAIN_LOOP; // skip ch = more() from below - we are
// alreayd ahead ...
}
ch = more();
} // endless while(true)
} else {
if (seenRoot) {
return parseEpilog();
} else {
return parseProlog();
}
}
}
protected int parseProlog() throws XmlPullParserException, IOException {
// [2] prolog: ::= XMLDecl? Misc* (doctypedecl Misc*)? and look for [39]
// element
char ch;
if (seenMarkup) {
ch = buf[pos - 1];
} else {
ch = more();
}
if (eventType == START_DOCUMENT) {
// bootstrap parsing with getting first character input!
// deal with BOM
// detect BOM and drop it (Unicode int Order Mark)
if (ch == '\uFFFE') {
throw new XmlPullParserException(
"first character in input was UNICODE noncharacter (0xFFFE)" + "- input requires int swapping", this,
null);
}
if (ch == '\uFEFF') {
// skipping UNICODE int Order Mark (so called BOM)
ch = more();
}
}
seenMarkup = false;
boolean gotS = false;
posStart = pos - 1;
final boolean normalizeIgnorableWS = tokenize && !roundtripSupported;
boolean normalizedCR = false;
while (true) {
// deal with Misc
// [27] Misc ::= Comment | PI | S
// deal with docdecl --> mark it!
// else parseStartTag seen <[^/]
if (ch == '<') {
if (gotS && tokenize) {
posEnd = pos - 1;
seenMarkup = true;
return eventType = IGNORABLE_WHITESPACE;
}
ch = more();
if (ch == '?') {
// check if it is 'xml'
// deal with XMLDecl
if (parsePI()) { // make sure to skip XMLDecl
if (tokenize) {
return eventType = PROCESSING_INSTRUCTION;
}
} else {
// skip over - continue tokenizing
posStart = pos;
gotS = false;
}
} else if (ch == '!') {
ch = more();
if (ch == 'D') {
if (seenDocdecl) {
throw new XmlPullParserException("only one docdecl allowed in XML document", this, null);
}
seenDocdecl = true;
parseDocdecl();
if (tokenize)
return eventType = DOCDECL;
} else if (ch == '-') {
parseComment();
if (tokenize)
return eventType = COMMENT;
} else {
throw new XmlPullParserException("unexpected markup <!" + printable(ch), this, null);
}
} else if (ch == '/') {
throw new XmlPullParserException("expected start tag name and not " + printable(ch), this, null);
} else if (isNameStartChar(ch)) {
seenRoot = true;
return parseStartTag();
} else {
throw new XmlPullParserException("expected start tag name and not " + printable(ch), this, null);
}
} else if (isS(ch)) {
gotS = true;
if (normalizeIgnorableWS) {
if (ch == '\r') {
normalizedCR = true;
// posEnd = pos -1;
// joinPC();
// posEnd is already is set
if (!usePC) {
posEnd = pos - 1;
if (posEnd > posStart) {
joinPC();
} else {
usePC = true;
pcStart = pcEnd = 0;
}
}
// assert usePC == true;
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
} else if (ch == '\n') {
if (!normalizedCR && usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
}
normalizedCR = false;
} else {
if (usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = ch;
}
normalizedCR = false;
}
}
} else {
throw new XmlPullParserException(
"only whitespace content allowed before start tag and not " + printable(ch), this, null);
}
ch = more();
}
}
protected int parseEpilog() throws XmlPullParserException, IOException {
if (eventType == END_DOCUMENT) {
throw new XmlPullParserException("already reached end of XML input", this, null);
}
if (reachedEnd) {
return eventType = END_DOCUMENT;
}
boolean gotS = false;
final boolean normalizeIgnorableWS = tokenize && !roundtripSupported;
boolean normalizedCR = false;
try {
// epilog: Misc*
char ch;
if (seenMarkup) {
ch = buf[pos - 1];
} else {
ch = more();
}
seenMarkup = false;
posStart = pos - 1;
if (!reachedEnd) {
while (true) {
// deal with Misc
// [27] Misc ::= Comment | PI | S
if (ch == '<') {
if (gotS && tokenize) {
posEnd = pos - 1;
seenMarkup = true;
return eventType = IGNORABLE_WHITESPACE;
}
ch = more();
if (reachedEnd) {
break;
}
if (ch == '?') {
// check if it is 'xml'
// deal with XMLDecl
parsePI();
if (tokenize)
return eventType = PROCESSING_INSTRUCTION;
} else if (ch == '!') {
ch = more();
if (reachedEnd) {
break;
}
if (ch == 'D') {
parseDocdecl(); // FIXME
if (tokenize)
return eventType = DOCDECL;
} else if (ch == '-') {
parseComment();
if (tokenize)
return eventType = COMMENT;
} else {
throw new XmlPullParserException("unexpected markup <!" + printable(ch), this, null);
}
} else if (ch == '/') {
throw new XmlPullParserException(
"end tag not allowed in epilog but got " + printable(ch), this, null);
} else if (isNameStartChar(ch)) {
throw new XmlPullParserException(
"start tag not allowed in epilog but got " + printable(ch), this, null);
} else {
throw new XmlPullParserException(
"in epilog expected ignorable content and not " + printable(ch), this, null);
}
} else if (isS(ch)) {
gotS = true;
if (normalizeIgnorableWS) {
if (ch == '\r') {
normalizedCR = true;
// posEnd = pos -1;
// joinPC();
// posEnd is alreadys set
if (!usePC) {
posEnd = pos - 1;
if (posEnd > posStart) {
joinPC();
} else {
usePC = true;
pcStart = pcEnd = 0;
}
}
// assert usePC == true;
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
} else if (ch == '\n') {
if (!normalizedCR && usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
}
normalizedCR = false;
} else {
if (usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = ch;
}
normalizedCR = false;
}
}
} else {
throw new XmlPullParserException(
"in epilog non whitespace content is not allowed but got " + printable(ch), this, null);
}
ch = more();
if (reachedEnd) {
break;
}
}
}
// throw Exception("unexpected content in epilog
// catch EOFException return END_DOCUEMENT
// try {
} catch (EOFException ex) {
reachedEnd = true;
}
if (tokenize && gotS) {
posEnd = pos; // well - this is LAST available character pos
return eventType = IGNORABLE_WHITESPACE;
}
return eventType = END_DOCUMENT;
}
public int parseEndTag() throws XmlPullParserException, IOException {
// ASSUMPTION ch is past "</"
// [42] ETag ::= '</' Name S? '>'
char ch = more();
if (!isNameStartChar(ch)) {
throw new XmlPullParserException("expected name start and not " + printable(ch), this, null);
}
posStart = pos - 3;
final int nameStart = pos - 1 + bufAbsoluteStart;
do {
ch = more();
} while (isNameChar(ch));
// now we go one level down -- do checks
// --depth; //FIXME
// check that end tag name is the same as start tag
// String name = new String(buf, nameStart - bufAbsoluteStart,
// (pos - 1) - (nameStart - bufAbsoluteStart));
// int last = pos - 1;
int off = nameStart - bufAbsoluteStart;
// final int len = last - off;
final int len = (pos - 1) - off;
final char[] cbuf = elRawName[depth];
if (elRawNameEnd[depth] != len) {
// construct strings for exception
final String startname = new String(cbuf, 0, elRawNameEnd[depth]);
final String endname = new String(buf, off, len);
throw new XmlPullParserException(
"end tag name </" + endname + "> must match start tag name <" + startname + ">" + " from line "
+ elRawNameLine[depth],
this, null);
}
for (int i = 0; i < len; i++) {
if (buf[off++] != cbuf[i]) {
// construct strings for exception
final String startname = new String(cbuf, 0, len);
final String endname = new String(buf, off - i - 1, len);
throw new XmlPullParserException(
"end tag name </" + endname + "> must be the same as start tag <" + startname + ">" + " from line "
+ elRawNameLine[depth],
this,
null);
}
}
while (isS(ch)) {
ch = more();
} // skip additional white spaces
if (ch != '>') {
throw new XmlPullParserException(
"expected > to finish end tag not " + printable(ch) + " from line " + elRawNameLine[depth], this, null);
}
// namespaceEnd = elNamespaceCount[ depth ]; //FIXME
posEnd = pos;
pastEndTag = true;
return eventType = END_TAG;
}
public int parseStartTag() throws XmlPullParserException, IOException {
// remember starting line number
startLineNumber = lineNumber;
// ASSUMPTION ch is past <T
// [40] STag ::= '<' Name (S Attribute)* S? '>'
// [44] EmptyElemTag ::= '<' Name (S Attribute)* S? '/>'
++depth; // FIXME
posStart = pos - 2;
emptyElementTag = false;
attributeCount = 0;
// retrieve name
final int nameStart = pos - 1 + bufAbsoluteStart;
int colonPos = -1;
char ch = buf[pos - 1];
if (ch == ':' && processNamespaces)
throw new XmlPullParserException(
"when namespaces processing enabled colon can not be at element name start", this, null);
while (true) {
ch = more();
if (!isNameChar(ch))
break;
if (ch == ':' && processNamespaces) {
if (colonPos != -1)
throw new XmlPullParserException(
"only one colon is allowed in name of element when namespaces are enabled", this, null);
colonPos = pos - 1 + bufAbsoluteStart;
}
}
// retrieve name
ensureElementsCapacity();
// TODO check for efficient interning and then use elRawNameInterned!!!!
int elLen = (pos - 1) - (nameStart - bufAbsoluteStart);
if (elRawName[depth] == null || elRawName[depth].length < elLen) {
elRawName[depth] = new char[2 * elLen];
}
System.arraycopy(buf, nameStart - bufAbsoluteStart, elRawName[depth], 0, elLen);
elRawNameEnd[depth] = elLen;
elRawNameLine[depth] = lineNumber;
// work on prefixes and namespace URI
String prefix = null;
if (processNamespaces) {
if (colonPos != -1) {
prefix = elPrefix[depth] = newString(buf, nameStart - bufAbsoluteStart, colonPos - nameStart);
elName[depth] = newString(buf, colonPos + 1 - bufAbsoluteStart,
// (pos -1) - (colonPos + 1));
pos - 2 - (colonPos - bufAbsoluteStart));
} else {
elPrefix[depth] = null;
elName[depth] = newString(buf, nameStart - bufAbsoluteStart, elLen);
}
} else {
elName[depth] = newString(buf, nameStart - bufAbsoluteStart, elLen);
}
while (true) {
while (isS(ch)) {
ch = more();
} // skip additional white spaces
if (ch == '>') {
break;
} else if (ch == '/') {
if (emptyElementTag)
throw new XmlPullParserException("repeated / in tag declaration", this, null);
emptyElementTag = true;
ch = more();
if (ch != '>')
throw new XmlPullParserException("expected > to end empty tag not " + printable(ch), this, null);
break;
} else if (isNameStartChar(ch)) {
ch = parseAttribute();
ch = more();
continue;
} else {
throw new XmlPullParserException("start tag unexpected character " + printable(ch), this, null);
}
// ch = more(); // skip space
}
// now when namespaces were declared we can resolve them
if (processNamespaces) {
String uri = getNamespace(prefix);
if (uri == null) {
if (prefix == null) { // no prefix and no uri => use default
// namespace
uri = NO_NAMESPACE;
} else {
throw new XmlPullParserException(
"could not determine namespace bound to element prefix " + prefix, this, null);
}
}
elUri[depth] = uri;
// String uri = getNamespace(prefix);
// if(uri == null && prefix == null) { // no prefix and no uri =>
// use default namespace
// uri = "";
// }
// resolve attribute namespaces
for (int i = 0; i < attributeCount; i++) {
final String attrPrefix = attributePrefix[i];
if (attrPrefix != null) {
final String attrUri = getNamespace(attrPrefix);
if (attrUri == null) {
throw new XmlPullParserException(
"could not determine namespace bound to attribute prefix " + attrPrefix, this, null);
}
attributeUri[i] = attrUri;
} else {
attributeUri[i] = NO_NAMESPACE;
}
}
// TODO
// [ WFC: Unique Att Spec ]
// check attribute uniqueness constraint for attributes that has
// namespace!!!
for (int i = 1; i < attributeCount; i++) {
for (int j = 0; j < i; j++) {
if (attributeUri[j] == attributeUri[i]
&& (allStringsInterned && attributeName[j].equals(attributeName[i])
|| (!allStringsInterned && attributeNameHash[j] == attributeNameHash[i]
&& attributeName[j].equals(attributeName[i])))
) {
// prepare data for nice error message?
String attr1 = attributeName[j];
if (attributeUri[j] != null)
attr1 = attributeUri[j] + ":" + attr1;
String attr2 = attributeName[i];
if (attributeUri[i] != null)
attr2 = attributeUri[i] + ":" + attr2;
throw new XmlPullParserException("duplicated attributes " + attr1 + " and " + attr2, this, null);
}
}
}
} else { // ! processNamespaces
// [ WFC: Unique Att Spec ]
// check raw attribute uniqueness constraint!!!
for (int i = 1; i < attributeCount; i++) {
for (int j = 0; j < i; j++) {
if ((allStringsInterned && attributeName[j].equals(attributeName[i])
|| (!allStringsInterned && attributeNameHash[j] == attributeNameHash[i]
&& attributeName[j].equals(attributeName[i])))
) {
// prepare data for nice error message?
final String attr1 = attributeName[j];
final String attr2 = attributeName[i];
throw new XmlPullParserException("duplicated attributes " + attr1 + " and " + attr2, this, null);
}
}
}
}
elNamespaceCount[depth] = namespaceEnd;
posEnd = pos;
return eventType = START_TAG;
}
protected char parseAttribute() throws XmlPullParserException, IOException {
// parse attribute
// [41] Attribute ::= Name Eq AttValue
// [WFC: No External Entity References]
// [WFC: No < in Attribute Values]
final int prevPosStart = posStart + bufAbsoluteStart;
final int nameStart = pos - 1 + bufAbsoluteStart;
int colonPos = -1;
char ch = buf[pos - 1];
if (ch == ':' && processNamespaces)
throw new XmlPullParserException(
"when namespaces processing enabled colon can not be at attribute name start", this, null);
boolean startsWithXmlns = processNamespaces && ch == 'x';
int xmlnsPos = 0;
ch = more();
while (isNameChar(ch)) {
if (processNamespaces) {
if (startsWithXmlns && xmlnsPos < 5) {
++xmlnsPos;
if (xmlnsPos == 1) {
if (ch != 'm')
startsWithXmlns = false;
} else if (xmlnsPos == 2) {
if (ch != 'l')
startsWithXmlns = false;
} else if (xmlnsPos == 3) {
if (ch != 'n')
startsWithXmlns = false;
} else if (xmlnsPos == 4) {
if (ch != 's')
startsWithXmlns = false;
} else {
if (ch != ':')
throw new XmlPullParserException(
"after xmlns in attribute name must be colon" + " when namespaces are enabled", this, null);
// colonPos = pos - 1 + bufAbsoluteStart;
}
}
if (ch == ':') {
if (colonPos != -1)
throw new XmlPullParserException(
"only one colon is allowed in attribute name" + " when namespaces are enabled", this, null);
colonPos = pos - 1 + bufAbsoluteStart;
}
}
ch = more();
}
ensureAttributesCapacity(attributeCount);
// --- start processing attributes
String name = null;
String prefix = null;
// work on prefixes and namespace URI
if (processNamespaces) {
if (xmlnsPos < 4)
startsWithXmlns = false;
if (startsWithXmlns) {
if (colonPos != -1) {
// prefix = attributePrefix[ attributeCount ] = null;
final int nameLen = pos - 2 - (colonPos - bufAbsoluteStart);
if (nameLen == 0) {
throw new XmlPullParserException(
"namespace prefix is required after xmlns: " + " when namespaces are enabled", this, null);
}
name = // attributeName[ attributeCount ] =
newString(buf, colonPos - bufAbsoluteStart + 1, nameLen);
// pos - 1 - (colonPos + 1 - bufAbsoluteStart)
}
} else {
if (colonPos != -1) {
int prefixLen = colonPos - nameStart;
prefix = attributePrefix[attributeCount] = newString(buf, nameStart - bufAbsoluteStart, prefixLen);
// colonPos - (nameStart - bufAbsoluteStart));
int nameLen = pos - 2 - (colonPos - bufAbsoluteStart);
name = attributeName[attributeCount] = newString(buf, colonPos - bufAbsoluteStart + 1, nameLen);
// pos - 1 - (colonPos + 1 - bufAbsoluteStart));
// name.substring(0, colonPos-nameStart);
} else {
prefix = attributePrefix[attributeCount] = null;
name = attributeName[attributeCount]
= newString(buf, nameStart - bufAbsoluteStart, pos - 1 - (nameStart - bufAbsoluteStart));
}
if (!allStringsInterned) {
attributeNameHash[attributeCount] = name.hashCode();
}
}
} else {
// retrieve name
name = attributeName[attributeCount]
= newString(buf, nameStart - bufAbsoluteStart, pos - 1 - (nameStart - bufAbsoluteStart));
//// assert name != null;
if (!allStringsInterned) {
attributeNameHash[attributeCount] = name.hashCode();
}
}
// [25] Eq ::= S? '=' S?
while (isS(ch)) {
ch = more();
} // skip additional spaces
if (ch != '=')
throw new XmlPullParserException("expected = after attribute name", this, null);
ch = more();
while (isS(ch)) {
ch = more();
} // skip additional spaces
// [10] AttValue ::= '"' ([^<&"] | Reference)* '"'
// | "'" ([^<&'] | Reference)* "'"
final char delimit = ch;
if (delimit != '"' && delimit != '\'')
throw new XmlPullParserException(
"attribute value must start with quotation or apostrophe not " + printable(delimit), this, null);
// parse until delimit or < and resolve Reference
// [67] Reference ::= EntityRef | CharRef
// int valueStart = pos + bufAbsoluteStart;
boolean normalizedCR = false;
usePC = false;
pcStart = pcEnd;
posStart = pos;
while (true) {
ch = more();
if (ch == delimit) {
break;
}
if (ch == '<') {
throw new XmlPullParserException("markup not allowed inside attribute value - illegal < ", this, null);
}
if (ch == '&') {
// extractEntityRef
posEnd = pos - 1;
if (!usePC) {
final boolean hadCharData = posEnd > posStart;
if (hadCharData) {
// posEnd is already set correctly!!!
joinPC();
} else {
usePC = true;
pcStart = pcEnd = 0;
}
}
// assert usePC == true;
final char[] resolvedEntity = parseEntityRef();
// check if replacement text can be resolved !!!
if (resolvedEntity == null) {
if (entityRefName == null) {
entityRefName = newString(buf, posStart, posEnd - posStart);
}
throw new XmlPullParserException(
"could not resolve entity named '" + printable(entityRefName) + "'", this, null);
}
// write into PC replacement text - do merge for replacement
// text!!!!
for (int i = 0; i < resolvedEntity.length; i++) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = resolvedEntity[i];
}
} else if (ch == '\t' || ch == '\n' || ch == '\r') {
// do attribute value normalization
// as described in http://www.w3.org/TR/REC-xml#AVNormalize
// TODO add test for it form spec ...
// handle EOL normalization ...
if (!usePC) {
posEnd = pos - 1;
if (posEnd > posStart) {
joinPC();
} else {
usePC = true;
pcEnd = pcStart = 0;
}
}
// assert usePC == true;
if (pcEnd >= pc.length)
ensurePC(pcEnd);
if (ch != '\n' || !normalizedCR) {
pc[pcEnd++] = ' '; // '\n';
}
} else {
if (usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = ch;
}
}
normalizedCR = ch == '\r';
}
if (processNamespaces && startsWithXmlns) {
String ns;
if (!usePC) {
ns = newStringIntern(buf, posStart, pos - 1 - posStart);
} else {
ns = newStringIntern(pc, pcStart, pcEnd - pcStart);
}
ensureNamespacesCapacity(namespaceEnd);
int prefixHash = -1;
if (colonPos != -1) {
if (ns.isEmpty()) {
throw new XmlPullParserException(
"non-default namespace can not be declared to be empty string", this, null);
}
// declare new namespace
namespacePrefix[namespaceEnd] = name;
if (!allStringsInterned) {
prefixHash = namespacePrefixHash[namespaceEnd] = name.hashCode();
}
} else {
// declare new default namespace ...
namespacePrefix[namespaceEnd] = null; // ""; //null; //TODO
// check FIXME Alek
if (!allStringsInterned) {
prefixHash = namespacePrefixHash[namespaceEnd] = -1;
}
}
namespaceUri[namespaceEnd] = ns;
// detect duplicate namespace declarations!!!
final int startNs = elNamespaceCount[depth - 1];
for (int i = namespaceEnd - 1; i >= startNs; --i) {
if (((allStringsInterned || name == null) && namespacePrefix[i] == name)
|| (!allStringsInterned && name != null && namespacePrefixHash[i] == prefixHash
&& name.equals(namespacePrefix[i]))) {
final String s = name == null ? "default" : "'" + name + "'";
throw new XmlPullParserException("duplicated namespace declaration for " + s + " prefix", this, null);
}
}
++namespaceEnd;
} else {
if (!usePC) {
attributeValue[attributeCount] = new String(buf, posStart, pos - 1 - posStart);
} else {
attributeValue[attributeCount] = new String(pc, pcStart, pcEnd - pcStart);
}
++attributeCount;
}
posStart = prevPosStart - bufAbsoluteStart;
return ch;
}
protected char[] charRefOneCharBuf = new char[1];
protected char[] parseEntityRef() throws XmlPullParserException, IOException {
// entity reference
// http://www.w3.org/TR/2000/REC-xml-20001006#NT-Reference
// [67] Reference ::= EntityRef | CharRef
// ASSUMPTION just after &
entityRefName = null;
posStart = pos;
char ch = more();
if (ch == '#') {
// parse character reference
char charRef = 0;
ch = more();
if (ch == 'x') {
// encoded in hex
while (true) {
ch = more();
if (ch >= '0' && ch <= '9') {
charRef = (char) (charRef * 16 + (ch - '0'));
} else if (ch >= 'a' && ch <= 'f') {
charRef = (char) (charRef * 16 + (ch - ('a' - 10)));
} else if (ch >= 'A' && ch <= 'F') {
charRef = (char) (charRef * 16 + (ch - ('A' - 10)));
} else if (ch == ';') {
break;
} else {
throw new XmlPullParserException(
"character reference (with hex value) may not contain " + printable(ch), this, null);
}
}
} else {
// encoded in decimal
while (true) {
if (ch >= '0' && ch <= '9') {
charRef = (char) (charRef * 10 + (ch - '0'));
} else if (ch == ';') {
break;
} else {
throw new XmlPullParserException(
"character reference (with decimal value) may not contain " + printable(ch), this, null);
}
ch = more();
}
}
posEnd = pos - 1;
charRefOneCharBuf[0] = charRef;
if (tokenize) {
text = newString(charRefOneCharBuf, 0, 1);
}
return charRefOneCharBuf;
} else {
// [68] EntityRef ::= '&' Name ';'
// scan name until ;
if (!isNameStartChar(ch)) {
throw new XmlPullParserException(
"entity reference names can not start with character '" + printable(ch) + "'", this, null);
}
while (true) {
ch = more();
if (ch == ';') {
break;
}
if (!isNameChar(ch)) {
throw new XmlPullParserException(
"entity reference name can not contain character " + printable(ch) + "'", this, null);
}
}
posEnd = pos - 1;
// determine what name maps to
final int len = posEnd - posStart;
if (len == 2 && buf[posStart] == 'l' && buf[posStart + 1] == 't') {
if (tokenize) {
text = "<";
}
charRefOneCharBuf[0] = '<';
return charRefOneCharBuf;
// if(paramPC || isParserTokenizing) {
// if(pcEnd >= pc.length) ensurePC();
// pc[pcEnd++] = '<';
// }
} else if (len == 3 && buf[posStart] == 'a' && buf[posStart + 1] == 'm' && buf[posStart + 2] == 'p') {
if (tokenize) {
text = "&";
}
charRefOneCharBuf[0] = '&';
return charRefOneCharBuf;
} else if (len == 2 && buf[posStart] == 'g' && buf[posStart + 1] == 't') {
if (tokenize) {
text = ">";
}
charRefOneCharBuf[0] = '>';
return charRefOneCharBuf;
} else if (len == 4 && buf[posStart] == 'a' && buf[posStart + 1] == 'p' && buf[posStart + 2] == 'o'
&& buf[posStart + 3] == 's') {
if (tokenize) {
text = "'";
}
charRefOneCharBuf[0] = '\'';
return charRefOneCharBuf;
} else if (len == 4 && buf[posStart] == 'q' && buf[posStart + 1] == 'u' && buf[posStart + 2] == 'o'
&& buf[posStart + 3] == 't') {
if (tokenize) {
text = "\"";
}
charRefOneCharBuf[0] = '"';
return charRefOneCharBuf;
} else {
final char[] result = lookuEntityReplacement(len);
if (result != null) {
return result;
}
}
if (tokenize)
text = null;
return null;
}
}
protected char[] lookuEntityReplacement(int entitNameLen) throws XmlPullParserException, IOException {
if (!allStringsInterned) {
final int hash = fastHash(buf, posStart, posEnd - posStart);
LOOP: for (int i = entityEnd - 1; i >= 0; --i) {
if (hash == entityNameHash[i] && entitNameLen == entityNameBuf[i].length) {
final char[] entityBuf = entityNameBuf[i];
for (int j = 0; j < entitNameLen; j++) {
if (buf[posStart + j] != entityBuf[j])
continue LOOP;
}
if (tokenize)
text = entityReplacement[i];
return entityReplacementBuf[i];
}
}
} else {
entityRefName = newString(buf, posStart, posEnd - posStart);
for (int i = entityEnd - 1; i >= 0; --i) {
// take advantage that interning for newStirng is enforced
if (entityRefName == entityName[i]) {
if (tokenize)
text = entityReplacement[i];
return entityReplacementBuf[i];
}
}
}
return null;
}
protected void parseComment() throws XmlPullParserException, IOException {
// implements XML 1.0 Section 2.5 Comments
// ASSUMPTION: seen <!-
char ch = more();
if (ch != '-')
throw new XmlPullParserException("expected <!-- for comment start", this, null);
if (tokenize)
posStart = pos;
final int curLine = lineNumber;
final int curColumn = columnNumber;
try {
final boolean normalizeIgnorableWS = tokenize && !roundtripSupported;
boolean normalizedCR = false;
boolean seenDash = false;
boolean seenDashDash = false;
while (true) {
// scan until it hits -->
ch = more();
if (seenDashDash && ch != '>') {
throw new XmlPullParserException(
"in comment after two dashes (--) next character must be >" + " not " + printable(ch), this, null);
}
if (ch == '-') {
if (!seenDash) {
seenDash = true;
} else {
seenDashDash = true;
seenDash = false;
}
} else if (ch == '>') {
if (seenDashDash) {
break; // found end sequence!!!!
} else {
seenDashDash = false;
}
seenDash = false;
} else {
seenDash = false;
}
if (normalizeIgnorableWS) {
if (ch == '\r') {
normalizedCR = true;
// posEnd = pos -1;
// joinPC();
// posEnd is already set
if (!usePC) {
posEnd = pos - 1;
if (posEnd > posStart) {
joinPC();
} else {
usePC = true;
pcStart = pcEnd = 0;
}
}
// assert usePC == true;
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
} else if (ch == '\n') {
if (!normalizedCR && usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
}
normalizedCR = false;
} else {
if (usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = ch;
}
normalizedCR = false;
}
}
}
} catch (EOFException ex) {
// detect EOF and create meaningful error ...
throw new XmlPullParserException(
"comment started on line " + curLine + " and column " + curColumn + " was not closed", this, ex);
}
if (tokenize) {
posEnd = pos - 3;
if (usePC) {
pcEnd -= 2;
}
}
}
protected boolean parsePI() throws XmlPullParserException, IOException {
// implements XML 1.0 Section 2.6 Processing Instructions
// [16] PI ::= '<?' PITarget (S (Char* - (Char* '?>' Char*)))? '?>'
// [17] PITarget ::= Name - (('X' | 'x') ('M' | 'm') ('L' | 'l'))
// ASSUMPTION: seen <?
if (tokenize)
posStart = pos;
final int curLine = lineNumber;
final int curColumn = columnNumber;
int piTargetStart = pos + bufAbsoluteStart;
int piTargetEnd = -1;
final boolean normalizeIgnorableWS = tokenize && !roundtripSupported;
boolean normalizedCR = false;
try {
boolean seenQ = false;
char ch = more();
if (isS(ch)) {
throw new XmlPullParserException(
"processing instruction PITarget must be exactly after <? and not white space character", this, null);
}
while (true) {
// scan until it hits ?>
// ch = more();
if (ch == '?') {
seenQ = true;
} else if (ch == '>') {
if (seenQ) {
break; // found end sequence!!!!
}
seenQ = false;
} else {
if (piTargetEnd == -1 && isS(ch)) {
piTargetEnd = pos - 1 + bufAbsoluteStart;
// [17] PITarget ::= Name - (('X' | 'x') ('M' | 'm')
// ('L' | 'l'))
if ((piTargetEnd - piTargetStart) == 3) {
if ((buf[piTargetStart] == 'x' || buf[piTargetStart] == 'X')
&& (buf[piTargetStart + 1] == 'm' || buf[piTargetStart + 1] == 'M')
&& (buf[piTargetStart + 2] == 'l' || buf[piTargetStart + 2] == 'L')) {
if (piTargetStart > 3) { // <?xml is allowed as
// first characters in
// input ...
throw new XmlPullParserException(
"processing instruction can not have PITarget with reserveld xml name", this, null);
} else {
if (buf[piTargetStart] != 'x' && buf[piTargetStart + 1] != 'm'
&& buf[piTargetStart + 2] != 'l') {
throw new XmlPullParserException("XMLDecl must have xml name in lowercase", this, null);
}
}
parseXmlDecl(ch);
if (tokenize)
posEnd = pos - 2;
final int off = piTargetStart - bufAbsoluteStart + 3;
final int len = pos - 2 - off;
xmlDeclContent = newString(buf, off, len);
return false;
}
}
}
seenQ = false;
}
if (normalizeIgnorableWS) {
if (ch == '\r') {
normalizedCR = true;
// posEnd = pos -1;
// joinPC();
// posEnd is already set
if (!usePC) {
posEnd = pos - 1;
if (posEnd > posStart) {
joinPC();
} else {
usePC = true;
pcStart = pcEnd = 0;
}
}
// assert usePC == true;
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
} else if (ch == '\n') {
if (!normalizedCR && usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
}
normalizedCR = false;
} else {
if (usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = ch;
}
normalizedCR = false;
}
}
ch = more();
}
} catch (EOFException ex) {
// detect EOF and create meaningful error ...
throw new XmlPullParserException(
"processing instruction started on line " + curLine + " and column " + curColumn + " was not closed", this,
ex);
}
if (piTargetEnd == -1) {
piTargetEnd = pos - 2 + bufAbsoluteStart;
// throw new XmlPullParserException(
// "processing instruction must have PITarget name", this, null);
}
piTargetStart -= bufAbsoluteStart;
piTargetEnd -= bufAbsoluteStart;
if (tokenize) {
posEnd = pos - 2;
if (normalizeIgnorableWS) {
--pcEnd;
}
}
return true;
}
// protected final static char[] VERSION = {'v','e','r','s','i','o','n'};
// protected final static char[] NCODING = {'n','c','o','d','i','n','g'};
// protected final static char[] TANDALONE =
// {'t','a','n','d','a','l','o','n','e'};
// protected final static char[] YES = {'y','e','s'};
// protected final static char[] NO = {'n','o'};
protected static final char[] VERSION = "version".toCharArray();
protected static final char[] NCODING = "ncoding".toCharArray();
protected static final char[] TANDALONE = "tandalone".toCharArray();
protected static final char[] YES = "yes".toCharArray();
protected static final char[] NO = "no".toCharArray();
protected void parseXmlDecl(char ch) throws XmlPullParserException, IOException {
// [23] XMLDecl ::= '<?xml' VersionInfo EncodingDecl? SDDecl? S? '?>'
// first make sure that relative positions will stay OK
preventBufferCompaction = true;
bufStart = 0; // necessary to keep pos unchanged during expansion!
// --- parse VersionInfo
// [24] VersionInfo ::= S 'version' Eq ("'" VersionNum "'" | '"'
// VersionNum '"')
// parse is positioned just on first S past <?xml
ch = skipS(ch);
ch = requireInput(ch, VERSION);
// [25] Eq ::= S? '=' S?
ch = skipS(ch);
if (ch != '=') {
throw new XmlPullParserException("expected equals sign (=) after version and not " + printable(ch), this, null);
}
ch = more();
ch = skipS(ch);
if (ch != '\'' && ch != '"') {
throw new XmlPullParserException(
"expected apostrophe (') or quotation mark (\") after version and not " + printable(ch), this, null);
}
final char quotChar = ch;
// int versionStart = pos + bufAbsoluteStart; // required if
// preventBufferCompaction==false
final int versionStart = pos;
ch = more();
// [26] VersionNum ::= ([a-zA-Z0-9_.:] | '-')+
while (ch != quotChar) {
if ((ch < 'a' || ch > 'z') && (ch < 'A' || ch > 'Z') && (ch < '0' || ch > '9') && ch != '_' && ch != '.'
&& ch != ':' && ch != '-') {
throw new XmlPullParserException(
"<?xml version value expected to be in ([a-zA-Z0-9_.:] | '-')" + " not " + printable(ch), this, null);
}
ch = more();
}
final int versionEnd = pos - 1;
parseXmlDeclWithVersion(versionStart, versionEnd);
preventBufferCompaction = false; // alow again buffer commpaction - pos
// MAY chnage
}
// protected String xmlDeclVersion;
protected void parseXmlDeclWithVersion(int versionStart, int versionEnd) throws XmlPullParserException, IOException {
// check version is "1.0"
if ((versionEnd - versionStart != 3) || buf[versionStart] != '1' || buf[versionStart + 1] != '.'
|| buf[versionStart + 2] != '0') {
throw new XmlPullParserException(
"only 1.0 is supported as <?xml version not '"
+ printable(new String(buf, versionStart, versionEnd - versionStart)) + "'",
this,
null);
}
xmlDeclVersion = newString(buf, versionStart, versionEnd - versionStart);
// [80] EncodingDecl ::= S 'encoding' Eq ('"' EncName '"' | "'" EncName
// "'" )
char ch = more();
ch = skipS(ch);
if (ch == 'e') {
ch = more();
ch = requireInput(ch, NCODING);
ch = skipS(ch);
if (ch != '=') {
throw new XmlPullParserException(
"expected equals sign (=) after encoding and not " + printable(ch), this, null);
}
ch = more();
ch = skipS(ch);
if (ch != '\'' && ch != '"') {
throw new XmlPullParserException(
"expected apostrophe (') or quotation mark (\") after encoding and not " + printable(ch), this, null);
}
final char quotChar = ch;
final int encodingStart = pos;
ch = more();
// [81] EncName ::= [A-Za-z] ([A-Za-z0-9._] | '-')*
if ((ch < 'a' || ch > 'z') && (ch < 'A' || ch > 'Z')) {
throw new XmlPullParserException(
"<?xml encoding name expected to start with [A-Za-z]" + " not " + printable(ch), this, null);
}
ch = more();
while (ch != quotChar) {
if ((ch < 'a' || ch > 'z') && (ch < 'A' || ch > 'Z') && (ch < '0' || ch > '9') && ch != '.' && ch != '_'
&& ch != '-') {
throw new XmlPullParserException(
"<?xml encoding value expected to be in ([A-Za-z0-9._] | '-')" + " not " + printable(ch), this,
null);
}
ch = more();
}
final int encodingEnd = pos - 1;
// TODO reconcile with setInput encodingName
inputEncoding = newString(buf, encodingStart, encodingEnd - encodingStart);
ch = more();
}
ch = skipS(ch);
// [32] SDDecl ::= S 'standalone' Eq (("'" ('yes' | 'no') "'") | ('"'
// ('yes' | 'no') '"'))
if (ch == 's') {
ch = more();
ch = requireInput(ch, TANDALONE);
ch = skipS(ch);
if (ch != '=') {
throw new XmlPullParserException(
"expected equals sign (=) after standalone and not " + printable(ch), this, null);
}
ch = more();
ch = skipS(ch);
if (ch != '\'' && ch != '"') {
throw new XmlPullParserException(
"expected apostrophe (') or quotation mark (\") after encoding and not " + printable(ch), this, null);
}
char quotChar = ch;
int standaloneStart = pos;
ch = more();
if (ch == 'y') {
ch = requireInput(ch, YES);
// Boolean standalone = new Boolean(true);
xmlDeclStandalone = Boolean.TRUE;
} else if (ch == 'n') {
ch = requireInput(ch, NO);
// Boolean standalone = new Boolean(false);
xmlDeclStandalone = Boolean.FALSE;
} else {
throw new XmlPullParserException(
"expected 'yes' or 'no' after standalone and not " + printable(ch), this, null);
}
if (ch != quotChar) {
throw new XmlPullParserException(
"expected " + quotChar + " after standalone value not " + printable(ch), this, null);
}
ch = more();
}
ch = skipS(ch);
if (ch != '?') {
throw new XmlPullParserException("expected ?> as last part of <?xml not " + printable(ch), this, null);
}
ch = more();
if (ch != '>') {
throw new XmlPullParserException("expected ?> as last part of <?xml not " + printable(ch), this, null);
}
// NOTE: this code is broken as for some types of input streams (URLConnection ...)
// it is not possible to do more than once new InputStreamReader(inputStream)
// as it somehow detects it and closes undelrying inout stram (b.....d!)
// In future one will need better low level byte-by-byte reading of prolog and then doing InputStream ...
// for more details see http://www.extreme.indiana.edu/bugzilla/show_bug.cgi?id=135
// //reset input stream
// if ((this.inputEncoding != oldEncoding) && (this.inputStream != null)) {
// if ((this.inputEncoding != null) && (!this.inputEncoding.equalsIgnoreCase(oldEncoding))) {
// // //there is need to reparse input to set location OK
// // reset();
// this.reader = new InputStreamReader(this.inputStream, this.inputEncoding);
// // //skip <?xml
// // for (int i = 0; i < 5; i++){
// // ch=more();
// // }
// // parseXmlDecl(ch);
// }
// }
}
protected void parseDocdecl() throws XmlPullParserException, IOException {
// ASSUMPTION: seen <!D
char ch = more();
if (ch != 'O')
throw new XmlPullParserException("expected <!DOCTYPE", this, null);
ch = more();
if (ch != 'C')
throw new XmlPullParserException("expected <!DOCTYPE", this, null);
ch = more();
if (ch != 'T')
throw new XmlPullParserException("expected <!DOCTYPE", this, null);
ch = more();
if (ch != 'Y')
throw new XmlPullParserException("expected <!DOCTYPE", this, null);
ch = more();
if (ch != 'P')
throw new XmlPullParserException("expected <!DOCTYPE", this, null);
ch = more();
if (ch != 'E')
throw new XmlPullParserException("expected <!DOCTYPE", this, null);
posStart = pos;
// do simple and crude scanning for end of doctype
// [28] doctypedecl ::= '<!DOCTYPE' S Name (S ExternalID)? S? ('['
// (markupdecl | DeclSep)* ']' S?)? '>'
int bracketLevel = 0;
final boolean normalizeIgnorableWS = tokenize && !roundtripSupported;
boolean normalizedCR = false;
while (true) {
ch = more();
if (ch == '[')
++bracketLevel;
if (ch == ']')
--bracketLevel;
if (ch == '>' && bracketLevel == 0)
break;
if (normalizeIgnorableWS) {
if (ch == '\r') {
normalizedCR = true;
// posEnd = pos -1;
// joinPC();
// posEnd is alreadys set
if (!usePC) {
posEnd = pos - 1;
if (posEnd > posStart) {
joinPC();
} else {
usePC = true;
pcStart = pcEnd = 0;
}
}
// assert usePC == true;
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
} else if (ch == '\n') {
if (!normalizedCR && usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
}
normalizedCR = false;
} else {
if (usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = ch;
}
normalizedCR = false;
}
}
}
posEnd = pos - 1;
}
protected void parseCDSect(boolean hadCharData) throws XmlPullParserException, IOException {
// implements XML 1.0 Section 2.7 CDATA Sections
// [18] CDSect ::= CDStart CData CDEnd
// [19] CDStart ::= '<![CDATA['
// [20] CData ::= (Char* - (Char* ']]>' Char*))
// [21] CDEnd ::= ']]>'
// ASSUMPTION: seen <![
char ch = more();
if (ch != 'C')
throw new XmlPullParserException("expected <[CDATA[ for comment start", this, null);
ch = more();
if (ch != 'D')
throw new XmlPullParserException("expected <[CDATA[ for comment start", this, null);
ch = more();
if (ch != 'A')
throw new XmlPullParserException("expected <[CDATA[ for comment start", this, null);
ch = more();
if (ch != 'T')
throw new XmlPullParserException("expected <[CDATA[ for comment start", this, null);
ch = more();
if (ch != 'A')
throw new XmlPullParserException("expected <[CDATA[ for comment start", this, null);
ch = more();
if (ch != '[')
throw new XmlPullParserException("expected <![CDATA[ for comment start", this, null);
// if(tokenize) {
final int cdStart = pos + bufAbsoluteStart;
final int curLine = lineNumber;
final int curColumn = columnNumber;
final boolean normalizeInput = !tokenize || !roundtripSupported;
try {
if (normalizeInput) {
if (hadCharData) {
if (!usePC) {
// posEnd is correct already!!!
if (posEnd > posStart) {
joinPC();
} else {
usePC = true;
pcStart = pcEnd = 0;
}
}
}
}
boolean seenBracket = false;
boolean seenBracketBracket = false;
boolean normalizedCR = false;
while (true) {
// scan until it hits "]]>"
ch = more();
if (ch == ']') {
if (!seenBracket) {
seenBracket = true;
} else {
seenBracketBracket = true;
// seenBracket = false;
}
} else if (ch == '>') {
if (seenBracket && seenBracketBracket) {
break; // found end sequence!!!!
} else {
seenBracketBracket = false;
}
seenBracket = false;
} else {
if (seenBracket) {
seenBracket = false;
}
}
if (normalizeInput) {
// deal with normalization issues ...
if (ch == '\r') {
normalizedCR = true;
posStart = cdStart - bufAbsoluteStart;
posEnd = pos - 1; // posEnd is alreadys set
if (!usePC) {
if (posEnd > posStart) {
joinPC();
} else {
usePC = true;
pcStart = pcEnd = 0;
}
}
// assert usePC == true;
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
} else if (ch == '\n') {
if (!normalizedCR && usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = '\n';
}
normalizedCR = false;
} else {
if (usePC) {
if (pcEnd >= pc.length)
ensurePC(pcEnd);
pc[pcEnd++] = ch;
}
normalizedCR = false;
}
}
}
} catch (EOFException ex) {
// detect EOF and create meaningful error ...
throw new XmlPullParserException(
"CDATA section started on line " + curLine + " and column " + curColumn + " was not closed", this, ex);
}
if (normalizeInput) {
if (usePC) {
pcEnd = pcEnd - 2;
}
}
posStart = cdStart - bufAbsoluteStart;
posEnd = pos - 3;
}
protected void fillBuf() throws IOException, XmlPullParserException {
if (reader == null)
throw new XmlPullParserException("reader must be set before parsing is started");
// see if we are in compaction area
if (bufEnd > bufSoftLimit) {
// expand buffer it makes sense!!!!
boolean compact;
if (preventBufferCompaction) {
compact = false;
} else {
compact = bufStart > bufSoftLimit;
if (!compact) {
// freeSpace
// if at least half of buffer can be reclaimed --> worthwhile effort!!!
// else less than half buffer available for compacting --> expand instead!!!
if (bufStart >= buf.length / 2) {
compact = true;
}
}
}
// if buffer almost full then compact it
if (compact) {
// TODO: look on trashing
// //assert bufStart > 0
System.arraycopy(buf, bufStart, buf, 0, bufEnd - bufStart);
if (TRACE_SIZING)
System.out.println("TRACE_SIZING fillBuf() compacting " + bufStart + " bufEnd=" + bufEnd + " pos=" + pos
+ " posStart=" + posStart + " posEnd=" + posEnd
+ " buf first 100 chars:"
+ new String(buf, bufStart, Math.min(bufEnd - bufStart, 100)));
} else {
final int newSize = 2 * buf.length;
final char[] newBuf = new char[newSize];
if (TRACE_SIZING)
System.out.println("TRACE_SIZING fillBuf() " + buf.length + " => " + newSize);
System.arraycopy(buf, bufStart, newBuf, 0, bufEnd - bufStart);
buf = newBuf;
if (bufLoadFactor > 0) {
// bufSoftLimit = ( bufLoadFactor * buf.length ) /100;
bufSoftLimit = (int) ((((long) bufLoadFactor) * buf.length) / 100);
}
}
bufEnd -= bufStart;
pos -= bufStart;
posStart -= bufStart;
posEnd -= bufStart;
bufAbsoluteStart += bufStart;
bufStart = 0;
if (TRACE_SIZING)
System.out.println("TRACE_SIZING fillBuf() after bufEnd=" + bufEnd + " pos=" + pos + " posStart=" + posStart
+ " posEnd=" + posEnd + " buf first 100 chars:"
+ new String(buf, 0, bufEnd < 100 ? bufEnd : 100));
}
// at least one character must be read or error
final int len = buf.length - bufEnd > READ_CHUNK_SIZE ? READ_CHUNK_SIZE : buf.length - bufEnd;
final int ret = reader.read(buf, bufEnd, len);
if (ret > 0) {
bufEnd += ret;
if (TRACE_SIZING)
System.out.println("TRACE_SIZING fillBuf() after filling in buffer" + " buf first 100 chars:"
+ new String(buf, 0, bufEnd < 100 ? bufEnd : 100));
return;
}
if (ret == -1) {
if (bufAbsoluteStart == 0 && pos == 0) {
throw new EOFException("input contained no data");
} else {
if (seenRoot && depth == 0) { // inside parsing epilog!!!
reachedEnd = true;
return;
} else {
StringBuilder expectedTagStack = new StringBuilder();
if (depth > 0) {
// final char[] cbuf = elRawName[depth];
// final String startname = new String(cbuf, 0,
// elRawNameEnd[depth]);
expectedTagStack.append(" - expected end tag");
if (depth > 1) {
expectedTagStack.append("s"); // more than one end
// tag
}
expectedTagStack.append(" ");
for (int i = depth; i > 0; i--) {
String tagName = new String(elRawName[i], 0, elRawNameEnd[i]);
expectedTagStack.append("</").append(tagName).append('>');
}
expectedTagStack.append(" to close");
for (int i = depth; i > 0; i--) {
if (i != depth) {
expectedTagStack.append(" and"); // more than
// one end tag
}
String tagName = new String(elRawName[i], 0, elRawNameEnd[i]);
expectedTagStack.append(" start tag <").append(tagName).append('>');
expectedTagStack.append(" from line ").append(elRawNameLine[i]);
}
expectedTagStack.append(", parser stopped on");
}
throw new EOFException("no more data available" + expectedTagStack.toString() + getPositionDescription());
}
}
} else {
throw new IOException("error reading input, returned " + ret);
}
}
protected char more() throws IOException, XmlPullParserException {
if (pos >= bufEnd) {
fillBuf();
// this return value should be ignonored as it is used in epilog
// parsing ...
if (reachedEnd)
return (char) -1;
}
final char ch = buf[pos++];
// line/columnNumber
if (ch == '\n') {
++lineNumber;
columnNumber = 1;
} else {
++columnNumber;
}
// System.out.print(ch);
return ch;
}
// /**
// * This function returns position of parser in XML input stream
// * (how many <b>characters</b> were processed.
// * <p><b>NOTE:</b> this logical position and not byte offset as encodings
// * such as UTF8 may use more than one byte to encode one character.
// */
// public int getCurrentInputPosition() {
// return pos + bufAbsoluteStart;
// }
protected void ensurePC(int end) {
// assert end >= pc.length;
final int newSize = end > READ_CHUNK_SIZE ? 2 * end : 2 * READ_CHUNK_SIZE;
final char[] newPC = new char[newSize];
if (TRACE_SIZING)
System.out.println("TRACE_SIZING ensurePC() " + pc.length + " ==> " + newSize + " end=" + end);
System.arraycopy(pc, 0, newPC, 0, pcEnd);
pc = newPC;
// assert end < pc.length;
}
protected void joinPC() {
// assert usePC == false;
// assert posEnd > posStart;
final int len = posEnd - posStart;
final int newEnd = pcEnd + len + 1;
if (newEnd >= pc.length)
ensurePC(newEnd); // add 1 for extra space for one char
// assert newEnd < pc.length;
System.arraycopy(buf, posStart, pc, pcEnd, len);
pcEnd += len;
usePC = true;
}
protected char requireInput(char ch, char[] input) throws XmlPullParserException, IOException {
for (int i = 0; i < input.length; i++) {
if (ch != input[i]) {
throw new XmlPullParserException(
"expected " + printable(input[i]) + " in " + new String(input) + " and not " + printable(ch), this,
null);
}
ch = more();
}
return ch;
}
protected char requireNextS() throws XmlPullParserException, IOException {
final char ch = more();
if (!isS(ch)) {
throw new XmlPullParserException("white space is required and not " + printable(ch), this, null);
}
return skipS(ch);
}
protected char skipS(char ch) throws XmlPullParserException, IOException {
while (isS(ch)) {
ch = more();
} // skip additional spaces
return ch;
}
// nameStart / name lookup tables based on XML 1.1
// http://www.w3.org/TR/2001/WD-xml11-20011213/
protected static final int LOOKUP_MAX = 0x400;
protected static final char LOOKUP_MAX_CHAR = (char) LOOKUP_MAX;
// protected static int lookupNameStartChar[] = new int[ LOOKUP_MAX_CHAR /
// 32 ];
// protected static int lookupNameChar[] = new int[ LOOKUP_MAX_CHAR / 32 ];
protected static boolean[] lookupNameStartChar = new boolean[LOOKUP_MAX];
protected static boolean[] lookupNameChar = new boolean[LOOKUP_MAX];
private static void setName(char ch)
// { lookupNameChar[ (int)ch / 32 ] |= (1 << (ch % 32)); }
{
lookupNameChar[ch] = true;
}
private static void setNameStart(char ch)
// { lookupNameStartChar[ (int)ch / 32 ] |= (1 << (ch % 32)); setName(ch); }
{
lookupNameStartChar[ch] = true;
setName(ch);
}
static {
setNameStart(':');
for (char ch = 'A'; ch <= 'Z'; ++ch)
setNameStart(ch);
setNameStart('_');
for (char ch = 'a'; ch <= 'z'; ++ch)
setNameStart(ch);
for (char ch = '\u00c0'; ch <= '\u02FF'; ++ch)
setNameStart(ch);
for (char ch = '\u0370'; ch <= '\u037d'; ++ch)
setNameStart(ch);
for (char ch = '\u037f'; ch < '\u0400'; ++ch)
setNameStart(ch);
setName('-');
setName('.');
for (char ch = '0'; ch <= '9'; ++ch)
setName(ch);
setName('\u00b7');
for (char ch = '\u0300'; ch <= '\u036f'; ++ch)
setName(ch);
}
// private final static boolean isNameStartChar(char ch) {
protected boolean isNameStartChar(char ch) {
return (ch < LOOKUP_MAX_CHAR && lookupNameStartChar[ch]) || (ch >= LOOKUP_MAX_CHAR && ch <= '\u2027')
|| (ch >= '\u202A' && ch <= '\u218F')
|| (ch >= '\u2800' && ch <= '\uFFEF');
// if(ch < LOOKUP_MAX_CHAR) return lookupNameStartChar[ ch ];
// else return ch <= '\u2027'
// || (ch >= '\u202A' && ch <= '\u218F')
// || (ch >= '\u2800' && ch <= '\uFFEF')
// ;
// return false;
// return (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch ==
// ':'
// || (ch >= '0' && ch <= '9');
// if(ch < LOOKUP_MAX_CHAR) return (lookupNameStartChar[ (int)ch / 32 ]
// & (1 << (ch % 32))) != 0;
// if(ch <= '\u2027') return true;
// //[#x202A-#x218F]
// if(ch < '\u202A') return false;
// if(ch <= '\u218F') return true;
// // added pairts [#x2800-#xD7FF] | [#xE000-#xFDCF] | [#xFDE0-#xFFEF] |
// [#x10000-#x10FFFF]
// if(ch < '\u2800') return false;
// if(ch <= '\uFFEF') return true;
// return false;
// else return (supportXml11 && ( (ch < '\u2027') || (ch > '\u2029' &&
// ch < '\u2200') ...
}
// private final static boolean isNameChar(char ch) {
protected boolean isNameChar(char ch) {
// return isNameStartChar(ch);
// if(ch < LOOKUP_MAX_CHAR) return (lookupNameChar[ (int)ch / 32 ] & (1
// << (ch % 32))) != 0;
return (ch < LOOKUP_MAX_CHAR && lookupNameChar[ch]) || (ch >= LOOKUP_MAX_CHAR && ch <= '\u2027')
|| (ch >= '\u202A' && ch <= '\u218F')
|| (ch >= '\u2800' && ch <= '\uFFEF');
// return false;
// return (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch ==
// ':'
// || (ch >= '0' && ch <= '9');
// if(ch < LOOKUP_MAX_CHAR) return (lookupNameStartChar[ (int)ch / 32 ]
// & (1 << (ch % 32))) != 0;
// else return
// else if(ch <= '\u2027') return true;
// //[#x202A-#x218F]
// else if(ch < '\u202A') return false;
// else if(ch <= '\u218F') return true;
// // added pairts [#x2800-#xD7FF] | [#xE000-#xFDCF] | [#xFDE0-#xFFEF] |
// [#x10000-#x10FFFF]
// else if(ch < '\u2800') return false;
// else if(ch <= '\uFFEF') return true;
// else return false;
}
protected boolean isS(char ch) {
return (ch == ' ' || ch == '\n' || ch == '\r' || ch == '\t');
// || (supportXml11 && (ch == '\u0085' || ch == '\u2028');
}
// protected boolean isChar(char ch) { return (ch < '\uD800' || ch >
// '\uDFFF')
// ch != '\u0000' ch < '\uFFFE'
// protected char printable(char ch) { return ch; }
protected String printable(char ch) {
if (ch == '\n') {
return "\\n";
} else if (ch == '\r') {
return "\\r";
} else if (ch == '\t') {
return "\\t";
} else if (ch == '\'') {
return "\\'";
}
if (ch > 127 || ch < 32) {
return "\\u" + Integer.toHexString((int) ch);
}
return String.valueOf(ch);
}
protected String printable(String s) {
if (s == null)
return null;
final int sLen = s.length();
StringBuilder buf = new StringBuilder(sLen + 10);
for (int i = 0; i < sLen; ++i) {
buf.append(printable(s.charAt(i)));
}
s = buf.toString();
return s;
}
}
/*
* Indiana University Extreme! Lab Software License, Version 1.2 Copyright (C)
* 2003 The Trustees of Indiana University. All rights reserved. Redistribution
* and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met: 1) All
* redistributions of source code must retain the above copyright notice, the
* list of authors in the original source code, this list of conditions and the
* disclaimer listed in this license; 2) All redistributions in binary form must
* reproduce the above copyright notice, this list of conditions and the
* disclaimer listed in this license in the documentation and/or other materials
* provided with the distribution; 3) Any documentation included with all
* redistributions must include the following acknowledgement: "This product
* includes software developed by the Indiana University Extreme! Lab. For
* further information please visit http://www.extreme.indiana.edu/"
* Alternatively, this acknowledgment may appear in the software itself, and
* wherever such third-party acknowledgments normally appear. 4) The name
* "Indiana University" or "Indiana University Extreme! Lab" shall not be used
* to endorse or promote products derived from this software without prior
* written permission from Indiana University. For written permission, please
* contact http://www.extreme.indiana.edu/. 5) Products derived from this
* software may not use "Indiana University" name nor may "Indiana University"
* appear in their name, without prior written permission of the Indiana
* University. Indiana University provides no reassurances that the source code
* provided does not infringe the patent or any other intellectual property
* rights of any other entity. Indiana University disclaims any liability to any
* recipient for claims brought by any other entity based on infringement of
* intellectual property rights or otherwise. LICENSEE UNDERSTANDS THAT SOFTWARE
* IS PROVIDED "AS IS" FOR WHICH NO WARRANTIES AS TO CAPABILITIES OR ACCURACY
* ARE MADE. INDIANA UNIVERSITY GIVES NO WARRANTIES AND MAKES NO REPRESENTATION
* THAT SOFTWARE IS FREE OF INFRINGEMENT OF THIRD PARTY PATENT, COPYRIGHT, OR
* OTHER PROPRIETARY RIGHTS. INDIANA UNIVERSITY MAKES NO WARRANTIES THAT
* SOFTWARE IS FREE FROM "BUGS", "VIRUSES", "TROJAN HORSES", "TRAP
* DOORS", "WORMS", OR OTHER HARMFUL CODE. LICENSEE ASSUMES THE ENTIRE RISK AS
* TO THE PERFORMANCE OF SOFTWARE AND/OR ASSOCIATED MATERIALS, AND TO THE
* PERFORMANCE AND VALIDITY OF INFORMATION GENERATED USING SOFTWARE.
*/
| MXParser |
java | apache__camel | components/camel-csv/src/main/java/org/apache/camel/dataformat/csv/CsvMarshaller.java | {
"start": 1386,
"end": 1444
} | class ____ data into a CSV format.
*/
public abstract | marshal |
java | apache__flink | flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/statement/CompleteStatementResponseBody.java | {
"start": 1322,
"end": 1798
} | class ____ implements ResponseBody {
private static final String FIELD_NAME_CANDIDATES = "candidates";
@JsonProperty(FIELD_NAME_CANDIDATES)
private final List<String> candidates;
@JsonCreator
public CompleteStatementResponseBody(
@JsonProperty(FIELD_NAME_CANDIDATES) List<String> candidates) {
this.candidates = candidates;
}
public List<String> getCandidates() {
return candidates;
}
}
| CompleteStatementResponseBody |
java | redisson__redisson | redisson/src/main/java/org/redisson/RedissonTransferQueue.java | {
"start": 1604,
"end": 1704
} | class ____<V> extends RedissonExpirable implements RTransferQueue<V> {
public | RedissonTransferQueue |
java | micronaut-projects__micronaut-core | jackson-databind/src/main/java/io/micronaut/jackson/serialize/ConvertibleMultiValuesSerializer.java | {
"start": 1043,
"end": 2129
} | class ____ extends ValueSerializer<ConvertibleMultiValues<?>> {
@Override
public boolean isEmpty(SerializationContext provider, ConvertibleMultiValues<?> value) {
return value.isEmpty();
}
@Override
public void serialize(ConvertibleMultiValues<?> value, JsonGenerator gen, SerializationContext serializers) {
gen.writeStartObject();
for (Map.Entry<String, ? extends List<?>> entry : value) {
String fieldName = entry.getKey();
List<?> v = entry.getValue();
int len = v.size();
if (len > 0) {
if (len == 1) {
serializers.defaultSerializeProperty(fieldName, v.get(0), gen);
} else {
gen.writeName(fieldName);
gen.writeStartArray();
for (Object o : v) {
serializers.writeValue(gen, o);
}
gen.writeEndArray();
}
}
}
gen.writeEndObject();
}
}
| ConvertibleMultiValuesSerializer |
java | apache__camel | components/camel-aws/camel-aws2-kinesis/src/main/java/org/apache/camel/component/aws2/kinesis/RecordStringConverter.java | {
"start": 1219,
"end": 1789
} | class ____ {
private RecordStringConverter() {
}
@Converter
public static String toString(Record dataRecord) {
Charset charset = StandardCharsets.UTF_8;
ByteBuffer buffer = dataRecord.data().asByteBuffer();
if (buffer.hasArray()) {
byte[] bytes = dataRecord.data().asByteArray();
return new String(bytes, charset);
} else {
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
return new String(bytes, charset);
}
}
}
| RecordStringConverter |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/colstats/ColumnStatistics.java | {
"start": 1027,
"end": 1149
} | interface ____ represent column statistics, which is part of
* {@link Statistics}.
*
* @since 3.4.0
*/
@Evolving
public | to |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/result/method/annotation/MultipartWebClientIntegrationTests.java | {
"start": 12165,
"end": 12353
} | class ____ {
private String name;
@JsonCreator
public Person(@JsonProperty("name") String name) {
this.name = name;
}
public String getName() {
return name;
}
}
}
| Person |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-jackson2-only/src/test/java/smoketest/jackson2/only/SampleJackson2OnlyWithoutSpringWebApplicationTests.java | {
"start": 1282,
"end": 1773
} | class ____ {
@Test
@SuppressWarnings("unchecked")
void jmxEndpointsShouldWork() throws Exception {
MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
Map<String, Object> result = (Map<String, Object>) mbeanServer.invoke(
ObjectName.getInstance("org.springframework.boot:type=Endpoint,name=Configprops"),
"configurationProperties", new Object[0], null);
assertThat(result).containsOnlyKeys("contexts");
}
}
| SampleJackson2OnlyWithoutSpringWebApplicationTests |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/testers/CollectionSerializationTester.java | {
"start": 1435,
"end": 1785
} | class ____<E> extends AbstractCollectionTester<E> {
@CollectionFeature.Require(SERIALIZABLE)
public void testReserialize() {
// For a bare Collection, the most we can guarantee is that the elements are preserved.
assertEqualIgnoringOrder(actualContents(), SerializableTester.reserialize(actualContents()));
}
}
| CollectionSerializationTester |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/generics/WildCardInject.java | {
"start": 732,
"end": 1133
} | class ____ {
// tests injecting field
@Inject
protected ConversionService conversionService;
// tests injecting constructor
public WildCardInject(ConversionService conversionService) {
}
// tests injection method
@Inject
public void setConversionService(ConversionService conversionService) {
this.conversionService = conversionService;
}
}
| WildCardInject |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/ssl/SslPrincipalMapper.java | {
"start": 3929,
"end": 8225
} | class ____ {
private static final Pattern BACK_REFERENCE_PATTERN = Pattern.compile("\\$(\\d+)");
private final boolean isDefault;
private final Pattern pattern;
private final String replacement;
private final boolean toLowerCase;
private final boolean toUpperCase;
Rule() {
isDefault = true;
pattern = null;
replacement = null;
toLowerCase = false;
toUpperCase = false;
}
Rule(String pattern, String replacement, boolean toLowerCase, boolean toUpperCase) {
isDefault = false;
this.pattern = pattern == null ? null : Pattern.compile(pattern);
this.replacement = replacement;
this.toLowerCase = toLowerCase;
this.toUpperCase = toUpperCase;
}
String apply(String distinguishedName) {
if (isDefault) {
return distinguishedName;
}
String result = null;
final Matcher m = pattern.matcher(distinguishedName);
if (m.matches()) {
result = distinguishedName.replaceAll(pattern.pattern(), escapeLiteralBackReferences(replacement, m.groupCount()));
}
if (toLowerCase && result != null) {
result = result.toLowerCase(Locale.ENGLISH);
} else if (toUpperCase && result != null) {
result = result.toUpperCase(Locale.ENGLISH);
}
return result;
}
//If we find a back reference that is not valid, then we will treat it as a literal string. For example, if we have 3 capturing
//groups and the Replacement Value has the value is "$1@$4", then we want to treat the $4 as a literal "$4", rather
//than attempting to use it as a back reference.
//This method was taken from Apache Nifi project : org.apache.nifi.authorization.util.IdentityMappingUtil
private String escapeLiteralBackReferences(final String unescaped, final int numCapturingGroups) {
if (numCapturingGroups == 0) {
return unescaped;
}
String value = unescaped;
final Matcher backRefMatcher = BACK_REFERENCE_PATTERN.matcher(value);
while (backRefMatcher.find()) {
final String backRefNum = backRefMatcher.group(1);
if (backRefNum.startsWith("0")) {
continue;
}
int backRefIndex = Integer.parseInt(backRefNum);
// if we have a replacement value like $123, and we have less than 123 capturing groups, then
// we want to truncate the 3 and use capturing group 12; if we have less than 12 capturing groups,
// then we want to truncate the 2 and use capturing group 1; if we don't have a capturing group then
// we want to truncate the 1 and get 0.
while (backRefIndex > numCapturingGroups && backRefIndex >= 10) {
backRefIndex /= 10;
}
if (backRefIndex > numCapturingGroups) {
final StringBuilder sb = new StringBuilder(value.length() + 1);
final int groupStart = backRefMatcher.start(1);
sb.append(value, 0, groupStart - 1);
sb.append("\\");
sb.append(value.substring(groupStart - 1));
value = sb.toString();
}
}
return value;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder();
if (isDefault) {
buf.append("DEFAULT");
} else {
buf.append("RULE:");
if (pattern != null) {
buf.append(pattern);
}
if (replacement != null) {
buf.append("/");
buf.append(replacement);
}
if (toLowerCase) {
buf.append("/L");
} else if (toUpperCase) {
buf.append("/U");
}
}
return buf.toString();
}
}
}
| Rule |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java | {
"start": 86312,
"end": 125405
} | class ____ implements DeprecationHandler {
private final DeprecationLogger deprecationLogger;
private final String apiKeyId;
private ApiKeyLoggingDeprecationHandler(DeprecationLogger logger, String apiKeyId) {
this.deprecationLogger = logger;
this.apiKeyId = apiKeyId;
}
@Override
public void logRenamedField(String parserName, Supplier<XContentLocation> location, String oldName, String currentName) {
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
deprecationLogger.warn(
DeprecationCategory.API,
"api_key_field",
"{}Deprecated field [{}] used in api key [{}], expected [{}] instead",
prefix,
oldName,
apiKeyId,
currentName
);
}
@Override
public void logReplacedField(String parserName, Supplier<XContentLocation> location, String oldName, String replacedName) {
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
deprecationLogger.warn(
DeprecationCategory.API,
"api_key_field",
"{}Deprecated field [{}] used in api key [{}], replaced by [{}]",
prefix,
oldName,
apiKeyId,
replacedName
);
}
@Override
public void logRemovedField(String parserName, Supplier<XContentLocation> location, String removedName) {
String prefix = parserName == null ? "" : "[" + parserName + "][" + location.get() + "] ";
deprecationLogger.warn(
DeprecationCategory.API,
"api_key_field",
"{}Deprecated field [{}] used in api key [{}], which is unused and will be removed entirely",
prefix,
removedName,
apiKeyId
);
}
}
/**
* @return `null` if the update is a noop, i.e., if no changes to `currentApiKeyDoc` are required
*/
@Nullable
private IndexRequest maybeBuildIndexRequest(
final VersionedApiKeyDoc currentVersionedDoc,
final Authentication authentication,
final BaseUpdateApiKeyRequest request,
final Set<RoleDescriptor> userRoleDescriptors
) throws IOException {
if (logger.isTraceEnabled()) {
logger.trace(
"Building index request for update of API key doc [{}] with seqNo [{}] and primaryTerm [{}]",
currentVersionedDoc.id(),
currentVersionedDoc.seqNo(),
currentVersionedDoc.primaryTerm()
);
}
final var targetDocVersion = ApiKey.CURRENT_API_KEY_VERSION;
final var currentDocVersion = new ApiKey.Version(currentVersionedDoc.doc().version);
assert currentDocVersion.onOrBefore(targetDocVersion)
: "API key ["
+ currentVersionedDoc.id()
+ "] has version ["
+ currentDocVersion
+ " which is greater than current version ["
+ ApiKey.CURRENT_API_KEY_VERSION
+ "]";
if (logger.isDebugEnabled() && currentDocVersion.before(targetDocVersion)) {
logger.debug(
"API key update for [{}] will update version from [{}] to [{}]",
currentVersionedDoc.id(),
currentDocVersion,
targetDocVersion
);
}
final XContentBuilder builder = maybeBuildUpdatedDocument(
currentVersionedDoc.id(),
currentVersionedDoc.doc(),
targetDocVersion,
authentication,
request,
userRoleDescriptors,
clock
);
final boolean isNoop = builder == null;
return isNoop
? null
: client.prepareIndex(SECURITY_MAIN_ALIAS)
.setId(currentVersionedDoc.id())
.setSource(builder)
.setIfSeqNo(currentVersionedDoc.seqNo())
.setIfPrimaryTerm(currentVersionedDoc.primaryTerm())
.setOpType(DocWriteRequest.OpType.INDEX)
.request();
}
private static void addErrorsForNotFoundApiKeys(
final BulkUpdateApiKeyResponse.Builder responseBuilder,
final Collection<VersionedApiKeyDoc> foundDocs,
final List<String> requestedIds
) {
// Short-circuiting by size is safe: `foundDocs` only contains unique IDs of those requested. Same size here necessarily implies
// same content
if (foundDocs.size() == requestedIds.size()) {
return;
}
final Set<String> foundIds = foundDocs.stream().map(VersionedApiKeyDoc::id).collect(Collectors.toUnmodifiableSet());
for (String id : requestedIds) {
if (foundIds.contains(id) == false) {
responseBuilder.error(id, new ResourceNotFoundException("no API key owned by requesting user found for ID [" + id + "]"));
}
}
}
/**
* Invalidate API keys for given realm, user name, API key name and id.
* @param realmNames realm names
* @param username username
* @param apiKeyName API key name
* @param apiKeyIds API key ids
* @param includeCrossClusterApiKeys whether to include cross-cluster api keys in the invalidation; if false any cross-cluster api keys
* will be skipped. skipped API keys will be included in the error details of the response
* @param invalidateListener listener for {@link InvalidateApiKeyResponse}
*/
public void invalidateApiKeys(
String[] realmNames,
String username,
String apiKeyName,
String[] apiKeyIds,
boolean includeCrossClusterApiKeys,
ActionListener<InvalidateApiKeyResponse> invalidateListener
) {
ensureEnabled();
if ((realmNames == null || realmNames.length == 0)
&& Strings.hasText(username) == false
&& Strings.hasText(apiKeyName) == false
&& (apiKeyIds == null || apiKeyIds.length == 0)) {
logger.trace("none of the parameters [api key id, api key name, username, realm name] were specified for invalidation");
invalidateListener.onFailure(
new IllegalArgumentException("One of [api key id, api key name, username, realm name] must be specified")
);
} else {
findApiKeysForUserRealmApiKeyIdAndNameCombination(
realmNames,
username,
apiKeyName,
apiKeyIds,
true,
false,
this::convertSearchHitToApiKeyInfo,
ActionListener.wrap(apiKeys -> {
if (apiKeys.isEmpty()) {
logger.debug(
"No active api keys to invalidate for realms {}, username [{}], api key name [{}] and api key ids {}",
Arrays.toString(realmNames),
username,
apiKeyName,
Arrays.toString(apiKeyIds)
);
invalidateListener.onResponse(InvalidateApiKeyResponse.emptyResponse());
} else {
indexInvalidation(apiKeys, includeCrossClusterApiKeys, invalidateListener);
}
}, invalidateListener::onFailure)
);
}
}
private <T> void findApiKeys(
final BoolQueryBuilder boolQuery,
boolean filterOutInvalidatedKeys,
boolean filterOutExpiredKeys,
final Function<SearchHit, T> hitParser,
final ActionListener<Collection<T>> listener
) {
if (filterOutInvalidatedKeys) {
boolQuery.filter(QueryBuilders.termQuery("api_key_invalidated", false));
}
if (filterOutExpiredKeys) {
final BoolQueryBuilder expiredQuery = QueryBuilders.boolQuery();
expiredQuery.should(QueryBuilders.rangeQuery("expiration_time").gt(clock.instant().toEpochMilli()));
expiredQuery.should(QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery("expiration_time")));
boolQuery.filter(expiredQuery);
}
final Supplier<ThreadContext.StoredContext> supplier = client.threadPool().getThreadContext().newRestorableContext(false);
try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(SECURITY_ORIGIN)) {
final SearchRequest request = client.prepareSearch(SECURITY_MAIN_ALIAS)
.setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings))
.setQuery(boolQuery)
.setVersion(false)
.setSize(1000)
.setFetchSource(true)
.request();
securityIndex.forCurrentProject()
.checkIndexVersionThenExecute(
listener::onFailure,
() -> ScrollHelper.fetchAllByEntity(
client,
request,
new ContextPreservingActionListener<>(supplier, listener),
hitParser
)
);
}
}
public static QueryBuilder filterForRealmNames(String[] realmNames) {
if (realmNames == null || realmNames.length == 0) {
return null;
}
if (realmNames.length == 1) {
return QueryBuilders.termQuery("creator.realm", realmNames[0]);
} else {
final BoolQueryBuilder realmsQuery = QueryBuilders.boolQuery();
for (String realmName : realmNames) {
realmsQuery.should(QueryBuilders.termQuery("creator.realm", realmName));
}
realmsQuery.minimumShouldMatch(1);
return realmsQuery;
}
}
private void findVersionedApiKeyDocsForSubject(
final Authentication authentication,
final String[] apiKeyIds,
final ActionListener<Collection<VersionedApiKeyDoc>> listener
) {
assert authentication.isApiKey() == false : "Authentication [" + authentication + "] is an API key, but should not be";
findApiKeysForUserRealmApiKeyIdAndNameCombination(
getOwnersRealmNames(authentication),
authentication.getEffectiveSubject().getUser().principal(),
null,
apiKeyIds,
false,
false,
ApiKeyService::convertSearchHitToVersionedApiKeyDoc,
listener
);
}
private <T> void findApiKeysForUserRealmApiKeyIdAndNameCombination(
String[] realmNames,
String userName,
String apiKeyName,
String[] apiKeyIds,
boolean filterOutInvalidatedKeys,
boolean filterOutExpiredKeys,
Function<SearchHit, T> hitParser,
ActionListener<Collection<T>> listener
) {
final IndexState projectSecurityIndex = securityIndex.forCurrentProject();
if (projectSecurityIndex.indexExists() == false) {
listener.onResponse(Collections.emptyList());
} else if (projectSecurityIndex.isAvailable(SEARCH_SHARDS) == false) {
listener.onFailure(projectSecurityIndex.getUnavailableReason(SEARCH_SHARDS));
} else {
final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery("doc_type", "api_key"));
QueryBuilder realmsQuery = filterForRealmNames(realmNames);
if (realmsQuery != null) {
boolQuery.filter(realmsQuery);
}
if (Strings.hasText(userName)) {
boolQuery.filter(QueryBuilders.termQuery("creator.principal", userName));
}
if (Strings.hasText(apiKeyName) && "*".equals(apiKeyName) == false) {
if (apiKeyName.endsWith("*")) {
boolQuery.filter(QueryBuilders.prefixQuery("name", apiKeyName.substring(0, apiKeyName.length() - 1)));
} else {
boolQuery.filter(QueryBuilders.termQuery("name", apiKeyName));
}
}
if (apiKeyIds != null && apiKeyIds.length > 0) {
boolQuery.filter(QueryBuilders.idsQuery().addIds(apiKeyIds));
}
findApiKeys(boolQuery, filterOutInvalidatedKeys, filterOutExpiredKeys, hitParser, listener);
}
}
/**
* Performs the actual invalidation of a collection of api keys
*
* @param apiKeys the api keys to invalidate
* @param includeCrossClusterApiKeys whether to include cross-cluster api keys in the invalidation; if false any cross-cluster api keys
* will be skipped. skipped API keys will be included in the error details of the response
* @param listener the listener to notify upon completion
*/
private void indexInvalidation(
Collection<ApiKey> apiKeys,
boolean includeCrossClusterApiKeys,
ActionListener<InvalidateApiKeyResponse> listener
) {
maybeStartApiKeyRemover();
if (apiKeys.isEmpty()) {
listener.onFailure(new ElasticsearchSecurityException("No api key ids provided for invalidation"));
} else {
final BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
final long invalidationTime = clock.instant().toEpochMilli();
final Set<String> apiKeyIdsToInvalidate = new HashSet<>();
final Set<String> crossClusterApiKeyIdsToSkip = new HashSet<>();
final ArrayList<ElasticsearchException> failedRequestResponses = new ArrayList<>();
for (ApiKey apiKey : apiKeys) {
final String apiKeyId = apiKey.getId();
if (apiKeyIdsToInvalidate.contains(apiKeyId) || crossClusterApiKeyIdsToSkip.contains(apiKeyId)) {
continue;
}
if (false == includeCrossClusterApiKeys && ApiKey.Type.CROSS_CLUSTER.equals(apiKey.getType())) {
logger.debug("Skipping invalidation of cross cluster API key [{}]", apiKey);
failedRequestResponses.add(cannotInvalidateCrossClusterApiKeyException(apiKeyId));
crossClusterApiKeyIdsToSkip.add(apiKeyId);
} else {
final UpdateRequestBuilder updateRequestBuilder = client.prepareUpdate(SECURITY_MAIN_ALIAS, apiKeyId)
.setDoc(Map.of("api_key_invalidated", true, "invalidation_time", invalidationTime));
bulkRequestBuilder.add(updateRequestBuilder);
apiKeyIdsToInvalidate.add(apiKeyId);
}
}
// noinspection ConstantValue
assert false == apiKeyIdsToInvalidate.isEmpty() || false == crossClusterApiKeyIdsToSkip.isEmpty()
: "There are no API keys but that should never happen, original=["
+ (apiKeys.size() > 10 ? ("size=" + apiKeys.size() + " including " + apiKeys.iterator().next()) : apiKeys)
+ "], to-invalidate=["
+ apiKeyIdsToInvalidate
+ "], to-skip=["
+ crossClusterApiKeyIdsToSkip
+ "]";
if (apiKeyIdsToInvalidate.isEmpty()) {
listener.onResponse(new InvalidateApiKeyResponse(Collections.emptyList(), Collections.emptyList(), failedRequestResponses));
return;
}
assert bulkRequestBuilder.numberOfActions() > 0
: "Bulk request has ["
+ bulkRequestBuilder.numberOfActions()
+ "] actions, but there are ["
+ apiKeyIdsToInvalidate.size()
+ "] api keys to invalidate";
bulkRequestBuilder.setRefreshPolicy(defaultCreateDocRefreshPolicy(settings));
securityIndex.forCurrentProject()
.prepareIndexIfNeededThenExecute(
ex -> listener.onFailure(traceLog("prepare security index", ex)),
() -> executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
SECURITY_ORIGIN,
bulkRequestBuilder.request(),
ActionListener.<BulkResponse>wrap(bulkResponse -> {
ArrayList<String> previouslyInvalidated = new ArrayList<>();
ArrayList<String> invalidated = new ArrayList<>();
for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) {
if (bulkItemResponse.isFailed()) {
Throwable cause = bulkItemResponse.getFailure().getCause();
final String failedApiKeyId = bulkItemResponse.getFailure().getId();
traceLog("invalidate api key", failedApiKeyId, cause);
failedRequestResponses.add(new ElasticsearchException("Error invalidating api key", cause));
} else {
UpdateResponse updateResponse = bulkItemResponse.getResponse();
if (updateResponse.getResult() == DocWriteResponse.Result.UPDATED) {
logger.debug("Invalidated api key for doc [{}]", updateResponse.getId());
invalidated.add(updateResponse.getId());
} else if (updateResponse.getResult() == DocWriteResponse.Result.NOOP) {
previouslyInvalidated.add(updateResponse.getId());
}
}
}
InvalidateApiKeyResponse result = new InvalidateApiKeyResponse(
invalidated,
previouslyInvalidated,
failedRequestResponses
);
clearCache(result, listener);
}, e -> {
Throwable cause = ExceptionsHelper.unwrapCause(e);
traceLog("invalidate api keys", cause);
listener.onFailure(e);
}),
client::bulk
)
);
}
}
private ElasticsearchException cannotInvalidateCrossClusterApiKeyException(String apiKeyId) {
return new ElasticsearchSecurityException(
"Cannot invalidate cross-cluster API key ["
+ apiKeyId
+ "]. This requires ["
+ ClusterPrivilegeResolver.MANAGE_SECURITY.name()
+ "] cluster privilege or higher"
);
}
private void buildResponseAndClearCache(
final BulkResponse bulkResponse,
final BulkUpdateApiKeyResponse.Builder responseBuilder,
final ActionListener<BulkUpdateApiKeyResponse> listener
) {
for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) {
final String apiKeyId = bulkItemResponse.getId();
if (bulkItemResponse.isFailed()) {
responseBuilder.error(
apiKeyId,
new ElasticsearchException("bulk request execution failure", bulkItemResponse.getFailure().getCause())
);
} else {
// Since we made an index request against an existing document, we can't get a NOOP or CREATED here
assert bulkItemResponse.getResponse().getResult() == DocWriteResponse.Result.UPDATED
: "Bulk Item ["
+ bulkItemResponse.getId()
+ "] is ["
+ bulkItemResponse.getResponse().getResult()
+ "] but should be ["
+ DocWriteResponse.Result.UPDATED
+ "]";
responseBuilder.updated(apiKeyId);
}
}
clearApiKeyDocCache(responseBuilder.build(), listener);
}
private static void addLimitedByRoleDescriptors(final XContentBuilder builder, final Set<RoleDescriptor> limitedByRoleDescriptors)
throws IOException {
assert limitedByRoleDescriptors != null;
builder.startObject("limited_by_role_descriptors");
for (RoleDescriptor descriptor : limitedByRoleDescriptors) {
builder.field(descriptor.getName(), (contentBuilder, params) -> descriptor.toXContent(contentBuilder, params, true));
}
builder.endObject();
}
private static void addApiKeyHash(final XContentBuilder builder, final char[] apiKeyHashChars) throws IOException {
byte[] utf8Bytes = null;
try {
utf8Bytes = CharArrays.toUtf8Bytes(apiKeyHashChars);
builder.field("api_key_hash").utf8Value(utf8Bytes, 0, utf8Bytes.length);
} finally {
if (utf8Bytes != null) {
Arrays.fill(utf8Bytes, (byte) 0);
}
}
}
private static void addCreator(final XContentBuilder builder, final Authentication authentication) throws IOException {
final var user = authentication.getEffectiveSubject().getUser();
final var sourceRealm = authentication.getEffectiveSubject().getRealm();
builder.startObject("creator")
.field("principal", user.principal())
.field("full_name", user.fullName())
.field("email", user.email())
.field("metadata", user.metadata())
.field("realm", sourceRealm.getName())
.field("realm_type", sourceRealm.getType());
if (sourceRealm.getDomain() != null) {
builder.field("realm_domain", sourceRealm.getDomain());
}
builder.endObject();
}
private static void addRoleDescriptors(final XContentBuilder builder, final List<RoleDescriptor> keyRoles) throws IOException {
builder.startObject("role_descriptors");
if (keyRoles != null && keyRoles.isEmpty() == false) {
for (RoleDescriptor descriptor : keyRoles) {
builder.field(descriptor.getName(), (contentBuilder, params) -> descriptor.toXContent(contentBuilder, params, true));
}
}
builder.endObject();
}
private void clearCache(InvalidateApiKeyResponse result, ActionListener<InvalidateApiKeyResponse> listener) {
executeClearCacheRequest(
result,
listener,
new ClearSecurityCacheRequest().cacheName("api_key").keys(result.getInvalidatedApiKeys().toArray(String[]::new))
);
}
private void clearApiKeyDocCache(final BulkUpdateApiKeyResponse result, final ActionListener<BulkUpdateApiKeyResponse> listener) {
executeClearCacheRequest(
result,
listener,
new ClearSecurityCacheRequest().cacheName("api_key_doc").keys(result.getUpdated().toArray(String[]::new))
);
}
private <T> void executeClearCacheRequest(T result, ActionListener<T> listener, ClearSecurityCacheRequest clearApiKeyCacheRequest) {
executeAsyncWithOrigin(client, SECURITY_ORIGIN, ClearSecurityCacheAction.INSTANCE, clearApiKeyCacheRequest, new ActionListener<>() {
@Override
public void onResponse(ClearSecurityCacheResponse nodes) {
listener.onResponse(result);
}
@Override
public void onFailure(Exception e) {
logger.error(() -> format("unable to clear API key cache [%s]", clearApiKeyCacheRequest.cacheName()), e);
listener.onFailure(new ElasticsearchException("clearing the API key cache failed; please clear the caches manually", e));
}
});
}
/**
* Logs an exception concerning a specific api key at TRACE level (if enabled)
*/
private static <E extends Throwable> E traceLog(String action, String identifier, E exception) {
if (logger.isTraceEnabled()) {
if (exception instanceof final ElasticsearchException esEx) {
final Object detail = esEx.getBodyHeader("error_description");
if (detail != null) {
logger.trace(() -> format("Failure in [%s] for id [%s] - [%s]", action, identifier, detail), esEx);
} else {
logger.trace(() -> format("Failure in [%s] for id [%s]", action, identifier), esEx);
}
} else {
logger.trace(() -> format("Failure in [%s] for id [%s]", action, identifier), exception);
}
}
return exception;
}
/**
* Logs an exception at TRACE level (if enabled)
*/
private static <E extends Throwable> E traceLog(String action, E exception) {
if (logger.isTraceEnabled()) {
if (exception instanceof final ElasticsearchException esEx) {
final Object detail = esEx.getBodyHeader("error_description");
if (detail != null) {
logger.trace(() -> format("Failure in [%s] - [%s]", action, detail), esEx);
} else {
logger.trace(() -> "Failure in [" + action + "]", esEx);
}
} else {
logger.trace(() -> "Failure in [" + action + "]", exception);
}
}
return exception;
}
// pkg scoped for testing
boolean isExpirationInProgress() {
return inactiveApiKeysRemover.isExpirationInProgress();
}
// pkg scoped for testing
long lastTimeWhenApiKeysRemoverWasTriggered() {
return inactiveApiKeysRemover.getLastRunTimestamp();
}
private void maybeStartApiKeyRemover() {
if (securityIndex.forCurrentProject().isAvailable(PRIMARY_SHARDS)) {
inactiveApiKeysRemover.maybeSubmit(client.threadPool());
}
}
/**
* Get API key information for given realm, user, API key name and id combination
* @param realmNames realm names
* @param username user name
* @param apiKeyName API key name
* @param apiKeyIds API key ids
* @param withLimitedBy whether to parse and return the limited by role descriptors
* @param listener receives the requested collection of {@link ApiKey}s
*/
public void getApiKeys(
String[] realmNames,
String username,
String apiKeyName,
String[] apiKeyIds,
boolean withLimitedBy,
boolean activeOnly,
ActionListener<Collection<ApiKey>> listener
) {
ensureEnabled();
findApiKeysForUserRealmApiKeyIdAndNameCombination(
realmNames,
username,
apiKeyName,
apiKeyIds,
activeOnly,
activeOnly,
hit -> convertSearchHitToApiKeyInfo(hit, withLimitedBy),
ActionListener.wrap(apiKeyInfos -> {
if (apiKeyInfos.isEmpty() && logger.isDebugEnabled()) {
logger.debug(
"No API keys found for realms {}, user [{}], API key name [{}], API key IDs {}, and active_only flag [{}]",
Arrays.toString(realmNames),
username,
apiKeyName,
Arrays.toString(apiKeyIds),
activeOnly
);
}
listener.onResponse(apiKeyInfos);
}, listener::onFailure)
);
}
public record QueryApiKeysResult(
long total,
Collection<ApiKey> apiKeyInfos,
Collection<Object[]> sortValues,
@Nullable InternalAggregations aggregations
) {
static final QueryApiKeysResult EMPTY = new QueryApiKeysResult(0, List.of(), List.of(), null);
}
public void queryApiKeys(SearchRequest searchRequest, boolean withLimitedBy, ActionListener<QueryApiKeysResult> listener) {
ensureEnabled();
final IndexState projectSecurityIndex = securityIndex.forCurrentProject();
if (projectSecurityIndex.indexExists() == false) {
logger.debug("security index does not exist");
listener.onResponse(QueryApiKeysResult.EMPTY);
} else if (projectSecurityIndex.isAvailable(SEARCH_SHARDS) == false) {
listener.onFailure(projectSecurityIndex.getUnavailableReason(SEARCH_SHARDS));
} else {
projectSecurityIndex.checkIndexVersionThenExecute(
listener::onFailure,
() -> executeAsyncWithOrigin(
client,
SECURITY_ORIGIN,
TransportSearchAction.TYPE,
searchRequest,
ActionListener.wrap(searchResponse -> {
long total = searchResponse.getHits().getTotalHits().value();
if (total == 0) {
logger.debug("No api keys found for query [{}]", searchRequest.source().query());
listener.onResponse(QueryApiKeysResult.EMPTY);
return;
}
SearchHit[] hits = searchResponse.getHits().getHits();
List<ApiKey> apiKeyInfos = Arrays.stream(hits)
.map(hit -> convertSearchHitToApiKeyInfo(hit, withLimitedBy))
.toList();
List<Object[]> sortValues = Arrays.stream(hits).map(SearchHit::getSortValues).toList();
listener.onResponse(new QueryApiKeysResult(total, apiKeyInfos, sortValues, searchResponse.getAggregations()));
}, listener::onFailure)
)
);
}
}
private ApiKey convertSearchHitToApiKeyInfo(SearchHit hit) {
return convertSearchHitToApiKeyInfo(hit, false);
}
private ApiKey convertSearchHitToApiKeyInfo(SearchHit hit, boolean withLimitedBy) {
final ApiKeyDoc apiKeyDoc = convertSearchHitToVersionedApiKeyDoc(hit).doc;
final String apiKeyId = hit.getId();
final Map<String, Object> metadata = apiKeyDoc.metadataFlattened != null
? XContentHelper.convertToMap(apiKeyDoc.metadataFlattened, false, XContentType.JSON).v2()
: Map.of();
final List<RoleDescriptor> roleDescriptors = parseRoleDescriptorsBytes(
apiKeyId,
apiKeyDoc.roleDescriptorsBytes,
RoleReference.ApiKeyRoleType.ASSIGNED
);
final List<RoleDescriptor> limitedByRoleDescriptors = (withLimitedBy && apiKeyDoc.type != ApiKey.Type.CROSS_CLUSTER)
? parseRoleDescriptorsBytes(apiKeyId, apiKeyDoc.limitedByRoleDescriptorsBytes, RoleReference.ApiKeyRoleType.LIMITED_BY)
: null;
return new ApiKey(
apiKeyDoc.name,
apiKeyId,
apiKeyDoc.type,
Instant.ofEpochMilli(apiKeyDoc.creationTime),
apiKeyDoc.expirationTime != -1 ? Instant.ofEpochMilli(apiKeyDoc.expirationTime) : null,
apiKeyDoc.invalidated,
apiKeyDoc.invalidation != -1 ? Instant.ofEpochMilli(apiKeyDoc.invalidation) : null,
(String) apiKeyDoc.creator.get("principal"),
(String) apiKeyDoc.creator.get("realm"),
(String) apiKeyDoc.creator.get("realm_type"),
metadata,
roleDescriptors,
limitedByRoleDescriptors,
apiKeyDoc.certificateIdentity
);
}
private static VersionedApiKeyDoc convertSearchHitToVersionedApiKeyDoc(SearchHit hit) {
try (
XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, hit.getSourceRef(), XContentType.JSON)
) {
return new VersionedApiKeyDoc(ApiKeyDoc.fromXContent(parser), hit.getId(), hit.getSeqNo(), hit.getPrimaryTerm());
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
private record VersionedApiKeyDoc(ApiKeyDoc doc, String id, long seqNo, long primaryTerm) {}
private RemovalListener<String, ListenableFuture<CachedApiKeyHashResult>> getAuthCacheRemovalListener(int maximumWeight) {
return notification -> {
if (RemovalReason.EVICTED == notification.getRemovalReason() && getApiKeyAuthCache().weight() >= maximumWeight) {
evictionCounter.increment();
logger.trace(
"API key with ID [{}] was evicted from the authentication cache, " + "possibly due to cache size limit",
notification.getKey()
);
final long last = lastEvictionCheckedAt.get();
final long now = System.nanoTime();
if (now - last >= EVICTION_MONITOR_INTERVAL_NANOS && lastEvictionCheckedAt.compareAndSet(last, now)) {
final long sum = evictionCounter.sum();
evictionCounter.add(-sum); // reset by decrease
if (sum >= EVICTION_WARNING_THRESHOLD) {
logger.warn(
"Possible thrashing for API key authentication cache, "
+ "[{}] eviction due to cache size within last [{}] seconds",
sum,
EVICTION_MONITOR_INTERVAL_SECONDS
);
}
}
}
};
}
// package private for test
LongAdder getEvictionCounter() {
return evictionCounter;
}
// package private for test
AtomicLong getLastEvictionCheckedAt() {
return lastEvictionCheckedAt;
}
/**
* Returns realm name of the owner user of an API key if the effective user is an API Key.
* If the effective user is not an API key, it just returns the source realm name.
*
* @param authentication {@link Authentication}
* @return realm name
*/
public static String getCreatorRealmName(final Authentication authentication) {
if (authentication.isApiKey() || authentication.isCrossClusterAccess()) {
return (String) authentication.getEffectiveSubject().getMetadata().get(AuthenticationField.API_KEY_CREATOR_REALM_NAME);
} else {
// TODO we should use the effective subject realm here but need to handle the failed lookup scenario, in which the realm may be
// `null`. Since this method is used in audit logging, this requires some care.
if (authentication.isFailedRunAs()) {
return authentication.getAuthenticatingSubject().getRealm().getName();
} else {
return authentication.getEffectiveSubject().getRealm().getName();
}
}
}
/**
* Returns the realm names that the username can access resources across.
*/
public static String[] getOwnersRealmNames(final Authentication authentication) {
if (authentication.isApiKey()) {
return new String[] {
(String) authentication.getEffectiveSubject().getMetadata().get(AuthenticationField.API_KEY_CREATOR_REALM_NAME) };
} else {
final Authentication.RealmRef effectiveSubjectRealm = authentication.getEffectiveSubject().getRealm();
// The effective subject realm can only be `null` when run-as lookup fails. The owner is always the effective subject, so there
// is no owner information to return here
if (effectiveSubjectRealm == null) {
final var message =
"Cannot determine owner realms without an effective subject realm for non-API key authentication object ["
+ authentication
+ "]";
assert false : message;
throw new IllegalArgumentException(message);
}
final RealmDomain domain = effectiveSubjectRealm.getDomain();
if (domain != null) {
return domain.realms().stream().map(RealmConfig.RealmIdentifier::getName).toArray(String[]::new);
} else {
return new String[] { effectiveSubjectRealm.getName() };
}
}
}
/**
* Returns realm type of the owner user of an API key if the effective user is an API Key.
* If the effective user is not an API key, it just returns the source realm type.
*
* @param authentication {@link Authentication}
* @return realm type
*/
public static String getCreatorRealmType(final Authentication authentication) {
if (authentication.isApiKey()) {
return (String) authentication.getEffectiveSubject().getMetadata().get(AuthenticationField.API_KEY_CREATOR_REALM_TYPE);
} else {
// TODO we should use the effective subject realm here but need to handle the failed lookup scenario, in which the realm may be
// `null`. Since this method is used in audit logging, this requires some care.
if (authentication.isFailedRunAs()) {
return authentication.getAuthenticatingSubject().getRealm().getType();
} else {
return authentication.getEffectiveSubject().getRealm().getType();
}
}
}
/**
* If the authentication has type of api_key, returns the metadata associated to the
* API key.
* @param authentication {@link Authentication}
* @return A map for the metadata or an empty map if no metadata is found.
*/
public static Map<String, Object> getApiKeyMetadata(Authentication authentication) {
if (false == authentication.isAuthenticatedAsApiKey()) {
throw new IllegalArgumentException(
"authentication realm must be ["
+ AuthenticationField.API_KEY_REALM_TYPE
+ "], got ["
+ authentication.getEffectiveSubject().getRealm().getType()
+ "]"
);
}
final Object apiKeyMetadata = authentication.getEffectiveSubject().getMetadata().get(AuthenticationField.API_KEY_METADATA_KEY);
if (apiKeyMetadata != null) {
final Tuple<XContentType, Map<String, Object>> tuple = XContentHelper.convertToMap(
(BytesReference) apiKeyMetadata,
false,
XContentType.JSON
);
return tuple.v2();
} else {
return Map.of();
}
}
final | ApiKeyLoggingDeprecationHandler |
java | spring-projects__spring-boot | module/spring-boot-mustache/src/test/java/org/springframework/boot/mustache/autoconfigure/MustacheAutoConfigurationWithoutWebMvcTests.java | {
"start": 1120,
"end": 1209
} | class ____.
*
* @author Andy Wilkinson
*/
@ClassPathExclusions("spring-webmvc-*.jar")
| path |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/ChildFirstClassLoader.java | {
"start": 1281,
"end": 2171
} | class ____ extends FlinkUserCodeClassLoader {
/**
* The classes that should always go through the parent ClassLoader. This is relevant for Flink
* classes, for example, to avoid loading Flink classes that cross the user-code/system-code
* barrier in the user-code ClassLoader.
*/
private final String[] alwaysParentFirstPatterns;
public ChildFirstClassLoader(
URL[] urls,
ClassLoader parent,
String[] alwaysParentFirstPatterns,
Consumer<Throwable> classLoadingExceptionHandler) {
super(urls, parent, classLoadingExceptionHandler);
this.alwaysParentFirstPatterns = alwaysParentFirstPatterns;
}
@Override
protected Class<?> loadClassWithoutExceptionHandling(String name, boolean resolve)
throws ClassNotFoundException {
// First, check if the | ChildFirstClassLoader |
java | quarkusio__quarkus | extensions/panache/hibernate-reactive-panache/deployment/src/test/java/io/quarkus/hibernate/reactive/panache/test/config/NoConfigTest.java | {
"start": 296,
"end": 857
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.overrideConfigKey("quarkus.datasource.devservices.enabled", "false")
.setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.addAsResource("application-datasource-only.properties", "application.properties"));
@Test
public void testNoConfig() {
// we should be able to start the application, even with no (Hibernate/Panache) configuration at all
}
}
| NoConfigTest |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/interceptor/staticmethods/InterceptedStaticMethodTest.java | {
"start": 3537,
"end": 5215
} | class ____ {
static final AtomicInteger VOID_INTERCEPTIONS = new AtomicInteger();
@AroundInvoke
Object aroundInvoke(InvocationContext ctx) throws Exception {
if (!Modifier.isStatic(ctx.getMethod().getModifiers())) {
throw new AssertionFailedError("Not a static method!");
}
assertNull(ctx.getTarget());
// verify annotations can be inspected
if (ctx.getMethod().getDeclaringClass().getName().equals(Simple.class.getName())) {
assertEquals(1, ctx.getMethod().getAnnotations().length);
assertTrue(ctx.getMethod().isAnnotationPresent(InterceptMe.class));
assertFalse(ctx.getMethod().isAnnotationPresent(WithClassPolicy.class));
assertFalse(ctx.getMethod().isAnnotationPresent(NotNull.class));
if (ctx.getMethod().getName().equals("ping")) {
assertTrue(ctx.getMethod().getParameters()[0].isAnnotationPresent(NotNull.class));
}
}
Object ret = ctx.proceed();
if (ret != null) {
if (ret instanceof String) {
return "OK:" + ctx.proceed();
} else if (ret instanceof Double) {
return 42.0;
} else {
throw new AssertionFailedError("Unsupported return type: " + ret.getClass());
}
} else {
VOID_INTERCEPTIONS.incrementAndGet();
return ret;
}
}
}
@InterceptorBinding
@Target({ TYPE, METHOD })
@Retention(RUNTIME)
@ | SimpleInterceptor |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/bindings/inherited/InheritedBindingOnBeanTest.java | {
"start": 1825,
"end": 1976
} | interface ____ {
}
@Target({ TYPE, METHOD })
@Retention(RUNTIME)
@Documented
@InterceptorBinding
// not @Inherited
@ | FooBinding |
java | google__guice | core/test/com/google/inject/TypeConversionTest.java | {
"start": 6130,
"end": 6292
} | class ____)"
+ " (bound at [unknown source]).",
"Reason: RuntimeException: For input string: \"invalid\"");
}
}
public static | Integer |
java | apache__avro | lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordWriter.java | {
"start": 1233,
"end": 2156
} | class ____<T> extends AvroTrevniRecordWriterBase<AvroKey<T>, NullWritable, T> {
/**
* Constructor.
*
* @param context The TaskAttempContext to supply the writer with information
* form the job configuration
*/
public AvroTrevniKeyRecordWriter(TaskAttemptContext context) throws IOException {
super(context);
}
/** {@inheritDoc} */
@Override
public void write(AvroKey<T> key, NullWritable value) throws IOException, InterruptedException {
writer.write(key.datum());
if (writer.sizeEstimate() >= blockSize) // block full
flush();
}
/** {@inheritDoc} */
@Override
protected Schema initSchema(TaskAttemptContext context) {
boolean isMapOnly = context.getNumReduceTasks() == 0;
return isMapOnly ? AvroJob.getMapOutputKeySchema(context.getConfiguration())
: AvroJob.getOutputKeySchema(context.getConfiguration());
}
}
| AvroTrevniKeyRecordWriter |
java | quarkusio__quarkus | integration-tests/compose-devservices/src/main/java/io/quarkus/it/compose/devservices/postgres/PostgresEndpoint.java | {
"start": 196,
"end": 532
} | class ____ {
@ConfigProperty(name = "postgres.db.name")
String dbName;
@ConfigProperty(name = "postgres.db.port")
String dbPort;
@GET
@Path("/name")
public String dbName() {
return dbName;
}
@GET
@Path("/port")
public String dbPort() {
return dbPort;
}
}
| PostgresEndpoint |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/filter/ForwardRoutingFilterStaticIntegrationTests.java | {
"start": 1319,
"end": 1771
} | class ____ extends BaseWebClientTests {
@Test
public void gatewayRequestsMeterFilterHasTags() {
testClient.get()
.uri("/mydocs")
.exchange()
.expectStatus()
.isOk()
.expectBody(String.class)
.consumeWith(result -> {
assertThat(result.getResponseBody()).contains("Docs 123");
});
}
@EnableAutoConfiguration
@SpringBootConfiguration
@Import(DefaultTestConfig.class)
public static | ForwardRoutingFilterStaticIntegrationTests |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/method/annotation/ModelAttributeMethodProcessorTests.java | {
"start": 2926,
"end": 13813
} | class ____ {
private NativeWebRequest request;
private ModelAndViewContainer container;
private ModelAttributeMethodProcessor processor;
private MethodParameter paramNamedValidModelAttr;
private MethodParameter paramErrors;
private MethodParameter paramInt;
private MethodParameter paramModelAttr;
private MethodParameter paramBindingDisabledAttr;
private MethodParameter paramNonSimpleType;
private MethodParameter beanWithConstructorArgs;
private MethodParameter returnParamNamedModelAttr;
private MethodParameter returnParamNonSimpleType;
@BeforeEach
void setup() throws Exception {
this.request = new ServletWebRequest(new MockHttpServletRequest());
this.container = new ModelAndViewContainer();
this.processor = new ModelAttributeMethodProcessor(false);
Method method = ModelAttributeHandler.class.getDeclaredMethod("modelAttribute",
TestBean.class, Errors.class, int.class, TestBean.class,
TestBean.class, TestBean.class, TestBeanWithConstructorArgs.class);
this.paramNamedValidModelAttr = new SynthesizingMethodParameter(method, 0);
this.paramErrors = new SynthesizingMethodParameter(method, 1);
this.paramInt = new SynthesizingMethodParameter(method, 2);
this.paramModelAttr = new SynthesizingMethodParameter(method, 3);
this.paramBindingDisabledAttr = new SynthesizingMethodParameter(method, 4);
this.paramNonSimpleType = new SynthesizingMethodParameter(method, 5);
this.beanWithConstructorArgs = new SynthesizingMethodParameter(method, 6);
method = getClass().getDeclaredMethod("annotatedReturnValue");
this.returnParamNamedModelAttr = new MethodParameter(method, -1);
method = getClass().getDeclaredMethod("notAnnotatedReturnValue");
this.returnParamNonSimpleType = new MethodParameter(method, -1);
}
@Test
void supportedParameters() {
assertThat(this.processor.supportsParameter(this.paramNamedValidModelAttr)).isTrue();
assertThat(this.processor.supportsParameter(this.paramModelAttr)).isTrue();
assertThat(this.processor.supportsParameter(this.paramErrors)).isFalse();
assertThat(this.processor.supportsParameter(this.paramInt)).isFalse();
assertThat(this.processor.supportsParameter(this.paramNonSimpleType)).isFalse();
}
@Test
void supportedParametersInDefaultResolutionMode() {
this.processor = new ModelAttributeMethodProcessor(true);
// Only non-simple types, even if not annotated
assertThat(this.processor.supportsParameter(this.paramNamedValidModelAttr)).isTrue();
assertThat(this.processor.supportsParameter(this.paramErrors)).isTrue();
assertThat(this.processor.supportsParameter(this.paramModelAttr)).isTrue();
assertThat(this.processor.supportsParameter(this.paramNonSimpleType)).isTrue();
assertThat(this.processor.supportsParameter(this.paramInt)).isFalse();
}
@Test
void supportedReturnTypes() {
this.processor = new ModelAttributeMethodProcessor(false);
assertThat(this.processor.supportsReturnType(returnParamNamedModelAttr)).isTrue();
assertThat(this.processor.supportsReturnType(returnParamNonSimpleType)).isFalse();
}
@Test
void supportedReturnTypesInDefaultResolutionMode() {
this.processor = new ModelAttributeMethodProcessor(true);
assertThat(this.processor.supportsReturnType(returnParamNamedModelAttr)).isTrue();
assertThat(this.processor.supportsReturnType(returnParamNonSimpleType)).isTrue();
}
@Test
void bindExceptionRequired() {
assertThat(this.processor.isBindExceptionRequired(null, this.paramNonSimpleType)).isTrue();
assertThat(this.processor.isBindExceptionRequired(null, this.paramNamedValidModelAttr)).isFalse();
}
@Test
void resolveArgumentFromModel() throws Exception {
testGetAttributeFromModel("attrName", this.paramNamedValidModelAttr);
testGetAttributeFromModel("testBean", this.paramModelAttr);
testGetAttributeFromModel("testBean", this.paramNonSimpleType);
}
@Test
void resolveArgumentViaDefaultConstructor() throws Exception {
WebDataBinder dataBinder = new WebRequestDataBinder(null);
dataBinder.setTargetType(ResolvableType.forMethodParameter(this.paramNamedValidModelAttr));
WebDataBinderFactory factory = mock();
given(factory.createBinder(any(), isNull(), eq("attrName"), any())).willReturn(dataBinder);
this.processor.resolveArgument(this.paramNamedValidModelAttr, this.container, this.request, factory);
verify(factory).createBinder(any(), isNull(), eq("attrName"), any());
}
@Test
void resolveArgumentValidation() throws Exception {
String name = "attrName";
Object target = new TestBean();
this.container.addAttribute(name, target);
StubRequestDataBinder dataBinder = new StubRequestDataBinder(target, name);
WebDataBinderFactory factory = mock();
ResolvableType type = ResolvableType.forMethodParameter(this.paramNamedValidModelAttr);
given(factory.createBinder(this.request, target, name, type)).willReturn(dataBinder);
this.processor.resolveArgument(this.paramNamedValidModelAttr, this.container, this.request, factory);
assertThat(dataBinder.isBindInvoked()).isTrue();
assertThat(dataBinder.isValidateInvoked()).isTrue();
}
@Test
void resolveArgumentBindingDisabledPreviously() throws Exception {
String name = "attrName";
Object target = new TestBean();
this.container.addAttribute(name, target);
// Declare binding disabled (for example, via @ModelAttribute method)
this.container.setBindingDisabled(name);
StubRequestDataBinder dataBinder = new StubRequestDataBinder(target, name);
WebDataBinderFactory factory = mock();
ResolvableType type = ResolvableType.forMethodParameter(this.paramNamedValidModelAttr);
given(factory.createBinder(this.request, target, name, type)).willReturn(dataBinder);
this.processor.resolveArgument(this.paramNamedValidModelAttr, this.container, this.request, factory);
assertThat(dataBinder.isBindInvoked()).isFalse();
assertThat(dataBinder.isValidateInvoked()).isTrue();
}
@Test
void resolveArgumentBindingDisabled() throws Exception {
String name = "noBindAttr";
Object target = new TestBean();
this.container.addAttribute(name, target);
StubRequestDataBinder dataBinder = new StubRequestDataBinder(target, name);
WebDataBinderFactory factory = mock();
ResolvableType type = ResolvableType.forMethodParameter(this.paramBindingDisabledAttr);
given(factory.createBinder(this.request, target, name, type)).willReturn(dataBinder);
this.processor.resolveArgument(this.paramBindingDisabledAttr, this.container, this.request, factory);
assertThat(dataBinder.isBindInvoked()).isFalse();
assertThat(dataBinder.isValidateInvoked()).isTrue();
}
@Test
void resolveArgumentBindException() throws Exception {
String name = "testBean";
Object target = new TestBean();
this.container.getModel().addAttribute(target);
StubRequestDataBinder dataBinder = new StubRequestDataBinder(target, name);
dataBinder.getBindingResult().reject("error");
WebDataBinderFactory binderFactory = mock();
ResolvableType type = ResolvableType.forMethodParameter(this.paramNonSimpleType);
given(binderFactory.createBinder(this.request, target, name, type)).willReturn(dataBinder);
assertThatExceptionOfType(MethodArgumentNotValidException.class).isThrownBy(() ->
this.processor.resolveArgument(this.paramNonSimpleType, this.container, this.request, binderFactory));
verify(binderFactory).createBinder(this.request, target, name, type);
}
@Test // SPR-9378
public void resolveArgumentOrdering() throws Exception {
String name = "testBean";
Object testBean = new TestBean(name);
this.container.addAttribute(name, testBean);
this.container.addAttribute(BindingResult.MODEL_KEY_PREFIX + name, testBean);
Object anotherTestBean = new TestBean();
this.container.addAttribute("anotherTestBean", anotherTestBean);
StubRequestDataBinder dataBinder = new StubRequestDataBinder(testBean, name);
WebDataBinderFactory binderFactory = mock();
ResolvableType type = ResolvableType.forMethodParameter(this.paramModelAttr);
given(binderFactory.createBinder(this.request, testBean, name, type)).willReturn(dataBinder);
this.processor.resolveArgument(this.paramModelAttr, this.container, this.request, binderFactory);
Object[] values = this.container.getModel().values().toArray();
assertThat(values[1]).as("Resolved attribute should be updated to be last").isSameAs(testBean);
assertThat(values[2]).as("BindingResult of resolved attr should be last").isSameAs(dataBinder.getBindingResult());
}
@Test
void handleAnnotatedReturnValue() throws Exception {
this.processor.handleReturnValue("expected", this.returnParamNamedModelAttr, this.container, this.request);
assertThat(this.container.getModel().get("modelAttrName")).isEqualTo("expected");
}
@Test
void handleNotAnnotatedReturnValue() throws Exception {
TestBean testBean = new TestBean("expected");
this.processor.handleReturnValue(testBean, this.returnParamNonSimpleType, this.container, this.request);
assertThat(this.container.getModel().get("testBean")).isSameAs(testBean);
}
@Test // gh-25182
public void resolveConstructorListArgumentFromCommaSeparatedRequestParameter() throws Exception {
MockHttpServletRequest mockRequest = new MockHttpServletRequest();
mockRequest.addParameter("listOfStrings", "1,2");
ServletWebRequest requestWithParam = new ServletWebRequest(mockRequest);
WebDataBinderFactory factory = mock();
given(factory.createBinder(any(), any(), eq("testBeanWithConstructorArgs"), any()))
.willAnswer(invocation -> {
WebRequestDataBinder binder = new WebRequestDataBinder(invocation.getArgument(1));
binder.setTargetType(ResolvableType.forMethodParameter(this.beanWithConstructorArgs));
// Add conversion service which will convert "1,2" to a list
binder.setConversionService(new DefaultFormattingConversionService());
return binder;
});
Object resolved = this.processor.resolveArgument(this.beanWithConstructorArgs, this.container, requestWithParam, factory);
assertThat(resolved).isInstanceOf(TestBeanWithConstructorArgs.class);
assertThat(((TestBeanWithConstructorArgs) resolved).listOfStrings).containsExactly("1", "2");
assertThat(((TestBeanWithConstructorArgs) resolved).file).isNull();
}
private void testGetAttributeFromModel(String expectedAttrName, MethodParameter param) throws Exception {
Object target = new TestBean();
this.container.addAttribute(expectedAttrName, target);
WebDataBinder dataBinder = new WebRequestDataBinder(target);
WebDataBinderFactory factory = mock();
ResolvableType type = ResolvableType.forMethodParameter(param);
given(factory.createBinder(this.request, target, expectedAttrName, type)).willReturn(dataBinder);
this.processor.resolveArgument(param, this.container, this.request, factory);
verify(factory).createBinder(this.request, target, expectedAttrName, type);
}
private static | ModelAttributeMethodProcessorTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/store/FSStoreOpHandler.java | {
"start": 1799,
"end": 2023
} | class ____ {
private static Map<StoreType, Map<Integer, Class<? extends FSNodeStoreLogOp>>>
editLogOp;
private static Map<StoreType, Class<? extends FSNodeStoreLogOp>> mirrorOp;
/**
* Store Type | FSStoreOpHandler |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/DefaultJSONParserTest_date.java | {
"start": 438,
"end": 3365
} | class ____ extends TestCase {
protected void setUp() throws Exception {
JSON.defaultTimeZone = TimeZone.getTimeZone("Asia/Shanghai");
JSON.defaultLocale = Locale.CHINA;
}
public void test_date() {
String text = "{\"date\":\"2011-01-09T13:49:53.254\"}";
char[] chars = text.toCharArray();
DefaultJSONParser parser = new DefaultJSONParser(chars, chars.length, ParserConfig.getGlobalInstance(), 0);
parser.config(Feature.AllowISO8601DateFormat, true);
JSONObject json = parser.parseObject();
Assert.assertEquals(new Date(1294552193254L), json.get("date"));
}
public void test_date2() {
String text = "{\"date\":\"xxxxx\"}";
char[] chars = text.toCharArray();
DefaultJSONParser parser = new DefaultJSONParser(chars, chars.length, ParserConfig.getGlobalInstance(), 0);
parser.config(Feature.AllowISO8601DateFormat, true);
JSONObject json = parser.parseObject();
Assert.assertEquals("xxxxx", json.get("date"));
}
public void test_date3() {
String text = "{\"1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst\\t\":\"xxxxx\"}";
char[] chars = text.toCharArray();
DefaultJSONParser parser = new DefaultJSONParser(chars, chars.length, ParserConfig.getGlobalInstance(), 0);
parser.config(Feature.AllowISO8601DateFormat, true);
JSONObject json = parser.parseObject();
Assert.assertEquals("xxxxx", json.get("1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst\t"));
}
public void test_date4() {
String text = "{\"1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst\\t\":\"xxxxx\"}";
char[] chars = text.toCharArray();
DefaultJSONParser parser = new DefaultJSONParser(chars, chars.length, ParserConfig.getGlobalInstance(), 0);
parser.config(Feature.AllowISO8601DateFormat, true);
JSONObject json = parser.parseObject();
Assert.assertEquals("xxxxx", json.get("1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst1234567890abcdefghijklmnopqrst\t"));
}
public void test_dateFormat() throws Exception {
DefaultJSONParser parser = new DefaultJSONParser("{}");
parser.setDateFormat("yyyy-DD-mm");
SimpleDateFormat format = new SimpleDateFormat("yyyy-DD-mm", JSON.defaultLocale);
format.setTimeZone(JSON.defaultTimeZone);
parser.setDateFormat(format);
parser.getDateFomartPattern();
parser.getDateFormat();
parser.parse();
parser.close();
}
}
| DefaultJSONParserTest_date |
java | elastic__elasticsearch | test/external-modules/multi-project/src/javaRestTest/java/org/elasticsearch/multiproject/MultiProjectClusterStateActionIT.java | {
"start": 912,
"end": 2079
} | class ____ extends MultiProjectRestTestCase {
@ClassRule
public static ElasticsearchCluster CLUSTER = ElasticsearchCluster.local()
.distribution(DistributionType.INTEG_TEST)
.setting("test.multi_project.enabled", "true")
.setting("xpack.security.http.ssl.enabled", "false")
.setting("xpack.security.enabled", "false")
.build();
@Override
protected String getTestRestCluster() {
return CLUSTER.getHttpAddresses();
}
public void testMultipleProjects() throws Exception {
var response = client().performRequest(new Request("GET", "/_cluster/state?multi_project"));
var projects = ObjectPath.<List<Map<String, ?>>>eval("metadata.projects", entityAsMap(response));
assertNotNull(projects);
assertEquals(1, projects.size());
createProject("foo");
response = client().performRequest(new Request("GET", "/_cluster/state?multi_project"));
projects = ObjectPath.<List<Map<String, ?>>>eval("metadata.projects", entityAsMap(response));
assertNotNull(projects);
assertEquals(2, projects.size());
}
}
| MultiProjectClusterStateActionIT |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeContainsTest.java | {
"start": 1097,
"end": 2609
} | class ____ extends RxJavaTest {
@Test
public void doesContain() {
Maybe.just(1).contains(1).test().assertResult(true);
}
@Test
public void doesntContain() {
Maybe.just(1).contains(2).test().assertResult(false);
}
@Test
public void empty() {
Maybe.empty().contains(2).test().assertResult(false);
}
@Test
public void error() {
Maybe.error(new TestException()).contains(2).test().assertFailure(TestException.class);
}
@Test
public void dispose() {
PublishProcessor<Integer> pp = PublishProcessor.create();
TestObserver<Boolean> to = pp.singleElement().contains(1).test();
assertTrue(pp.hasSubscribers());
to.dispose();
assertFalse(pp.hasSubscribers());
}
@Test
public void isDisposed() {
PublishProcessor<Integer> pp = PublishProcessor.create();
TestHelper.checkDisposed(pp.singleElement().contains(1));
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeMaybeToSingle(new Function<Maybe<Object>, SingleSource<Boolean>>() {
@Override
public SingleSource<Boolean> apply(Maybe<Object> f) throws Exception {
return f.contains(1);
}
});
}
@SuppressWarnings("unchecked")
@Test
public void hasSource() {
assertSame(Maybe.empty(), ((HasUpstreamMaybeSource<Object>)(Maybe.empty().contains(0))).source());
}
}
| MaybeContainsTest |
java | apache__flink | flink-kubernetes/src/test/java/org/apache/flink/kubernetes/kubeclient/factory/KubernetesTaskManagerFactoryTest.java | {
"start": 1681,
"end": 4857
} | class ____ extends KubernetesTaskManagerTestBase {
private Pod resultPod;
@Override
protected void setupFlinkConfig() {
super.setupFlinkConfig();
flinkConfig.set(
SecurityOptions.KERBEROS_LOGIN_KEYTAB, kerberosDir.toString() + "/" + KEYTAB_FILE);
flinkConfig.set(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, "test");
flinkConfig.set(
SecurityOptions.KERBEROS_KRB5_PATH, kerberosDir.toString() + "/" + KRB5_CONF_FILE);
}
@Override
protected void onSetup() throws Exception {
super.onSetup();
KubernetesTestUtils.createTemporyFile("some data", flinkConfDir, CONFIG_FILE_LOGBACK_NAME);
KubernetesTestUtils.createTemporyFile("some data", flinkConfDir, CONFIG_FILE_LOG4J_NAME);
setHadoopConfDirEnv();
generateHadoopConfFileItems();
generateKerberosFileItems();
this.resultPod =
KubernetesTaskManagerFactory.buildTaskManagerKubernetesPod(
new FlinkPod.Builder().build(), kubernetesTaskManagerParameters)
.getInternalResource();
}
@Test
void testPod() {
assertThat(this.resultPod.getMetadata().getName()).isEqualTo(POD_NAME);
assertThat(this.resultPod.getMetadata().getLabels()).hasSize(5);
assertThat(this.resultPod.getSpec().getVolumes()).hasSize(4);
}
@Test
void testContainer() {
final List<Container> resultContainers = this.resultPod.getSpec().getContainers();
assertThat(resultContainers).hasSize(1);
final Container resultMainContainer = resultContainers.get(0);
assertThat(resultMainContainer.getName()).isEqualTo(Constants.MAIN_CONTAINER_NAME);
assertThat(resultMainContainer.getImage()).isEqualTo(CONTAINER_IMAGE);
assertThat(resultMainContainer.getImagePullPolicy())
.isEqualTo(CONTAINER_IMAGE_PULL_POLICY.name());
assertThat(resultMainContainer.getEnv()).hasSize(5);
assertThat(
resultMainContainer.getEnv().stream()
.anyMatch(envVar -> envVar.getName().equals("key1")))
.isTrue();
assertThat(resultMainContainer.getPorts()).hasSize(1);
assertThat(resultMainContainer.getCommand()).hasSize(1);
// The args list is [bash, -c, 'java -classpath $FLINK_CLASSPATH ...'].
assertThat(resultMainContainer.getArgs()).hasSize(3);
assertThat(resultMainContainer.getVolumeMounts()).hasSize(4);
}
@Test
void testHadoopDecoratorsCanBeTurnedOff() {
flinkConfig.set(
KubernetesConfigOptions.KUBERNETES_HADOOP_CONF_MOUNT_DECORATOR_ENABLED, false);
flinkConfig.set(KubernetesConfigOptions.KUBERNETES_KERBEROS_MOUNT_DECORATOR_ENABLED, false);
Pod pod =
KubernetesTaskManagerFactory.buildTaskManagerKubernetesPod(
new FlinkPod.Builder().build(), kubernetesTaskManagerParameters)
.getInternalResource();
assertThat(pod.getSpec().getVolumes()).hasSize(1);
}
}
| KubernetesTaskManagerFactoryTest |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/ImportAutoConfigurationTests.java | {
"start": 1200,
"end": 2875
} | class ____ {
@Test
void multipleAnnotationsShouldMergeCorrectly() {
assertThat(getImportedConfigBeans(Config.class)).containsExactly("ConfigA", "ConfigB", "ConfigC", "ConfigD");
assertThat(getImportedConfigBeans(AnotherConfig.class)).containsExactly("ConfigA", "ConfigB", "ConfigC",
"ConfigD");
}
@Test
void classesAsAnAlias() {
assertThat(getImportedConfigBeans(AnotherConfigUsingClasses.class)).containsExactly("ConfigA", "ConfigB",
"ConfigC", "ConfigD");
}
@Test
void excluding() {
assertThat(getImportedConfigBeans(ExcludingConfig.class)).containsExactly("ConfigA", "ConfigB", "ConfigD");
}
@Test
void excludeAppliedGlobally() {
assertThat(getImportedConfigBeans(ExcludeDConfig.class, ImportADConfig.class)).containsExactly("ConfigA");
}
@Test
void excludeWithRedundancy() {
assertThat(getImportedConfigBeans(ExcludeADConfig.class, ExcludeDConfig.class, ImportADConfig.class)).isEmpty();
}
private List<String> getImportedConfigBeans(Class<?>... config) {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(config);
String shortName = ClassUtils.getShortName(ImportAutoConfigurationTests.class);
int beginIndex = shortName.length() + 1;
List<String> orderedConfigBeans = new ArrayList<>();
for (String bean : context.getBeanDefinitionNames()) {
if (bean.contains("$Config")) {
String shortBeanName = ClassUtils.getShortName(bean);
orderedConfigBeans.add(shortBeanName.substring(beginIndex));
}
}
context.close();
return orderedConfigBeans;
}
@ImportAutoConfiguration({ ConfigD.class, ConfigB.class })
@MetaImportAutoConfiguration
static | ImportAutoConfigurationTests |
java | alibaba__nacos | naming/src/test/java/com/alibaba/nacos/naming/cluster/remote/request/AbstractClusterRequestTest.java | {
"start": 856,
"end": 1105
} | class ____ {
@Test
void getModule() {
AbstractClusterRequest request = new AbstractClusterRequest() {
};
String actual = request.getModule();
assertEquals("cluster", actual);
}
} | AbstractClusterRequestTest |
java | quarkusio__quarkus | extensions/grpc/deployment/src/test/java/io/quarkus/grpc/deployment/MutinyServiceBlockingMethodTest.java | {
"start": 453,
"end": 499
} | interface ____ considered blocking.
*/
public | are |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/streams/impl/MessagePassingQueue.java | {
"start": 15130,
"end": 17220
} | class ____<E> extends MessagePassingQueue<E> {
private static final AtomicLongFieldUpdater<MessagePassingQueue.MpSc<?>> WIP_UPDATER = (AtomicLongFieldUpdater<MessagePassingQueue.MpSc<?>>) (AtomicLongFieldUpdater)AtomicLongFieldUpdater.newUpdater(MessagePassingQueue.MpSc.class, "wip");
// Todo : check false sharing
private volatile long wip;
public MpSc(Predicate<E> consumer) {
this(consumer, DEFAULT_LOW_WATER_MARK, DEFAULT_HIGH_WATER_MARK);
}
public MpSc(Predicate<E> consumer, int lowWaterMark, int highWaterMark) {
super(PlatformDependent.newMpscQueue(), consumer, lowWaterMark, highWaterMark);
}
/**
* Let the consumer thread add the {@code element} to the queue.
*
* A set of flags is returned
* <ul>
* <li>When {@link #UNWRITABLE_MASK} is set, the queue is writable and new elements can be added to the queue,
* otherwise no elements <i>should</i> be added to the queue nor submitted but it is a soft condition</li>
* <li>When {@link #DRAIN_REQUIRED_MASK} is set, the queue contains at least one element and must be drained</li>
* </ul>
*
* @param element the element to add
* @return a bitset of [{@link #DRAIN_REQUIRED_MASK}, {@link #UNWRITABLE_MASK}, {@link #WRITABLE_MASK}] flags
*/
public int write(E element) {
int res = add(element);
if ((res & DRAIN_REQUIRED_MASK) != 0) {
return drain();
} else {
return res;
}
}
@Override
protected boolean wipCompareAndSet(long expect, long update) {
return WIP_UPDATER.compareAndSet(this, expect, update);
}
@Override
protected long wipIncrementAndGet() {
return WIP_UPDATER.incrementAndGet(this);
}
@Override
protected long wipDecrementAndGet() {
return WIP_UPDATER.decrementAndGet(this);
}
@Override
protected long wipGet() {
return WIP_UPDATER.get(this);
}
@Override
protected long wipAddAndGet(long delta) {
return WIP_UPDATER.addAndGet(this, delta);
}
}
public static | MpSc |
java | reactor__reactor-core | reactor-core-micrometer/src/main/java/reactor/core/observability/micrometer/Micrometer.java | {
"start": 1250,
"end": 3721
} | class ____ {
/**
* The default "name" to use as a prefix for meter if the instrumented sequence doesn't define a {@link reactor.core.publisher.Flux#name(String) name}.
*/
public static final String DEFAULT_METER_PREFIX = "reactor";
/**
* A {@link SignalListener} factory that will ultimately produce Micrometer metrics
* to the provided {@link MeterRegistry} (and using the registry's {@link MeterRegistry.Config#clock() configured}
* {@link Clock} in case additional timings are needed).
* To be used with either the {@link reactor.core.publisher.Flux#tap(SignalListenerFactory)} or
* {@link reactor.core.publisher.Mono#tap(SignalListenerFactory)} operator.
* <p>
* When used in a {@link reactor.core.publisher.Flux#tap(SignalListenerFactory)} operator, meter names use
* the {@link reactor.core.publisher.Flux#name(String)} set upstream of the tap as id prefix if applicable
* or default to {@link #DEFAULT_METER_PREFIX}. Similarly, upstream tags are gathered and added
* to the default set of tags for meters.
* See {@link MicrometerMeterListenerDocumentation} for a documentation of the default set of meters and tags.
* <p>
* Note that some monitoring systems like Prometheus require to have the exact same set of
* tags for each meter bearing the same name.
*
* @param <T> the type of onNext in the target publisher
* @param meterRegistry the {@link MeterRegistry} in which to register and publish metrics
* @return a {@link SignalListenerFactory} to record metrics
* @see MicrometerMeterListenerDocumentation
*/
public static <T> SignalListenerFactory<T, ?> metrics(MeterRegistry meterRegistry) {
return new MicrometerMeterListenerFactory<T>(meterRegistry);
}
/**
* A {@link SignalListener} factory that will ultimately produce a Micrometer {@link Observation}
* representing the runtime of the publisher to the provided {@link ObservationRegistry}.
* To be used with either the {@link reactor.core.publisher.Flux#tap(SignalListenerFactory)} or
* {@link reactor.core.publisher.Mono#tap(SignalListenerFactory)} operator.
* <p>
* The {@link Observation} covers the entire length of the sequence, from subscription to termination.
* Said termination can be a cancellation, a completion with or without values or an error.
* This is denoted by the low cardinality {@code status} {@link KeyValue}.
* In case of an exception, a high cardinality {@code exception} KeyValue with the exception | Micrometer |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/MethodOrderer.java | {
"start": 4374,
"end": 4983
} | class ____ in concrete implementations of the
* {@code MethodOrderer} API.
*
* @return the default {@code ExecutionMode}; never {@code null} but
* potentially empty
* @see #orderMethods(MethodOrdererContext)
*/
default Optional<ExecutionMode> getDefaultExecutionMode() {
return Optional.of(ExecutionMode.SAME_THREAD);
}
/**
* {@code MethodOrderer} that allows to explicitly specify that the default
* ordering should be applied.
*
* <p>If the {@value #DEFAULT_ORDER_PROPERTY_NAME} is set, specifying this
* {@code MethodOrderer} has the same effect as referencing the configured
* | or |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java | {
"start": 924,
"end": 8189
} | class ____ extends ESTestCase {
public void testEmptyVersionsList() {
assertThat(
CompatibilityVersions.minimumVersions(List.of()),
equalTo(new CompatibilityVersions(TransportVersion.minimumCompatible(), Map.of()))
);
}
public void testMinimumTransportVersions() {
TransportVersion version1 = TransportVersionUtils.getNextVersion(TransportVersion.minimumCompatible(), true);
TransportVersion version2 = TransportVersionUtils.randomVersionBetween(
random(),
TransportVersionUtils.getNextVersion(version1, true),
TransportVersion.current()
);
CompatibilityVersions compatibilityVersions1 = new CompatibilityVersions(version1, Map.of());
CompatibilityVersions compatibilityVersions2 = new CompatibilityVersions(version2, Map.of());
List<CompatibilityVersions> versions = List.of(compatibilityVersions1, compatibilityVersions2);
assertThat(CompatibilityVersions.minimumVersions(versions), equalTo(compatibilityVersions1));
}
public void testMinimumMappingsVersions() {
SystemIndexDescriptor.MappingsVersion v1 = new SystemIndexDescriptor.MappingsVersion(1, 1);
SystemIndexDescriptor.MappingsVersion v2 = new SystemIndexDescriptor.MappingsVersion(2, 2);
SystemIndexDescriptor.MappingsVersion v3 = new SystemIndexDescriptor.MappingsVersion(3, 3);
Map<String, SystemIndexDescriptor.MappingsVersion> mappings1 = Map.of(".system-index-1", v3, ".system-index-2", v1);
Map<String, SystemIndexDescriptor.MappingsVersion> mappings2 = Map.of(".system-index-1", v2, ".system-index-2", v2);
Map<String, SystemIndexDescriptor.MappingsVersion> mappings3 = Map.of(".system-index-3", v1);
CompatibilityVersions compatibilityVersions1 = new CompatibilityVersions(TransportVersion.current(), mappings1);
CompatibilityVersions compatibilityVersions2 = new CompatibilityVersions(TransportVersion.current(), mappings2);
CompatibilityVersions compatibilityVersions3 = new CompatibilityVersions(TransportVersion.current(), mappings3);
List<CompatibilityVersions> versions = List.of(compatibilityVersions1, compatibilityVersions2, compatibilityVersions3);
assertThat(
CompatibilityVersions.minimumVersions(versions),
equalTo(
new CompatibilityVersions(
TransportVersion.current(),
Map.of(".system-index-1", v2, ".system-index-2", v1, ".system-index-3", v1)
)
)
);
}
/**
* By design, all versions should increase monotonically through releases, so we shouldn't have a situation
* where the minimum transport version is in one CompatibilityVersions object and a minimum system
* index is in another. However, the minimumVersions method we're testing will handle that situation without
* complaint.
*/
public void testMinimumsAreMerged() {
TransportVersion version1 = TransportVersionUtils.getNextVersion(TransportVersion.minimumCompatible(), true);
TransportVersion version2 = TransportVersionUtils.randomVersionBetween(
random(),
TransportVersionUtils.getNextVersion(version1, true),
TransportVersion.current()
);
SystemIndexDescriptor.MappingsVersion v1 = new SystemIndexDescriptor.MappingsVersion(1, 1);
SystemIndexDescriptor.MappingsVersion v2 = new SystemIndexDescriptor.MappingsVersion(2, 2);
Map<String, SystemIndexDescriptor.MappingsVersion> mappings1 = Map.of(".system-index-1", v2);
Map<String, SystemIndexDescriptor.MappingsVersion> mappings2 = Map.of(".system-index-1", v1);
CompatibilityVersions compatibilityVersions1 = new CompatibilityVersions(version1, mappings1);
CompatibilityVersions compatibilityVersions2 = new CompatibilityVersions(version2, mappings2);
List<CompatibilityVersions> versions = List.of(compatibilityVersions1, compatibilityVersions2);
assertThat(CompatibilityVersions.minimumVersions(versions), equalTo(new CompatibilityVersions(version1, mappings2)));
}
public void testPreventJoinClusterWithUnsupportedTransportVersion() {
List<TransportVersion> transportVersions = IntStream.range(0, randomIntBetween(2, 10))
.mapToObj(i -> TransportVersionUtils.randomCompatibleVersion(random()))
.toList();
TransportVersion min = Collections.min(transportVersions);
List<CompatibilityVersions> compatibilityVersions = transportVersions.stream()
.map(transportVersion -> new CompatibilityVersions(transportVersion, Map.of()))
.toList();
// should not throw
CompatibilityVersions.ensureVersionsCompatibility(
new CompatibilityVersions(TransportVersionUtils.randomVersionBetween(random(), min, TransportVersion.current()), Map.of()),
compatibilityVersions
);
IllegalStateException e = expectThrows(
IllegalStateException.class,
() -> CompatibilityVersions.ensureVersionsCompatibility(
new CompatibilityVersions(
TransportVersionUtils.randomVersionBetween(
random(),
TransportVersionUtils.getFirstVersion(),
TransportVersionUtils.getPreviousVersion(min)
),
Map.of()
),
compatibilityVersions
)
);
assertThat(e.getMessage(), containsString("may not join a cluster with minimum version"));
}
public void testPreventJoinClusterWithUnsupportedMappingsVersion() {
List<CompatibilityVersions> compatibilityVersions = IntStream.range(0, randomIntBetween(2, 10))
.mapToObj(
i -> new CompatibilityVersions(
TransportVersion.current(),
Map.of(".system-index", new SystemIndexDescriptor.MappingsVersion(randomIntBetween(2, 10), -1))
)
)
.toList();
int min = compatibilityVersions.stream()
.mapToInt(v -> v.systemIndexMappingsVersion().get(".system-index").version())
.min()
.orElse(2);
// should not throw
CompatibilityVersions.ensureVersionsCompatibility(
new CompatibilityVersions(
TransportVersion.current(),
Map.of(".system-index", new SystemIndexDescriptor.MappingsVersion(min, -1))
),
compatibilityVersions
);
IllegalStateException e = expectThrows(
IllegalStateException.class,
() -> CompatibilityVersions.ensureVersionsCompatibility(
new CompatibilityVersions(
TransportVersion.current(),
Map.of(".system-index", new SystemIndexDescriptor.MappingsVersion(randomIntBetween(1, min - 1), -1))
),
compatibilityVersions
)
);
assertThat(e.getMessage(), containsString("may not join a cluster with minimum system index mappings versions"));
}
}
| CompatibilityVersionsTests |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/client/RestOperations.java | {
"start": 1386,
"end": 31357
} | interface ____ {
// GET
/**
* Retrieve a representation by doing a GET on the specified URL.
* The response (if any) is converted and returned.
* <p>URI Template variables are expanded using the given URI variables, if any.
* @param url the URL
* @param responseType the type of the return value
* @param uriVariables the variables to expand the template
* @return the converted object
*/
<T> @Nullable T getForObject(String url, Class<T> responseType, @Nullable Object... uriVariables) throws RestClientException;
/**
* Retrieve a representation by doing a GET on the URI template.
* The response (if any) is converted and returned.
* <p>URI Template variables are expanded using the given map.
* @param url the URL
* @param responseType the type of the return value
* @param uriVariables the map containing variables for the URI template
* @return the converted object
*/
<T> @Nullable T getForObject(String url, Class<T> responseType, Map<String, ? extends @Nullable Object> uriVariables) throws RestClientException;
/**
* Retrieve a representation by doing a GET on the URL.
* The response (if any) is converted and returned.
* @param url the URL
* @param responseType the type of the return value
* @return the converted object
*/
<T> @Nullable T getForObject(URI url, Class<T> responseType) throws RestClientException;
/**
* Retrieve an entity by doing a GET on the specified URL.
* The response is converted and stored in a {@link ResponseEntity}.
* <p>URI Template variables are expanded using the given URI variables, if any.
* @param url the URL
* @param responseType the type of the return value
* @param uriVariables the variables to expand the template
* @return the entity
* @since 3.0.2
*/
<T> ResponseEntity<T> getForEntity(String url, Class<T> responseType, @Nullable Object... uriVariables)
throws RestClientException;
/**
* Retrieve a representation by doing a GET on the URI template.
* The response is converted and stored in a {@link ResponseEntity}.
* <p>URI Template variables are expanded using the given map.
* @param url the URL
* @param responseType the type of the return value
* @param uriVariables the map containing variables for the URI template
* @return the converted object
* @since 3.0.2
*/
<T> ResponseEntity<T> getForEntity(String url, Class<T> responseType, Map<String, ? extends @Nullable Object> uriVariables)
throws RestClientException;
/**
* Retrieve a representation by doing a GET on the URL.
* The response is converted and stored in a {@link ResponseEntity}.
* @param url the URL
* @param responseType the type of the return value
* @return the converted object
* @since 3.0.2
*/
<T> ResponseEntity<T> getForEntity(URI url, Class<T> responseType) throws RestClientException;
// HEAD
/**
* Retrieve all headers of the resource specified by the URI template.
* <p>URI Template variables are expanded using the given URI variables, if any.
* @param url the URL
* @param uriVariables the variables to expand the template
* @return all HTTP headers of that resource
*/
HttpHeaders headForHeaders(String url, @Nullable Object... uriVariables) throws RestClientException;
/**
* Retrieve all headers of the resource specified by the URI template.
* <p>URI Template variables are expanded using the given map.
* @param url the URL
* @param uriVariables the map containing variables for the URI template
* @return all HTTP headers of that resource
*/
HttpHeaders headForHeaders(String url, Map<String, ? extends @Nullable Object> uriVariables) throws RestClientException;
/**
* Retrieve all headers of the resource specified by the URL.
* @param url the URL
* @return all HTTP headers of that resource
*/
HttpHeaders headForHeaders(URI url) throws RestClientException;
// POST
/**
* Create a new resource by POSTing the given object to the URI template, and return the value of
* the {@code Location} header. This header typically indicates where the new resource is stored.
* <p>URI Template variables are expanded using the given URI variables, if any.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* <p>The body of the entity, or {@code request} itself, can be a
* {@link org.springframework.util.MultiValueMap MultiValueMap} to create a multipart request.
* The values in the {@code MultiValueMap} can be any Object representing the body of the part,
* or an {@link org.springframework.http.HttpEntity HttpEntity} representing a part with body
* and headers.
* @param url the URL
* @param request the Object to be POSTed (may be {@code null})
* @param uriVariables the variables to expand the template
* @return the value for the {@code Location} header
* @see HttpEntity
*/
@Nullable URI postForLocation(String url, @Nullable Object request, @Nullable Object... uriVariables) throws RestClientException;
/**
* Create a new resource by POSTing the given object to the URI template, and return the value of
* the {@code Location} header. This header typically indicates where the new resource is stored.
* <p>URI Template variables are expanded using the given map.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request
* <p>The body of the entity, or {@code request} itself, can be a
* {@link org.springframework.util.MultiValueMap MultiValueMap} to create a multipart request.
* The values in the {@code MultiValueMap} can be any Object representing the body of the part,
* or an {@link org.springframework.http.HttpEntity HttpEntity} representing a part with body
* and headers.
* @param url the URL
* @param request the Object to be POSTed (may be {@code null})
* @param uriVariables the variables to expand the template
* @return the value for the {@code Location} header
* @see HttpEntity
*/
@Nullable URI postForLocation(String url, @Nullable Object request, Map<String, ? extends @Nullable Object> uriVariables)
throws RestClientException;
/**
* Create a new resource by POSTing the given object to the URL, and return the value of the
* {@code Location} header. This header typically indicates where the new resource is stored.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* <p>The body of the entity, or {@code request} itself, can be a
* {@link org.springframework.util.MultiValueMap MultiValueMap} to create a multipart request.
* The values in the {@code MultiValueMap} can be any Object representing the body of the part,
* or an {@link org.springframework.http.HttpEntity HttpEntity} representing a part with body
* and headers.
* @param url the URL
* @param request the Object to be POSTed (may be {@code null})
* @return the value for the {@code Location} header
* @see HttpEntity
*/
@Nullable URI postForLocation(URI url, @Nullable Object request) throws RestClientException;
/**
* Create a new resource by POSTing the given object to the URI template,
* and return the representation found in the response.
* <p>URI Template variables are expanded using the given URI variables, if any.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* <p>The body of the entity, or {@code request} itself, can be a
* {@link org.springframework.util.MultiValueMap MultiValueMap} to create a multipart request.
* The values in the {@code MultiValueMap} can be any Object representing the body of the part,
* or an {@link org.springframework.http.HttpEntity HttpEntity} representing a part with body
* and headers.
* @param url the URL
* @param request the Object to be POSTed (may be {@code null})
* @param responseType the type of the return value
* @param uriVariables the variables to expand the template
* @return the converted object
* @see HttpEntity
*/
<T> @Nullable T postForObject(String url, @Nullable Object request, Class<T> responseType,
@Nullable Object... uriVariables) throws RestClientException;
/**
* Create a new resource by POSTing the given object to the URI template,
* and return the representation found in the response.
* <p>URI Template variables are expanded using the given map.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* <p>The body of the entity, or {@code request} itself, can be a
* {@link org.springframework.util.MultiValueMap MultiValueMap} to create a multipart request.
* The values in the {@code MultiValueMap} can be any Object representing the body of the part,
* or an {@link org.springframework.http.HttpEntity HttpEntity} representing a part with body
* and headers.
* @param url the URL
* @param request the Object to be POSTed (may be {@code null})
* @param responseType the type of the return value
* @param uriVariables the variables to expand the template
* @return the converted object
* @see HttpEntity
*/
<T> @Nullable T postForObject(String url, @Nullable Object request, Class<T> responseType,
Map<String, ? extends @Nullable Object> uriVariables) throws RestClientException;
/**
* Create a new resource by POSTing the given object to the URL,
* and return the representation found in the response.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* <p>The body of the entity, or {@code request} itself, can be a
* {@link org.springframework.util.MultiValueMap MultiValueMap} to create a multipart request.
* The values in the {@code MultiValueMap} can be any Object representing the body of the part,
* or an {@link org.springframework.http.HttpEntity HttpEntity} representing a part with body
* and headers.
* @param url the URL
* @param request the Object to be POSTed (may be {@code null})
* @param responseType the type of the return value
* @return the converted object
* @see HttpEntity
*/
<T> @Nullable T postForObject(URI url, @Nullable Object request, Class<T> responseType) throws RestClientException;
/**
* Create a new resource by POSTing the given object to the URI template,
* and return the response as {@link ResponseEntity}.
* <p>URI Template variables are expanded using the given URI variables, if any.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* <p>The body of the entity, or {@code request} itself, can be a
* {@link org.springframework.util.MultiValueMap MultiValueMap} to create a multipart request.
* The values in the {@code MultiValueMap} can be any Object representing the body of the part,
* or an {@link org.springframework.http.HttpEntity HttpEntity} representing a part with body
* and headers.
* @param url the URL
* @param request the Object to be POSTed (may be {@code null})
* @param uriVariables the variables to expand the template
* @return the converted object
* @since 3.0.2
* @see HttpEntity
*/
<T> ResponseEntity<T> postForEntity(String url, @Nullable Object request, Class<T> responseType,
@Nullable Object... uriVariables) throws RestClientException;
/**
* Create a new resource by POSTing the given object to the URI template,
* and return the response as {@link HttpEntity}.
* <p>URI Template variables are expanded using the given map.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* <p>The body of the entity, or {@code request} itself, can be a
* {@link org.springframework.util.MultiValueMap MultiValueMap} to create a multipart request.
* The values in the {@code MultiValueMap} can be any Object representing the body of the part,
* or an {@link org.springframework.http.HttpEntity HttpEntity} representing a part with body
* and headers.
* @param url the URL
* @param request the Object to be POSTed (may be {@code null})
* @param uriVariables the variables to expand the template
* @return the converted object
* @since 3.0.2
* @see HttpEntity
*/
<T> ResponseEntity<T> postForEntity(String url, @Nullable Object request, Class<T> responseType,
Map<String, ? extends @Nullable Object> uriVariables) throws RestClientException;
/**
* Create a new resource by POSTing the given object to the URL,
* and return the response as {@link ResponseEntity}.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* <p>The body of the entity, or {@code request} itself, can be a
* {@link org.springframework.util.MultiValueMap MultiValueMap} to create a multipart request.
* The values in the {@code MultiValueMap} can be any Object representing the body of the part,
* or an {@link org.springframework.http.HttpEntity HttpEntity} representing a part with body
* and headers.
* @param url the URL
* @param request the Object to be POSTed (may be {@code null})
* @return the converted object
* @since 3.0.2
* @see HttpEntity
*/
<T> ResponseEntity<T> postForEntity(URI url, @Nullable Object request, Class<T> responseType)
throws RestClientException;
// PUT
/**
* Create or update a resource by PUTting the given object to the URI.
* <p>URI Template variables are expanded using the given URI variables, if any.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* @param url the URL
* @param request the Object to be PUT (may be {@code null})
* @param uriVariables the variables to expand the template
* @see HttpEntity
*/
void put(String url, @Nullable Object request, @Nullable Object... uriVariables) throws RestClientException;
/**
* Creates a new resource by PUTting the given object to URI template.
* <p>URI Template variables are expanded using the given map.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* @param url the URL
* @param request the Object to be PUT (may be {@code null})
* @param uriVariables the variables to expand the template
* @see HttpEntity
*/
void put(String url, @Nullable Object request, Map<String, ? extends @Nullable Object> uriVariables) throws RestClientException;
/**
* Creates a new resource by PUTting the given object to URL.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* @param url the URL
* @param request the Object to be PUT (may be {@code null})
* @see HttpEntity
*/
void put(URI url, @Nullable Object request) throws RestClientException;
// PATCH
/**
* Update a resource by PATCHing the given object to the URI template,
* and return the representation found in the response.
* <p>URI Template variables are expanded using the given URI variables, if any.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* <p><b>NOTE: The standard JDK HTTP library does not support HTTP PATCH.
* You need to use, for example, the Apache HttpComponents request factory.</b>
* @param url the URL
* @param request the object to be PATCHed (may be {@code null})
* @param responseType the type of the return value
* @param uriVariables the variables to expand the template
* @return the converted object
* @since 4.3.5
* @see HttpEntity
* @see RestTemplate#setRequestFactory
* @see org.springframework.http.client.HttpComponentsClientHttpRequestFactory
*/
<T> @Nullable T patchForObject(String url, @Nullable Object request, Class<T> responseType, @Nullable Object... uriVariables)
throws RestClientException;
/**
* Update a resource by PATCHing the given object to the URI template,
* and return the representation found in the response.
* <p>URI Template variables are expanded using the given map.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* <p><b>NOTE: The standard JDK HTTP library does not support HTTP PATCH.
* You need to use, for example, the Apache HttpComponents request factory.</b>
* @param url the URL
* @param request the object to be PATCHed (may be {@code null})
* @param responseType the type of the return value
* @param uriVariables the variables to expand the template
* @return the converted object
* @since 4.3.5
* @see HttpEntity
* @see RestTemplate#setRequestFactory
* @see org.springframework.http.client.HttpComponentsClientHttpRequestFactory
*/
<T> @Nullable T patchForObject(String url, @Nullable Object request, Class<T> responseType,
Map<String, ? extends @Nullable Object> uriVariables) throws RestClientException;
/**
* Update a resource by PATCHing the given object to the URL,
* and return the representation found in the response.
* <p>The {@code request} parameter can be a {@link HttpEntity} in order to
* add additional HTTP headers to the request.
* <p><b>NOTE: The standard JDK HTTP library does not support HTTP PATCH.
* You need to use, for example, the Apache HttpComponents request factory.</b>
* @param url the URL
* @param request the object to be PATCHed (may be {@code null})
* @param responseType the type of the return value
* @return the converted object
* @since 4.3.5
* @see HttpEntity
* @see RestTemplate#setRequestFactory
* @see org.springframework.http.client.HttpComponentsClientHttpRequestFactory
*/
<T> @Nullable T patchForObject(URI url, @Nullable Object request, Class<T> responseType)
throws RestClientException;
// DELETE
/**
* Delete the resources at the specified URI.
* <p>URI Template variables are expanded using the given URI variables, if any.
* @param url the URL
* @param uriVariables the variables to expand in the template
*/
void delete(String url, @Nullable Object... uriVariables) throws RestClientException;
/**
* Delete the resources at the specified URI.
* <p>URI Template variables are expanded using the given map.
* @param url the URL
* @param uriVariables the variables to expand the template
*/
void delete(String url, Map<String, ? extends @Nullable Object> uriVariables) throws RestClientException;
/**
* Delete the resources at the specified URL.
* @param url the URL
*/
void delete(URI url) throws RestClientException;
// OPTIONS
/**
* Return the value of the {@code Allow} header for the given URI.
* <p>URI Template variables are expanded using the given URI variables, if any.
* @param url the URL
* @param uriVariables the variables to expand in the template
* @return the value of the {@code Allow} header
*/
Set<HttpMethod> optionsForAllow(String url, @Nullable Object... uriVariables) throws RestClientException;
/**
* Return the value of the {@code Allow} header for the given URI.
* <p>URI Template variables are expanded using the given map.
* @param url the URL
* @param uriVariables the variables to expand in the template
* @return the value of the {@code Allow} header
*/
Set<HttpMethod> optionsForAllow(String url, Map<String, ? extends @Nullable Object> uriVariables) throws RestClientException;
/**
* Return the value of the {@code Allow} header for the given URL.
* @param url the URL
* @return the value of the {@code Allow} header
*/
Set<HttpMethod> optionsForAllow(URI url) throws RestClientException;
// exchange
/**
* Execute the HTTP method to the given URI template, writing the given request entity to the request,
* and return the response as {@link ResponseEntity}.
* <p>URI Template variables are expanded using the given URI variables, if any.
* @param url the URL
* @param method the HTTP method (GET, POST, etc)
* @param requestEntity the entity (headers and/or body) to write to the request
* may be {@code null})
* @param responseType the type to convert the response to, or {@code Void.class} for no body
* @param uriVariables the variables to expand in the template
* @return the response as entity
* @since 3.0.2
*/
<T> ResponseEntity<T> exchange(String url, HttpMethod method, @Nullable HttpEntity<?> requestEntity,
Class<T> responseType, @Nullable Object... uriVariables) throws RestClientException;
/**
* Execute the HTTP method to the given URI template, writing the given request entity to the request,
* and return the response as {@link ResponseEntity}.
* <p>URI Template variables are expanded using the given URI variables, if any.
* @param url the URL
* @param method the HTTP method (GET, POST, etc)
* @param requestEntity the entity (headers and/or body) to write to the request
* (may be {@code null})
* @param responseType the type to convert the response to, or {@code Void.class} for no body
* @param uriVariables the variables to expand in the template
* @return the response as entity
* @since 3.0.2
*/
<T> ResponseEntity<T> exchange(String url, HttpMethod method, @Nullable HttpEntity<?> requestEntity,
Class<T> responseType, Map<String, ? extends @Nullable Object> uriVariables) throws RestClientException;
/**
* Execute the HTTP method to the given URI template, writing the given request entity to the request,
* and return the response as {@link ResponseEntity}.
* @param url the URL
* @param method the HTTP method (GET, POST, etc)
* @param requestEntity the entity (headers and/or body) to write to the request
* (may be {@code null})
* @param responseType the type to convert the response to, or {@code Void.class} for no body
* @return the response as entity
* @since 3.0.2
*/
<T> ResponseEntity<T> exchange(URI url, HttpMethod method, @Nullable HttpEntity<?> requestEntity,
Class<T> responseType) throws RestClientException;
/**
* Execute the HTTP method to the given URI template, writing the given
* request entity to the request, and return the response as {@link ResponseEntity}.
* The given {@link ParameterizedTypeReference} is used to pass generic type information:
* <pre class="code">
* ParameterizedTypeReference<List<MyBean>> myBean =
* new ParameterizedTypeReference<List<MyBean>>() {};
*
* ResponseEntity<List<MyBean>> response =
* template.exchange("https://example.com",HttpMethod.GET, null, myBean);
* </pre>
* @param url the URL
* @param method the HTTP method (GET, POST, etc)
* @param requestEntity the entity (headers and/or body) to write to the
* request (may be {@code null})
* @param responseType the type to convert the response to, or {@code Void.class} for no body
* @param uriVariables the variables to expand in the template
* @return the response as entity
* @since 3.2
*/
<T> ResponseEntity<T> exchange(String url,HttpMethod method, @Nullable HttpEntity<?> requestEntity,
ParameterizedTypeReference<T> responseType, @Nullable Object... uriVariables) throws RestClientException;
/**
* Execute the HTTP method to the given URI template, writing the given
* request entity to the request, and return the response as {@link ResponseEntity}.
* The given {@link ParameterizedTypeReference} is used to pass generic type information:
* <pre class="code">
* ParameterizedTypeReference<List<MyBean>> myBean =
* new ParameterizedTypeReference<List<MyBean>>() {};
*
* ResponseEntity<List<MyBean>> response =
* template.exchange("https://example.com",HttpMethod.GET, null, myBean);
* </pre>
* @param url the URL
* @param method the HTTP method (GET, POST, etc)
* @param requestEntity the entity (headers and/or body) to write to the request
* (may be {@code null})
* @param responseType the type to convert the response to, or {@code Void.class} for no body
* @param uriVariables the variables to expand in the template
* @return the response as entity
* @since 3.2
*/
<T> ResponseEntity<T> exchange(String url, HttpMethod method, @Nullable HttpEntity<?> requestEntity,
ParameterizedTypeReference<T> responseType, Map<String, ? extends @Nullable Object> uriVariables) throws RestClientException;
/**
* Execute the HTTP method to the given URI template, writing the given
* request entity to the request, and return the response as {@link ResponseEntity}.
* The given {@link ParameterizedTypeReference} is used to pass generic type information:
* <pre class="code">
* ParameterizedTypeReference<List<MyBean>> myBean =
* new ParameterizedTypeReference<List<MyBean>>() {};
*
* ResponseEntity<List<MyBean>> response =
* template.exchange("https://example.com",HttpMethod.GET, null, myBean);
* </pre>
* @param url the URL
* @param method the HTTP method (GET, POST, etc)
* @param requestEntity the entity (headers and/or body) to write to the request
* (may be {@code null})
* @param responseType the type to convert the response to, or {@code Void.class} for no body
* @return the response as entity
* @since 3.2
*/
<T> ResponseEntity<T> exchange(URI url, HttpMethod method, @Nullable HttpEntity<?> requestEntity,
ParameterizedTypeReference<T> responseType) throws RestClientException;
/**
* Execute the request specified in the given {@link RequestEntity} and return
* the response as {@link ResponseEntity}. Typically used in combination
* with the static builder methods on {@code RequestEntity}, for instance:
* <pre class="code">
* MyRequest body = ...
* RequestEntity request = RequestEntity
* .post(URI.create("https://example.com/foo"))
* .accept(MediaType.APPLICATION_JSON)
* .body(body);
* ResponseEntity<MyResponse> response = template.exchange(request, MyResponse.class);
* </pre>
* @param requestEntity the entity to write to the request
* @param responseType the type to convert the response to, or {@code Void.class} for no body
* @return the response as entity
* @since 4.1
*/
<T> ResponseEntity<T> exchange(RequestEntity<?> requestEntity, Class<T> responseType)
throws RestClientException;
/**
* Execute the request specified in the given {@link RequestEntity} and return
* the response as {@link ResponseEntity}. The given
* {@link ParameterizedTypeReference} is used to pass generic type information:
* <pre class="code">
* MyRequest body = ...
* RequestEntity request = RequestEntity
* .post(URI.create("https://example.com/foo"))
* .accept(MediaType.APPLICATION_JSON)
* .body(body);
* ParameterizedTypeReference<List<MyResponse>> myBean =
* new ParameterizedTypeReference<List<MyResponse>>() {};
* ResponseEntity<List<MyResponse>> response = template.exchange(request, myBean);
* </pre>
* @param requestEntity the entity to write to the request
* @param responseType the type to convert the response to, or {@code Void.class} for no body
* @return the response as entity
* @since 4.1
*/
<T> ResponseEntity<T> exchange(RequestEntity<?> requestEntity, ParameterizedTypeReference<T> responseType)
throws RestClientException;
// General execution
/**
* Execute the HTTP method to the given URI template, preparing the request with the
* {@link RequestCallback}, and reading the response with a {@link ResponseExtractor}.
* <p>URI Template variables are expanded using the given URI variables, if any.
* @param uriTemplate the URI template
* @param method the HTTP method (GET, POST, etc)
* @param requestCallback object that prepares the request
* @param responseExtractor object that extracts the return value from the response
* @param uriVariables the variables to expand in the template
* @return an arbitrary object, as returned by the {@link ResponseExtractor}
*/
<T> @Nullable T execute(String uriTemplate, HttpMethod method, @Nullable RequestCallback requestCallback,
@Nullable ResponseExtractor<T> responseExtractor, @Nullable Object... uriVariables)
throws RestClientException;
/**
* Execute the HTTP method to the given URI template, preparing the request with the
* {@link RequestCallback}, and reading the response with a {@link ResponseExtractor}.
* <p>URI Template variables are expanded using the given URI variables map.
* @param uriTemplate the URI template
* @param method the HTTP method (GET, POST, etc)
* @param requestCallback object that prepares the request
* @param responseExtractor object that extracts the return value from the response
* @param uriVariables the variables to expand in the template
* @return an arbitrary object, as returned by the {@link ResponseExtractor}
*/
<T> @Nullable T execute(String uriTemplate, HttpMethod method, @Nullable RequestCallback requestCallback,
@Nullable ResponseExtractor<T> responseExtractor, Map<String, ? extends @Nullable Object> uriVariables)
throws RestClientException;
/**
* Execute the HTTP method to the given URL, preparing the request with the
* {@link RequestCallback}, and reading the response with a {@link ResponseExtractor}.
* @param url the URL
* @param method the HTTP method (GET, POST, etc)
* @param requestCallback object that prepares the request
* @param responseExtractor object that extracts the return value from the response
* @return an arbitrary object, as returned by the {@link ResponseExtractor}
*/
<T> @Nullable T execute(URI url, HttpMethod method, @Nullable RequestCallback requestCallback,
@Nullable ResponseExtractor<T> responseExtractor) throws RestClientException;
}
| RestOperations |
java | micronaut-projects__micronaut-core | router/src/test/java/io/micronaut/web/router/uri/WhatwgParser.java | {
"start": 49196,
"end": 50180
} | enum ____ {
DOMAIN_TO_ASCII,
DOMAIN_INVALID_CODE_POINT,
DOMAIN_TO_UNICODE,
HOST_INVALID_CODE_POINT,
IPV4_EMPTY_PART,
IPV4_TOO_MANY_PARTS,
IPV4_NON_NUMERIC_PART,
IPV4_NON_DECIMAL_PART,
IPV4_OUT_OF_RANGE_PART,
IPV6_UNCLOSED,
IPV6_INVALID_COMPRESSION,
IPV6_TOO_MANY_PIECES,
IPV6_MULTIPLE_COMPRESSION,
IPV6_INVALID_CODE_POINT,
IPV6_TOO_FEW_PIECES,
IPV4_IN_IPV6_TOO_MANY_PIECES,
IPV4_IN_IPV6_INVALID_CODE_POINT,
IPV4_IN_IPV6_OUT_OF_RANGE_PART,
IPV4_IN_IPV6_TOO_FEW_PARTS,
INVALID_URL_UNIT,
SPECIAL_SCHEME_MISSING_FOLLOWING_SOLIDUS,
MISSING_SCHEME_NON_RELATIVE_URL,
INVALID_REVERSE_SOLIDUS,
INVALID_CREDENTIALS,
HOST_MISSING,
PORT_OUT_OF_RANGE,
PORT_INVALID,
FILE_INVALID_WINDOWS_DRIVE_LETTER,
FILE_INVALID_WINDOWS_DRIVE_LETTER_HOST,
}
| ValidationError |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.