language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/uris/UrisBaseTest.java | {
"start": 1022,
"end": 1112
} | class ____ {@link java.net.URI} tests.
*
* @author Alexander Bischof
*/
public abstract | for |
java | alibaba__nacos | naming/src/main/java/com/alibaba/nacos/naming/push/v2/hook/PushResultHookHolder.java | {
"start": 827,
"end": 1493
} | class ____ implements PushResultHook {
private static final PushResultHookHolder INSTANCE = new PushResultHookHolder();
private final Collection<PushResultHook> hooks;
private PushResultHookHolder() {
hooks = NacosServiceLoader.load(PushResultHook.class);
}
public static PushResultHookHolder getInstance() {
return INSTANCE;
}
@Override
public void pushSuccess(PushResult result) {
hooks.forEach(each -> each.pushSuccess(result));
}
@Override
public void pushFailed(PushResult result) {
hooks.forEach(each -> each.pushFailed(result));
}
}
| PushResultHookHolder |
java | apache__camel | components/camel-jira/src/test/java/org/apache/camel/component/jira/producer/AddIssueLinkProducerTest.java | {
"start": 3127,
"end": 9329
} | class ____ extends CamelTestSupport {
@Mock
private JiraRestClient jiraClient;
@Mock
private JiraRestClientFactory jiraRestClientFactory;
@Mock
private IssueRestClient issueRestClient;
@Produce("direct:start")
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint mockResult;
private Issue parentIssue;
private Issue childIssue;
@Override
protected void bindToRegistry(Registry registry) {
registry.bind(JIRA_REST_CLIENT_FACTORY, jiraRestClientFactory);
}
@Override
protected CamelContext createCamelContext() throws Exception {
setMocks();
CamelContext camelContext = super.createCamelContext();
camelContext.disableJMX();
JiraComponent component = new JiraComponent(camelContext);
camelContext.addComponent(JIRA, component);
return camelContext;
}
public void setMocks() {
lenient().when(jiraRestClientFactory.createWithBasicHttpAuthentication(any(), any(), any())).thenReturn(jiraClient);
lenient().when(jiraClient.getIssueClient()).thenReturn(issueRestClient);
parentIssue = createIssue(1);
childIssue = createIssue(2);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.to("jira://addIssueLink?jiraUrl=" + JIRA_CREDENTIALS)
.to(mockResult);
}
};
}
@Test
public void testAddIssueLink() throws InterruptedException {
String comment = "A new test comment " + new Date();
String linkType = "Relates";
Map<String, Object> headers = new HashMap<>();
headers.put(PARENT_ISSUE_KEY, parentIssue.getKey());
headers.put(CHILD_ISSUE_KEY, childIssue.getKey());
headers.put(LINK_TYPE, linkType);
when(issueRestClient.linkIssue(any(LinkIssuesInput.class)))
.then((Answer<Void>) inv -> {
Collection<IssueLink> links = new ArrayList<>();
links.add(newIssueLink(childIssue.getId(), 1, comment));
parentIssue = createIssueWithLinks(parentIssue.getId(), links);
return null;
});
template.sendBodyAndHeaders(comment, headers);
mockResult.expectedMessageCount(1);
mockResult.assertIsSatisfied();
verify(issueRestClient).linkIssue(any(LinkIssuesInput.class));
}
@Test
public void testAddIssueLinkNoComment() throws InterruptedException {
String linkType = "Relates";
Map<String, Object> headers = new HashMap<>();
headers.put(PARENT_ISSUE_KEY, parentIssue.getKey());
headers.put(CHILD_ISSUE_KEY, childIssue.getKey());
headers.put(LINK_TYPE, linkType);
when(issueRestClient.linkIssue(any(LinkIssuesInput.class)))
.then((Answer<Void>) inv -> {
Collection<IssueLink> links = new ArrayList<>();
links.add(newIssueLink(childIssue.getId(), 1, null));
parentIssue = createIssueWithLinks(parentIssue.getId(), links);
return null;
});
template.sendBodyAndHeaders(null, headers);
mockResult.expectedMessageCount(1);
mockResult.assertIsSatisfied();
verify(issueRestClient).linkIssue(any(LinkIssuesInput.class));
}
@Test
public void testAddIssueLinkMissingParentIssueKey() throws InterruptedException {
String comment = "A new test comment " + new Date();
String linkType = "Relates";
Map<String, Object> headers = new HashMap<>();
headers.put(CHILD_ISSUE_KEY, childIssue.getKey());
headers.put(LINK_TYPE, linkType);
try {
template.sendBodyAndHeaders(comment, headers);
fail("Should have thrown an exception");
} catch (CamelExecutionException e) {
IllegalArgumentException cause = assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertStringContains(cause.getMessage(), PARENT_ISSUE_KEY);
}
mockResult.expectedMessageCount(0);
mockResult.assertIsSatisfied();
verify(issueRestClient, never()).linkIssue(any(LinkIssuesInput.class));
}
@Test
public void testAddIssueLinkMissingChildIssueKey() throws InterruptedException {
String comment = "A new test comment " + new Date();
String linkType = "Relates";
Map<String, Object> headers = new HashMap<>();
headers.put(PARENT_ISSUE_KEY, parentIssue.getKey());
headers.put(LINK_TYPE, linkType);
try {
template.sendBodyAndHeaders(comment, headers);
fail("Should have thrown an exception");
} catch (CamelExecutionException e) {
IllegalArgumentException cause = assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertStringContains(cause.getMessage(), CHILD_ISSUE_KEY);
}
mockResult.expectedMessageCount(0);
mockResult.assertIsSatisfied();
verify(issueRestClient, never()).linkIssue(any(LinkIssuesInput.class));
}
@Test
public void testAddIssueLinkMissingLinkType() throws InterruptedException {
String comment = "A new test comment " + new Date();
Map<String, Object> headers = new HashMap<>();
headers.put(PARENT_ISSUE_KEY, parentIssue.getKey());
headers.put(CHILD_ISSUE_KEY, childIssue.getKey());
try {
template.sendBodyAndHeaders(comment, headers);
fail("Should have thrown an exception");
} catch (CamelExecutionException e) {
IllegalArgumentException cause = assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertStringContains(cause.getMessage(), LINK_TYPE);
}
mockResult.expectedMessageCount(0);
mockResult.assertIsSatisfied();
verify(issueRestClient, never()).linkIssue(any(LinkIssuesInput.class));
}
}
| AddIssueLinkProducerTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/IndexReshardingState.java | {
"start": 1375,
"end": 1746
} | class ____ implements Writeable, ToXContentFragment {
/**
* @return the number of shards the index has at the start of this operation
*/
public abstract int shardCountBefore();
/**
* @return the number of shards that the index will have when resharding completes
*/
public abstract int shardCountAfter();
// This | IndexReshardingState |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/health/MyFooHealthCheck.java | {
"start": 1043,
"end": 1366
} | class ____ extends AbstractHealthCheck {
public MyFooHealthCheck() {
super("acme", "myfoo");
}
@Override
protected void doCall(HealthCheckResultBuilder builder, Map<String, Object> options) {
builder.state(State.DOWN);
builder.message("Chaos Monkey was here");
}
}
| MyFooHealthCheck |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/util/ServletContextPropertyUtils.java | {
"start": 1362,
"end": 3811
} | class ____ {
private static final PropertyPlaceholderHelper strictHelper =
new PropertyPlaceholderHelper(SystemPropertyUtils.PLACEHOLDER_PREFIX,
SystemPropertyUtils.PLACEHOLDER_SUFFIX, SystemPropertyUtils.VALUE_SEPARATOR,
SystemPropertyUtils.ESCAPE_CHARACTER, false);
private static final PropertyPlaceholderHelper nonStrictHelper =
new PropertyPlaceholderHelper(SystemPropertyUtils.PLACEHOLDER_PREFIX,
SystemPropertyUtils.PLACEHOLDER_SUFFIX, SystemPropertyUtils.VALUE_SEPARATOR,
SystemPropertyUtils.ESCAPE_CHARACTER, true);
/**
* Resolve ${...} placeholders in the given text, replacing them with corresponding
* servlet context init parameter or system property values.
* @param text the String to resolve
* @param servletContext the servletContext to use for lookups.
* @return the resolved String
* @throws IllegalArgumentException if there is an unresolvable placeholder
* @see SystemPropertyUtils#PLACEHOLDER_PREFIX
* @see SystemPropertyUtils#PLACEHOLDER_SUFFIX
* @see SystemPropertyUtils#resolvePlaceholders(String, boolean)
*/
public static String resolvePlaceholders(String text, ServletContext servletContext) {
return resolvePlaceholders(text, servletContext, false);
}
/**
* Resolve ${...} placeholders in the given text, replacing them with corresponding
* servlet context init parameter or system property values. Unresolvable placeholders
* with no default value are ignored and passed through unchanged if the flag is set to true.
* @param text the String to resolve
* @param servletContext the servletContext to use for lookups.
* @param ignoreUnresolvablePlaceholders flag to determine is unresolved placeholders are ignored
* @return the resolved String
* @throws IllegalArgumentException if there is an unresolvable placeholder and the flag is {@code false}
* @see SystemPropertyUtils#PLACEHOLDER_PREFIX
* @see SystemPropertyUtils#PLACEHOLDER_SUFFIX
* @see SystemPropertyUtils#resolvePlaceholders(String, boolean)
*/
public static String resolvePlaceholders(
String text, ServletContext servletContext, boolean ignoreUnresolvablePlaceholders) {
if (text.isEmpty()) {
return text;
}
PropertyPlaceholderHelper helper = (ignoreUnresolvablePlaceholders ? nonStrictHelper : strictHelper);
return helper.replacePlaceholders(text, new ServletContextPlaceholderResolver(text, servletContext));
}
private static | ServletContextPropertyUtils |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/operators/SlotSharingGroupTest.java | {
"start": 1141,
"end": 3919
} | class ____ {
@Test
void testBuildSlotSharingGroupWithSpecificResource() {
final String name = "ssg";
final MemorySize heap = MemorySize.ofMebiBytes(100);
final MemorySize offHeap = MemorySize.ofMebiBytes(200);
final MemorySize managed = MemorySize.ofMebiBytes(300);
final SlotSharingGroup slotSharingGroup =
SlotSharingGroup.newBuilder(name)
.setCpuCores(1)
.setTaskHeapMemory(heap)
.setTaskOffHeapMemory(offHeap)
.setManagedMemory(managed)
.setExternalResource("gpu", 1)
.build();
assertThat(slotSharingGroup.getName()).isEqualTo(name);
assertThat(slotSharingGroup.getCpuCores()).hasValue(1.0);
assertThat(slotSharingGroup.getTaskHeapMemory()).hasValue(heap);
assertThat(slotSharingGroup.getTaskOffHeapMemory()).hasValue(offHeap);
assertThat(slotSharingGroup.getManagedMemory()).hasValue(managed);
assertThat(slotSharingGroup.getExternalResources())
.isEqualTo(Collections.singletonMap("gpu", 1.0));
}
@Test
void testBuildSlotSharingGroupWithUnknownResource() {
final String name = "ssg";
final SlotSharingGroup slotSharingGroup = SlotSharingGroup.newBuilder(name).build();
assertThat(slotSharingGroup.getName()).isEqualTo(name);
assertThat(slotSharingGroup.getCpuCores()).isNotPresent();
assertThat(slotSharingGroup.getTaskHeapMemory()).isNotPresent();
assertThat(slotSharingGroup.getManagedMemory()).isNotPresent();
assertThat(slotSharingGroup.getTaskOffHeapMemory()).isNotPresent();
assertThat(slotSharingGroup.getExternalResources()).isEmpty();
}
@Test
void testBuildSlotSharingGroupWithIllegalConfig() {
assertThatThrownBy(
() ->
SlotSharingGroup.newBuilder("ssg")
.setCpuCores(1)
.setTaskHeapMemory(MemorySize.ZERO)
.setTaskOffHeapMemoryMB(10)
.build())
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testBuildSlotSharingGroupWithoutAllRequiredConfig() {
assertThatThrownBy(
() ->
SlotSharingGroup.newBuilder("ssg")
.setCpuCores(1)
.setTaskOffHeapMemoryMB(10)
.build())
.isInstanceOf(IllegalArgumentException.class);
}
}
| SlotSharingGroupTest |
java | spring-projects__spring-boot | module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/classpath/ClassPathFileSystemWatcher.java | {
"start": 1408,
"end": 3534
} | class ____ implements InitializingBean, DisposableBean, ApplicationContextAware {
private final FileSystemWatcher fileSystemWatcher;
private final @Nullable ClassPathRestartStrategy restartStrategy;
@SuppressWarnings("NullAway.Init")
private ApplicationContext applicationContext;
private boolean stopWatcherOnRestart;
/**
* Create a new {@link ClassPathFileSystemWatcher} instance.
* @param fileSystemWatcherFactory a factory to create the underlying
* {@link FileSystemWatcher} used to monitor the local file system
* @param restartStrategy the classpath restart strategy
* @param urls the URLs to watch
*/
public ClassPathFileSystemWatcher(FileSystemWatcherFactory fileSystemWatcherFactory,
@Nullable ClassPathRestartStrategy restartStrategy, URL[] urls) {
Assert.notNull(fileSystemWatcherFactory, "'fileSystemWatcherFactory' must not be null");
Assert.notNull(urls, "'urls' must not be null");
this.fileSystemWatcher = fileSystemWatcherFactory.getFileSystemWatcher();
this.restartStrategy = restartStrategy;
this.fileSystemWatcher.addSourceDirectories(new ClassPathDirectories(urls));
}
/**
* Set if the {@link FileSystemWatcher} should be stopped when a full restart occurs.
* @param stopWatcherOnRestart if the watcher should be stopped when a restart occurs
*/
public void setStopWatcherOnRestart(boolean stopWatcherOnRestart) {
this.stopWatcherOnRestart = stopWatcherOnRestart;
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public void afterPropertiesSet() throws Exception {
if (this.restartStrategy != null) {
FileSystemWatcher watcherToStop = null;
if (this.stopWatcherOnRestart) {
watcherToStop = this.fileSystemWatcher;
}
this.fileSystemWatcher.addListener(
new ClassPathFileChangeListener(this.applicationContext, this.restartStrategy, watcherToStop));
}
this.fileSystemWatcher.start();
}
@Override
public void destroy() throws Exception {
this.fileSystemWatcher.stop();
}
}
| ClassPathFileSystemWatcher |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/aggregator/AggregationStrategyBeanAdapterAllowNullOldExchangeTest.java | {
"start": 1060,
"end": 2079
} | class ____ extends ContextTestSupport {
private final MyBodyAppender appender = new MyBodyAppender();
private AggregationStrategyBeanAdapter myStrategy;
@Test
public void testAggregate() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("OldWasNullABC");
template.sendBody("direct:start", "A");
template.sendBody("direct:start", "B");
template.sendBody("direct:start", "C");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
myStrategy = new AggregationStrategyBeanAdapter(appender, "append");
myStrategy.setAllowNullOldExchange(true);
from("direct:start").aggregate(constant(true), myStrategy).completionSize(3).to("mock:result");
}
};
}
public static final | AggregationStrategyBeanAdapterAllowNullOldExchangeTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/doublearray/DoubleArrayAssert_hasSizeLessThanOrEqualTo_Test.java | {
"start": 806,
"end": 1178
} | class ____ extends DoubleArrayAssertBaseTest {
@Override
protected DoubleArrayAssert invoke_api_method() {
return assertions.hasSizeLessThanOrEqualTo(6);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertHasSizeLessThanOrEqualTo(getInfo(assertions), getActual(assertions), 6);
}
}
| DoubleArrayAssert_hasSizeLessThanOrEqualTo_Test |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/cache/support/NoOpCacheManager.java | {
"start": 1360,
"end": 1745
} | class ____ implements CacheManager {
private final ConcurrentMap<String, Cache> cacheMap = new ConcurrentHashMap<>(16);
@Override
public @Nullable Cache getCache(String name) {
return this.cacheMap.computeIfAbsent(name, NoOpCache::new);
}
@Override
public Collection<String> getCacheNames() {
return Collections.unmodifiableSet(this.cacheMap.keySet());
}
}
| NoOpCacheManager |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/associations/OneToManyEagerDiscriminatorTest.java | {
"start": 3523,
"end": 3937
} | class ____ extends ValueBase {
private String data;
@ManyToOne(fetch = FetchType.EAGER)
@JoinColumn(name = "entity_id", nullable = false)
private User entity;
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public User getEntity() {
return entity;
}
public void setEntity(User entity) {
this.entity = entity;
}
}
}
| UserValueBase |
java | alibaba__nacos | core/src/test/java/com/alibaba/nacos/core/remote/core/ServerLoaderInfoRequestHandlerTest.java | {
"start": 1466,
"end": 2336
} | class ____ {
@InjectMocks
private ServerLoaderInfoRequestHandler handler;
@Mock
private ConnectionManager connectionManager;
@Test
void testHandle() {
Mockito.when(connectionManager.currentClientsCount()).thenReturn(1);
Mockito.when(connectionManager.currentClientsCount(Mockito.any())).thenReturn(1);
ServerLoaderInfoRequest request = new ServerLoaderInfoRequest();
RequestMeta meta = new RequestMeta();
try {
ServerLoaderInfoResponse response = handler.handle(request, meta);
String sdkConCount = response.getMetricsValue("sdkConCount");
assertEquals("1", sdkConCount);
} catch (NacosException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
| ServerLoaderInfoRequestHandlerTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLineReader.java | {
"start": 1085,
"end": 5630
} | class ____ {
/**
* TEST_1: The test scenario is the tail of the buffer equals the starting
* character/s of delimiter.
*
* The Test Data is such that,
*
* 1) we will have "</entity>" as delimiter
*
* 2) The tail of the current buffer would be "</" which matches with the
* starting character sequence of delimiter.
*
* 3) The Head of the next buffer would be "id>" which does NOT match with
* the remaining characters of delimiter.
*
* 4) Input data would be prefixed by char 'a' about
* numberOfCharToFillTheBuffer times. So that, one iteration to buffer the
* input data, would end at '</' ie equals starting 2 char of delimiter
*
* 5) For this we would take BufferSize as 64 * 1024;
*
* Check Condition In the second key value pair, the value should contain
* "</" from currentToken and "id>" from next token
*/
@Test
public void testCustomDelimiter1() throws Exception {
final String delimiter = "</entity>";
// Ending part of Input Data Buffer
// It contains '</' ie delimiter character
final String currentBufferTailToken = "</entity><entity><id>Gelesh</";
// Supposing the start of next buffer is this
final String nextBufferHeadToken = "id><name>Omathil</name></entity>";
// Expected must capture from both the buffer, excluding Delimiter
final String expected =
(currentBufferTailToken + nextBufferHeadToken).replace(delimiter, "");
final String testPartOfInput = currentBufferTailToken + nextBufferHeadToken;
final int bufferSize = 64 * 1024;
int numberOfCharToFillTheBuffer =
bufferSize - currentBufferTailToken.length();
final char[] fillBuffer = new char[numberOfCharToFillTheBuffer];
// char 'a' as a filler for the test string
Arrays.fill(fillBuffer, 'a');
final StringBuilder fillerString = new StringBuilder();
final String testData = fillerString + testPartOfInput;
final LineReader lineReader = new LineReader(
new ByteArrayInputStream(testData.getBytes(StandardCharsets.UTF_8)),
delimiter.getBytes(StandardCharsets.UTF_8));
final Text line = new Text();
lineReader.readLine(line);
lineReader.close();
assertEquals(fillerString.toString(), line.toString());
lineReader.readLine(line);
assertEquals(expected, line.toString());
}
/**
* TEST_2: The test scenario is such that, the character/s preceding the
* delimiter, equals the starting character/s of delimiter.
*/
@Test
public void testCustomDelimiter2() throws Exception {
final String delimiter = "record";
final StringBuilder testStringBuilder = new StringBuilder();
testStringBuilder.append(delimiter).append("Kerala ");
testStringBuilder.append(delimiter).append("Bangalore");
testStringBuilder.append(delimiter).append(" North Korea");
testStringBuilder.append(delimiter).append(delimiter).append("Guantanamo");
// ~EOF with 're'
testStringBuilder.append(delimiter + "ecord" + "recor" + "core");
final String testData = testStringBuilder.toString();
final LineReader lineReader = new LineReader(
new ByteArrayInputStream(testData.getBytes(StandardCharsets.UTF_8)),
delimiter.getBytes((StandardCharsets.UTF_8)));
final Text line = new Text();
lineReader.readLine(line);
assertEquals("", line.toString());
lineReader.readLine(line);
assertEquals("Kerala ", line.toString());
lineReader.readLine(line);
assertEquals("Bangalore", line.toString());
lineReader.readLine(line);
assertEquals(" North Korea", line.toString());
lineReader.readLine(line);
assertEquals("", line.toString());
lineReader.readLine(line);
assertEquals("Guantanamo", line.toString());
lineReader.readLine(line);
assertEquals(("ecord" + "recor" + "core"), line.toString());
lineReader.close();
}
/**
* Test 3: The test scenario is such that, aaabccc split by aaab.
*/
@Test
public void testCustomDelimiter3() throws Exception {
final String testData = "aaaabccc";
final String delimiter = "aaab";
final LineReader lineReader = new LineReader(
new ByteArrayInputStream(testData.getBytes(StandardCharsets.UTF_8)),
delimiter.getBytes(StandardCharsets.UTF_8));
final Text line = new Text();
lineReader.readLine(line);
assertEquals("a", line.toString());
lineReader.readLine(line);
assertEquals("ccc", line.toString());
lineReader.close();
}
}
| TestLineReader |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/annotation/DiscovererEndpointFilter.java | {
"start": 969,
"end": 1554
} | class ____ implements EndpointFilter<DiscoveredEndpoint<?>> {
private final Class<? extends EndpointDiscoverer<?, ?>> discoverer;
/**
* Create a new {@link DiscovererEndpointFilter} instance.
* @param discoverer the required discoverer
*/
protected DiscovererEndpointFilter(Class<? extends EndpointDiscoverer<?, ?>> discoverer) {
Assert.notNull(discoverer, "'discoverer' must not be null");
this.discoverer = discoverer;
}
@Override
public boolean match(DiscoveredEndpoint<?> endpoint) {
return endpoint.wasDiscoveredBy(this.discoverer);
}
}
| DiscovererEndpointFilter |
java | apache__camel | components/camel-jackson/src/test/java/org/apache/camel/component/jackson/JacksonMarshalUnmarshalListTest.java | {
"start": 1192,
"end": 3041
} | class ____ extends CamelTestSupport {
@Test
public void testUnmarshalListPojo() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:reversePojo");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(List.class);
String json = "[{\"name\":\"Camel\"}, {\"name\":\"World\"}]";
template.sendBody("direct:backPojo", json);
MockEndpoint.assertIsSatisfied(context);
List list = mock.getReceivedExchanges().get(0).getIn().getBody(List.class);
assertNotNull(list);
assertEquals(2, list.size());
TestPojo pojo = (TestPojo) list.get(0);
assertEquals("Camel", pojo.getName());
pojo = (TestPojo) list.get(1);
assertEquals("World", pojo.getName());
}
@Test
public void testUnmarshalListPojoOneElement() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:reversePojo");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(List.class);
String json = "[{\"name\":\"Camel\"}]";
template.sendBody("direct:backPojo", json);
MockEndpoint.assertIsSatisfied(context);
List list = mock.getReceivedExchanges().get(0).getIn().getBody(List.class);
assertNotNull(list);
assertEquals(1, list.size());
TestPojo pojo = (TestPojo) list.get(0);
assertEquals("Camel", pojo.getName());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
JacksonDataFormat format = new JacksonDataFormat(TestPojo.class);
format.useList();
from("direct:backPojo").unmarshal(format).to("mock:reversePojo");
}
};
}
}
| JacksonMarshalUnmarshalListTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/codec/vectors/es93/DirectIOCapableLucene99FlatVectorsFormat.java | {
"start": 3925,
"end": 5737
} | class ____ extends FlatVectorsReader {
private final Lucene99FlatVectorsReader inner;
private final SegmentReadState state;
Lucene99FlatBulkScoringVectorsReader(SegmentReadState state, Lucene99FlatVectorsReader inner, FlatVectorsScorer scorer) {
super(scorer);
this.inner = inner;
this.state = state;
}
@Override
public void close() throws IOException {
inner.close();
}
@Override
public RandomVectorScorer getRandomVectorScorer(String field, float[] target) throws IOException {
return inner.getRandomVectorScorer(field, target);
}
@Override
public RandomVectorScorer getRandomVectorScorer(String field, byte[] target) throws IOException {
return inner.getRandomVectorScorer(field, target);
}
@Override
public void checkIntegrity() throws IOException {
inner.checkIntegrity();
}
@Override
public FloatVectorValues getFloatVectorValues(String field) throws IOException {
FloatVectorValues vectorValues = inner.getFloatVectorValues(field);
if (vectorValues == null || vectorValues.size() == 0) {
return null;
}
FieldInfo info = state.fieldInfos.fieldInfo(field);
return new RescorerOffHeapVectorValues(vectorValues, info.getVectorSimilarityFunction(), vectorScorer);
}
@Override
public ByteVectorValues getByteVectorValues(String field) throws IOException {
return inner.getByteVectorValues(field);
}
@Override
public long ramBytesUsed() {
return inner.ramBytesUsed();
}
}
static | Lucene99FlatBulkScoringVectorsReader |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/Http2ControlFrameLimitEncoder.java | {
"start": 1306,
"end": 5015
} | class ____ extends DecoratingHttp2ConnectionEncoder {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(Http2ControlFrameLimitEncoder.class);
private final int maxOutstandingControlFrames;
private final ChannelFutureListener outstandingControlFramesListener = new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) {
outstandingControlFrames--;
}
};
private Http2LifecycleManager lifecycleManager;
private int outstandingControlFrames;
private boolean limitReached;
Http2ControlFrameLimitEncoder(Http2ConnectionEncoder delegate, int maxOutstandingControlFrames) {
super(delegate);
this.maxOutstandingControlFrames = ObjectUtil.checkPositive(maxOutstandingControlFrames,
"maxOutstandingControlFrames");
}
@Override
public void lifecycleManager(Http2LifecycleManager lifecycleManager) {
this.lifecycleManager = lifecycleManager;
super.lifecycleManager(lifecycleManager);
}
@Override
public ChannelFuture writeSettingsAck(ChannelHandlerContext ctx, ChannelPromise promise) {
ChannelPromise newPromise = handleOutstandingControlFrames(ctx, promise);
if (newPromise == null) {
return promise;
}
return super.writeSettingsAck(ctx, newPromise);
}
@Override
public ChannelFuture writePing(ChannelHandlerContext ctx, boolean ack, long data, ChannelPromise promise) {
// Only apply the limit to ping acks.
if (ack) {
ChannelPromise newPromise = handleOutstandingControlFrames(ctx, promise);
if (newPromise == null) {
return promise;
}
return super.writePing(ctx, ack, data, newPromise);
}
return super.writePing(ctx, ack, data, promise);
}
@Override
public ChannelFuture writeRstStream(
ChannelHandlerContext ctx, int streamId, long errorCode, ChannelPromise promise) {
ChannelPromise newPromise = handleOutstandingControlFrames(ctx, promise);
if (newPromise == null) {
return promise;
}
return super.writeRstStream(ctx, streamId, errorCode, newPromise);
}
private ChannelPromise handleOutstandingControlFrames(ChannelHandlerContext ctx, ChannelPromise promise) {
if (!limitReached) {
if (outstandingControlFrames == maxOutstandingControlFrames) {
// Let's try to flush once as we may be able to flush some of the control frames.
ctx.flush();
}
if (outstandingControlFrames == maxOutstandingControlFrames) {
limitReached = true;
Http2Exception exception = Http2Exception.connectionError(Http2Error.ENHANCE_YOUR_CALM,
"Maximum number %d of outstanding control frames reached", maxOutstandingControlFrames);
logger.info("{} Maximum number {} of outstanding control frames reached, closing channel.",
ctx.channel(), maxOutstandingControlFrames, exception);
// First notify the Http2LifecycleManager and then close the connection.
lifecycleManager.onError(ctx, true, exception);
ctx.close();
}
outstandingControlFrames++;
// We did not reach the limit yet, add the listener to decrement the number of outstanding control frames
// once the promise was completed
return promise.unvoid().addListener(outstandingControlFramesListener);
}
return promise;
}
}
| Http2ControlFrameLimitEncoder |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java | {
"start": 47754,
"end": 48362
} | class ____ {
private final Object actual = new Date();
@Test
void createAssert() {
// WHEN
AbstractDateAssert<?> result = DATE.createAssert(actual);
// THEN
result.isBeforeOrEqualTo(new Date());
}
@Test
void createAssert_with_ValueProvider() {
// GIVEN
ValueProvider<?> valueProvider = mockThatDelegatesTo(type -> actual);
// WHEN
AbstractDateAssert<?> result = DATE.createAssert(valueProvider);
// THEN
result.isBeforeOrEqualTo(new Date());
verify(valueProvider).apply(Date.class);
}
}
@Nested
| Date_Factory |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/errors/MultipleAmbiguousGlobalErrorHandlersTest.java | {
"start": 840,
"end": 1036
} | class ____ {
@OnError
void onError1(IllegalStateException ise) {
}
@OnError
void onError2(IllegalStateException ise) {
}
}
}
| GlobalErrorHandlers |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/util/GlobUtil.java | {
"start": 157,
"end": 2847
} | class ____ {
private GlobUtil() {
}
/**
* @deprecated in favor of {@link io.quarkus.util.GlobUtil#toRegexPattern(String)}
*
* Transforms the given {@code glob} to a regular expression suitable for passing to
* {@link Pattern#compile(String)}.
*
* <h2>Glob syntax
* <h2>
*
* <table>
* <tr>
* <th>Construct</th>
* <th>Description</th>
* </tr>
* <tr>
* <td><code>*</code></td>
* <td>Matches a (possibly empty) sequence of characters that does not contain slash ({@code /})</td>
* </tr>
* <tr>
* <td><code>**</code></td>
* <td>Matches a (possibly empty) sequence of characters that may contain slash ({@code /})</td>
* </tr>
* <tr>
* <td><code>?</code></td>
* <td>Matches one character, but not slash</td>
* </tr>
* <tr>
* <td><code>[abc]</code></td>
* <td>Matches one character given in the bracket, but not slash</td>
* </tr>
* <tr>
* <td><code>[a-z]</code></td>
* <td>Matches one character from the range given in the bracket, but not slash</td>
* </tr>
* <tr>
* <td><code>[!abc]</code></td>
* <td>Matches one character not named in the bracket; does not match slash</td>
* </tr>
* <tr>
* <td><code>[a-z]</code></td>
* <td>Matches one character outside the range given in the bracket; does not match slash</td>
* </tr>
* <tr>
* <td><code>{one,two,three}</code></td>
* <td>Matches any of the alternating tokens separated by comma; the tokens may contain wildcards, nested
* alternations and ranges</td>
* </tr>
* <tr>
* <td><code>\</code></td>
* <td>The escape character</td>
* </tr>
* </table>
*
* @param glob the glob expression to transform
* @return a regular expression suitable for {@link Pattern}
* @throws IllegalStateException in case the {@code glob} is syntactically invalid
*/
@Deprecated
public static String toRegexPattern(String glob) {
return io.quarkus.util.GlobUtil.toRegexPattern(glob);
}
}
| GlobUtil |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/ActivateProfileAction.java | {
"start": 369,
"end": 684
} | class ____ extends ActionType<ActivateProfileResponse> {
public static final String NAME = "cluster:admin/xpack/security/profile/activate";
public static final ActivateProfileAction INSTANCE = new ActivateProfileAction();
public ActivateProfileAction() {
super(NAME);
}
}
| ActivateProfileAction |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/configuration/BeanMethodQualificationTests.java | {
"start": 10780,
"end": 11530
} | class ____ {
@Bean @Qualifier("interesting")
public static TestBean testBean1() {
return new TestBean("interesting");
}
@Bean @Qualifier("interesting") @Fallback
public static TestBean testBean1x() {
return new TestBean("");
}
@Bean @Boring
public TestBean testBean2(TestBean testBean1, TestBean[] testBeanArray,
List<TestBean> testBeanList, Map<String, TestBean> testBeanMap) {
TestBean tb = new TestBean("boring");
tb.setSpouse(testBean1);
tb.setPets(CollectionUtils.arrayToList(testBeanArray));
tb.setSomeList(testBeanList);
tb.setSomeMap(testBeanMap);
return tb;
}
@Bean @Boring @Fallback
public TestBean testBean2x() {
return new TestBean("");
}
}
@Configuration
static | FallbackConfig |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/resource/WiderMappingDefaultOptions.java | {
"start": 156,
"end": 266
} | class ____ {
@OPTIONS
public String options() {
return "hello";
}
}
| WiderMappingDefaultOptions |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/testsupport/TestHelper.java | {
"start": 114727,
"end": 117892
} | class ____<T> implements
ObservableConverter<T, TestObserverEx<T>>,
SingleConverter<T, TestObserverEx<T>>,
MaybeConverter<T, TestObserverEx<T>>,
CompletableConverter<TestObserverEx<Void>>,
FlowableConverter<T, TestSubscriberEx<T>> {
final boolean cancelled;
final int fusionMode;
TestConsumerExConverters(boolean cancelled, int fusionMode) {
this.cancelled = cancelled;
this.fusionMode = fusionMode;
}
@Override
public TestObserverEx<Void> apply(Completable upstream) {
TestObserverEx<Void> toe = new TestObserverEx<>();
if (cancelled) {
toe.dispose();
}
toe.setInitialFusionMode(fusionMode);
return upstream.subscribeWith(toe);
}
@Override
public TestObserverEx<T> apply(Maybe<T> upstream) {
TestObserverEx<T> toe = new TestObserverEx<>();
if (cancelled) {
toe.dispose();
}
toe.setInitialFusionMode(fusionMode);
return upstream.subscribeWith(toe);
}
@Override
public TestObserverEx<T> apply(Single<T> upstream) {
TestObserverEx<T> toe = new TestObserverEx<>();
if (cancelled) {
toe.dispose();
}
toe.setInitialFusionMode(fusionMode);
return upstream.subscribeWith(toe);
}
@Override
public TestObserverEx<T> apply(Observable<T> upstream) {
TestObserverEx<T> toe = new TestObserverEx<>();
if (cancelled) {
toe.dispose();
}
toe.setInitialFusionMode(fusionMode);
return upstream.subscribeWith(toe);
}
@Override
public TestSubscriberEx<T> apply(Flowable<T> upstream) {
TestSubscriberEx<T> tse = new TestSubscriberEx<>();
if (cancelled) {
tse.dispose();
}
tse.setInitialFusionMode(fusionMode);
return upstream.subscribeWith(tse);
}
}
@SafeVarargs
public static <T> TestSubscriberEx<T> assertValueSet(TestSubscriberEx<T> ts, T... values) {
Set<T> expectedSet = new HashSet<>(Arrays.asList(values));
for (T t : ts.values()) {
if (!expectedSet.contains(t)) {
throw ts.failWith("Item not in the set: " + BaseTestConsumer.valueAndClass(t));
}
}
return ts;
}
@SafeVarargs
public static <T> TestObserverEx<T> assertValueSet(TestObserverEx<T> to, T... values) {
Set<T> expectedSet = new HashSet<>(Arrays.asList(values));
for (T t : to.values()) {
if (!expectedSet.contains(t)) {
throw to.failWith("Item not in the set: " + BaseTestConsumer.valueAndClass(t));
}
}
return to;
}
/**
* Given a base reactive type name, try to find its source in the current runtime
* path and return a file to it or null if not found.
* @param baseClassName the | TestConsumerExConverters |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/InitializeInlineTest.java | {
"start": 1470,
"end": 1794
} | class ____ {
void test() {
int a = 1;
final int b = 1;
}
}
""")
.doTest();
}
@Test
public void multipleAssignment_noMatch() {
compilationHelper
.addInputLines(
"Test.java",
"""
| Test |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/expr/SQLBooleanExpr.java | {
"start": 2502,
"end": 2537
} | enum ____ {
ON_OFF
}
}
| Type |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableConcatWithSingle.java | {
"start": 1376,
"end": 1813
} | class ____<T> extends AbstractFlowableWithUpstream<T, T> {
final SingleSource<? extends T> other;
public FlowableConcatWithSingle(Flowable<T> source, SingleSource<? extends T> other) {
super(source);
this.other = other;
}
@Override
protected void subscribeActual(Subscriber<? super T> s) {
source.subscribe(new ConcatWithSubscriber<>(s, other));
}
static final | FlowableConcatWithSingle |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/ForwardedServerRequestWrapper.java | {
"start": 1098,
"end": 10076
} | class ____ extends HttpServerRequestWrapper implements HttpServerRequest {
private final ForwardedParser forwardedParser;
private boolean modified;
private HttpMethod method;
private String path;
private String query;
private String uri;
private String absoluteURI;
public ForwardedServerRequestWrapper(HttpServerRequest request, ForwardingProxyOptions forwardingProxyOptions,
TrustedProxyCheck trustedProxyCheck) {
super((HttpServerRequestInternal) request);
forwardedParser = new ForwardedParser(delegate, forwardingProxyOptions, trustedProxyCheck);
}
void changeTo(HttpMethod method, String uri) {
modified = true;
this.method = method;
this.uri = uri;
// lazy initialization
this.path = null;
this.query = null;
this.absoluteURI = null;
// parse
int queryIndex = uri.indexOf('?');
int fragmentIndex = uri.indexOf('#');
// there's a query
if (queryIndex != -1) {
path = uri.substring(0, queryIndex);
// there's a fragment
if (fragmentIndex != -1) {
query = uri.substring(queryIndex + 1, fragmentIndex);
} else {
query = uri.substring(queryIndex + 1);
}
} else {
// there's a fragment
if (fragmentIndex != -1) {
path = uri.substring(0, fragmentIndex);
} else {
path = uri;
}
}
}
@Override
public long bytesRead() {
return delegate.bytesRead();
}
@Override
public HttpServerRequest exceptionHandler(Handler<Throwable> handler) {
delegate.exceptionHandler(handler);
return this;
}
@Override
public HttpServerRequest handler(Handler<Buffer> handler) {
delegate.handler(handler);
return this;
}
@Override
public HttpServerRequest pause() {
delegate.pause();
return this;
}
@Override
public HttpServerRequest resume() {
delegate.resume();
return this;
}
@Override
public HttpServerRequest fetch(long amount) {
delegate.fetch(amount);
return this;
}
@Override
public HttpServerRequest endHandler(Handler<Void> handler) {
delegate.endHandler(handler);
return this;
}
@Override
public HttpVersion version() {
return delegate.version();
}
@Override
public HttpMethod method() {
if (!modified) {
return delegate.method();
}
return method;
}
@Override
public String uri() {
if (!modified) {
return forwardedParser.uri();
}
return uri;
}
@Override
public String path() {
if (!modified) {
return delegate.path();
}
return path;
}
@Override
public String query() {
if (!modified) {
return delegate.query();
}
return query;
}
@Override
public HttpServerResponse response() {
return delegate.response();
}
@Override
public MultiMap headers() {
return delegate.headers();
}
@Override
public String getHeader(String s) {
return delegate.getHeader(s);
}
@Override
public String getHeader(CharSequence charSequence) {
return delegate.getHeader(charSequence);
}
@Override
public MultiMap params() {
return delegate.params();
}
@Override
public String getParam(String s) {
return delegate.getParam(s);
}
@Override
public SocketAddress remoteAddress() {
return forwardedParser.remoteAddress();
}
@Override
public HostAndPort authority() {
return forwardedParser.authority();
}
@Override
public HostAndPort authority(boolean real) {
if (real) {
return delegate.authority();
}
return this.authority();
}
@Override
public boolean isValidAuthority() {
return forwardedParser.authority() != null;
}
@Override
public SocketAddress localAddress() {
return delegate.localAddress();
}
@Override
@Deprecated
public X509Certificate[] peerCertificateChain() throws SSLPeerUnverifiedException {
return delegate.peerCertificateChain();
}
@Override
public SSLSession sslSession() {
return delegate.sslSession();
}
@Override
public String absoluteURI() {
if (!modified) {
return forwardedParser.absoluteURI();
} else {
if (absoluteURI == null) {
String scheme = forwardedParser.scheme();
String host = forwardedParser.host();
// if both are not null we can rebuild the uri
if (scheme != null && host != null) {
absoluteURI = scheme + "://" + host + uri;
} else {
absoluteURI = uri;
}
}
return absoluteURI;
}
}
@Override
public String scheme() {
return forwardedParser.scheme();
}
@Override
public String host() {
return forwardedParser.host();
}
@Override
public HttpServerRequest customFrameHandler(Handler<HttpFrame> handler) {
delegate.customFrameHandler(handler);
return this;
}
@Override
public HttpConnection connection() {
return delegate.connection();
}
@Override
public HttpServerRequest bodyHandler(Handler<Buffer> handler) {
delegate.bodyHandler(handler);
return this;
}
@Override
public HttpServerRequest setExpectMultipart(boolean b) {
delegate.setExpectMultipart(b);
return this;
}
@Override
public boolean isExpectMultipart() {
return delegate.isExpectMultipart();
}
@Override
public HttpServerRequest uploadHandler(Handler<HttpServerFileUpload> handler) {
delegate.uploadHandler(handler);
return this;
}
@Override
public MultiMap formAttributes() {
return delegate.formAttributes();
}
@Override
public String getFormAttribute(String s) {
return delegate.getFormAttribute(s);
}
@Override
public boolean isEnded() {
return delegate.isEnded();
}
@Override
public boolean isSSL() {
return forwardedParser.isSSL();
}
@Override
public HttpServerRequest streamPriorityHandler(Handler<StreamPriority> handler) {
delegate.streamPriorityHandler(handler);
return this;
}
@Override
public StreamPriority streamPriority() {
return delegate.streamPriority();
}
@Override
public Cookie getCookie(String name) {
return delegate.getCookie(name);
}
@Override
public int cookieCount() {
return delegate.cookieCount();
}
@Override
@Deprecated
public Map<String, Cookie> cookieMap() {
return delegate.cookieMap();
}
@Override
public Cookie getCookie(String name, String domain, String path) {
return delegate.getCookie(name, domain, path);
}
@Override
public Set<Cookie> cookies(String name) {
return delegate.cookies(name);
}
@Override
public Set<Cookie> cookies() {
return delegate.cookies();
}
@Override
public HttpServerRequest body(Handler<AsyncResult<Buffer>> handler) {
return delegate.body(handler);
}
@Override
public Future<Buffer> body() {
return delegate.body();
}
@Override
public void end(Handler<AsyncResult<Void>> handler) {
delegate.end(handler);
}
@Override
public Future<Void> end() {
return delegate.end();
}
@Override
public void toNetSocket(Handler<AsyncResult<NetSocket>> handler) {
delegate.toNetSocket(handler);
}
@Override
public Future<NetSocket> toNetSocket() {
return delegate.toNetSocket();
}
@Override
public void toWebSocket(Handler<AsyncResult<ServerWebSocket>> handler) {
delegate.toWebSocket(handler);
}
@Override
public Future<ServerWebSocket> toWebSocket() {
return delegate.toWebSocket();
}
@Override
public Context context() {
return delegate.context();
}
@Override
public Object metric() {
return delegate.metric();
}
@Override
public DecoderResult decoderResult() {
return delegate.decoderResult();
}
@Override
public HttpServerRequest setParamsCharset(String charset) {
delegate.setParamsCharset(charset);
return this;
}
@Override
public String getParamsCharset() {
return delegate.getParamsCharset();
}
}
| ForwardedServerRequestWrapper |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/model/domain/internal/AnyDiscriminatorSqmPathSource.java | {
"start": 877,
"end": 2437
} | class ____<D> extends AbstractSqmPathSource<D>
implements ReturnableType<D>, SqmBindableType<D> {
private final BasicType<D> domainType;
public AnyDiscriminatorSqmPathSource(
String localPathName,
SqmPathSource<D> pathModel,
SimpleDomainType<D> domainType,
BindableType jpaBindableType) {
super( localPathName, pathModel, domainType, jpaBindableType );
this.domainType = (BasicType<D>) domainType; // TODO: don't like this cast!
}
@Override
public SqmPath<D> createSqmPath(SqmPath<?> lhs, @Nullable SqmPathSource<?> intermediatePathSource) {
final NavigablePath navigablePath =
intermediatePathSource == null
? lhs.getNavigablePath()
: lhs.getNavigablePath().append( intermediatePathSource.getPathName() );
return new AnyDiscriminatorSqmPath<>( navigablePath, pathModel, lhs, lhs.nodeBuilder() );
}
@Override
public SqmPathSource<?> findSubPathSource(String name) {
throw new IllegalStateException( "Entity discriminator cannot be de-referenced" );
}
@Override
public PersistenceType getPersistenceType() {
return BASIC;
}
@Override
public Class<D> getJavaType() {
return getExpressibleJavaType().getJavaTypeClass();
}
@Override
public @Nullable SqmDomainType<D> getSqmType() {
return getPathType();
}
@Override
public BasicType<D> getPathType() {
return domainType;
}
@Override
public String getTypeName() {
return super.getTypeName();
}
@Override
public JavaType<D> getExpressibleJavaType() {
return getPathType().getExpressibleJavaType();
}
}
| AnyDiscriminatorSqmPathSource |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/jackson/DefaultSavedRequestMixinTests.java | {
"start": 1383,
"end": 6938
} | class ____ extends AbstractMixinTests {
// @formatter:off
private static final String COOKIES_JSON = "[\"java.util.ArrayList\", [{"
+ "\"@class\": \"org.springframework.security.web.savedrequest.SavedCookie\", "
+ "\"name\": \"SESSION\", "
+ "\"value\": \"123456789\", "
+ "\"maxAge\": -1, "
+ "\"path\": null, "
+ "\"secure\":false, "
+ "\"domain\": null"
+ "}]]";
// @formatter:on
// @formatter:off
private static final String REQUEST_JSON = "{" +
"\"@class\": \"org.springframework.security.web.savedrequest.DefaultSavedRequest\", "
+ "\"cookies\": " + COOKIES_JSON + ","
+ "\"locales\": [\"java.util.ArrayList\", [\"en\"]], "
+ "\"headers\": {\"@class\": \"java.util.TreeMap\", \"x-auth-token\": [\"java.util.ArrayList\", [\"12\"]]}, "
+ "\"parameters\": {\"@class\": \"java.util.TreeMap\"},"
+ "\"contextPath\": \"\", "
+ "\"method\": \"\", "
+ "\"pathInfo\": null, "
+ "\"queryString\": null, "
+ "\"requestURI\": \"\", "
+ "\"requestURL\": \"http://localhost\", "
+ "\"scheme\": \"http\", "
+ "\"serverName\": \"localhost\", "
+ "\"servletPath\": \"\", "
+ "\"serverPort\": 80"
+ "}";
// @formatter:on
// @formatter:off
private static final String REQUEST_WITH_MATCHING_REQUEST_PARAM_NAME_JSON = "{" +
"\"@class\": \"org.springframework.security.web.savedrequest.DefaultSavedRequest\", "
+ "\"cookies\": " + COOKIES_JSON + ","
+ "\"locales\": [\"java.util.ArrayList\", [\"en\"]], "
+ "\"headers\": {\"@class\": \"java.util.TreeMap\", \"x-auth-token\": [\"java.util.ArrayList\", [\"12\"]]}, "
+ "\"parameters\": {\"@class\": \"java.util.TreeMap\"},"
+ "\"contextPath\": \"\", "
+ "\"method\": \"\", "
+ "\"pathInfo\": null, "
+ "\"queryString\": null, "
+ "\"requestURI\": \"\", "
+ "\"requestURL\": \"http://localhost\", "
+ "\"scheme\": \"http\", "
+ "\"serverName\": \"localhost\", "
+ "\"servletPath\": \"\", "
+ "\"serverPort\": 80, "
+ "\"matchingRequestParameterName\": \"success\""
+ "}";
// @formatter:on
@Test
public void matchRequestBuildWithConstructorAndBuilder() {
DefaultSavedRequest request = new DefaultSavedRequest.Builder()
.setCookies(Collections.singletonList(new SavedCookie(new Cookie("SESSION", "123456789"))))
.setHeaders(Collections.singletonMap("x-auth-token", Collections.singletonList("12")))
.setScheme("http")
.setRequestURL("http://localhost")
.setServerName("localhost")
.setRequestURI("")
.setLocales(Collections.singletonList(new Locale("en")))
.setContextPath("")
.setMethod("")
.setServletPath("")
.build();
MockHttpServletRequest mockRequest = new MockHttpServletRequest();
mockRequest.setCookies(new Cookie("SESSION", "123456789"));
mockRequest.addHeader("x-auth-token", "12");
String currentUrl = UrlUtils.buildFullRequestUrl(mockRequest);
assertThat(request.getRedirectUrl().equals(currentUrl)).isTrue();
}
@Test
public void serializeDefaultRequestBuildWithConstructorTest() throws IOException, JSONException {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("x-auth-token", "12");
// Spring 5 MockHttpServletRequest automatically adds a header when the cookies
// are set. To get consistency we override the request.
HttpServletRequest requestToWrite = new HttpServletRequestWrapper(request) {
@Override
public Cookie[] getCookies() {
return new Cookie[] { new Cookie("SESSION", "123456789") };
}
};
String actualString = this.mapper.writerWithDefaultPrettyPrinter()
.writeValueAsString(new DefaultSavedRequest(requestToWrite));
JSONAssert.assertEquals(REQUEST_JSON, actualString, true);
}
@Test
public void serializeDefaultRequestBuildWithBuilderTest() throws IOException, JSONException {
DefaultSavedRequest request = new DefaultSavedRequest.Builder()
.setCookies(Collections.singletonList(new SavedCookie(new Cookie("SESSION", "123456789"))))
.setHeaders(Collections.singletonMap("x-auth-token", Collections.singletonList("12")))
.setScheme("http")
.setRequestURL("http://localhost")
.setServerName("localhost")
.setRequestURI("")
.setLocales(Collections.singletonList(new Locale("en")))
.setContextPath("")
.setMethod("")
.setServletPath("")
.build();
String actualString = this.mapper.writerWithDefaultPrettyPrinter().writeValueAsString(request);
JSONAssert.assertEquals(REQUEST_JSON, actualString, true);
}
@Test
public void deserializeDefaultSavedRequest() {
DefaultSavedRequest request = (DefaultSavedRequest) this.mapper.readValue(REQUEST_JSON, Object.class);
assertThat(request).isNotNull();
assertThat(request.getCookies()).hasSize(1);
assertThat(request.getLocales()).hasSize(1).contains(new Locale("en"));
assertThat(request.getHeaderNames()).hasSize(1).contains("x-auth-token");
assertThat(request.getHeaderValues("x-auth-token")).hasSize(1).contains("12");
}
@Test
public void deserializeWhenMatchingRequestParameterNameThenRedirectUrlContainsParam() {
DefaultSavedRequest request = (DefaultSavedRequest) this.mapper
.readValue(REQUEST_WITH_MATCHING_REQUEST_PARAM_NAME_JSON, Object.class);
assertThat(request.getRedirectUrl()).isEqualTo("http://localhost?success");
}
@Test
public void deserializeWhenNullMatchingRequestParameterNameThenRedirectUrlDoesNotContainParam() {
DefaultSavedRequest request = (DefaultSavedRequest) this.mapper.readValue(REQUEST_JSON, Object.class);
assertThat(request.getRedirectUrl()).isEqualTo("http://localhost");
}
}
| DefaultSavedRequestMixinTests |
java | elastic__elasticsearch | distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java | {
"start": 23021,
"end": 25663
} | class ____ extends ServerCli {
boolean startServerCalled = false;
@Override
protected Command loadTool(Map<String, String> sysprops, String toolname, String libs) {
if (toolname.equals("auto-configure-node")) {
assertThat(libs, equalTo("modules/x-pack-core,modules/x-pack-security,lib/tools/security-cli"));
return AUTO_CONFIG_CLI;
} else if (toolname.equals("sync-plugins")) {
assertThat(libs, equalTo("lib/tools/plugin-cli"));
return SYNC_PLUGINS_CLI;
}
throw new AssertionError("Unknown tool: " + toolname);
}
@Override
Environment autoConfigureSecurity(
Terminal terminal,
OptionSet options,
ProcessInfo processInfo,
Environment env,
SecureString keystorePassword
) throws Exception {
if (mockSecureSettingsLoader != null && mockSecureSettingsLoader.supportsSecurityAutoConfiguration() == false) {
fail("We shouldn't be calling auto configure on loaders that don't support it");
}
return super.autoConfigureSecurity(terminal, options, processInfo, env, keystorePassword);
}
@Override
void syncPlugins(Terminal terminal, Environment env, ProcessInfo processInfo) throws Exception {
if (mockSecureSettingsLoader != null && mockSecureSettingsLoader instanceof MockSecureSettingsLoader mock) {
mock.verifiedEnv = true;
// equals as a pointer, environment shouldn't be changed if autoconfigure is not supported
assertFalse(mockSecureSettingsLoader.supportsSecurityAutoConfiguration());
assertTrue(mock.environment == env);
}
super.syncPlugins(terminal, env, processInfo);
}
@Override
protected SecureSettingsLoader secureSettingsLoader(Environment env) {
if (mockSecureSettingsLoader != null) {
return mockSecureSettingsLoader;
}
return new KeystoreSecureSettingsLoader();
}
@Override
protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args) throws Exception {
startServerCalled = true;
if (argsValidator != null) {
argsValidator.accept(args);
}
mockServer.reset();
return mockServer;
}
}
@Override
protected Command newCommand() {
return new TestServerCli();
}
static | TestServerCli |
java | netty__netty | codec-dns/src/main/java/io/netty/handler/codec/dns/DnsMessage.java | {
"start": 835,
"end": 5314
} | interface ____ extends ReferenceCounted {
/**
* Returns the {@code ID} of this DNS message.
*/
int id();
/**
* Sets the {@code ID} of this DNS message.
*/
DnsMessage setId(int id);
/**
* Returns the {@code opCode} of this DNS message.
*/
DnsOpCode opCode();
/**
* Sets the {@code opCode} of this DNS message.
*/
DnsMessage setOpCode(DnsOpCode opCode);
/**
* Returns the {@code RD} (recursion desired} field of this DNS message.
*/
boolean isRecursionDesired();
/**
* Sets the {@code RD} (recursion desired} field of this DNS message.
*/
DnsMessage setRecursionDesired(boolean recursionDesired);
/**
* Returns the {@code Z} (reserved for future use) field of this DNS message.
*/
int z();
/**
* Sets the {@code Z} (reserved for future use) field of this DNS message.
*/
DnsMessage setZ(int z);
/**
* Returns the number of records in the specified {@code section} of this DNS message.
*/
int count(DnsSection section);
/**
* Returns the number of records in this DNS message.
*/
int count();
/**
* Returns the first record in the specified {@code section} of this DNS message.
* When the specified {@code section} is {@link DnsSection#QUESTION}, the type of the returned record is
* always {@link DnsQuestion}.
*
* @return {@code null} if this message doesn't have any records in the specified {@code section}
*/
<T extends DnsRecord> T recordAt(DnsSection section);
/**
* Returns the record at the specified {@code index} of the specified {@code section} of this DNS message.
* When the specified {@code section} is {@link DnsSection#QUESTION}, the type of the returned record is
* always {@link DnsQuestion}.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is out of bounds
*/
<T extends DnsRecord> T recordAt(DnsSection section, int index);
/**
* Sets the specified {@code section} of this DNS message to the specified {@code record},
* making it a single-record section. When the specified {@code section} is {@link DnsSection#QUESTION},
* the specified {@code record} must be a {@link DnsQuestion}.
*/
DnsMessage setRecord(DnsSection section, DnsRecord record);
/**
* Sets the specified {@code record} at the specified {@code index} of the specified {@code section}
* of this DNS message. When the specified {@code section} is {@link DnsSection#QUESTION},
* the specified {@code record} must be a {@link DnsQuestion}.
*
* @return the old record
* @throws IndexOutOfBoundsException if the specified {@code index} is out of bounds
*/
<T extends DnsRecord> T setRecord(DnsSection section, int index, DnsRecord record);
/**
* Adds the specified {@code record} at the end of the specified {@code section} of this DNS message.
* When the specified {@code section} is {@link DnsSection#QUESTION}, the specified {@code record}
* must be a {@link DnsQuestion}.
*/
DnsMessage addRecord(DnsSection section, DnsRecord record);
/**
* Adds the specified {@code record} at the specified {@code index} of the specified {@code section}
* of this DNS message. When the specified {@code section} is {@link DnsSection#QUESTION}, the specified
* {@code record} must be a {@link DnsQuestion}.
*
* @throws IndexOutOfBoundsException if the specified {@code index} is out of bounds
*/
DnsMessage addRecord(DnsSection section, int index, DnsRecord record);
/**
* Removes the record at the specified {@code index} of the specified {@code section} from this DNS message.
* When the specified {@code section} is {@link DnsSection#QUESTION}, the type of the returned record is
* always {@link DnsQuestion}.
*
* @return the removed record
*/
<T extends DnsRecord> T removeRecord(DnsSection section, int index);
/**
* Removes all the records in the specified {@code section} of this DNS message.
*/
DnsMessage clear(DnsSection section);
/**
* Removes all the records in this DNS message.
*/
DnsMessage clear();
@Override
DnsMessage touch();
@Override
DnsMessage touch(Object hint);
@Override
DnsMessage retain();
@Override
DnsMessage retain(int increment);
}
| DnsMessage |
java | google__gson | gson/src/test/java/com/google/gson/functional/ObjectTest.java | {
"start": 14094,
"end": 15031
} | class ____ {}
gson =
new GsonBuilder()
.registerTypeAdapter(
Local.class,
(JsonSerializer<Local>)
(src, typeOfSrc, context) -> new JsonPrimitive("custom-value"))
.create();
assertThat(gson.toJson(new Local())).isEqualTo("\"custom-value\"");
}
@Test
public void testAnonymousLocalClassesCustomDeserialization() {
Gson gson =
new GsonBuilder()
.registerTypeHierarchyAdapter(
ClassWithNoFields.class,
(JsonDeserializer<ClassWithNoFields>)
(json, typeOfT, context) -> new ClassWithNoFields())
.create();
assertThat(gson.fromJson("{}", ClassWithNoFields.class)).isNotNull();
Class<?> anonymousClass = new ClassWithNoFields() {}.getClass();
// Custom deserializer is ignored
assertThat(gson.fromJson("{}", anonymousClass)).isNull();
| Local |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/support/hsf/HSFJSONUtilsTest_1.java | {
"start": 8742,
"end": 9033
} | class ____ {
public void f2(String name, Model model) {
}
public void f3(String name, List<Model> models) {
}
public void f3(String name, Model[] models) {
}
public void f3(int a, long b) {
}
}
public static | Service |
java | square__retrofit | retrofit/java-test/src/test/java/retrofit2/RetrofitTest.java | {
"start": 19551,
"end": 20663
} | class ____ extends Converter.Factory {
@Override
public @Nullable Converter<?, String> stringConverter(
Type type, Annotation[] annotations, Retrofit retrofit) {
factoryCalled.set(true);
return null;
}
}
Retrofit retrofit =
new Retrofit.Builder()
.baseUrl(server.url("/"))
.addConverterFactory(new MyConverterFactory())
.build();
CallMethod service = retrofit.create(CallMethod.class);
Call<ResponseBody> call = service.queryObject(null);
assertThat(call).isNotNull();
assertThat(factoryCalled.get()).isTrue();
}
@Test
public void missingConverterThrowsOnNonRequestBody() throws IOException {
Retrofit retrofit = new Retrofit.Builder().baseUrl(server.url("/")).build();
CallMethod example = retrofit.create(CallMethod.class);
try {
example.disallowed("Hi!");
fail();
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessageThat()
.isEqualTo(
""
+ "Unable to create @Body converter for | MyConverterFactory |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/api/services/model/LifecycleBindingsInjector.java | {
"start": 1125,
"end": 1996
} | interface ____ {
/**
* Injects plugin executions induced by lifecycle bindings into the specified model. The model has already undergone
* injection of plugin management so any plugins that are injected by lifecycle bindings and are not already present
* in the model's plugin section need to be subjected to the model's plugin management.
*
* @param model The model into which to inject the default plugin executions for its packaging, must not be
* <code>null</code>.
* @param request The model building request that holds further settings, must not be {@code null}.
* @param problems The container used to collect problems that were encountered, must not be {@code null}.
*/
Model injectLifecycleBindings(Model model, ModelBuilderRequest request, ModelProblemCollector problems);
}
| LifecycleBindingsInjector |
java | apache__camel | components/camel-clickup/src/test/java/org/apache/camel/component/clickup/ClickUpWebhookRegistrationAlreadyExistsTest.java | {
"start": 2088,
"end": 9017
} | class ____ extends ClickUpTestSupport {
private final static Long WORKSPACE_ID = 12345L;
private final static String AUTHORIZATION_TOKEN = "mock-authorization-token";
private final static String WEBHOOK_SECRET = "mock-webhook-secret";
private final static Set<String> EVENTS = new HashSet<>(List.of("taskTimeTrackedUpdated"));
private static final ObjectMapper MAPPER = new ObjectMapper();
public static final String WEBHOOK_ALREADY_EXISTS_JSON = "messages/webhook-already-exists.json";
public static final String WEBHOOKS = "messages/webhooks.json";
@Override
public void configureTest(TestExecutionConfiguration testExecutionConfiguration) {
super.configureTest(testExecutionConfiguration);
testExecutionConfiguration.withUseRouteBuilder(false);
}
@Test
public void testAutomaticRegistrationWhenWebhookConfigurationAlreadyExists() throws Exception {
final ClickUpMockRoutes.MockProcessor<String> creationMockProcessor
= getMockRoutes().getMock("POST", "team/" + WORKSPACE_ID + "/webhook");
creationMockProcessor.clearRecordedMessages();
final ClickUpMockRoutes.MockProcessor<String> readMockProcessor
= getMockRoutes().getMock("GET", "team/" + WORKSPACE_ID + "/webhook");
readMockProcessor.clearRecordedMessages();
try (final DefaultCamelContext mockContext = new DefaultCamelContext()) {
mockContext.addRoutes(getMockRoutes());
mockContext.start();
/* Make sure the ClickUp mock API is up and running */
Awaitility.await()
.atMost(5, TimeUnit.SECONDS)
.until(() -> {
HttpClient client = HttpClient.newBuilder().build();
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:" + port + "/clickup-api-mock/health")).GET().build();
final HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
return response.statusCode() == 200;
});
context().addRoutes(new RouteBuilder() {
@Override
public void configure() {
String apiMockBaseUrl = "http://localhost:" + port + "/clickup-api-mock";
from("webhook:clickup:" + WORKSPACE_ID + "?authorizationToken=" + AUTHORIZATION_TOKEN + "&webhookSecret="
+ WEBHOOK_SECRET + "&events=" + String.join(",", EVENTS) + "&webhookAutoRegister=true&baseUrl="
+ apiMockBaseUrl)
.id("webhook")
.to("mock:endpoint");
}
});
context().start();
{
final List<String> creationRecordedMessages = creationMockProcessor.awaitRecordedMessages(1, 5000);
assertEquals(1, creationRecordedMessages.size());
String webhookCreationMessage = creationRecordedMessages.get(0);
try {
WebhookCreationCommand command = MAPPER.readValue(webhookCreationMessage, WebhookCreationCommand.class);
assertInstanceOf(WebhookCreationCommand.class, command);
} catch (IOException e) {
fail(e);
}
creationMockProcessor.clearRecordedMessages();
}
{
final List<String> readRecordedMessages = readMockProcessor.awaitRecordedMessages(1, 5000);
assertEquals(1, readRecordedMessages.size());
String webhookReadMessage = readRecordedMessages.get(0);
assertEquals("", webhookReadMessage);
readMockProcessor.clearRecordedMessages();
}
context().stop();
}
}
@Override
protected ClickUpMockRoutes createMockRoutes() {
ClickUpMockRoutes clickUpMockRoutes = new ClickUpMockRoutes(port);
clickUpMockRoutes.addEndpoint(
"health",
"GET",
true,
String.class,
() -> "");
try (InputStream content = getClass().getClassLoader().getResourceAsStream(WEBHOOK_ALREADY_EXISTS_JSON)) {
assert content != null;
String responseBody = new String(content.readAllBytes());
clickUpMockRoutes.addEndpoint(
"team/" + WORKSPACE_ID + "/webhook",
"POST",
true,
String.class,
() -> responseBody);
} catch (IOException e) {
throw new RuntimeException(e);
}
clickUpMockRoutes.addEndpoint(
"team/" + WORKSPACE_ID + "/webhook",
"GET",
true,
String.class,
() -> {
String webhookExternalUrl;
try {
Optional<Endpoint> optionalEndpoint = context().getEndpoints().stream()
.filter(endpoint -> endpoint instanceof WebhookEndpoint)
.findFirst();
if (optionalEndpoint.isEmpty()) {
throw new RuntimeException("Could not find clickup webhook endpoint. This should never happen.");
}
WebhookEndpoint webhookEndpoint = (WebhookEndpoint) (optionalEndpoint.get());
WebhookConfiguration config = webhookEndpoint.getConfiguration();
webhookExternalUrl = config.computeFullExternalUrl();
} catch (UnknownHostException e) {
throw new RuntimeException(e);
}
WebhooksReadResult webhooksReadResult = getJSONResource(WEBHOOKS, WebhooksReadResult.class);
Optional<Webhook> webhook = webhooksReadResult.getWebhooks().stream().findFirst();
if (webhook.isEmpty()) {
throw new RuntimeException(
"Could not find the testing webhook. This should never happen, since its reading webhooks from a static file.");
}
webhook.get().setEndpoint(webhookExternalUrl);
String readWebhooksResponseBody;
try {
readWebhooksResponseBody = MAPPER.writeValueAsString(webhooksReadResult);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return readWebhooksResponseBody;
});
return clickUpMockRoutes;
}
}
| ClickUpWebhookRegistrationAlreadyExistsTest |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/api/condition/DisabledForJreRangeConditionTests.java | {
"start": 3913,
"end": 6609
} | enum ____ or numeric version, but not both");
}
/**
* @see DisabledForJreRangeIntegrationTests#minGreaterThanMax()
*/
@Test
void minGreaterThanMax() {
assertPreconditionViolationFor(this::evaluateCondition)//
.withMessage(
"@DisabledForJreRange's minimum value [21] must be less than or equal to its maximum value [17]");
}
/**
* @see DisabledForJreRangeIntegrationTests#minGreaterThanMaxVersion()
*/
@Test
void minGreaterThanMaxVersion() {
minGreaterThanMax();
}
/**
* @see DisabledForJreRangeIntegrationTests#minVersionGreaterThanMaxVersion()
*/
@Test
void minVersionGreaterThanMaxVersion() {
minGreaterThanMax();
}
/**
* @see DisabledForJreRangeIntegrationTests#minVersionGreaterThanMax()
*/
@Test
void minVersionGreaterThanMax() {
minGreaterThanMax();
}
/**
* @see DisabledForJreRangeIntegrationTests#min18()
*/
@Test
void min18() {
evaluateCondition();
assertDisabledOnCurrentJreIf(!onJava17());
}
/**
* @see DisabledForJreRangeIntegrationTests#minVersion18()
*/
@Test
void minVersion18() {
min18();
}
/**
* @see DisabledForJreRangeIntegrationTests#max18()
*/
@Test
void max18() {
evaluateCondition();
assertDisabledOnCurrentJreIf(onJava17() || onJava18());
}
/**
* @see DisabledForJreRangeIntegrationTests#maxVersion18()
*/
@Test
void maxVersion18() {
max18();
}
/**
* @see DisabledForJreRangeIntegrationTests#min17Max17()
*/
@Test
void min17Max17() {
evaluateCondition();
assertDisabledOnCurrentJreIf(onJava17());
}
/**
* @see DisabledForJreRangeIntegrationTests#minVersion17MaxVersion17()
*/
@Test
void minVersion17MaxVersion17() {
min17Max17();
}
/**
* @see DisabledForJreRangeIntegrationTests#min18Max19()
*/
@Test
void min18Max19() {
evaluateCondition();
assertDisabledOnCurrentJreIf(onJava18() || onJava19());
assertCustomDisabledReasonIs("Disabled on Java 18 & 19");
}
/**
* @see DisabledForJreRangeIntegrationTests#minVersion18MaxVersion19()
*/
@Test
void minVersion18MaxVersion19() {
min18Max19();
}
/**
* @see DisabledForJreRangeIntegrationTests#minOtherMaxOther()
*/
@Test
void minOtherMaxOther() {
evaluateCondition();
assertDisabledOnCurrentJreIf(!onKnownVersion());
}
/**
* @see DisabledForJreRangeIntegrationTests#minMaxIntegerMaxMaxInteger()
*/
@Test
void minMaxIntegerMaxMaxInteger() {
minOtherMaxOther();
}
private void assertDisabledOnCurrentJreIf(boolean condition) {
if (condition) {
assertDisabled();
assertReasonContains("Disabled on JRE version: " + JAVA_VERSION);
}
else {
assertEnabled();
assertReasonContains("Enabled on JRE version: " + JAVA_VERSION);
}
}
}
| constant |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/scheduler/SchedulerRouteTest.java | {
"start": 1242,
"end": 2268
} | class ____ extends ContextTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(SchedulerRouteTest.class);
private final MyBean bean = new MyBean();
@Test
public void testSchedulerInvokesBeanMethod() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(2);
assertMockEndpointsSatisfied();
assertTrue(bean.counter.get() >= 2, "Should have fired 2 or more times was: " + bean.counter.get());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("scheduler://foo?delay=100").log("Fired scheduler").to("bean:myBean", "mock:result");
}
};
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry answer = super.createCamelRegistry();
answer.bind("myBean", bean);
return answer;
}
public static | SchedulerRouteTest |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-api/src/test/java/org/apache/dubbo/remoting/exchange/ResponseTest.java | {
"start": 1006,
"end": 1710
} | class ____ {
@Test
void test() {
Response response = new Response();
response.setStatus(Response.OK);
response.setId(1);
response.setVersion("1.0.0");
response.setResult("test");
response.setEvent(HEARTBEAT_EVENT);
response.setErrorMessage("errorMsg");
Assertions.assertTrue(response.isEvent());
Assertions.assertTrue(response.isHeartbeat());
Assertions.assertEquals(response.getVersion(), "1.0.0");
Assertions.assertEquals(response.getId(), 1);
Assertions.assertEquals(response.getResult(), HEARTBEAT_EVENT);
Assertions.assertEquals(response.getErrorMessage(), "errorMsg");
}
}
| ResponseTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1400/Issue1405.java | {
"start": 4301,
"end": 5882
} | class ____ {
private String appId;
private int userId;
private String idNumber;
private String realName;
private String businessLine;
private boolean ignoreIdNumberRepeat;
private boolean offline;
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public int getUserId() {
return userId;
}
public void setUserId(int userId) {
this.userId = userId;
}
public String getIdNumber() {
return idNumber;
}
public void setIdNumber(String idNumber) {
this.idNumber = idNumber;
}
public String getRealName() {
return realName;
}
public void setRealName(String realName) {
this.realName = realName;
}
public String getBusinessLine() {
return businessLine;
}
public void setBusinessLine(String businessLine) {
this.businessLine = businessLine;
}
public boolean isIgnoreIdNumberRepeat() {
return ignoreIdNumberRepeat;
}
public void setIgnoreIdNumberRepeat(boolean ignoreIdNumberRepeat) {
this.ignoreIdNumberRepeat = ignoreIdNumberRepeat;
}
public boolean isOffline() {
return offline;
}
public void setOffline(boolean offline) {
this.offline = offline;
}
}
}
| AuthIdentityRequest |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/core/annotation/AuthenticationPrincipal.java | {
"start": 1390,
"end": 2040
} | interface ____ {
/**
* True if a {@link ClassCastException} should be thrown when the current
* {@link Authentication#getPrincipal()} is the incorrect type. Default is false.
* @return
*/
boolean errorOnInvalidType() default false;
/**
* If specified will use the provided SpEL expression to resolve the principal. This
* is convenient if users need to transform the result.
*
* <p>
* For example, perhaps the user wants to resolve a CustomUser object that is final
* and is leveraging a UserDetailsService. This can be handled by returning an object
* that looks like:
* </p>
*
* <pre>
* public | AuthenticationPrincipal |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/config/ServiceLocatorFactoryBeanTests.java | {
"start": 13240,
"end": 13465
} | class ____ extends NestedRuntimeException {
public CustomServiceLocatorException1(String message, Throwable cause) {
super(message, cause);
}
}
@SuppressWarnings("serial")
public static | CustomServiceLocatorException1 |
java | apache__maven | compat/maven-compat/src/main/java/org/apache/maven/project/interpolation/RegexBasedModelInterpolator.java | {
"start": 1303,
"end": 1742
} | class ____ extends AbstractStringBasedModelInterpolator {
public RegexBasedModelInterpolator() throws IOException {}
public RegexBasedModelInterpolator(PathTranslator pathTranslator) {
super(pathTranslator);
}
public RegexBasedModelInterpolator(Properties envars) {}
@Override
protected Interpolator createInterpolator() {
return new RegexBasedInterpolator(true);
}
}
| RegexBasedModelInterpolator |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KnativeEndpointBuilderFactory.java | {
"start": 38972,
"end": 39294
} | class ____ extends AbstractEndpointBuilder implements KnativeEndpointBuilder, AdvancedKnativeEndpointBuilder {
public KnativeEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new KnativeEndpointBuilderImpl(path);
}
} | KnativeEndpointBuilderImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/function/SelfRenderingAggregateFunctionSqlAstExpression.java | {
"start": 768,
"end": 1596
} | class ____<T> extends SelfRenderingFunctionSqlAstExpression<T>
implements AggregateFunctionExpression {
private final Predicate filter;
public SelfRenderingAggregateFunctionSqlAstExpression(
String functionName,
FunctionRenderer renderer,
List<? extends SqlAstNode> sqlAstArguments,
Predicate filter,
ReturnableType<T> type,
JdbcMappingContainer expressible) {
super( functionName, renderer, sqlAstArguments, type, expressible );
this.filter = filter;
}
@Override
public Predicate getFilter() {
return filter;
}
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
getFunctionRenderer().render( sqlAppender, getArguments(), filter, getType(), walker );
}
}
| SelfRenderingAggregateFunctionSqlAstExpression |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java | {
"start": 16834,
"end": 18856
} | class ____ extends AbstractBlockBuilder {
private int size = 0;
NoopBlockBuilder(BlockFactory blockFactory) {
super(blockFactory);
}
@Override
protected int valuesLength() {
return size;
}
@Override
protected void growValuesArray(int newSize) {
size = newSize;
}
@Override
protected int elementSize() {
return Long.BYTES;
}
@Override
public Block.Builder copyFrom(Block block, int beginInclusive, int endExclusive) {
throw new UnsupportedOperationException();
}
@Override
public Block.Builder mvOrdering(Block.MvOrdering mvOrdering) {
throw new UnsupportedOperationException();
}
@Override
public Block build() {
throw new UnsupportedOperationException();
}
void appendUntilBreaking() {
int maxArrayLength = ArrayUtil.MAX_ARRAY_LENGTH;
for (long i = 0; i < maxArrayLength; i++) {
ensureCapacity();
valueCount++;
}
}
}
ByteSizeValue largeHeap = ByteSizeValue.ofMb(between(4 * 1024, 8 * 1024));
BlockFactory blockFactory = blockFactory(largeHeap);
try (var builder = new NoopBlockBuilder(blockFactory)) {
expectThrows(CircuitBreakingException.class, builder::appendUntilBreaking);
} finally {
assertThat(blockFactory.breaker().getUsed(), equalTo(0L));
}
}
static Matcher<Long> between(long minInclusive, long maxInclusive) {
return allOf(greaterThanOrEqualTo(minInclusive), lessThanOrEqualTo(maxInclusive));
}
/** An accumulator that stops at BigArrays or BlockFactory. And calls ramBytesUsed on BigArray instances. */
static | NoopBlockBuilder |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableInnerJoin.java | {
"start": 1633,
"end": 2434
} | class ____<K, V1, V2, VOut> extends KTableKTableAbstractJoin<K, V1, V2, VOut> {
private static final Logger LOG = LoggerFactory.getLogger(KTableKTableInnerJoin.class);
KTableKTableInnerJoin(final KTableImpl<K, ?, V1> table1,
final KTableImpl<K, ?, V2> table2,
final ValueJoiner<? super V1, ? super V2, ? extends VOut> joiner) {
super(table1, table2, joiner);
}
@Override
public Processor<K, Change<V1>, K, Change<VOut>> get() {
return new KTableKTableJoinProcessor(valueGetterSupplier2.get());
}
@Override
public KTableValueGetterSupplier<K, VOut> view() {
return new KTableKTableInnerJoinValueGetterSupplier(valueGetterSupplier1, valueGetterSupplier2);
}
private | KTableKTableInnerJoin |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagementProtocolPBClientImpl.java | {
"start": 5754,
"end": 12987
} | class ____ implements ContainerManagementProtocol,
Closeable {
// Not a documented config. Only used for tests
static final String NM_COMMAND_TIMEOUT = YarnConfiguration.YARN_PREFIX
+ "rpc.nm-command-timeout";
/**
* Maximum of 1 minute timeout for a Node to react to the command
*/
static final int DEFAULT_COMMAND_TIMEOUT = 60000;
private ContainerManagementProtocolPB proxy;
public ContainerManagementProtocolPBClientImpl(long clientVersion,
InetSocketAddress addr, Configuration conf) throws IOException {
RPC.setProtocolEngine(conf, ContainerManagementProtocolPB.class,
ProtobufRpcEngine2.class);
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
int expireInterval = conf.getInt(NM_COMMAND_TIMEOUT, DEFAULT_COMMAND_TIMEOUT);
proxy =
(ContainerManagementProtocolPB) RPC.getProxy(ContainerManagementProtocolPB.class,
clientVersion, addr, ugi, conf,
NetUtils.getDefaultSocketFactory(conf), expireInterval);
}
@Override
public void close() {
if (this.proxy != null) {
RPC.stopProxy(this.proxy);
}
}
@Override
public StartContainersResponse
startContainers(StartContainersRequest requests) throws YarnException,
IOException {
StartContainersRequestProto requestProto =
((StartContainersRequestPBImpl) requests).getProto();
try {
return new StartContainersResponsePBImpl(proxy.startContainers(null,
requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public StopContainersResponse stopContainers(StopContainersRequest requests)
throws YarnException, IOException {
StopContainersRequestProto requestProto =
((StopContainersRequestPBImpl) requests).getProto();
try {
return new StopContainersResponsePBImpl(proxy.stopContainers(null,
requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public GetContainerStatusesResponse getContainerStatuses(
GetContainerStatusesRequest request) throws YarnException, IOException {
GetContainerStatusesRequestProto requestProto =
((GetContainerStatusesRequestPBImpl) request).getProto();
try {
return new GetContainerStatusesResponsePBImpl(proxy.getContainerStatuses(
null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
@Deprecated
public IncreaseContainersResourceResponse increaseContainersResource(
IncreaseContainersResourceRequest request) throws YarnException,
IOException {
try {
ContainerUpdateRequest req =
ContainerUpdateRequest.newInstance(request.getContainersToIncrease());
ContainerUpdateRequestProto reqProto =
((ContainerUpdateRequestPBImpl) req).getProto();
ContainerUpdateResponse resp = new ContainerUpdateResponsePBImpl(
proxy.updateContainer(null, reqProto));
return IncreaseContainersResourceResponse
.newInstance(resp.getSuccessfullyUpdatedContainers(),
resp.getFailedRequests());
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public ContainerUpdateResponse updateContainer(ContainerUpdateRequest
request) throws YarnException, IOException {
ContainerUpdateRequestProto requestProto =
((ContainerUpdateRequestPBImpl)request).getProto();
try {
return new ContainerUpdateResponsePBImpl(
proxy.updateContainer(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public SignalContainerResponse signalToContainer(
SignalContainerRequest request) throws YarnException, IOException {
SignalContainerRequestProto requestProto =
((SignalContainerRequestPBImpl) request).getProto();
try {
return new SignalContainerResponsePBImpl(
proxy.signalToContainer(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public ResourceLocalizationResponse localize(
ResourceLocalizationRequest request) throws YarnException, IOException {
ResourceLocalizationRequestProto requestProto =
((ResourceLocalizationRequestPBImpl) request).getProto();
try {
return new ResourceLocalizationResponsePBImpl(
proxy.localize(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public ReInitializeContainerResponse reInitializeContainer(
ReInitializeContainerRequest request) throws YarnException, IOException {
YarnServiceProtos.ReInitializeContainerRequestProto requestProto =
((ReInitializeContainerRequestPBImpl) request).getProto();
try {
return new ReInitializeContainerResponsePBImpl(
proxy.reInitializeContainer(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public RestartContainerResponse restartContainer(ContainerId containerId)
throws YarnException, IOException {
YarnProtos.ContainerIdProto containerIdProto = ProtoUtils
.convertToProtoFormat(containerId);
try {
return new RestartContainerResponsePBImpl(
proxy.restartContainer(null, containerIdProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public RollbackResponse rollbackLastReInitialization(ContainerId containerId)
throws YarnException, IOException {
YarnProtos.ContainerIdProto containerIdProto = ProtoUtils
.convertToProtoFormat(containerId);
try {
return new RollbackResponsePBImpl(
proxy.rollbackLastReInitialization(null, containerIdProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public CommitResponse commitLastReInitialization(ContainerId containerId)
throws YarnException, IOException {
YarnProtos.ContainerIdProto containerIdProto = ProtoUtils
.convertToProtoFormat(containerId);
try {
return new CommitResponsePBImpl(
proxy.commitLastReInitialization(null, containerIdProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
@Override
public GetLocalizationStatusesResponse getLocalizationStatuses(
GetLocalizationStatusesRequest request)
throws YarnException, IOException {
GetLocalizationStatusesRequestProto requestProto =
((GetLocalizationStatusesRequestPBImpl) request).getProto();
try {
return new GetLocalizationStatusesResponsePBImpl(
proxy.getLocalizationStatuses(null, requestProto));
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
return null;
}
}
}
| ContainerManagementProtocolPBClientImpl |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/connectors/DynamicSourceUtils.java | {
"start": 3770,
"end": 30229
} | class ____ {
/**
* Converts a given {@link DataStream} to a {@link RelNode}. It adds helper projections if
* necessary.
*/
public static RelNode convertDataStreamToRel(
boolean isBatchMode,
ReadableConfig config,
FlinkRelBuilder relBuilder,
ContextResolvedTable contextResolvedTable,
DataStream<?> dataStream,
DataType physicalDataType,
boolean isTopLevelRecord,
ChangelogMode changelogMode) {
final DynamicTableSource tableSource =
new ExternalDynamicSource<>(
contextResolvedTable.getIdentifier(),
dataStream,
physicalDataType,
isTopLevelRecord,
changelogMode);
final FlinkStatistic statistic =
FlinkStatistic.unknown(contextResolvedTable.getResolvedSchema()).build();
return convertSourceToRel(
isBatchMode,
config,
relBuilder,
contextResolvedTable,
statistic,
Collections.emptyList(),
tableSource);
}
/**
* Converts a given {@link DynamicTableSource} to a {@link RelNode}. It adds helper projections
* if necessary.
*/
public static RelNode convertSourceToRel(
boolean isBatchMode,
ReadableConfig config,
FlinkRelBuilder relBuilder,
ContextResolvedTable contextResolvedTable,
FlinkStatistic statistic,
List<RelHint> hints,
DynamicTableSource tableSource) {
final String tableDebugName = contextResolvedTable.getIdentifier().asSummaryString();
final ResolvedCatalogTable resolvedCatalogTable = contextResolvedTable.getResolvedTable();
final List<SourceAbilitySpec> sourceAbilities = new ArrayList<>();
// 1. prepare table source
prepareDynamicSource(
tableDebugName,
resolvedCatalogTable,
tableSource,
isBatchMode,
config,
sourceAbilities);
// 2. push table scan
pushTableScan(
isBatchMode,
relBuilder,
contextResolvedTable,
statistic,
hints,
tableSource,
sourceAbilities);
// 3. push project for non-physical columns
final ResolvedSchema schema = contextResolvedTable.getResolvedSchema();
if (!schema.getColumns().stream().allMatch(Column::isPhysical)) {
pushMetadataProjection(relBuilder, schema);
pushGeneratedProjection(relBuilder, schema);
}
// 4. push watermark assigner
if (!isBatchMode && !schema.getWatermarkSpecs().isEmpty()) {
pushWatermarkAssigner(relBuilder, schema);
}
return relBuilder.build();
}
/**
* Prepares the given {@link DynamicTableSource}. It check whether the source is compatible with
* the given schema and applies initial parameters.
*/
public static void prepareDynamicSource(
String tableDebugName,
ResolvedCatalogTable table,
DynamicTableSource source,
boolean isBatchMode,
ReadableConfig config,
List<SourceAbilitySpec> sourceAbilities) {
final ResolvedSchema schema = table.getResolvedSchema();
validateAndApplyMetadata(tableDebugName, schema, source, sourceAbilities);
if (source instanceof ScanTableSource) {
validateScanSource(
tableDebugName, schema, (ScanTableSource) source, isBatchMode, config);
prepareRowLevelModificationScan(source);
}
// lookup table source is validated in LookupJoin node
}
// TODO: isUpsertSource(), isSourceChangeEventsDuplicate()
/**
* Returns a list of required metadata columns. Ordered by the iteration order of {@link
* SupportsReadingMetadata#listReadableMetadata()}.
*
* <p>This method assumes that source and schema have been validated via {@link
* #prepareDynamicSource(String, ResolvedCatalogTable, DynamicTableSource, boolean,
* ReadableConfig, List)}.
*/
public static List<MetadataColumn> createRequiredMetadataColumns(
ResolvedSchema schema, DynamicTableSource source) {
final Map<String, MetadataColumn> metadataKeysToMetadataColumns =
createMetadataKeysToMetadataColumnsMap(schema);
final Map<String, DataType> metadataMap = extractMetadataMap(source);
// reorder the column
return metadataMap.keySet().stream()
.filter(metadataKeysToMetadataColumns::containsKey)
.map(metadataKeysToMetadataColumns::get)
.collect(Collectors.toList());
}
/**
* Returns a map record the mapping relation between metadataKeys to metadataColumns in input
* schema.
*/
public static Map<String, MetadataColumn> createMetadataKeysToMetadataColumnsMap(
ResolvedSchema schema) {
final List<MetadataColumn> metadataColumns = extractMetadataColumns(schema);
Map<String, MetadataColumn> metadataKeysToMetadataColumns = new HashMap<>();
for (MetadataColumn column : metadataColumns) {
String metadataKey = column.getMetadataKey().orElse(column.getName());
// After resolving, every metadata column has the unique metadata key.
metadataKeysToMetadataColumns.put(metadataKey, column);
}
return metadataKeysToMetadataColumns;
}
/**
* Returns the {@link DataType} that a source should produce as the input into the runtime.
*
* <p>The format looks as follows: {@code PHYSICAL COLUMNS + METADATA COLUMNS}
*
* <p>Physical columns use the table schema's name. Metadata column use the metadata key as
* name.
*/
public static RowType createProducedType(ResolvedSchema schema, DynamicTableSource source) {
final Map<String, DataType> metadataMap = extractMetadataMap(source);
final Stream<RowField> physicalFields =
((RowType) schema.toPhysicalRowDataType().getLogicalType()).getFields().stream();
final Stream<RowField> metadataFields =
createRequiredMetadataColumns(schema, source).stream()
.map(
k ->
new RowField(
// Use the alias to ensure that physical and
// metadata columns don't collide
k.getName(),
metadataMap
.get(k.getMetadataKey().orElse(k.getName()))
.getLogicalType()));
final List<RowField> rowFields =
Stream.concat(physicalFields, metadataFields).collect(Collectors.toList());
return new RowType(false, rowFields);
}
/** Returns true if the table is an upsert source. */
public static boolean isUpsertSource(
ResolvedSchema resolvedSchema, DynamicTableSource tableSource) {
if (!(tableSource instanceof ScanTableSource)) {
return false;
}
ChangelogMode mode = ((ScanTableSource) tableSource).getChangelogMode();
boolean isUpsertMode =
mode.contains(RowKind.UPDATE_AFTER) && !mode.contains(RowKind.UPDATE_BEFORE);
boolean hasPrimaryKey = resolvedSchema.getPrimaryKey().isPresent();
return isUpsertMode && hasPrimaryKey;
}
/** Returns true if the table source produces duplicate change events. */
public static boolean isSourceChangeEventsDuplicate(
ResolvedSchema resolvedSchema,
DynamicTableSource tableSource,
TableConfig tableConfig) {
if (!(tableSource instanceof ScanTableSource)) {
return false;
}
ChangelogMode mode = ((ScanTableSource) tableSource).getChangelogMode();
boolean isCDCSource =
!mode.containsOnly(RowKind.INSERT) && !isUpsertSource(resolvedSchema, tableSource);
boolean changeEventsDuplicate =
tableConfig.get(ExecutionConfigOptions.TABLE_EXEC_SOURCE_CDC_EVENTS_DUPLICATE);
boolean hasPrimaryKey = resolvedSchema.getPrimaryKey().isPresent();
return isCDCSource && changeEventsDuplicate && hasPrimaryKey;
}
/** Returns true if the changelogNormalize should be enabled. */
public static boolean changelogNormalizeEnabled(
boolean eventTimeSnapshotRequired,
ResolvedSchema resolvedSchema,
DynamicTableSource tableSource,
TableConfig tableConfig) {
return !eventTimeSnapshotRequired
&& (isUpsertSource(resolvedSchema, tableSource)
|| isSourceChangeEventsDuplicate(resolvedSchema, tableSource, tableConfig));
}
// --------------------------------------------------------------------------------------------
/** Creates a specialized node for assigning watermarks. */
private static void pushWatermarkAssigner(FlinkRelBuilder relBuilder, ResolvedSchema schema) {
final ExpressionConverter converter = new ExpressionConverter(relBuilder);
final RelDataType inputRelDataType = relBuilder.peek().getRowType();
// schema resolver has checked before that only one spec exists
final WatermarkSpec watermarkSpec = schema.getWatermarkSpecs().get(0);
final String rowtimeColumn = watermarkSpec.getRowtimeAttribute();
final int rowtimeColumnIdx = inputRelDataType.getFieldNames().indexOf(rowtimeColumn);
final RexNode watermarkRexNode = watermarkSpec.getWatermarkExpression().accept(converter);
relBuilder.watermark(rowtimeColumnIdx, watermarkRexNode);
}
/** Creates a projection that adds computed columns and finalizes the table schema. */
private static void pushGeneratedProjection(FlinkRelBuilder relBuilder, ResolvedSchema schema) {
final ExpressionConverter converter = new ExpressionConverter(relBuilder);
final List<RexNode> projection =
schema.getColumns().stream()
.map(
c -> {
if (c instanceof ComputedColumn) {
final ComputedColumn computedColumn = (ComputedColumn) c;
return computedColumn.getExpression().accept(converter);
} else {
return relBuilder.field(c.getName());
}
})
.collect(Collectors.toList());
relBuilder.projectNamed(
projection,
schema.getColumns().stream().map(Column::getName).collect(Collectors.toList()),
true);
}
/**
* Creates a projection that reorders physical and metadata columns according to the given
* schema. It casts metadata columns into the expected data type to be accessed by computed
* columns in the next step. Computed columns are ignored here.
*
* @see SupportsReadingMetadata
*/
private static void pushMetadataProjection(FlinkRelBuilder relBuilder, ResolvedSchema schema) {
final RexBuilder rexBuilder = relBuilder.getRexBuilder();
final List<String> fieldNames =
schema.getColumns().stream()
.filter(c -> !(c instanceof ComputedColumn))
.map(Column::getName)
.collect(Collectors.toList());
final List<RexNode> fieldNodes =
schema.getColumns().stream()
.filter(c -> !(c instanceof ComputedColumn))
.map(
c -> {
final RelDataType relDataType =
relBuilder
.getTypeFactory()
.createFieldTypeFromLogicalType(
c.getDataType().getLogicalType());
if (c instanceof MetadataColumn) {
final MetadataColumn metadataColumn = (MetadataColumn) c;
String columnName = metadataColumn.getName();
return rexBuilder.makeAbstractCast(
relDataType, relBuilder.field(columnName));
} else {
return relBuilder.field(c.getName());
}
})
.collect(Collectors.toList());
relBuilder.projectNamed(fieldNodes, fieldNames, true);
}
private static void pushTableScan(
boolean isBatchMode,
FlinkRelBuilder relBuilder,
ContextResolvedTable contextResolvedTable,
FlinkStatistic statistic,
List<RelHint> hints,
DynamicTableSource tableSource,
List<SourceAbilitySpec> sourceAbilities) {
final RowType producedType =
createProducedType(contextResolvedTable.getResolvedSchema(), tableSource);
final RelDataType producedRelDataType =
relBuilder.getTypeFactory().buildRelNodeRowType(producedType);
final TableSourceTable tableSourceTable =
new TableSourceTable(
relBuilder.getRelOptSchema(),
producedRelDataType,
statistic,
tableSource,
!isBatchMode,
contextResolvedTable,
ShortcutUtils.unwrapContext(relBuilder),
ShortcutUtils.unwrapTypeFactory(relBuilder),
sourceAbilities.toArray(new SourceAbilitySpec[0]));
final LogicalTableScan scan =
LogicalTableScan.create(relBuilder.getCluster(), tableSourceTable, hints);
relBuilder.push(scan);
}
public static Map<String, DataType> extractMetadataMap(DynamicTableSource source) {
if (source instanceof SupportsReadingMetadata) {
return ((SupportsReadingMetadata) source).listReadableMetadata();
}
return Collections.emptyMap();
}
public static List<MetadataColumn> extractMetadataColumns(ResolvedSchema schema) {
return schema.getColumns().stream()
.filter(MetadataColumn.class::isInstance)
.map(MetadataColumn.class::cast)
.collect(Collectors.toList());
}
public static void validateAndApplyMetadata(
String tableDebugName,
ResolvedSchema schema,
DynamicTableSource source,
List<SourceAbilitySpec> sourceAbilities) {
final List<MetadataColumn> metadataColumns = extractMetadataColumns(schema);
if (metadataColumns.isEmpty()) {
return;
}
if (!(source instanceof SupportsReadingMetadata)) {
throw new ValidationException(
String.format(
"Table '%s' declares metadata columns, but the underlying %s doesn't implement "
+ "the %s interface. Therefore, metadata cannot be read from the given source.",
source.asSummaryString(),
DynamicTableSource.class.getSimpleName(),
SupportsReadingMetadata.class.getSimpleName()));
}
final SupportsReadingMetadata metadataSource = (SupportsReadingMetadata) source;
final Map<String, DataType> metadataMap = metadataSource.listReadableMetadata();
metadataColumns.forEach(
c -> {
final String metadataKey = c.getMetadataKey().orElse(c.getName());
final LogicalType metadataType = c.getDataType().getLogicalType();
final DataType expectedMetadataDataType = metadataMap.get(metadataKey);
// check that metadata key is valid
if (expectedMetadataDataType == null) {
throw new ValidationException(
String.format(
"Invalid metadata key '%s' in column '%s' of table '%s'. "
+ "The %s class '%s' supports the following metadata keys for reading:\n%s",
metadataKey,
c.getName(),
tableDebugName,
DynamicTableSource.class.getSimpleName(),
source.getClass().getName(),
String.join("\n", metadataMap.keySet())));
}
// check that types are compatible
if (!supportsExplicitCast(
expectedMetadataDataType.getLogicalType(), metadataType)) {
if (metadataKey.equals(c.getName())) {
throw new ValidationException(
String.format(
"Invalid data type for metadata column '%s' of table '%s'. "
+ "The column cannot be declared as '%s' because the type must be "
+ "castable from metadata type '%s'.",
c.getName(),
tableDebugName,
expectedMetadataDataType.getLogicalType(),
metadataType));
} else {
throw new ValidationException(
String.format(
"Invalid data type for metadata column '%s' with metadata key '%s' of table '%s'. "
+ "The column cannot be declared as '%s' because the type must be "
+ "castable from metadata type '%s'.",
c.getName(),
metadataKey,
tableDebugName,
expectedMetadataDataType.getLogicalType(),
metadataType));
}
}
});
final List<String> metadataKeys =
createRequiredMetadataColumns(schema, source).stream()
.map(column -> column.getMetadataKey().orElse(column.getName()))
.collect(Collectors.toList());
final DataType producedDataType =
TypeConversions.fromLogicalToDataType(createProducedType(schema, source));
sourceAbilities.add(
new ReadingMetadataSpec(metadataKeys, (RowType) producedDataType.getLogicalType()));
metadataSource.applyReadableMetadata(metadataKeys, producedDataType);
}
private static void validateScanSource(
String tableDebugName,
ResolvedSchema schema,
ScanTableSource scanSource,
boolean isBatchMode,
ReadableConfig config) {
final ChangelogMode changelogMode = scanSource.getChangelogMode();
validateWatermarks(tableDebugName, schema);
if (isBatchMode) {
validateScanSourceForBatch(tableDebugName, scanSource, changelogMode);
} else {
validateScanSourceForStreaming(
tableDebugName, schema, scanSource, changelogMode, config);
}
}
private static void validateScanSourceForStreaming(
String tableDebugName,
ResolvedSchema schema,
ScanTableSource scanSource,
ChangelogMode changelogMode,
ReadableConfig config) {
// sanity check for produced ChangelogMode
final boolean hasChangelogMode = changelogMode != null;
final boolean hasUpdateBefore =
hasChangelogMode && changelogMode.contains(RowKind.UPDATE_BEFORE);
final boolean hasUpdateAfter =
hasChangelogMode && changelogMode.contains(RowKind.UPDATE_AFTER);
if (!hasUpdateBefore && hasUpdateAfter) {
// only UPDATE_AFTER
if (!schema.getPrimaryKey().isPresent()) {
throw new TableException(
String.format(
"Table '%s' produces a changelog stream that contains UPDATE_AFTER but no UPDATE_BEFORE. "
+ "This requires defining a primary key constraint on the table.",
tableDebugName));
}
} else if (hasUpdateBefore && !hasUpdateAfter) {
// only UPDATE_BEFORE
throw new ValidationException(
String.format(
"Invalid source for table '%s'. A %s doesn't support a changelog stream that contains "
+ "UPDATE_BEFORE but no UPDATE_AFTER. Please adapt the implementation of class '%s'.",
tableDebugName,
ScanTableSource.class.getSimpleName(),
scanSource.getClass().getName()));
} else if (hasChangelogMode && !changelogMode.containsOnly(RowKind.INSERT)) {
// CDC mode (non-upsert mode and non-insert-only mode)
final boolean changeEventsDuplicate =
config.get(ExecutionConfigOptions.TABLE_EXEC_SOURCE_CDC_EVENTS_DUPLICATE);
if (changeEventsDuplicate && !schema.getPrimaryKey().isPresent()) {
throw new TableException(
String.format(
"Configuration '%s' is enabled which requires the changelog sources to define a PRIMARY KEY. "
+ "However, table '%s' doesn't have a primary key.",
ExecutionConfigOptions.TABLE_EXEC_SOURCE_CDC_EVENTS_DUPLICATE.key(),
tableDebugName));
}
}
}
private static void validateScanSourceForBatch(
String tableDebugName, ScanTableSource scanSource, ChangelogMode changelogMode) {
final ScanRuntimeProvider provider =
scanSource.getScanRuntimeProvider(ScanRuntimeProviderContext.INSTANCE);
// batch only supports bounded source
if (!provider.isBounded()) {
throw new ValidationException(
String.format(
"Querying an unbounded table '%s' in batch mode is not allowed. "
+ "The table source is unbounded.",
tableDebugName));
}
// batch only supports INSERT only source
if (!changelogMode.containsOnly(RowKind.INSERT)) {
throw new TableException(
String.format(
"Querying a table in batch mode is currently only possible for INSERT-only table sources. "
+ "But the source for table '%s' produces other changelog messages than just INSERT.",
tableDebugName));
}
}
private static void validateWatermarks(String tableDebugName, ResolvedSchema schema) {
if (schema.getWatermarkSpecs().isEmpty()) {
return;
}
if (schema.getWatermarkSpecs().size() > 1) {
throw new TableException(
String.format(
"Currently only at most one WATERMARK declaration is supported for table '%s'.",
tableDebugName));
}
final String rowtimeAttribute = schema.getWatermarkSpecs().get(0).getRowtimeAttribute();
if (rowtimeAttribute.contains(".")) {
throw new TableException(
String.format(
"A nested field '%s' cannot be declared as rowtime attribute for table '%s' right now.",
rowtimeAttribute, tableDebugName));
}
}
private static void prepareRowLevelModificationScan(DynamicTableSource dynamicTableSource) {
// if the modification type has been set and the dynamic source supports row-level
// modification scan
if (RowLevelModificationContextUtils.getModificationType() != null
&& dynamicTableSource instanceof SupportsRowLevelModificationScan) {
SupportsRowLevelModificationScan modificationScan =
(SupportsRowLevelModificationScan) dynamicTableSource;
// get the previous scan context
RowLevelModificationScanContext scanContext =
RowLevelModificationContextUtils.getScanContext();
// pass the previous scan context to current table soruce and
// get a new scan context
RowLevelModificationScanContext newScanContext =
modificationScan.applyRowLevelModificationScan(
RowLevelModificationContextUtils.getModificationType(), scanContext);
// set the scan context
RowLevelModificationContextUtils.setScanContext(newScanContext);
}
}
private DynamicSourceUtils() {
// no instantiation
}
}
| DynamicSourceUtils |
java | google__dagger | java/dagger/hilt/android/plugin/main/src/test/data/java-libraryB/src/main/java/libb/LibraryBModule.java | {
"start": 828,
"end": 950
} | class ____ {
@Provides
public static LibraryBProvided provideB() {
return new LibraryBProvided();
}
}
| LibraryBModule |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/boot/model/Key.java | {
"start": 346,
"end": 1091
} | class ____ implements ColumnContainer, Bindable<JaxbHbmKeyType>, Cloneable<Key> {
private final List<Column> columns;
public Key() {
this.columns = new ArrayList<>();
}
public Key(Key key) {
this.columns = new ArrayList<>();
for ( Column column : key.columns ) {
this.columns.add( new Column( column ) );
}
}
@Override
public List<Column> getColumns() {
return columns;
}
@Override
public void addColumn(Column column) {
this.columns.add( column );
}
@Override
public Key deepCopy() {
return new Key( this );
}
@Override
public JaxbHbmKeyType build() {
final JaxbHbmKeyType key = new JaxbHbmKeyType();
for ( Column column : columns ) {
key.getColumn().add( column.build() );
}
return key;
}
}
| Key |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/clock/SystemClock.java | {
"start": 1194,
"end": 1870
} | class ____ extends Clock {
private static final SystemClock INSTANCE = new SystemClock();
public static SystemClock getInstance() {
return INSTANCE;
}
// ------------------------------------------------------------------------
@Override
public long absoluteTimeMillis() {
return System.currentTimeMillis();
}
@Override
public long relativeTimeMillis() {
return System.nanoTime() / 1_000_000;
}
@Override
public long relativeTimeNanos() {
return System.nanoTime();
}
// ------------------------------------------------------------------------
private SystemClock() {}
}
| SystemClock |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/util/H2Utils.java | {
"start": 970,
"end": 2173
} | class ____ {
private static volatile Constructor<JdbcXAConnection> constructor;
private static volatile Method method;
public static final int XA_DATA_SOURCE = 13;
public static Object createJdbcDataSourceFactory() {
return new JdbcDataSourceFactory();
}
public static XAConnection createXAConnection(Object factory, Connection physicalConn) throws SQLException {
try {
if (constructor == null) {
constructor = JdbcXAConnection.class.getDeclaredConstructor(JdbcDataSourceFactory.class, int.class,
JdbcConnection.class);
constructor.setAccessible(true);
}
int id = getNextId(XA_DATA_SOURCE);
return constructor.newInstance(factory, id, physicalConn);
} catch (Exception e) {
throw new SQLException("createXAConnection error", e);
}
}
public static int getNextId(int type) throws Exception {
if (method == null) {
method = TraceObject.class.getDeclaredMethod("getNextId", int.class);
method.setAccessible(true);
}
return (Integer) method.invoke(null, type);
}
}
| H2Utils |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/duplicate_statements/AnnotatedMapperExtended.java | {
"start": 957,
"end": 1086
} | interface ____ extends AnnotatedMapper {
@Select("select * from users")
List<User> getAllUsers(int i);
}
| AnnotatedMapperExtended |
java | apache__camel | components/camel-aws/camel-aws2-sns/src/generated/java/org/apache/camel/component/aws2/sns/Sns2ComponentConfigurer.java | {
"start": 735,
"end": 14342
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
private org.apache.camel.component.aws2.sns.Sns2Configuration getOrCreateConfiguration(Sns2Component target) {
if (target.getConfiguration() == null) {
target.setConfiguration(new org.apache.camel.component.aws2.sns.Sns2Configuration());
}
return target.getConfiguration();
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
Sns2Component target = (Sns2Component) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesskey":
case "accessKey": getOrCreateConfiguration(target).setAccessKey(property(camelContext, java.lang.String.class, value)); return true;
case "amazonsnsclient":
case "amazonSNSClient": getOrCreateConfiguration(target).setAmazonSNSClient(property(camelContext, software.amazon.awssdk.services.sns.SnsClient.class, value)); return true;
case "autocreatetopic":
case "autoCreateTopic": getOrCreateConfiguration(target).setAutoCreateTopic(property(camelContext, boolean.class, value)); return true;
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "batchenabled":
case "batchEnabled": getOrCreateConfiguration(target).setBatchEnabled(property(camelContext, boolean.class, value)); return true;
case "configuration": target.setConfiguration(property(camelContext, org.apache.camel.component.aws2.sns.Sns2Configuration.class, value)); return true;
case "healthcheckconsumerenabled":
case "healthCheckConsumerEnabled": target.setHealthCheckConsumerEnabled(property(camelContext, boolean.class, value)); return true;
case "healthcheckproducerenabled":
case "healthCheckProducerEnabled": target.setHealthCheckProducerEnabled(property(camelContext, boolean.class, value)); return true;
case "kmsmasterkeyid":
case "kmsMasterKeyId": getOrCreateConfiguration(target).setKmsMasterKeyId(property(camelContext, java.lang.String.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "messagededuplicationidstrategy":
case "messageDeduplicationIdStrategy": getOrCreateConfiguration(target).setMessageDeduplicationIdStrategy(property(camelContext, java.lang.String.class, value)); return true;
case "messagegroupidstrategy":
case "messageGroupIdStrategy": getOrCreateConfiguration(target).setMessageGroupIdStrategy(property(camelContext, java.lang.String.class, value)); return true;
case "messagestructure":
case "messageStructure": getOrCreateConfiguration(target).setMessageStructure(property(camelContext, java.lang.String.class, value)); return true;
case "overrideendpoint":
case "overrideEndpoint": getOrCreateConfiguration(target).setOverrideEndpoint(property(camelContext, boolean.class, value)); return true;
case "policy": getOrCreateConfiguration(target).setPolicy(property(camelContext, java.lang.String.class, value)); return true;
case "profilecredentialsname":
case "profileCredentialsName": getOrCreateConfiguration(target).setProfileCredentialsName(property(camelContext, java.lang.String.class, value)); return true;
case "proxyhost":
case "proxyHost": getOrCreateConfiguration(target).setProxyHost(property(camelContext, java.lang.String.class, value)); return true;
case "proxyport":
case "proxyPort": getOrCreateConfiguration(target).setProxyPort(property(camelContext, java.lang.Integer.class, value)); return true;
case "proxyprotocol":
case "proxyProtocol": getOrCreateConfiguration(target).setProxyProtocol(property(camelContext, software.amazon.awssdk.core.Protocol.class, value)); return true;
case "queuearn":
case "queueArn": getOrCreateConfiguration(target).setQueueArn(property(camelContext, java.lang.String.class, value)); return true;
case "region": getOrCreateConfiguration(target).setRegion(property(camelContext, java.lang.String.class, value)); return true;
case "secretkey":
case "secretKey": getOrCreateConfiguration(target).setSecretKey(property(camelContext, java.lang.String.class, value)); return true;
case "serversideencryptionenabled":
case "serverSideEncryptionEnabled": getOrCreateConfiguration(target).setServerSideEncryptionEnabled(property(camelContext, boolean.class, value)); return true;
case "sessiontoken":
case "sessionToken": getOrCreateConfiguration(target).setSessionToken(property(camelContext, java.lang.String.class, value)); return true;
case "subject": getOrCreateConfiguration(target).setSubject(property(camelContext, java.lang.String.class, value)); return true;
case "subscribesnstosqs":
case "subscribeSNStoSQS": getOrCreateConfiguration(target).setSubscribeSNStoSQS(property(camelContext, boolean.class, value)); return true;
case "trustallcertificates":
case "trustAllCertificates": getOrCreateConfiguration(target).setTrustAllCertificates(property(camelContext, boolean.class, value)); return true;
case "uriendpointoverride":
case "uriEndpointOverride": getOrCreateConfiguration(target).setUriEndpointOverride(property(camelContext, java.lang.String.class, value)); return true;
case "usedefaultcredentialsprovider":
case "useDefaultCredentialsProvider": getOrCreateConfiguration(target).setUseDefaultCredentialsProvider(property(camelContext, boolean.class, value)); return true;
case "useprofilecredentialsprovider":
case "useProfileCredentialsProvider": getOrCreateConfiguration(target).setUseProfileCredentialsProvider(property(camelContext, boolean.class, value)); return true;
case "usesessioncredentials":
case "useSessionCredentials": getOrCreateConfiguration(target).setUseSessionCredentials(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public String[] getAutowiredNames() {
return new String[]{"amazonSNSClient"};
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesskey":
case "accessKey": return java.lang.String.class;
case "amazonsnsclient":
case "amazonSNSClient": return software.amazon.awssdk.services.sns.SnsClient.class;
case "autocreatetopic":
case "autoCreateTopic": return boolean.class;
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "batchenabled":
case "batchEnabled": return boolean.class;
case "configuration": return org.apache.camel.component.aws2.sns.Sns2Configuration.class;
case "healthcheckconsumerenabled":
case "healthCheckConsumerEnabled": return boolean.class;
case "healthcheckproducerenabled":
case "healthCheckProducerEnabled": return boolean.class;
case "kmsmasterkeyid":
case "kmsMasterKeyId": return java.lang.String.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "messagededuplicationidstrategy":
case "messageDeduplicationIdStrategy": return java.lang.String.class;
case "messagegroupidstrategy":
case "messageGroupIdStrategy": return java.lang.String.class;
case "messagestructure":
case "messageStructure": return java.lang.String.class;
case "overrideendpoint":
case "overrideEndpoint": return boolean.class;
case "policy": return java.lang.String.class;
case "profilecredentialsname":
case "profileCredentialsName": return java.lang.String.class;
case "proxyhost":
case "proxyHost": return java.lang.String.class;
case "proxyport":
case "proxyPort": return java.lang.Integer.class;
case "proxyprotocol":
case "proxyProtocol": return software.amazon.awssdk.core.Protocol.class;
case "queuearn":
case "queueArn": return java.lang.String.class;
case "region": return java.lang.String.class;
case "secretkey":
case "secretKey": return java.lang.String.class;
case "serversideencryptionenabled":
case "serverSideEncryptionEnabled": return boolean.class;
case "sessiontoken":
case "sessionToken": return java.lang.String.class;
case "subject": return java.lang.String.class;
case "subscribesnstosqs":
case "subscribeSNStoSQS": return boolean.class;
case "trustallcertificates":
case "trustAllCertificates": return boolean.class;
case "uriendpointoverride":
case "uriEndpointOverride": return java.lang.String.class;
case "usedefaultcredentialsprovider":
case "useDefaultCredentialsProvider": return boolean.class;
case "useprofilecredentialsprovider":
case "useProfileCredentialsProvider": return boolean.class;
case "usesessioncredentials":
case "useSessionCredentials": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
Sns2Component target = (Sns2Component) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesskey":
case "accessKey": return getOrCreateConfiguration(target).getAccessKey();
case "amazonsnsclient":
case "amazonSNSClient": return getOrCreateConfiguration(target).getAmazonSNSClient();
case "autocreatetopic":
case "autoCreateTopic": return getOrCreateConfiguration(target).isAutoCreateTopic();
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "batchenabled":
case "batchEnabled": return getOrCreateConfiguration(target).isBatchEnabled();
case "configuration": return target.getConfiguration();
case "healthcheckconsumerenabled":
case "healthCheckConsumerEnabled": return target.isHealthCheckConsumerEnabled();
case "healthcheckproducerenabled":
case "healthCheckProducerEnabled": return target.isHealthCheckProducerEnabled();
case "kmsmasterkeyid":
case "kmsMasterKeyId": return getOrCreateConfiguration(target).getKmsMasterKeyId();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "messagededuplicationidstrategy":
case "messageDeduplicationIdStrategy": return getOrCreateConfiguration(target).getMessageDeduplicationIdStrategy();
case "messagegroupidstrategy":
case "messageGroupIdStrategy": return getOrCreateConfiguration(target).getMessageGroupIdStrategy();
case "messagestructure":
case "messageStructure": return getOrCreateConfiguration(target).getMessageStructure();
case "overrideendpoint":
case "overrideEndpoint": return getOrCreateConfiguration(target).isOverrideEndpoint();
case "policy": return getOrCreateConfiguration(target).getPolicy();
case "profilecredentialsname":
case "profileCredentialsName": return getOrCreateConfiguration(target).getProfileCredentialsName();
case "proxyhost":
case "proxyHost": return getOrCreateConfiguration(target).getProxyHost();
case "proxyport":
case "proxyPort": return getOrCreateConfiguration(target).getProxyPort();
case "proxyprotocol":
case "proxyProtocol": return getOrCreateConfiguration(target).getProxyProtocol();
case "queuearn":
case "queueArn": return getOrCreateConfiguration(target).getQueueArn();
case "region": return getOrCreateConfiguration(target).getRegion();
case "secretkey":
case "secretKey": return getOrCreateConfiguration(target).getSecretKey();
case "serversideencryptionenabled":
case "serverSideEncryptionEnabled": return getOrCreateConfiguration(target).isServerSideEncryptionEnabled();
case "sessiontoken":
case "sessionToken": return getOrCreateConfiguration(target).getSessionToken();
case "subject": return getOrCreateConfiguration(target).getSubject();
case "subscribesnstosqs":
case "subscribeSNStoSQS": return getOrCreateConfiguration(target).isSubscribeSNStoSQS();
case "trustallcertificates":
case "trustAllCertificates": return getOrCreateConfiguration(target).isTrustAllCertificates();
case "uriendpointoverride":
case "uriEndpointOverride": return getOrCreateConfiguration(target).getUriEndpointOverride();
case "usedefaultcredentialsprovider":
case "useDefaultCredentialsProvider": return getOrCreateConfiguration(target).isUseDefaultCredentialsProvider();
case "useprofilecredentialsprovider":
case "useProfileCredentialsProvider": return getOrCreateConfiguration(target).isUseProfileCredentialsProvider();
case "usesessioncredentials":
case "useSessionCredentials": return getOrCreateConfiguration(target).isUseSessionCredentials();
default: return null;
}
}
}
| Sns2ComponentConfigurer |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/customproviders/CustomContainerRequestFilter.java | {
"start": 274,
"end": 641
} | class ____ {
@ServerRequestFilter
public void whatever(UriInfo uriInfo, HttpHeaders httpHeaders, ContainerRequestContext requestContext) {
String customHeaderValue = uriInfo.getPath() + "-" + httpHeaders.getHeaderString("some-input");
requestContext.getHeaders().putSingle("custom-header", customHeaderValue);
}
}
| CustomContainerRequestFilter |
java | grpc__grpc-java | api/src/main/java/io/grpc/TlsChannelCredentials.java | {
"start": 14288,
"end": 14982
} | interface ____
* used. So generally there will just be a single entry and it implements {@link
* javax.net.ssl.X509TrustManager}.
*/
public Builder trustManager(TrustManager... trustManagers) {
List<TrustManager> trustManagerList = Collections.unmodifiableList(new ArrayList<>(
Arrays.asList(trustManagers)));
clearTrustManagers();
this.trustManagers = trustManagerList;
return this;
}
private void clearTrustManagers() {
this.rootCertificates = null;
this.trustManagers = null;
}
/** Construct the credentials. */
public ChannelCredentials build() {
return new TlsChannelCredentials(this);
}
}
}
| is |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/requests/DescribeDelegationTokenRequest.java | {
"start": 1275,
"end": 3152
} | class ____ extends AbstractRequest.Builder<DescribeDelegationTokenRequest> {
private final DescribeDelegationTokenRequestData data;
public Builder(List<KafkaPrincipal> owners) {
super(ApiKeys.DESCRIBE_DELEGATION_TOKEN);
this.data = new DescribeDelegationTokenRequestData()
.setOwners(owners == null ? null : owners
.stream()
.map(owner -> new DescribeDelegationTokenRequestData.DescribeDelegationTokenOwner()
.setPrincipalName(owner.getName())
.setPrincipalType(owner.getPrincipalType()))
.collect(Collectors.toList()));
}
@Override
public DescribeDelegationTokenRequest build(short version) {
return new DescribeDelegationTokenRequest(data, version);
}
@Override
public String toString() {
return data.toString();
}
}
private final DescribeDelegationTokenRequestData data;
public DescribeDelegationTokenRequest(DescribeDelegationTokenRequestData data, short version) {
super(ApiKeys.DESCRIBE_DELEGATION_TOKEN, version);
this.data = data;
}
@Override
public DescribeDelegationTokenRequestData data() {
return data;
}
public boolean ownersListEmpty() {
return data.owners() != null && data.owners().isEmpty();
}
@Override
public AbstractResponse getErrorResponse(int throttleTimeMs, Throwable e) {
return new DescribeDelegationTokenResponse(version(), throttleTimeMs, Errors.forException(e));
}
public static DescribeDelegationTokenRequest parse(Readable readable, short version) {
return new DescribeDelegationTokenRequest(new DescribeDelegationTokenRequestData(
readable, version), version);
}
}
| Builder |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/tofix/JsonIdentityInfoAndBackReferences3964Test.java | {
"start": 3229,
"end": 3663
} | class ____ {
public final int id;
public List<Cat> cats;
@JsonCreator
public Animal(@JsonProperty("id") int id, @JsonProperty("cats") List<Cat> cats) {
this.id = id;
this.cats = cats;
}
}
@JsonIdentityInfo(
generator = ObjectIdGenerators.PropertyGenerator.class,
property = "id",
scope = Cat.class
)
public static | Animal |
java | elastic__elasticsearch | plugins/store-smb/src/test/java/org/elasticsearch/index/store/smb/SmbNIOFSDirectoryTests.java | {
"start": 728,
"end": 1227
} | class ____ extends EsBaseDirectoryTestCase {
@Override
protected Directory getDirectory(Path file) throws IOException {
return new SmbDirectoryWrapper(new NIOFSDirectory(file));
}
@Override
public void testCreateOutputForExistingFile() throws IOException {
/**
* This test is disabled because {@link SmbDirectoryWrapper} opens existing file
* with an explicit StandardOpenOption.TRUNCATE_EXISTING option.
*/
}
}
| SmbNIOFSDirectoryTests |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/ReadOnlyMapTest.java | {
"start": 189,
"end": 582
} | class ____ extends TestCase {
public void test_readOnlyNullList() throws Exception {
String text = "{\"values\":{\"a\":{}}}";
Entity entity = JSON.parseObject(text, Entity.class);
Assert.assertNotNull(entity);
Assert.assertNotNull(entity.values.get("a"));
Assert.assertTrue(entity.values.get("a") instanceof A);
}
public static | ReadOnlyMapTest |
java | apache__camel | components/camel-servlet/src/test/java/org/apache/camel/component/servlet/ServletComponentMuteExceptionTest.java | {
"start": 1037,
"end": 2690
} | class ____ extends ServletCamelRouterTestSupport {
@Test
public void testMuteException() throws Exception {
WebRequest req = new PostMethodWebRequest(
contextUrl + "/services/mute",
new ByteArrayInputStream("".getBytes()), "text/plain");
WebResponse response = query(req, false);
assertEquals(500, response.getResponseCode());
assertEquals("text/plain", response.getContentType());
assertEquals("", response.getText());
}
@Test
public void testMuteWithTransferException() throws Exception {
WebRequest req = new PostMethodWebRequest(
contextUrl + "/services/muteWithTransfer",
new ByteArrayInputStream("".getBytes()), "text/plain");
WebResponse response = query(req, false);
assertEquals(500, response.getResponseCode());
assertEquals("text/plain", response.getContentType());
assertEquals("", response.getText());
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
ServletComponent sc = context.getComponent("servlet", ServletComponent.class);
sc.setMuteException(true);
from("servlet:mute")
.throwException(new IllegalArgumentException("Damn"));
from("servlet:muteWithTransfer?transferException=true")
.throwException(new IllegalArgumentException("Damn"));
}
};
}
}
| ServletComponentMuteExceptionTest |
java | spring-projects__spring-framework | spring-jms/src/test/java/org/springframework/jms/annotation/JmsListenerAnnotationBeanPostProcessorTests.java | {
"start": 10145,
"end": 10397
} | class ____ implements SimpleService {
@Override
public void handleIt(String value, String body) {
}
@Transactional
@JmsListener(destination = "testQueue")
@SendTo("foobar")
public void handleIt2(String body) {
}
}
}
| InvalidProxyTestBean |
java | apache__rocketmq | broker/src/test/java/org/apache/rocketmq/broker/topic/RocksdbTopicConfigManagerTest.java | {
"start": 2176,
"end": 15824
} | class ____ {
private final String basePath = Paths.get(System.getProperty("user.home"),
"unit-test-store", UUID.randomUUID().toString().substring(0, 16).toUpperCase()).toString();
private RocksDBTopicConfigManager topicConfigManager;
@Mock
private BrokerController brokerController;
@Mock
private DefaultMessageStore defaultMessageStore;
@Before
public void init() {
if (notToBeExecuted()) {
return;
}
BrokerConfig brokerConfig = new BrokerConfig();
when(brokerController.getBrokerConfig()).thenReturn(brokerConfig);
MessageStoreConfig messageStoreConfig = new MessageStoreConfig();
messageStoreConfig.setStorePathRootDir(basePath);
when(brokerController.getMessageStoreConfig()).thenReturn(messageStoreConfig);
Mockito.lenient().when(brokerController.getMessageStore()).thenReturn(defaultMessageStore);
Mockito.lenient().when(defaultMessageStore.getStateMachineVersion()).thenReturn(0L);
topicConfigManager = new RocksDBTopicConfigManager(brokerController);
topicConfigManager.load();
}
@After
public void destroy() {
if (notToBeExecuted()) {
return;
}
if (topicConfigManager != null) {
topicConfigManager.stop();
}
}
@Test
public void testAddUnsupportedKeyOnCreating() {
if (notToBeExecuted()) {
return;
}
String unsupportedKey = "key4";
String topicName = "testAddUnsupportedKeyOnCreating-" + System.currentTimeMillis();
supportAttributes(asList(
new EnumAttribute("enum.key", true, newHashSet("enum-1", "enum-2", "enum-3"), "enum-1"),
new BooleanAttribute("bool.key", false, false),
new LongRangeAttribute("long.range.key", true, 10, 20, 15)
));
Map<String, String> attributes = new HashMap<>();
attributes.put("+enum.key", "enum-2");
attributes.put("+" + unsupportedKey, "value1");
TopicConfig topicConfig = new TopicConfig();
topicConfig.setTopicName(topicName);
topicConfig.setAttributes(attributes);
RuntimeException runtimeException = Assert.assertThrows(RuntimeException.class, () -> topicConfigManager.updateTopicConfig(topicConfig));
Assert.assertEquals("unsupported key: " + unsupportedKey, runtimeException.getMessage());
}
@Test
public void testAddWrongFormatKeyOnCreating() {
if (notToBeExecuted()) {
return;
}
String topicName = "testAddWrongFormatKeyOnCreating-" + System.currentTimeMillis();
supportAttributes(asList(
new EnumAttribute("enum.key", true, newHashSet("enum-1", "enum-2", "enum-3"), "enum-1"),
new BooleanAttribute("bool.key", false, false),
new LongRangeAttribute("long.range.key", true, 10, 20, 15)
));
Map<String, String> attributes = new HashMap<>();
attributes.put("++enum.key", "value1");
TopicConfig topicConfig = new TopicConfig();
topicConfig.setTopicName(topicName);
topicConfig.setAttributes(attributes);
RuntimeException runtimeException = Assert.assertThrows(RuntimeException.class, () -> topicConfigManager.updateTopicConfig(topicConfig));
Assert.assertEquals("kv string format wrong.", runtimeException.getMessage());
}
@Test
public void testDeleteKeyOnCreating() {
if (notToBeExecuted()) {
return;
}
String topicName = "testDeleteKeyOnCreating-" + System.currentTimeMillis();
String key = "enum.key";
supportAttributes(asList(
new EnumAttribute("enum.key", true, newHashSet("enum-1", "enum-2", "enum-3"), "enum-1"),
new BooleanAttribute("bool.key", false, false),
new LongRangeAttribute("long.range.key", true, 10, 20, 15)
));
Map<String, String> attributes = new HashMap<>();
attributes.put("-" + key, "");
TopicConfig topicConfig = new TopicConfig();
topicConfig.setTopicName(topicName);
topicConfig.setAttributes(attributes);
RuntimeException runtimeException = Assert.assertThrows(RuntimeException.class, () -> topicConfigManager.updateTopicConfig(topicConfig));
Assert.assertEquals("only add attribute is supported while creating topic. key: " + key, runtimeException.getMessage());
}
@Test
public void testAddWrongValueOnCreating() {
if (notToBeExecuted()) {
return;
}
String topicName = "testAddWrongValueOnCreating-" + System.currentTimeMillis();
Map<String, String> attributes = new HashMap<>();
attributes.put("+" + TopicAttributes.QUEUE_TYPE_ATTRIBUTE.getName(), "wrong-value");
TopicConfig topicConfig = new TopicConfig();
topicConfig.setTopicName(topicName);
topicConfig.setAttributes(attributes);
RuntimeException runtimeException = Assert.assertThrows(RuntimeException.class, () -> topicConfigManager.updateTopicConfig(topicConfig));
Assert.assertEquals("value is not in set: [SimpleCQ, BatchCQ]", runtimeException.getMessage());
}
@Test
public void testNormalAddKeyOnCreating() {
if (notToBeExecuted()) {
return;
}
String topic = "testNormalAddKeyOnCreating-" + System.currentTimeMillis();
supportAttributes(asList(
new EnumAttribute("enum.key", true, newHashSet("enum-1", "enum-2", "enum-3"), "enum-1"),
new BooleanAttribute("bool.key", false, false),
new LongRangeAttribute("long.range.key", true, 10, 20, 15)
));
Map<String, String> attributes = new HashMap<>();
attributes.put("+enum.key", "enum-2");
attributes.put("+long.range.key", "16");
TopicConfig topicConfig = new TopicConfig();
topicConfig.setTopicName(topic);
topicConfig.setAttributes(attributes);
topicConfigManager.updateTopicConfig(topicConfig);
TopicConfig existingTopicConfig = topicConfigManager.getTopicConfigTable().get(topic);
Assert.assertEquals("enum-2", existingTopicConfig.getAttributes().get("enum.key"));
Assert.assertEquals("16", existingTopicConfig.getAttributes().get("long.range.key"));
}
@Test
public void testAddDuplicatedKeyOnUpdating() {
if (notToBeExecuted()) {
return;
}
String duplicatedKey = "long.range.key";
String topicName = "testAddDuplicatedKeyOnUpdating-" + System.currentTimeMillis();
supportAttributes(asList(
new EnumAttribute("enum.key", true, newHashSet("enum-1", "enum-2", "enum-3"), "enum-1"),
new BooleanAttribute("bool.key", false, false),
new LongRangeAttribute("long.range.key", true, 10, 20, 15)
));
Map<String, String> attributes = new HashMap<>();
attributes.put("+enum.key", "enum-3");
attributes.put("+bool.key", "true");
attributes.put("+long.range.key", "12");
TopicConfig topicConfig = new TopicConfig();
topicConfig.setTopicName(topicName);
topicConfig.setAttributes(attributes);
topicConfigManager.updateTopicConfig(topicConfig);
attributes = new HashMap<>();
attributes.put("+" + duplicatedKey, "11");
attributes.put("-" + duplicatedKey, "");
TopicConfig duplicateTopicConfig = new TopicConfig();
duplicateTopicConfig.setTopicName(topicName);
duplicateTopicConfig.setAttributes(attributes);
RuntimeException runtimeException = Assert.assertThrows(RuntimeException.class, () -> topicConfigManager.updateTopicConfig(duplicateTopicConfig));
Assert.assertEquals("alter duplication key. key: " + duplicatedKey, runtimeException.getMessage());
}
@Test
public void testDeleteNonexistentKeyOnUpdating() {
if (notToBeExecuted()) {
return;
}
String key = "nonexisting.key";
String topicName = "testDeleteNonexistentKeyOnUpdating-" + System.currentTimeMillis();
supportAttributes(asList(
new EnumAttribute("enum.key", true, newHashSet("enum-1", "enum-2", "enum-3"), "enum-1"),
new BooleanAttribute("bool.key", false, false),
new LongRangeAttribute("long.range.key", true, 10, 20, 15)
));
Map<String, String> attributes = new HashMap<>();
attributes.put("+enum.key", "enum-2");
attributes.put("+bool.key", "true");
TopicConfig topicConfig = new TopicConfig();
topicConfig.setTopicName(topicName);
topicConfig.setAttributes(attributes);
topicConfigManager.updateTopicConfig(topicConfig);
attributes = new HashMap<>();
attributes.clear();
attributes.put("-" + key, "");
topicConfig.setAttributes(attributes);
RuntimeException runtimeException = Assert.assertThrows(RuntimeException.class, () -> topicConfigManager.updateTopicConfig(topicConfig));
Assert.assertEquals("attempt to delete a nonexistent key: " + key, runtimeException.getMessage());
}
@Test
public void testAlterTopicWithoutChangingAttributes() {
if (notToBeExecuted()) {
return;
}
String topic = "testAlterTopicWithoutChangingAttributes-" + System.currentTimeMillis();
supportAttributes(asList(
new EnumAttribute("enum.key", true, newHashSet("enum-1", "enum-2", "enum-3"), "enum-1"),
new BooleanAttribute("bool.key", false, false),
new LongRangeAttribute("long.range.key", true, 10, 20, 15)
));
Map<String, String> attributes = new HashMap<>();
attributes.put("+enum.key", "enum-2");
attributes.put("+bool.key", "true");
TopicConfig topicConfigInit = new TopicConfig();
topicConfigInit.setTopicName(topic);
topicConfigInit.setAttributes(attributes);
topicConfigManager.updateTopicConfig(topicConfigInit);
Assert.assertEquals("enum-2", topicConfigManager.getTopicConfigTable().get(topic).getAttributes().get("enum.key"));
Assert.assertEquals("true", topicConfigManager.getTopicConfigTable().get(topic).getAttributes().get("bool.key"));
TopicConfig topicConfigAlter = new TopicConfig();
topicConfigAlter.setTopicName(topic);
topicConfigAlter.setReadQueueNums(10);
topicConfigAlter.setWriteQueueNums(10);
topicConfigManager.updateTopicConfig(topicConfigAlter);
Assert.assertEquals("enum-2", topicConfigManager.getTopicConfigTable().get(topic).getAttributes().get("enum.key"));
Assert.assertEquals("true", topicConfigManager.getTopicConfigTable().get(topic).getAttributes().get("bool.key"));
}
@Test
public void testNormalUpdateUnchangeableKeyOnUpdating() {
if (notToBeExecuted()) {
return;
}
String topic = "testNormalUpdateUnchangeableKeyOnUpdating-" + System.currentTimeMillis();
supportAttributes(asList(
new EnumAttribute("enum.key", true, newHashSet("enum-1", "enum-2", "enum-3"), "enum-1"),
new BooleanAttribute("bool.key", true, false),
new LongRangeAttribute("long.range.key", false, 10, 20, 15)
));
Map<String, String> attributes = new HashMap<>();
attributes.put("+long.range.key", "14");
TopicConfig topicConfig = new TopicConfig();
topicConfig.setTopicName(topic);
topicConfig.setAttributes(attributes);
topicConfigManager.updateTopicConfig(topicConfig);
attributes.put("+long.range.key", "16");
topicConfig.setAttributes(attributes);
RuntimeException runtimeException = Assert.assertThrows(RuntimeException.class, () -> topicConfigManager.updateTopicConfig(topicConfig));
Assert.assertEquals("attempt to update an unchangeable attribute. key: long.range.key", runtimeException.getMessage());
}
@Test
public void testNormalQueryKeyOnGetting() {
if (notToBeExecuted()) {
return;
}
String topic = "testNormalQueryKeyOnGetting-" + System.currentTimeMillis();
String unchangeable = "bool.key";
supportAttributes(asList(
new EnumAttribute("enum.key", true, newHashSet("enum-1", "enum-2", "enum-3"), "enum-1"),
new BooleanAttribute("bool.key", false, false),
new LongRangeAttribute("long.range.key", true, 10, 20, 15)
));
Map<String, String> attributes = new HashMap<>();
attributes.put("+" + unchangeable, "true");
TopicConfig topicConfig = new TopicConfig();
topicConfig.setTopicName(topic);
topicConfig.setAttributes(attributes);
topicConfigManager.updateTopicConfig(topicConfig);
TopicConfig topicConfigUpdated = topicConfigManager.getTopicConfigTable().get(topic);
Assert.assertEquals(CQType.SimpleCQ, QueueTypeUtils.getCQType(Optional.of(topicConfigUpdated)));
Assert.assertEquals("true", topicConfigUpdated.getAttributes().get(unchangeable));
}
private void supportAttributes(List<Attribute> supportAttributes) {
Map<String, Attribute> supportedAttributes = new HashMap<>();
for (Attribute supportAttribute : supportAttributes) {
supportedAttributes.put(supportAttribute.getName(), supportAttribute);
}
TopicAttributes.ALL.putAll(supportedAttributes);
}
private boolean notToBeExecuted() {
return MixAll.isMac();
}
}
| RocksdbTopicConfigManagerTest |
java | apache__camel | components/camel-netty/src/test/java/org/apache/camel/component/netty/NettyReuseConnectionTest.java | {
"start": 1008,
"end": 1645
} | class ____ extends BaseNettyTest {
private String uri = "netty:tcp://localhost:{{port}}?sync=true&disconnect=false";
@Test
public void testReuseConnection() {
for (int i = 0; i < 20; i++) {
String out = template.requestBody(uri, Integer.toString(i), String.class);
assertEquals("Reply " + i, out);
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(uri).transform().simple("Reply ${body}");
}
};
}
}
| NettyReuseConnectionTest |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/ErrorResponseExceptionTests.java | {
"start": 3349,
"end": 17328
} | class ____ {
private final MethodParameter methodParameter =
new MethodParameter(ResolvableMethod.on(getClass()).resolveMethod("handle"), 0);
@Test
void httpMediaTypeNotSupportedException() {
List<MediaType> mediaTypes =
Arrays.asList(MediaType.APPLICATION_JSON, MediaType.APPLICATION_CBOR);
HttpMediaTypeNotSupportedException ex = new HttpMediaTypeNotSupportedException(
MediaType.APPLICATION_XML, mediaTypes, HttpMethod.PATCH, "Custom message");
assertStatus(ex, HttpStatus.UNSUPPORTED_MEDIA_TYPE);
assertDetail(ex, "Content-Type 'application/xml' is not supported.");
assertDetailMessageCode(ex, null, new Object[] {ex.getContentType(), ex.getSupportedMediaTypes()});
HttpHeaders headers = ex.getHeaders();
assertThat(headers.getAccept()).isEqualTo(mediaTypes);
assertThat(headers.getAcceptPatch()).isEqualTo(mediaTypes);
}
@Test
void httpMediaTypeNotSupportedExceptionWithParseError() {
ErrorResponse ex = new HttpMediaTypeNotSupportedException(
"Could not parse Accept header: Invalid mime type \"foo\": does not contain '/'");
assertStatus(ex, HttpStatus.UNSUPPORTED_MEDIA_TYPE);
assertDetail(ex, "Could not parse Content-Type.");
assertDetailMessageCode(ex, "parseError", null);
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void httpMediaTypeNotAcceptableException() {
List<MediaType> mediaTypes = Arrays.asList(MediaType.APPLICATION_JSON, MediaType.APPLICATION_CBOR);
HttpMediaTypeNotAcceptableException ex = new HttpMediaTypeNotAcceptableException(mediaTypes);
assertStatus(ex, HttpStatus.NOT_ACCEPTABLE);
assertDetail(ex, "Acceptable representations: [application/json, application/cbor].");
assertDetailMessageCode(ex, null, new Object[] {ex.getSupportedMediaTypes()});
assertThat(ex.getHeaders().size()).isOne();
assertThat(ex.getHeaders().getAccept()).isEqualTo(mediaTypes);
}
@Test
void httpMediaTypeNotAcceptableExceptionWithParseError() {
ErrorResponse ex = new HttpMediaTypeNotAcceptableException(
"Could not parse Accept header: Invalid mime type \"foo\": does not contain '/'");
assertStatus(ex, HttpStatus.NOT_ACCEPTABLE);
assertDetail(ex, "Could not parse Accept header.");
assertDetailMessageCode(ex, "parseError", null);
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void asyncRequestTimeoutException() {
ErrorResponse ex = new AsyncRequestTimeoutException();
assertDetailMessageCode(ex, null, null);
assertStatus(ex, HttpStatus.SERVICE_UNAVAILABLE);
assertDetail(ex, null);
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void httpRequestMethodNotSupportedException() {
HttpRequestMethodNotSupportedException ex =
new HttpRequestMethodNotSupportedException("PUT", Arrays.asList("GET", "POST"));
assertStatus(ex, HttpStatus.METHOD_NOT_ALLOWED);
assertDetail(ex, "Method 'PUT' is not supported.");
assertDetailMessageCode(ex, null, new Object[] {ex.getMethod(), ex.getSupportedHttpMethods()});
assertThat(ex.getHeaders().size()).isOne();
assertThat(ex.getHeaders().getAllow()).containsExactly(HttpMethod.GET, HttpMethod.POST);
}
@Test
void missingRequestHeaderException() {
MissingRequestHeaderException ex = new MissingRequestHeaderException("Authorization", this.methodParameter);
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Required header 'Authorization' is not present.");
assertDetailMessageCode(ex, null, new Object[] {ex.getHeaderName()});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void missingServletRequestParameterException() {
MissingServletRequestParameterException ex = new MissingServletRequestParameterException("query", "String");
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Required parameter 'query' is not present.");
assertDetailMessageCode(ex, null, new Object[] {ex.getParameterName()});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void missingMatrixVariableException() {
MissingMatrixVariableException ex = new MissingMatrixVariableException("region", this.methodParameter);
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Required path parameter 'region' is not present.");
assertDetailMessageCode(ex, null, new Object[] {ex.getVariableName()});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void missingPathVariableException() {
MissingPathVariableException ex = new MissingPathVariableException("id", this.methodParameter);
assertStatus(ex, HttpStatus.INTERNAL_SERVER_ERROR);
assertDetail(ex, "Required path variable 'id' is not present.");
assertDetailMessageCode(ex, null, new Object[] {ex.getVariableName()});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void missingPathVariableExceptionAfterConversion() {
MissingPathVariableException ex = new MissingPathVariableException("id", this.methodParameter, true);
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Required path variable 'id' is not present.");
assertDetailMessageCode(ex, null, new Object[] {ex.getVariableName()});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void missingRequestCookieException() {
MissingRequestCookieException ex = new MissingRequestCookieException("oreo", this.methodParameter);
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Required cookie 'oreo' is not present.");
assertDetailMessageCode(ex, null, new Object[] {ex.getCookieName()});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void unsatisfiedServletRequestParameterException() {
UnsatisfiedServletRequestParameterException ex = new UnsatisfiedServletRequestParameterException(
new String[] { "foo=bar", "bar=baz" }, Collections.singletonMap("q", new String[] {"1"}));
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Invalid request parameters.");
assertDetailMessageCode(ex, null, new Object[] {List.of("\"foo=bar, bar=baz\"")});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void missingServletRequestPartException() {
MissingServletRequestPartException ex = new MissingServletRequestPartException("file");
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Required part 'file' is not present.");
assertDetailMessageCode(ex, null, new Object[] {ex.getRequestPartName()});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void methodArgumentNotValidException() {
ValidationTestHelper testHelper = new ValidationTestHelper(MethodArgumentNotValidException.class);
BindingResult result = testHelper.bindingResult();
MethodArgumentNotValidException ex = new MethodArgumentNotValidException(this.methodParameter, result);
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Invalid request content.");
testHelper.assertMessages(ex, ex.getAllErrors());
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void handlerMethodValidationException() {
MethodValidationResult result = mock(MethodValidationResult.class);
when(result.isForReturnValue()).thenReturn(false);
HandlerMethodValidationException ex = new HandlerMethodValidationException(result);
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Validation failure");
reset(result);
when(result.isForReturnValue()).thenReturn(true);
ex = new HandlerMethodValidationException(result);
assertStatus(ex, HttpStatus.INTERNAL_SERVER_ERROR);
assertDetail(ex, "Validation failure");
}
@Test
void unsupportedMediaTypeStatusException() {
List<MediaType> mediaTypes =
Arrays.asList(MediaType.APPLICATION_JSON, MediaType.APPLICATION_CBOR);
UnsupportedMediaTypeStatusException ex = new UnsupportedMediaTypeStatusException(
MediaType.APPLICATION_XML, mediaTypes, HttpMethod.PATCH);
assertStatus(ex, HttpStatus.UNSUPPORTED_MEDIA_TYPE);
assertDetail(ex, "Content-Type 'application/xml' is not supported.");
assertDetailMessageCode(ex, null, new Object[] {ex.getContentType(), ex.getSupportedMediaTypes()});
HttpHeaders headers = ex.getHeaders();
assertThat(headers.getAccept()).isEqualTo(mediaTypes);
assertThat(headers.getAcceptPatch()).isEqualTo(mediaTypes);
}
@Test
void unsupportedMediaTypeStatusExceptionWithParseError() {
ErrorResponse ex = new UnsupportedMediaTypeStatusException(
"Could not parse Accept header: Invalid mime type \"foo\": does not contain '/'");
assertStatus(ex, HttpStatus.UNSUPPORTED_MEDIA_TYPE);
assertDetail(ex, "Could not parse Content-Type.");
assertDetailMessageCode(ex, "parseError", null);
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void notAcceptableStatusException() {
List<MediaType> mediaTypes = Arrays.asList(MediaType.APPLICATION_JSON, MediaType.APPLICATION_CBOR);
NotAcceptableStatusException ex = new NotAcceptableStatusException(mediaTypes);
assertStatus(ex, HttpStatus.NOT_ACCEPTABLE);
assertDetail(ex, "Acceptable representations: [application/json, application/cbor].");
assertDetailMessageCode(ex, null, new Object[] {ex.getSupportedMediaTypes()});
assertThat(ex.getHeaders().size()).isOne();
assertThat(ex.getHeaders().getAccept()).isEqualTo(mediaTypes);
}
@Test
void notAcceptableStatusExceptionWithParseError() {
ErrorResponse ex = new NotAcceptableStatusException(
"Could not parse Accept header: Invalid mime type \"foo\": does not contain '/'");
assertStatus(ex, HttpStatus.NOT_ACCEPTABLE);
assertDetail(ex, "Could not parse Accept header.");
assertDetailMessageCode(ex, "parseError", null);
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void serverErrorException() {
ServerErrorException ex = new ServerErrorException("Failure", null);
assertStatus(ex, HttpStatus.INTERNAL_SERVER_ERROR);
assertDetail(ex, "Failure");
assertDetailMessageCode(ex, null, new Object[] {ex.getReason()});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void missingRequestValueException() {
MissingRequestValueException ex =
new MissingRequestValueException("foo", String.class, "header", this.methodParameter);
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Required header 'foo' is not present.");
assertDetailMessageCode(ex, null, new Object[] {ex.getLabel(), ex.getName()});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void unsatisfiedRequestParameterException() {
UnsatisfiedRequestParameterException ex =
new UnsatisfiedRequestParameterException(
Arrays.asList("foo=bar", "bar=baz"),
new LinkedMultiValueMap<>(Collections.singletonMap("q", Arrays.asList("1", "2"))));
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Invalid request parameters.");
assertDetailMessageCode(ex, null, new Object[] {ex.getConditions()});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void webExchangeBindException() {
ValidationTestHelper testHelper = new ValidationTestHelper(WebExchangeBindException.class);
BindingResult result = testHelper.bindingResult();
WebExchangeBindException ex = new WebExchangeBindException(this.methodParameter, result);
assertStatus(ex, HttpStatus.BAD_REQUEST);
assertDetail(ex, "Invalid request content.");
testHelper.assertMessages(ex, ex.getAllErrors());
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test
void methodNotAllowedException() {
List<HttpMethod> supportedMethods = Arrays.asList(HttpMethod.GET, HttpMethod.POST);
MethodNotAllowedException ex = new MethodNotAllowedException(HttpMethod.PUT, supportedMethods);
assertStatus(ex, HttpStatus.METHOD_NOT_ALLOWED);
assertDetail(ex, "Supported methods: [GET, POST]");
assertDetailMessageCode(ex, null, new Object[] {ex.getHttpMethod(), supportedMethods});
assertThat(ex.getHeaders().size()).isOne();
assertThat(ex.getHeaders().getAllow()).containsExactly(HttpMethod.GET, HttpMethod.POST);
}
@Test
void methodNotAllowedExceptionWithoutSupportedMethods() {
MethodNotAllowedException ex = new MethodNotAllowedException(HttpMethod.PUT, Collections.emptyList());
assertStatus(ex, HttpStatus.METHOD_NOT_ALLOWED);
assertDetail(ex, "Request method 'PUT' is not supported.");
assertDetailMessageCode(ex, null, new Object[] {ex.getHttpMethod(), Collections.emptyList()});
assertThat(ex.getHeaders().isEmpty()).isTrue();
}
@Test // gh-30300
void responseStatusException() {
Locale locale = Locale.UK;
LocaleContextHolder.setLocale(locale);
try {
String reason = "bad.request";
String message = "Breaking Bad Request";
StaticMessageSource messageSource = new StaticMessageSource();
messageSource.addMessage(reason, locale, message);
ResponseStatusException ex = new ResponseStatusException(HttpStatus.BAD_REQUEST, reason);
ProblemDetail problemDetail = ex.updateAndGetBody(messageSource, locale);
assertThat(problemDetail.getDetail()).isEqualTo(message);
}
finally {
LocaleContextHolder.resetLocaleContext();
}
}
private void assertStatus(ErrorResponse ex, HttpStatus status) {
ProblemDetail body = ex.getBody();
assertThat(ex.getStatusCode()).isEqualTo(status);
assertThat(body.getStatus()).isEqualTo(status.value());
assertThat(body.getTitle()).isEqualTo(status.getReasonPhrase());
}
private void assertDetail(ErrorResponse ex, @Nullable String detail) {
if (detail != null) {
assertThat(ex.getBody().getDetail()).isEqualTo(detail);
}
else {
assertThat(ex.getBody().getDetail()).isNull();
}
}
private void assertDetailMessageCode(
ErrorResponse ex, @Nullable String suffix, Object @Nullable [] arguments) {
assertThat(ex.getDetailMessageCode())
.isEqualTo(ErrorResponse.getDefaultDetailMessageCode(ex.getClass(), suffix));
if (arguments != null) {
assertThat(ex.getDetailMessageArguments()).containsExactlyElementsOf(Arrays.asList(arguments));
}
else {
assertThat(ex.getDetailMessageArguments()).isNull();
}
}
@SuppressWarnings("unused")
private void handle(String arg) {}
private static | ErrorResponseExceptionTests |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/joda/JodaTest_6_Period.java | {
"start": 231,
"end": 611
} | class ____ extends TestCase {
public void test_for_joda_0() throws Exception {
Model m = new Model();
m.period = Period.days(3);
String json = JSON.toJSONString(m);
assertEquals("{\"period\":\"P3D\"}", json);
Model m1 = JSON.parseObject(json, Model.class);
assertEquals(m.period, m1.period);
}
public static | JodaTest_6_Period |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/AzureStorageQueueComponentBuilderFactory.java | {
"start": 1398,
"end": 1980
} | interface ____ {
/**
* Azure Storage Queue Service (camel-azure-storage-queue)
* Stores and retrieves messages to/from Azure Storage Queue.
*
* Category: cloud,messaging
* Since: 3.3
* Maven coordinates: org.apache.camel:camel-azure-storage-queue
*
* @return the dsl builder
*/
static AzureStorageQueueComponentBuilder azureStorageQueue() {
return new AzureStorageQueueComponentBuilderImpl();
}
/**
* Builder for the Azure Storage Queue Service component.
*/
| AzureStorageQueueComponentBuilderFactory |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/utils/DummyStreamExecutionEnvironment.java | {
"start": 3616,
"end": 8712
} | class ____ extends StreamExecutionEnvironment {
private final StreamExecutionEnvironment realExecEnv;
public DummyStreamExecutionEnvironment(StreamExecutionEnvironment realExecEnv) {
this.realExecEnv = realExecEnv;
}
@Override
public ExecutionConfig getConfig() {
return realExecEnv.getConfig();
}
@Override
public ReadableConfig getConfiguration() {
return realExecEnv.getConfiguration();
}
@Override
public List<Tuple2<String, DistributedCache.DistributedCacheEntry>> getCachedFiles() {
return realExecEnv.getCachedFiles();
}
@Override
public StreamExecutionEnvironment setParallelism(int parallelism) {
// Please always reset the parallelism back to the original one after changed
realExecEnv.setParallelism(parallelism);
return this;
}
@Override
public StreamExecutionEnvironment setMaxParallelism(int maxParallelism) {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, setMaxParallelism method is unsupported.");
}
@Override
public int getParallelism() {
return realExecEnv.getParallelism();
}
@Override
public int getMaxParallelism() {
return realExecEnv.getMaxParallelism();
}
@Override
public StreamExecutionEnvironment setBufferTimeout(long timeoutMillis) {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, setBufferTimeout method is unsupported.");
}
@Override
public long getBufferTimeout() {
return realExecEnv.getBufferTimeout();
}
@Override
public StreamExecutionEnvironment disableOperatorChaining() {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, disableOperatorChaining method is unsupported.");
}
@Override
public boolean isChainingEnabled() {
return realExecEnv.isChainingEnabled();
}
@Override
public CheckpointConfig getCheckpointConfig() {
return realExecEnv.getCheckpointConfig();
}
@Override
public StreamExecutionEnvironment enableCheckpointing(long interval) {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, enableCheckpointing method is unsupported.");
}
@Override
public StreamExecutionEnvironment enableCheckpointing(
long interval, org.apache.flink.streaming.api.CheckpointingMode mode) {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, enableCheckpointing method is unsupported.");
}
@Override
public StreamExecutionEnvironment enableCheckpointing(long interval, CheckpointingMode mode) {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, enableCheckpointing method is unsupported.");
}
@Override
public long getCheckpointInterval() {
return realExecEnv.getCheckpointInterval();
}
@Override
public org.apache.flink.streaming.api.CheckpointingMode getCheckpointingMode() {
return realExecEnv.getCheckpointingMode();
}
@Override
public CheckpointingMode getCheckpointingConsistencyMode() {
return realExecEnv.getCheckpointingConsistencyMode();
}
@Override
public JobExecutionResult execute() throws Exception {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, execute method is unsupported.");
}
@Override
public JobExecutionResult execute(String jobName) throws Exception {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, execute method is unsupported.");
}
@Override
public JobExecutionResult execute(StreamGraph streamGraph) throws Exception {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, execute method is unsupported.");
}
@Override
public void registerCachedFile(String filePath, String name) {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, registerCachedFile method is unsupported.");
}
@Override
public void registerCachedFile(String filePath, String name, boolean executable) {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, registerCachedFile method is unsupported.");
}
@Override
public StreamGraph getStreamGraph() {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, getStreamGraph method is unsupported.");
}
@Override
public String getExecutionPlan() {
throw new UnsupportedOperationException(
"This is a dummy StreamExecutionEnvironment, getExecutionPlan method is unsupported.");
}
}
| DummyStreamExecutionEnvironment |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ProcessTableFunction.java | {
"start": 11591,
"end": 11988
} | class ____ {
* public String first;
* }
*
* public void eval(@StateHint SeenState memory, @ArgumentHint(SET_SEMANTIC_TABLE) Row input) {
* if (memory.first == null) {
* memory.first = input.toString();
* } else {
* collect("Event 1: " + memory.first + " and Event 2: " + input.toString());
* }
* }
* }
*
* // Function that uses Row for state
* | SeenState |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3813PluginClassPathOrderingTest.java | {
"start": 2831,
"end": 2965
} | class ____ using preOrder is:
// dep-a, dep-aa, dep-ac, dep-ab, dep-ad, dep-c, dep-b, dep-d
// The correct/expected | path |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/i18n/AbstractLocaleContextResolver.java | {
"start": 1170,
"end": 1742
} | class ____ extends AbstractLocaleResolver implements LocaleContextResolver {
private @Nullable TimeZone defaultTimeZone;
/**
* Set a default {@link TimeZone} that this resolver will return if no other
* time zone is found.
*/
public void setDefaultTimeZone(@Nullable TimeZone defaultTimeZone) {
this.defaultTimeZone = defaultTimeZone;
}
/**
* Get the default {@link TimeZone} that this resolver is supposed to fall
* back to, if any.
*/
public @Nullable TimeZone getDefaultTimeZone() {
return this.defaultTimeZone;
}
}
| AbstractLocaleContextResolver |
java | apache__camel | components/camel-kafka/src/main/java/org/apache/camel/component/kafka/consumer/AbstractCommitManager.java | {
"start": 1397,
"end": 5037
} | class ____ implements CommitManager {
public static final long START_OFFSET = -1;
public static final long NON_PARTITION = -1;
private static final Logger LOG = LoggerFactory.getLogger(AbstractCommitManager.class);
protected final KafkaConsumer kafkaConsumer;
protected final String threadId;
protected final String printableTopic;
protected final KafkaConfiguration configuration;
private final Consumer<?, ?> consumer;
protected AbstractCommitManager(Consumer<?, ?> consumer, KafkaConsumer kafkaConsumer, String threadId,
String printableTopic) {
this.consumer = consumer;
this.kafkaConsumer = kafkaConsumer;
this.threadId = threadId;
this.printableTopic = printableTopic;
this.configuration = kafkaConsumer.getEndpoint().getConfiguration();
}
protected KafkaManualCommit getManualCommit(
Exchange exchange, TopicPartition partition, ConsumerRecord<Object, Object> record,
KafkaManualCommitFactory manualCommitFactory) {
StateRepository<String, String> offsetRepository = configuration.getOffsetRepository();
long commitTimeoutMs = configuration.getCommitTimeoutMs();
KafkaManualCommitFactory.CamelExchangePayload camelExchangePayload = new KafkaManualCommitFactory.CamelExchangePayload(
exchange, consumer, threadId, offsetRepository);
KafkaManualCommitFactory.KafkaRecordPayload kafkaRecordPayload = new KafkaManualCommitFactory.KafkaRecordPayload(
partition,
record.offset(), commitTimeoutMs);
return manualCommitFactory.newInstance(camelExchangePayload, kafkaRecordPayload, this);
}
@Override
public KafkaManualCommit getManualCommit(
Exchange exchange, TopicPartition partition, ConsumerRecord<Object, Object> consumerRecord) {
KafkaManualCommitFactory manualCommitFactory = kafkaConsumer.getEndpoint().getKafkaManualCommitFactory();
if (manualCommitFactory == null) {
manualCommitFactory = new DefaultKafkaManualCommitFactory();
}
return getManualCommit(exchange, partition, consumerRecord, manualCommitFactory);
}
@Override
public void forceCommit(TopicPartition partition, long partitionLastOffset) {
if (LOG.isDebugEnabled()) {
LOG.debug("Forcing commitSync {} [topic: {} partition: {} offset: {}]", threadId, partition.topic(),
partition.partition(), partitionLastOffset);
}
long timeout = configuration.getCommitTimeoutMs();
consumer.commitSync(
Collections.singletonMap(partition, new OffsetAndMetadata(partitionLastOffset + 1)),
Duration.ofMillis(timeout));
}
protected void saveStateToOffsetRepository(
TopicPartition partition, long partitionLastOffset,
StateRepository<String, String> offsetRepository) {
if (LOG.isDebugEnabled()) {
LOG.debug("Saving offset repository state {} [topic: {} partition: {} offset: {}]", threadId, partition.topic(),
partition.partition(),
partitionLastOffset);
}
offsetRepository.setState(serializeOffsetKey(partition), serializeOffsetValue(partitionLastOffset));
}
protected static String serializeOffsetKey(TopicPartition topicPartition) {
return topicPartition.topic() + '/' + topicPartition.partition();
}
protected static String serializeOffsetValue(long offset) {
return String.valueOf(offset);
}
}
| AbstractCommitManager |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/client/RestTemplateObservationTests.java | {
"start": 2298,
"end": 8769
} | class ____ {
private final TestObservationRegistry observationRegistry = TestObservationRegistry.create();
private final ClientHttpRequestFactory requestFactory = mock();
private final ClientHttpRequest request = mock();
private final ClientHttpResponse response = mock();
private final ResponseErrorHandler errorHandler = mock();
private final HttpMessageConverter<String> converter = mock();
private final RestTemplate template = new RestTemplate(List.of(converter));
@BeforeEach
void setupEach() {
this.template.setRequestFactory(this.requestFactory);
this.template.setErrorHandler(this.errorHandler);
this.template.setObservationRegistry(this.observationRegistry);
this.observationRegistry.observationConfig().observationHandler(new ContextAssertionObservationHandler());
}
@Test
void executeVarArgsAddsUriTemplateAsKeyValue() throws Exception {
mockSentRequest(GET, "https://example.com/hotels/42/bookings/21");
mockResponseStatus(HttpStatus.OK);
template.execute("https://example.com/hotels/{hotel}/bookings/{booking}", GET,
null, null, "42", "21");
assertThatHttpObservation().hasLowCardinalityKeyValue("uri", "/hotels/{hotel}/bookings/{booking}");
}
@Test
void executeArgsMapAddsUriTemplateAsKeyValue() throws Exception {
mockSentRequest(GET, "https://example.com/hotels/42/bookings/21");
mockResponseStatus(HttpStatus.OK);
Map<String, String> vars = Map.of("hotel", "42", "booking", "21");
template.execute("https://example.com/hotels/{hotel}/bookings/{booking}", GET,
null, null, vars);
assertThatHttpObservation().hasLowCardinalityKeyValue("uri", "/hotels/{hotel}/bookings/{booking}");
}
@Test
void executeAddsSuccessAsOutcome() throws Exception {
mockSentRequest(GET, "https://example.org");
mockResponseStatus(HttpStatus.OK);
mockResponseBody("Hello World", MediaType.TEXT_PLAIN);
template.execute("https://example.org", GET, null, null);
assertThatHttpObservation().hasLowCardinalityKeyValue("outcome", "SUCCESS");
}
@Test
void executeAddsServerErrorAsOutcome() throws Exception {
String url = "https://example.org";
mockSentRequest(GET, url);
mockResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR);
willThrow(new HttpServerErrorException(HttpStatus.INTERNAL_SERVER_ERROR))
.given(errorHandler).handleError(URI.create(url), GET, response);
assertThatExceptionOfType(HttpServerErrorException.class).isThrownBy(() ->
template.execute(url, GET, null, null));
assertThatHttpObservation().hasLowCardinalityKeyValue("outcome", "SERVER_ERROR");
}
@Test
void executeAddsExceptionAsKeyValue() throws Exception {
mockSentRequest(GET, "https://example.org/resource");
mockResponseStatus(HttpStatus.OK);
given(converter.canRead(String.class, null)).willReturn(true);
MediaType supportedMediaType = new MediaType("test", "supported");
given(converter.getSupportedMediaTypes()).willReturn(List.of(supportedMediaType));
MediaType other = new MediaType("test", "other");
mockResponseBody("Test Body", other);
given(converter.canRead(String.class, other)).willReturn(false);
assertThatExceptionOfType(RestClientException.class).isThrownBy(() ->
template.getForObject("https://example.org/{p}", String.class, "resource"));
assertThatHttpObservation().hasLowCardinalityKeyValue("exception", "UnknownContentTypeException");
}
@Test
void executeWithIoExceptionAddsUnknownOutcome() throws Exception {
String url = "https://example.org/resource";
mockSentRequest(GET, url);
given(request.execute()).willThrow(new IOException("Socket failure"));
assertThatExceptionOfType(ResourceAccessException.class).isThrownBy(() ->
template.getForObject(url, String.class));
assertThatHttpObservation().hasLowCardinalityKeyValue("outcome", "UNKNOWN");
}
@Test // gh-32060
void executeShouldRecordErrorsThrownByErrorHandler() throws Exception {
mockSentRequest(GET, "https://example.org");
mockResponseStatus(HttpStatus.OK);
mockResponseBody("Hello World", MediaType.TEXT_PLAIN);
given(errorHandler.hasError(any())).willThrow(new IllegalStateException("error handler"));
assertThatIllegalStateException().isThrownBy(() ->
template.execute("https://example.org", GET, null, null));
assertThatHttpObservation().hasLowCardinalityKeyValue("exception", "IllegalStateException");
}
@Test // gh-32060
void executeShouldCreateObservationScope() throws Exception {
mockSentRequest(GET, "https://example.org");
mockResponseStatus(HttpStatus.OK);
mockResponseBody("Hello World", MediaType.TEXT_PLAIN);
ObservationErrorHandler observationErrorHandler = new ObservationErrorHandler(observationRegistry);
template.setErrorHandler(observationErrorHandler);
template.execute("https://example.org", GET, null, null);
assertThat(observationErrorHandler.currentObservation).isNotNull();
}
private void mockSentRequest(HttpMethod method, String uri) throws Exception {
mockSentRequest(method, uri, new HttpHeaders());
}
private void mockSentRequest(HttpMethod method, String uri, HttpHeaders requestHeaders) throws Exception {
given(requestFactory.createRequest(URI.create(uri), method)).willReturn(request);
given(request.getHeaders()).willReturn(requestHeaders);
given(request.getMethod()).willReturn(method);
given(request.getURI()).willReturn(URI.create(uri));
}
private void mockResponseStatus(HttpStatus responseStatus) throws Exception {
given(request.execute()).willReturn(response);
given(errorHandler.hasError(response)).willReturn(responseStatus.isError());
given(response.getStatusCode()).willReturn(responseStatus);
given(response.getStatusText()).willReturn(responseStatus.getReasonPhrase());
}
private void mockResponseBody(String expectedBody, MediaType mediaType) throws Exception {
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.setContentType(mediaType);
responseHeaders.setContentLength(expectedBody.length());
given(response.getHeaders()).willReturn(responseHeaders);
given(response.getBody()).willReturn(new ByteArrayInputStream(expectedBody.getBytes()));
given(converter.read(eq(String.class), any(HttpInputMessage.class))).willReturn(expectedBody);
}
private TestObservationRegistryAssert.TestObservationRegistryAssertReturningObservationContextAssert assertThatHttpObservation() {
return assertThat(this.observationRegistry).hasObservationWithNameEqualTo("http.client.requests").that();
}
static | RestTemplateObservationTests |
java | micronaut-projects__micronaut-core | json-core/src/main/java/io/micronaut/json/tree/JsonNode.java | {
"start": 1009,
"end": 1214
} | class ____ a json node. Json nodes can be either scalar (string, number, boolean, null) or
* containers (object, array).
*
* @author Jonas Konrad
* @since 3.1
*/
@Experimental
public abstract | representing |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/NotifyCheckpointAbortedITCase.java | {
"start": 14421,
"end": 15344
} | class ____ extends StreamSink<Integer> {
private static final long serialVersionUID = 1L;
private static final OneShotLatch notifiedAbortedLatch = new OneShotLatch();
private static final AtomicInteger notifiedAbortedTimes = new AtomicInteger(0);
public DeclineSink() {
super(
new SinkFunction<Integer>() {
private static final long serialVersionUID = 1L;
});
}
@Override
public void notifyCheckpointAborted(long checkpointId) {
notifiedAbortedTimes.incrementAndGet();
notifiedAbortedLatch.trigger();
}
static void reset() {
notifiedAbortedLatch.reset();
notifiedAbortedTimes.set(0);
}
}
/** The snapshot strategy to create failing runnable future at the checkpoint to decline. */
private static | DeclineSink |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/RedshiftData2EndpointBuilderFactory.java | {
"start": 1451,
"end": 1591
} | interface ____ {
/**
* Builder for endpoint for the AWS RedshiftData component.
*/
public | RedshiftData2EndpointBuilderFactory |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/codec/vectors/es93/ES93HnswScalarQuantizedBFloat16VectorsFormatTests.java | {
"start": 1371,
"end": 3408
} | class ____ extends BaseHnswBFloat16VectorsFormatTestCase {
@Override
protected KnnVectorsFormat createFormat() {
return new ES93HnswScalarQuantizedVectorsFormat(
DEFAULT_MAX_CONN,
DEFAULT_BEAM_WIDTH,
DenseVectorFieldMapper.ElementType.BFLOAT16,
null,
7,
false,
random().nextBoolean()
);
}
@Override
protected KnnVectorsFormat createFormat(int maxConn, int beamWidth) {
return new ES93HnswScalarQuantizedVectorsFormat(
maxConn,
beamWidth,
DenseVectorFieldMapper.ElementType.BFLOAT16,
null,
7,
false,
random().nextBoolean()
);
}
@Override
protected KnnVectorsFormat createFormat(int maxConn, int beamWidth, int numMergeWorkers, ExecutorService service) {
return new ES93HnswScalarQuantizedVectorsFormat(
maxConn,
beamWidth,
DenseVectorFieldMapper.ElementType.BFLOAT16,
null,
7,
false,
random().nextBoolean(),
numMergeWorkers,
service
);
}
@Override
public void testSingleVectorCase() throws Exception {
throw new AssumptionViolatedException("Scalar quantization changes the score significantly for MAXIMUM_INNER_PRODUCT");
}
public void testSimpleOffHeapSize() throws IOException {
float[] vector = randomVector(random().nextInt(12, 500));
try (Directory dir = newDirectory()) {
testSimpleOffHeapSize(
dir,
newIndexWriterConfig(),
vector,
allOf(
aMapWithSize(3),
hasEntry("vec", (long) vector.length * BFloat16.BYTES),
hasEntry("vex", 1L),
hasEntry(equalTo("veq"), greaterThan(0L))
)
);
}
}
}
| ES93HnswScalarQuantizedBFloat16VectorsFormatTests |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/SimpleJpaQueryUnitTests.java | {
"start": 15035,
"end": 17066
} | interface ____ extends Repository<User, Long> {
@Query(value = "SELECT u FROM User u WHERE u.lastname = ?1", nativeQuery = true)
List<User> findNativeByLastname(String lastname);
@NativeQuery(value = "SELECT u FROM User u WHERE u.lastname = ?1")
List<User> findByLastnameNativeAnnotation(String lastname);
@Query(value = "SELECT u FROM User u WHERE u.lastname = ?1", nativeQuery = true)
List<User> findNativeByLastname(String lastname, Pageable pageable);
@Query(value = "SELECT u FROM User u WHERE u.lastname = ?", nativeQuery = true)
List<User> legalUseOfJdbcStyleParameters(String lastname);
@Query(value = "SELECT u FROM User u WHERE u.lastname = ?")
List<User> illegalUseOfJdbcStyleParameters(String lastname);
@Query(USER_QUERY)
List<User> findByAnnotatedQuery();
@Query(USER_QUERY)
Page<User> pageByAnnotatedQuery(Pageable pageable);
@Query("select u from User u")
Collection<UserProjection> projectWithExplicitQuery();
@Query("""
SELECT cd FROM CampaignDeal cd
LEFT JOIN FETCH cd.dealLibrary d
LEFT JOIN FETCH d.publisher p
WHERE cd.campaignId = :campaignId
""")
Collection<UnrelatedType> selectWithJoin();
@Query("select u.unknown from User u")
Collection<UnrelatedType> projectWithUnknownPaths();
@Query("select r.name from User u LEFT JOIN FETCH u.roles r")
Collection<UnrelatedType> projectWithJoinPaths();
@Query("select u.country from User u")
Collection<Country> justCountries();
@Query(value = "select u from #{#entityName} u", countQuery = "select count(u.id) from #{#entityName} u")
List<User> findAllWithExpressionInCountQuery(Pageable pageable);
@Query(value = "select u from User u",
countQuery = "select count(u.id) from #{#entityName} u where u.name = :#{#arg0}")
List<User> findAllWithBindingsOnlyInCountQuery(String arg0, Pageable pageable);
// Typo in named parameter
@Query("select u from User u where u.firstname = :foo")
List<User> findByAnnotatedQuery(@Param("param") String param);
}
| SampleRepository |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-http12/src/main/java/org/apache/dubbo/remoting/http12/message/codec/YamlCodecFactory.java | {
"start": 1332,
"end": 1944
} | class ____ implements HttpMessageEncoderFactory, HttpMessageDecoderFactory {
@Override
public HttpMessageCodec createCodec(URL url, FrameworkModel frameworkModel, String mediaType) {
return frameworkModel == FrameworkModel.defaultModel() ? YamlCodec.INSTANCE : new YamlCodec(frameworkModel);
}
@Override
public MediaType mediaType() {
return MediaType.APPLICATION_YAML;
}
@Override
public boolean supports(String mediaType) {
return mediaType.startsWith(mediaType().getName()) || mediaType.startsWith(MediaType.TEXT_YAML.getName());
}
}
| YamlCodecFactory |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/heap/HeapSavepointRestoreOperation.java | {
"start": 2680,
"end": 12056
} | class ____<K> implements RestoreOperation<Void> {
private final int keyGroupPrefixBytes;
private final StateSerializerProvider<K> keySerializerProvider;
private final Map<String, StateTable<K, ?, ?>> registeredKVStates;
private final Map<String, HeapPriorityQueueSnapshotRestoreWrapper<?>> registeredPQStates;
private final FullSnapshotRestoreOperation<K> savepointRestoreOperation;
private final HeapMetaInfoRestoreOperation<K> heapMetaInfoRestoreOperation;
/*
Shared wrappers for deserializing an entry in the state handle. An optimization
to reduce the number of objects created.
*/
private final DataInputDeserializer entryKeyDeserializer;
private final DataInputDeserializer entryValueDeserializer;
private final ListDelimitedSerializer listDelimitedSerializer;
HeapSavepointRestoreOperation(
@Nonnull Collection<KeyedStateHandle> restoreStateHandles,
StateSerializerProvider<K> keySerializerProvider,
ClassLoader userCodeClassLoader,
Map<String, StateTable<K, ?, ?>> registeredKVStates,
Map<String, HeapPriorityQueueSnapshotRestoreWrapper<?>> registeredPQStates,
HeapPriorityQueueSetFactory priorityQueueSetFactory,
@Nonnull KeyGroupRange keyGroupRange,
int numberOfKeyGroups,
StateTableFactory<K> stateTableFactory,
InternalKeyContext<K> keyContext) {
this.keySerializerProvider = keySerializerProvider;
this.registeredKVStates = registeredKVStates;
this.registeredPQStates = registeredPQStates;
this.savepointRestoreOperation =
new FullSnapshotRestoreOperation<>(
keyGroupRange,
userCodeClassLoader,
restoreStateHandles,
keySerializerProvider);
this.keyGroupPrefixBytes = computeRequiredBytesInKeyGroupPrefix(numberOfKeyGroups);
this.heapMetaInfoRestoreOperation =
new HeapMetaInfoRestoreOperation<>(
keySerializerProvider,
priorityQueueSetFactory,
keyGroupRange,
numberOfKeyGroups,
stateTableFactory,
keyContext);
this.entryKeyDeserializer = new DataInputDeserializer();
this.entryValueDeserializer = new DataInputDeserializer();
this.listDelimitedSerializer = new ListDelimitedSerializer();
}
@Override
public Void restore() throws Exception {
registeredKVStates.clear();
registeredPQStates.clear();
try (ThrowingIterator<SavepointRestoreResult> restore =
this.savepointRestoreOperation.restore()) {
while (restore.hasNext()) {
SavepointRestoreResult restoreResult = restore.next();
List<StateMetaInfoSnapshot> restoredMetaInfos =
restoreResult.getStateMetaInfoSnapshots();
final Map<Integer, StateMetaInfoSnapshot> kvStatesById =
this.heapMetaInfoRestoreOperation.createOrCheckStateForMetaInfo(
restoredMetaInfos, registeredKVStates, registeredPQStates);
try (ThrowingIterator<KeyGroup> keyGroups = restoreResult.getRestoredKeyGroups()) {
while (keyGroups.hasNext()) {
readKeyGroupStateData(
keyGroups.next(),
keySerializerProvider.previousSchemaSerializer(),
kvStatesById);
}
}
}
}
return null;
}
private void readKeyGroupStateData(
KeyGroup keyGroup,
TypeSerializer<K> keySerializer,
Map<Integer, StateMetaInfoSnapshot> kvStatesById)
throws Exception {
try (ThrowingIterator<KeyGroupEntry> entries = keyGroup.getKeyGroupEntries()) {
while (entries.hasNext()) {
KeyGroupEntry groupEntry = entries.next();
StateMetaInfoSnapshot infoSnapshot = kvStatesById.get(groupEntry.getKvStateId());
switch (infoSnapshot.getBackendStateType()) {
case KEY_VALUE:
readKVStateData(keySerializer, groupEntry, infoSnapshot);
break;
case PRIORITY_QUEUE:
readPriorityQueue(groupEntry, infoSnapshot);
break;
case OPERATOR:
case BROADCAST:
throw new IllegalStateException(
"Expected only keyed state. Received: "
+ infoSnapshot.getBackendStateType());
}
}
}
}
@SuppressWarnings("unchecked")
private void readPriorityQueue(KeyGroupEntry groupEntry, StateMetaInfoSnapshot infoSnapshot)
throws IOException {
entryKeyDeserializer.setBuffer(groupEntry.getKey());
entryKeyDeserializer.skipBytesToRead(keyGroupPrefixBytes);
HeapPriorityQueueSnapshotRestoreWrapper<HeapPriorityQueueElement>
priorityQueueSnapshotRestoreWrapper =
(HeapPriorityQueueSnapshotRestoreWrapper<HeapPriorityQueueElement>)
registeredPQStates.get(infoSnapshot.getName());
HeapPriorityQueueElement timer =
priorityQueueSnapshotRestoreWrapper
.getMetaInfo()
.getElementSerializer()
.deserialize(entryKeyDeserializer);
HeapPriorityQueueSet<HeapPriorityQueueElement> priorityQueue =
priorityQueueSnapshotRestoreWrapper.getPriorityQueue();
priorityQueue.add(timer);
}
@SuppressWarnings("unchecked")
private void readKVStateData(
TypeSerializer<K> keySerializer,
KeyGroupEntry groupEntry,
StateMetaInfoSnapshot infoSnapshot)
throws IOException {
StateTable<K, Object, Object> stateTable =
(StateTable<K, Object, Object>) registeredKVStates.get(infoSnapshot.getName());
RegisteredKeyValueStateBackendMetaInfo<?, ?> metaInfo = stateTable.getMetaInfo();
TypeSerializer<?> namespaceSerializer = metaInfo.getPreviousNamespaceSerializer();
TypeSerializer<?> stateSerializer = metaInfo.getPreviousStateSerializer();
boolean isAmbigousKey =
keySerializer.getLength() < 0 && namespaceSerializer.getLength() < 0;
entryKeyDeserializer.setBuffer(groupEntry.getKey());
entryValueDeserializer.setBuffer(groupEntry.getValue());
int keyGroup = readKeyGroup(keyGroupPrefixBytes, entryKeyDeserializer);
K key = readKey(keySerializer, entryKeyDeserializer, isAmbigousKey);
Object namespace = readNamespace(namespaceSerializer, entryKeyDeserializer, isAmbigousKey);
switch (metaInfo.getStateType()) {
case LIST:
stateTable.put(
key,
keyGroup,
namespace,
listDelimitedSerializer.deserializeList(
groupEntry.getValue(),
((ListSerializer<?>) stateSerializer).getElementSerializer()));
break;
case VALUE:
case REDUCING:
case FOLDING:
case AGGREGATING:
stateTable.put(
key,
keyGroup,
namespace,
stateSerializer.deserialize(entryValueDeserializer));
break;
case MAP:
deserializeMapStateEntry(
(StateTable<K, Object, Map<Object, Object>>)
(StateTable<K, ?, ?>) stateTable,
keyGroup,
key,
namespace,
(MapSerializer<Object, Object>) stateSerializer);
break;
default:
throw new IllegalStateException("Unknown state type: " + metaInfo.getStateType());
}
}
private void deserializeMapStateEntry(
StateTable<K, Object, Map<Object, Object>> stateTable,
int keyGroup,
K key,
Object namespace,
MapSerializer<Object, Object> stateSerializer)
throws IOException {
Object mapEntryKey = stateSerializer.getKeySerializer().deserialize(entryKeyDeserializer);
boolean isNull = entryValueDeserializer.readBoolean();
final Object mapEntryValue;
if (isNull) {
mapEntryValue = null;
} else {
mapEntryValue =
stateSerializer.getValueSerializer().deserialize(entryValueDeserializer);
}
Map<Object, Object> userMap = stateTable.get(key, namespace);
if (userMap == null) {
userMap = new HashMap<>();
stateTable.put(key, keyGroup, namespace, userMap);
}
userMap.put(mapEntryKey, mapEntryValue);
}
}
| HeapSavepointRestoreOperation |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/convert/Property.java | {
"start": 1847,
"end": 7715
} | class ____ {
private static final Map<Property, Annotation[]> annotationCache = new ConcurrentReferenceHashMap<>();
private final Class<?> objectType;
private final @Nullable Method readMethod;
private final @Nullable Method writeMethod;
private final String name;
private final MethodParameter methodParameter;
private Annotation @Nullable [] annotations;
public Property(Class<?> objectType, @Nullable Method readMethod, @Nullable Method writeMethod) {
this(objectType, readMethod, writeMethod, null);
}
public Property(
Class<?> objectType, @Nullable Method readMethod, @Nullable Method writeMethod, @Nullable String name) {
this.objectType = objectType;
this.readMethod = readMethod;
this.writeMethod = writeMethod;
this.methodParameter = resolveMethodParameter();
this.name = (name != null ? name : resolveName());
}
/**
* The object declaring this property, either directly or in a superclass the object extends.
*/
public Class<?> getObjectType() {
return this.objectType;
}
/**
* The name of the property: for example, 'foo'.
*/
public String getName() {
return this.name;
}
/**
* The property type: for example, {@code java.lang.String}.
*/
public Class<?> getType() {
return this.methodParameter.getParameterType();
}
/**
* The property getter method: for example, {@code getFoo()}.
*/
public @Nullable Method getReadMethod() {
return this.readMethod;
}
/**
* The property setter method: for example, {@code setFoo(String)}.
*/
public @Nullable Method getWriteMethod() {
return this.writeMethod;
}
// Package private
MethodParameter getMethodParameter() {
return this.methodParameter;
}
Annotation[] getAnnotations() {
if (this.annotations == null) {
this.annotations = resolveAnnotations();
}
return this.annotations;
}
// Internal helpers
private String resolveName() {
if (this.readMethod != null) {
int index = this.readMethod.getName().indexOf("get");
if (index != -1) {
index += 3;
}
else {
index = this.readMethod.getName().indexOf("is");
if (index != -1) {
index += 2;
}
else {
// Record-style plain accessor method, for example, name()
index = 0;
}
}
return StringUtils.uncapitalize(this.readMethod.getName().substring(index));
}
else if (this.writeMethod != null) {
int index = this.writeMethod.getName().indexOf("set");
if (index == -1) {
throw new IllegalArgumentException("Not a setter method");
}
index += 3;
return StringUtils.uncapitalize(this.writeMethod.getName().substring(index));
}
else {
throw new IllegalStateException("Property is neither readable nor writeable");
}
}
private MethodParameter resolveMethodParameter() {
MethodParameter read = resolveReadMethodParameter();
MethodParameter write = resolveWriteMethodParameter();
if (write == null) {
if (read == null) {
throw new IllegalStateException("Property is neither readable nor writeable");
}
return read;
}
if (read != null) {
Class<?> readType = read.getParameterType();
Class<?> writeType = write.getParameterType();
if (!writeType.equals(readType) && writeType.isAssignableFrom(readType)) {
return read;
}
}
return write;
}
private @Nullable MethodParameter resolveReadMethodParameter() {
if (getReadMethod() == null) {
return null;
}
return new MethodParameter(getReadMethod(), -1).withContainingClass(getObjectType());
}
private @Nullable MethodParameter resolveWriteMethodParameter() {
if (getWriteMethod() == null) {
return null;
}
return new MethodParameter(getWriteMethod(), 0).withContainingClass(getObjectType());
}
private Annotation[] resolveAnnotations() {
Annotation[] annotations = annotationCache.get(this);
if (annotations == null) {
Map<Class<? extends Annotation>, Annotation> annotationMap = new LinkedHashMap<>();
addAnnotationsToMap(annotationMap, getReadMethod());
addAnnotationsToMap(annotationMap, getWriteMethod());
addAnnotationsToMap(annotationMap, getField());
annotations = annotationMap.values().toArray(new Annotation[0]);
annotationCache.put(this, annotations);
}
return annotations;
}
private void addAnnotationsToMap(
Map<Class<? extends Annotation>, Annotation> annotationMap, @Nullable AnnotatedElement object) {
if (object != null) {
for (Annotation annotation : object.getAnnotations()) {
annotationMap.put(annotation.annotationType(), annotation);
}
}
}
private @Nullable Field getField() {
String name = getName();
if (!StringUtils.hasLength(name)) {
return null;
}
Field field = null;
Class<?> declaringClass = declaringClass();
if (declaringClass != null) {
field = ReflectionUtils.findField(declaringClass, name);
if (field == null) {
// Same lenient fallback checking as in CachedIntrospectionResults...
field = ReflectionUtils.findField(declaringClass, StringUtils.uncapitalize(name));
if (field == null) {
field = ReflectionUtils.findField(declaringClass, StringUtils.capitalize(name));
}
}
}
return field;
}
private @Nullable Class<?> declaringClass() {
if (getReadMethod() != null) {
return getReadMethod().getDeclaringClass();
}
else if (getWriteMethod() != null) {
return getWriteMethod().getDeclaringClass();
}
else {
return null;
}
}
@Override
public boolean equals(@Nullable Object other) {
return (this == other || (other instanceof Property that &&
ObjectUtils.nullSafeEquals(this.objectType, that.objectType) &&
ObjectUtils.nullSafeEquals(this.name, that.name) &&
ObjectUtils.nullSafeEquals(this.readMethod, that.readMethod) &&
ObjectUtils.nullSafeEquals(this.writeMethod, that.writeMethod)));
}
@Override
public int hashCode() {
return Objects.hash(this.objectType, this.name);
}
}
| Property |
java | apache__kafka | connect/runtime/src/test/java/org/apache/kafka/connect/integration/ConnectorHandle.java | {
"start": 1357,
"end": 15325
} | class ____ {
private static final Logger log = LoggerFactory.getLogger(ConnectorHandle.class);
private final String connectorName;
private final Map<String, TaskHandle> taskHandles = new ConcurrentHashMap<>();
private final StartAndStopCounter startAndStopCounter = new StartAndStopCounter();
private CountDownLatch recordsRemainingLatch;
private CountDownLatch recordsToCommitLatch;
private int expectedRecords = -1;
private int expectedCommits = -1;
public ConnectorHandle(String connectorName) {
this.connectorName = connectorName;
}
/**
* Get or create a task handle for a given task id. The task need not be created when this method is called. If the
* handle is called before the task is created, the task will bind to the handle once it starts (or restarts).
*
* @param taskId the task id
* @return a non-null {@link TaskHandle}
*/
public TaskHandle taskHandle(String taskId) {
return taskHandle(taskId, null);
}
/**
* Get or create a task handle for a given task id. The task need not be created when this method is called. If the
* handle is called before the task is created, the task will bind to the handle once it starts (or restarts).
*
* @param taskId the task id
* @param consumer A callback invoked when a sink task processes a record.
* @return a non-null {@link TaskHandle}
*/
public TaskHandle taskHandle(String taskId, Consumer<SinkRecord> consumer) {
return taskHandles.computeIfAbsent(taskId, k -> new TaskHandle(this, taskId, consumer));
}
/**
* Gets the start and stop counter corresponding to this handle.
*
* @return the start and stop counter
*/
public StartAndStopCounter startAndStopCounter() {
return startAndStopCounter;
}
/**
* Get the connector's name corresponding to this handle.
*
* @return the connector's name
*/
public String name() {
return connectorName;
}
/**
* Get the list of tasks handles monitored by this connector handle.
*
* @return the task handle list
*/
public Collection<TaskHandle> tasks() {
return taskHandles.values();
}
/**
* Delete all task handles for this connector.
*/
public void clearTasks() {
log.info("Clearing {} existing task handles for connector {}", taskHandles.size(), connectorName);
taskHandles.clear();
}
/**
* Set the number of expected records for this connector.
*
* @param expected number of records
*/
public void expectedRecords(int expected) {
expectedRecords = expected;
recordsRemainingLatch = new CountDownLatch(expected);
}
/**
* Set the number of expected commits performed by this connector.
*
* @param expected number of commits
*/
public void expectedCommits(int expected) {
expectedCommits = expected;
recordsToCommitLatch = new CountDownLatch(expected);
}
/**
* Record a message arrival at the connector.
*/
public void record() {
if (recordsRemainingLatch != null) {
recordsRemainingLatch.countDown();
}
}
/**
* Record arrival of a batch of messages at the connector.
*
* @param batchSize the number of messages
*/
public void record(int batchSize) {
if (recordsRemainingLatch != null) {
IntStream.range(0, batchSize).forEach(i -> recordsRemainingLatch.countDown());
}
}
/**
* Record a message commit from the connector.
*/
public void commit() {
if (recordsToCommitLatch != null) {
recordsToCommitLatch.countDown();
}
}
/**
* Record commit on a batch of messages from the connector.
*
* @param batchSize the number of messages
*/
public void commit(int batchSize) {
if (recordsToCommitLatch != null) {
IntStream.range(0, batchSize).forEach(i -> recordsToCommitLatch.countDown());
}
}
/**
* Wait for this connector to meet the expected number of records as defined by {@code
* expectedRecords}.
*
* @param timeout max duration to wait for records
* @throws InterruptedException if another threads interrupts this one while waiting for records
*/
public void awaitRecords(long timeout) throws InterruptedException {
if (recordsRemainingLatch == null || expectedRecords < 0) {
throw new IllegalStateException("expectedRecords() was not set for this connector?");
}
if (!recordsRemainingLatch.await(timeout, TimeUnit.MILLISECONDS)) {
String msg = String.format(
"Insufficient records seen by connector %s in %d millis. Records expected=%d, actual=%d",
connectorName,
timeout,
expectedRecords,
expectedRecords - recordsRemainingLatch.getCount());
throw new DataException(msg);
}
}
/**
* Wait for this connector to meet the expected number of commits as defined by {@code
* expectedCommits}.
*
* @param timeout duration to wait for commits
* @throws InterruptedException if another threads interrupts this one while waiting for commits
*/
public void awaitCommits(long timeout) throws InterruptedException {
if (recordsToCommitLatch == null || expectedCommits < 0) {
throw new IllegalStateException("expectedCommits() was not set for this connector?");
}
if (!recordsToCommitLatch.await(timeout, TimeUnit.MILLISECONDS)) {
String msg = String.format(
"Insufficient records committed by connector %s in %d millis. Records expected=%d, actual=%d",
connectorName,
timeout,
expectedCommits,
expectedCommits - recordsToCommitLatch.getCount());
throw new DataException(msg);
}
}
/**
* Record that this connector has been started. This should be called by the connector under
* test.
*
* @see #expectedStarts(int)
*/
public void recordConnectorStart() {
startAndStopCounter.recordStart();
}
/**
* Record that this connector has been stopped. This should be called by the connector under
* test.
*
* @see #expectedStarts(int)
*/
public void recordConnectorStop() {
startAndStopCounter.recordStop();
}
/**
* Obtain a {@link StartAndStopLatch} that can be used to wait until the connector using this handle
* and all tasks using {@link TaskHandle} have completed the expected number of
* starts, starting the counts at the time this method is called.
*
* <p>A test can call this method, specifying the number of times the connector and tasks
* will each be stopped and started from that point (typically {@code expectedStarts(1)}).
* The test should then change the connector or otherwise cause the connector to restart one or
* more times, and then can call {@link StartAndStopLatch#await(long, TimeUnit)} to wait up to a
* specified duration for the connector and all tasks to be started at least the specified
* number of times.
*
* <p>This method does not track the number of times the connector and tasks are stopped, and
* only tracks the number of times the connector and tasks are <em>started</em>.
*
* @param expectedStarts the minimum number of starts that are expected once this method is
* called
* @return the latch that can be used to wait for the starts to complete; never null
*/
public StartAndStopLatch expectedStarts(int expectedStarts) {
return expectedStarts(expectedStarts, true);
}
/**
* Obtain a {@link StartAndStopLatch} that can be used to wait until the connector using this handle
* and optionally all tasks using {@link TaskHandle} have completed the expected number of
* starts, starting the counts at the time this method is called.
*
* <p>A test can call this method, specifying the number of times the connector and tasks
* will each be stopped and started from that point (typically {@code expectedStarts(1)}).
* The test should then change the connector or otherwise cause the connector to restart one or
* more times, and then can call {@link StartAndStopLatch#await(long, TimeUnit)} to wait up to a
* specified duration for the connector and all tasks to be started at least the specified
* number of times.
*
* <p>This method does not track the number of times the connector and tasks are stopped, and
* only tracks the number of times the connector and tasks are <em>started</em>.
*
* @param expectedStarts the minimum number of starts that are expected once this method is
* called
* @param includeTasks true if the latch should also wait for the tasks to be stopped the
* specified minimum number of times
* @return the latch that can be used to wait for the starts to complete; never null
*/
public StartAndStopLatch expectedStarts(int expectedStarts, boolean includeTasks) {
List<StartAndStopLatch> taskLatches = includeTasks
? taskHandles.values().stream()
.map(task -> task.expectedStarts(expectedStarts))
.toList()
: List.of();
return startAndStopCounter.expectedStarts(expectedStarts, taskLatches);
}
public StartAndStopLatch expectedStarts(int expectedStarts, Map<String, Integer> expectedTasksStarts, boolean includeTasks) {
List<StartAndStopLatch> taskLatches = includeTasks
? taskHandles.values().stream()
.map(task -> task.expectedStarts(expectedTasksStarts.get(task.taskId())))
.toList()
: List.of();
return startAndStopCounter.expectedStarts(expectedStarts, taskLatches);
}
/**
* Obtain a {@link StartAndStopLatch} that can be used to wait until the connector using this handle
* and optionally all tasks using {@link TaskHandle} have completed the minimum number of
* stops, starting the counts at the time this method is called.
*
* <p>A test can call this method, specifying the number of times the connector and tasks
* will each be stopped from that point (typically {@code expectedStops(1)}).
* The test should then change the connector or otherwise cause the connector to stop (or
* restart) one or more times, and then can call
* {@link StartAndStopLatch#await(long, TimeUnit)} to wait up to a specified duration for the
* connector and all tasks to be started at least the specified number of times.
*
* <p>This method does not track the number of times the connector and tasks are started, and
* only tracks the number of times the connector and tasks are <em>stopped</em>.
*
* @param expectedStops the minimum number of starts that are expected once this method is
* called
* @return the latch that can be used to wait for the starts to complete; never null
*/
public StartAndStopLatch expectedStops(int expectedStops) {
return expectedStops(expectedStops, true);
}
/**
* Obtain a {@link StartAndStopLatch} that can be used to wait until the connector using this handle
* and optionally all tasks using {@link TaskHandle} have completed the minimum number of
* stops, starting the counts at the time this method is called.
*
* <p>A test can call this method, specifying the number of times the connector and tasks
* will each be stopped from that point (typically {@code expectedStops(1)}).
* The test should then change the connector or otherwise cause the connector to stop (or
* restart) one or more times, and then can call
* {@link StartAndStopLatch#await(long, TimeUnit)} to wait up to a specified duration for the
* connector and all tasks to be started at least the specified number of times.
*
* <p>This method does not track the number of times the connector and tasks are started, and
* only tracks the number of times the connector and tasks are <em>stopped</em>.
*
* @param expectedStops the minimum number of starts that are expected once this method is
* called
* @param includeTasks true if the latch should also wait for the tasks to be stopped the
* specified minimum number of times
* @return the latch that can be used to wait for the starts to complete; never null
*/
public StartAndStopLatch expectedStops(int expectedStops, boolean includeTasks) {
List<StartAndStopLatch> taskLatches = includeTasks
? taskHandles.values().stream()
.map(task -> task.expectedStops(expectedStops))
.toList()
: List.of();
return startAndStopCounter.expectedStops(expectedStops, taskLatches);
}
public StartAndStopLatch expectedStops(int expectedStops, Map<String, Integer> expectedTasksStops, boolean includeTasks) {
List<StartAndStopLatch> taskLatches = includeTasks
? taskHandles.values().stream()
.map(task -> task.expectedStops(expectedTasksStops.get(task.taskId())))
.toList()
: List.of();
return startAndStopCounter.expectedStops(expectedStops, taskLatches);
}
@Override
public String toString() {
return "ConnectorHandle{" +
"connectorName='" + connectorName + '\'' +
'}';
}
}
| ConnectorHandle |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/graph/GraphQueryResponseItem.java | {
"start": 1301,
"end": 1586
} | interface ____ extends GraphQueryResponseItem {
long id();
List<String> labels();
List<ScalarItem> properties();
ScalarItem get(String property);
@Override
default Kind kind() {
return Kind.NODE;
}
}
| NodeItem |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/operators/util/TestRichInputFormat.java | {
"start": 1074,
"end": 2207
} | class ____ extends GenericInputFormat<String> implements NonParallelInput {
private static final long serialVersionUID = 1L;
private static final int NUM = 5;
private static final String[] NAMES = TestIOData.NAMES;
private int count = 0;
private boolean openCalled = false;
private boolean closeCalled = false;
@Override
public boolean reachedEnd() {
return count >= NUM;
}
@Override
public String nextRecord(String reuse) {
count++;
return NAMES[count - 1]
+ getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()
+ getRuntimeContext().getTaskInfo().getNumberOfParallelSubtasks();
}
public void reset() {
count = 0;
openCalled = false;
closeCalled = false;
}
@Override
public void openInputFormat() {
openCalled = true;
}
@Override
public void closeInputFormat() {
closeCalled = true;
}
public boolean hasBeenOpened() {
return openCalled;
}
public boolean hasBeenClosed() {
return closeCalled;
}
}
| TestRichInputFormat |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/Table.java | {
"start": 6695,
"end": 7180
} | class ____ {
public final Object value;
public final Map<String, String> attr;
public Cell(Object value, Cell other) {
this.value = value;
this.attr = other.attr;
}
public Cell(Object value) {
this.value = value;
this.attr = new HashMap<>();
}
public Cell(Object value, Map<String, String> attr) {
this.value = value;
this.attr = attr;
}
}
}
| Cell |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/MethodIntrospector.java | {
"start": 5817,
"end": 5951
} | class ____ in your configuration.",
method.getName(), method.getDeclaringClass().getSimpleName()));
}
}
/**
* A callback | mode |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/http/HttpConfigurationBuilder.java | {
"start": 47143,
"end": 47502
} | class ____ implements FactoryBean<SecurityContextHolderStrategy> {
@Override
public SecurityContextHolderStrategy getObject() throws Exception {
return SecurityContextHolder.getContextHolderStrategy();
}
@Override
public Class<?> getObjectType() {
return SecurityContextHolderStrategy.class;
}
}
static | SecurityContextHolderStrategyFactory |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSInputChecker.java | {
"start": 1777,
"end": 1835
} | class ____ if FSInputChecker works correctly.
*/
public | tests |
java | square__javapoet | src/test/java/com/squareup/javapoet/MethodSpecTest.java | {
"start": 3408,
"end": 3452
} | interface ____ {
}
abstract static | Nullable |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/filter/FilteringParserDelegate.java | {
"start": 436,
"end": 41174
} | class ____ extends JsonParserDelegate
{
/*
/**********************************************************************
/* Configuration
/**********************************************************************
*/
/**
* Object consulted to determine whether to write parts of content generator
* is asked to write or not.
*/
protected TokenFilter rootFilter;
/**
* Flag that determines whether filtering will continue after the first
* match is indicated or not: if `false`, output is based on just the first
* full match (returning {@link TokenFilter#INCLUDE_ALL}) and no more
* checks are made; if `true` then filtering will be applied as necessary
* until end of content.
*/
protected boolean _allowMultipleMatches;
/**
* Flag that determines whether path leading up to included content should
* also be automatically included or not. If `false`, no path inclusion is
* done and only explicitly included entries are output; if `true` then
* path from main level down to match is also included as necessary.
*/
protected TokenFilter.Inclusion _inclusion;
/*
/**********************************************************************
/* State
/**********************************************************************
*/
/**
* Last token retrieved via {@link #nextToken}, if any.
* Null before the first call to <code>nextToken()</code>,
* as well as if token has been explicitly cleared
*/
protected JsonToken _currToken;
/**
* Last cleared token, if any: that is, value that was in
* effect when {@link #clearCurrentToken} was called.
*/
protected JsonToken _lastClearedToken;
/**
* During traversal this is the actual "open" parse tree, which sometimes
* is the same as {@link #_exposedContext}, and at other times is ahead
* of it. Note that this context is never null.
*/
protected TokenFilterContext _headContext;
/**
* In cases where {@link #_headContext} is "ahead" of context exposed to
* caller, this context points to what is currently exposed to caller.
* When the two are in sync, this context reference will be <code>null</code>.
*/
protected TokenFilterContext _exposedContext;
/**
* State that applies to the item within container, used where applicable.
* Specifically used to pass inclusion state between property name and
* property, and also used for array elements.
*/
protected TokenFilter _itemFilter;
/**
* Number of tokens for which {@link TokenFilter#INCLUDE_ALL}
* has been returned.
*/
protected int _matchCount;
/*
/**********************************************************************
/* Construction, initialization
/**********************************************************************
*/
/**
* @param p Parser to delegate calls to
* @param f Filter to use
* @param inclusion Definition of inclusion criteria
* @param allowMultipleMatches Whether to allow multiple matches
*
* @throws IllegalArgumentException if non-blocking (async) parser
* (for which {@code parser.canParseAsync()} returns `true`) is
* used -- doing so requires use of different constructor:
* {@link #FilteringParserDelegate(JsonParser, TokenFilter, TokenFilter.Inclusion, boolean, boolean)}
*/
public FilteringParserDelegate(JsonParser p, TokenFilter f,
TokenFilter.Inclusion inclusion, boolean allowMultipleMatches)
{
super(p);
_checkAsyncParser(p);
initializeFilters(f, inclusion, allowMultipleMatches);
}
private static void _checkAsyncParser(JsonParser p) throws IllegalArgumentException {
if (p.canParseAsync()) {
throw new IllegalArgumentException(
String.format(
"%s is an asynchronous parser (canParseAsync() == true), " +
"which requires explicit permission to be used: " +
"to allow use, call constructor with `allowNonBlockingParser` passed as `true`",
p.getClass().getSimpleName()
)
);
}
}
/**
* @param p Parser to delegate calls to
* @param f Filter to use
* @param inclusion Definition of inclusion criteria
* @param allowMultipleMatches Whether to allow multiple matches
* @param allowNonBlockingParser If true, allows use of NonBlockingJsonParser: must
* also feed all input to parser before calling nextToken on this delegate
*
* @throws IllegalArgumentException if NonBlockingJsonParser is used without explicit permission
*/
public FilteringParserDelegate(JsonParser p, TokenFilter f,
TokenFilter.Inclusion inclusion, boolean allowMultipleMatches,
boolean allowNonBlockingParser) {
super(p);
if (!allowNonBlockingParser) {
_checkAsyncParser(p);
}
initializeFilters(f, inclusion, allowMultipleMatches);
}
/**
* Initializes filter-related fields
*
* @param f Filter to use
* @param inclusion Definition of inclusion criteria
* @param allowMultipleMatches Whether to allow multiple matches
*/
private void initializeFilters(TokenFilter f, TokenFilter.Inclusion inclusion, boolean allowMultipleMatches) {
rootFilter = f;
_itemFilter = f;
_headContext = TokenFilterContext.createRootContext(f);
_inclusion = inclusion;
_allowMultipleMatches = allowMultipleMatches;
}
/*
/**********************************************************************
/* Extended API
/**********************************************************************
*/
public TokenFilter getFilter() { return rootFilter; }
/**
* Accessor for finding number of matches, where specific token and sub-tree
* starting (if structured type) are passed.
*
* @return Number of matches
*/
public int getMatchCount() {
return _matchCount;
}
/*
/**********************************************************************
/* Public API, token accessors
/**********************************************************************
*/
@Override public JsonToken currentToken() { return _currToken; }
@Override public final int currentTokenId() {
final JsonToken t = _currToken;
return (t == null) ? JsonTokenId.ID_NO_TOKEN : t.id();
}
@Override public boolean hasCurrentToken() { return _currToken != null; }
@Override public boolean hasTokenId(int id) {
final JsonToken t = _currToken;
if (t == null) {
return (JsonTokenId.ID_NO_TOKEN == id);
}
return t.id() == id;
}
@Override public final boolean hasToken(JsonToken t) {
return (_currToken == t);
}
@Override public boolean isExpectedStartArrayToken() { return _currToken == JsonToken.START_ARRAY; }
@Override public boolean isExpectedStartObjectToken() { return _currToken == JsonToken.START_OBJECT; }
@Override public TokenStreamLocation currentLocation() { return delegate.currentLocation(); }
@Override
public TokenStreamContext streamReadContext() {
return _filterContext();
}
@Override
public String currentName() {
TokenStreamContext ctxt = _filterContext();
if (_currToken == JsonToken.START_OBJECT || _currToken == JsonToken.START_ARRAY) {
TokenStreamContext parent = ctxt.getParent();
return (parent == null) ? null : parent.currentName();
}
return ctxt.currentName();
}
/*
/**********************************************************************
/* Public API, token state overrides
/**********************************************************************
*/
@Override
public void clearCurrentToken() {
if (_currToken != null) {
_lastClearedToken = _currToken;
_currToken = null;
}
}
@Override
public JsonToken getLastClearedToken() { return _lastClearedToken; }
/*
@Override
public void overrideCurrentName(String name) {
// 14-Apr-2015, tatu: Not sure whether this can be supported, and if so,
// what to do with it... Delegation won't work for sure, so let's for
// now throw an exception
throw new UnsupportedOperationException("Cannot currently override name during filtering read");
}
*/
/*
/**********************************************************************
/* Public API, traversal
/**********************************************************************
*/
@Override
public JsonToken nextToken() throws JacksonException
{
// 23-May-2017, tatu: To be honest, code here is rather hairy and I don't like all
// conditionals; and it seems odd to return `null` but NOT considering input
// as closed... would love a rewrite to simplify/clear up logic here.
// Check for _allowMultipleMatches - false and at least there is one token - which is _currToken
// check for no buffered context _exposedContext - null
// If all the conditions matches then check for scalar / non-scalar property
if (!_allowMultipleMatches && (_currToken != null) && (_exposedContext == null)) {
// if scalar, and scalar not present in obj/array and _inclusion == ONLY_INCLUDE_ALL
// and INCLUDE_ALL matched once, return null
if (_currToken.isScalarValue() && !_headContext.isStartHandled()
&& _inclusion == Inclusion.ONLY_INCLUDE_ALL
&& (_itemFilter == TokenFilter.INCLUDE_ALL)) {
return (_currToken = null);
}
}
// Anything buffered?
TokenFilterContext ctxt = _exposedContext;
if (ctxt != null) {
while (true) {
JsonToken t = ctxt.nextTokenToRead();
if (t != null) {
_currToken = t;
return t;
}
// all done with buffered stuff?
if (ctxt == _headContext) {
_exposedContext = null;
if (ctxt.inArray()) {
t = delegate.currentToken();
_currToken = t;
if (_currToken == JsonToken.END_ARRAY) {
_headContext = _headContext.getParent();
_itemFilter = _headContext.getFilter();
}
return t;
}
// 19-Jul-2021, tatu: [core#700]: following was commented out?!
// Almost! Most likely still have the current token;
// with the sole exception of PROPERTY_NAME
t = delegate.currentToken();
if (t == JsonToken.END_OBJECT) {
_headContext = _headContext.getParent();
_itemFilter = _headContext.getFilter();
}
if (t != JsonToken.PROPERTY_NAME) {
_currToken = t;
return t;
}
break;
}
// If not, traverse down the context chain
ctxt = _headContext.findChildOf(ctxt);
_exposedContext = ctxt;
if (ctxt == null) { // should never occur
throw _constructReadException("Unexpected problem: chain of filtered context broken, token: "+t);
}
}
}
// If not, need to read more. If we got any:
JsonToken t = delegate.nextToken();
if (t == null) {
// no strict need to close, since we have no state here
_currToken = t;
return t;
}
// otherwise... to include or not?
TokenFilter f;
switch (t.id()) {
case JsonTokenId.ID_NOT_AVAILABLE:
throw _constructReadException("`JsonToken.NOT_AVAILABLE` received: ensure all input is fed to the Parser before use");
case ID_START_ARRAY:
f = _itemFilter;
if (f == TokenFilter.INCLUDE_ALL) {
_headContext = _headContext.createChildArrayContext(f, null, true);
return (_currToken = t);
}
if (f == null) { // does this occur?
delegate.skipChildren();
break;
}
// Otherwise still iffy, need to check
f = _headContext.checkValue(f);
if (f == null) {
delegate.skipChildren();
break;
}
if (f != TokenFilter.INCLUDE_ALL) {
f = f.filterStartArray();
}
_itemFilter = f;
if (f == TokenFilter.INCLUDE_ALL) {
_headContext = _headContext.createChildArrayContext(f, null, true);
return (_currToken = t);
} else if (f != null && _inclusion == Inclusion.INCLUDE_NON_NULL) {
// TODO don't count as match?
_headContext = _headContext.createChildArrayContext(f, null, true);
return (_currToken = t);
}
_headContext = _headContext.createChildArrayContext(f, null, false);
// Also: only need buffering if parent path to be included
if (_inclusion == Inclusion.INCLUDE_ALL_AND_PATH) {
t = _nextTokenWithBuffering(_headContext);
if (t != null) {
_currToken = t;
return t;
}
}
break;
case ID_START_OBJECT:
f = _itemFilter;
if (f == TokenFilter.INCLUDE_ALL) {
_headContext = _headContext.createChildObjectContext(f, null, true);
return (_currToken = t);
}
if (f == null) { // does this occur?
delegate.skipChildren();
break;
}
// Otherwise still iffy, need to check
f = _headContext.checkValue(f);
if (f == null) {
delegate.skipChildren();
break;
}
if (f != TokenFilter.INCLUDE_ALL) {
f = f.filterStartObject();
}
_itemFilter = f;
if (f == TokenFilter.INCLUDE_ALL) {
_headContext = _headContext.createChildObjectContext(f, null, true);
return (_currToken = t);
} else if (f != null && _inclusion == Inclusion.INCLUDE_NON_NULL) {
// TODO don't count as match?
_headContext = _headContext.createChildObjectContext(f, null, true);
return (_currToken = t);
}
_headContext = _headContext.createChildObjectContext(f, null, false);
// Also: only need buffering if parent path to be included
if (_inclusion == Inclusion.INCLUDE_ALL_AND_PATH) {
t = _nextTokenWithBuffering(_headContext);
if (t != null) {
_currToken = t;
return t;
}
}
// note: inclusion of surrounding Object handled separately via
// PROPERTY_NAME
break;
case ID_END_ARRAY:
case ID_END_OBJECT:
{
boolean returnEnd = _headContext.isStartHandled();
f = _headContext.getFilter();
if ((f != null) && (f != TokenFilter.INCLUDE_ALL)) {
if (t.id() == JsonTokenId.ID_END_ARRAY) {
f.filterFinishArray();
} else {
f.filterFinishObject();
}
}
_headContext = _headContext.getParent();
_itemFilter = _headContext.getFilter();
if (returnEnd) {
return (_currToken = t);
}
}
break;
case ID_PROPERTY_NAME:
{
final String name = delegate.currentName();
// note: this will also set 'needToHandleName'
f = _headContext.setPropertyName(name);
if (f == TokenFilter.INCLUDE_ALL) {
_itemFilter = f;
return (_currToken = t);
}
if (f == null) {
delegate.nextToken();
delegate.skipChildren();
break;
}
f = f.includeProperty(name);
if (f == null) {
delegate.nextToken();
delegate.skipChildren();
break;
}
_itemFilter = f;
if (f == TokenFilter.INCLUDE_ALL) {
if (_verifyAllowedMatches()) {
if (_inclusion == Inclusion.INCLUDE_ALL_AND_PATH) {
return (_currToken = t);
}
} else {
delegate.nextToken();
delegate.skipChildren();
}
}
if (_inclusion != Inclusion.ONLY_INCLUDE_ALL) {
t = _nextTokenWithBuffering(_headContext);
if (t != null) {
_currToken = t;
return t;
}
}
break;
}
default: // scalar value
f = _itemFilter;
if (f == TokenFilter.INCLUDE_ALL) {
return (_currToken = t);
}
if (f != null) {
f = _headContext.checkValue(f);
if ((f == TokenFilter.INCLUDE_ALL)
|| ((f != null) && f.includeValue(delegate))) {
if (_verifyAllowedMatches()) {
return (_currToken = t);
}
}
}
// Otherwise not included (leaves must be explicitly included)
break;
}
// We get here if token was not yet found; offlined handling
return _nextToken2();
}
// Offlined handling for cases where there was no buffered token to
// return, and the token read next could not be returned as-is,
// at least not yet, but where we have not yet established that
// buffering is needed.
protected final JsonToken _nextToken2() throws JacksonException
{
main_loop:
while (true) {
JsonToken t = delegate.nextToken();
if (t == null) { // is this even legal?
_currToken = t;
return t;
}
TokenFilter f;
switch (t.id()) {
case ID_START_ARRAY:
f = _itemFilter;
if (f == TokenFilter.INCLUDE_ALL) {
_headContext = _headContext.createChildArrayContext(f, null, true);
return (_currToken = t);
}
if (f == null) { // does this occur?
delegate.skipChildren();
continue main_loop;
}
// Otherwise still iffy, need to check
f = _headContext.checkValue(f);
if (f == null) {
delegate.skipChildren();
continue main_loop;
}
if (f != TokenFilter.INCLUDE_ALL) {
f = f.filterStartArray();
}
_itemFilter = f;
if (f == TokenFilter.INCLUDE_ALL) {
_headContext = _headContext.createChildArrayContext(f, null, true);
return (_currToken = t);
} else if (f != null && _inclusion == Inclusion.INCLUDE_NON_NULL) {
_headContext = _headContext.createChildArrayContext(f, null, true);
return (_currToken = t);
}
_headContext = _headContext.createChildArrayContext(f, null, false);
// but if we didn't figure it out yet, need to buffer possible events
if (_inclusion == Inclusion.INCLUDE_ALL_AND_PATH) {
t = _nextTokenWithBuffering(_headContext);
if (t != null) {
_currToken = t;
return t;
}
}
continue main_loop;
case ID_START_OBJECT:
f = _itemFilter;
if (f == TokenFilter.INCLUDE_ALL) {
_headContext = _headContext.createChildObjectContext(f, null, true);
return (_currToken = t);
}
if (f == null) { // does this occur?
delegate.skipChildren();
continue main_loop;
}
// Otherwise still iffy, need to check
f = _headContext.checkValue(f);
if (f == null) {
delegate.skipChildren();
continue main_loop;
}
if (f != TokenFilter.INCLUDE_ALL) {
f = f.filterStartObject();
}
_itemFilter = f;
if (f == TokenFilter.INCLUDE_ALL) {
_headContext = _headContext.createChildObjectContext(f, null, true);
return (_currToken = t);
} else if (f != null && _inclusion == Inclusion.INCLUDE_NON_NULL) {
_headContext = _headContext.createChildObjectContext(f, null, true);
return (_currToken = t);
}
_headContext = _headContext.createChildObjectContext(f, null, false);
if (_inclusion == Inclusion.INCLUDE_ALL_AND_PATH) {
t = _nextTokenWithBuffering(_headContext);
if (t != null) {
_currToken = t;
return t;
}
}
continue main_loop;
case ID_END_ARRAY:
{
boolean returnEnd = _headContext.isStartHandled();
f = _headContext.getFilter();
if ((f != null) && (f != TokenFilter.INCLUDE_ALL)) {
f.filterFinishArray();
if (!returnEnd) {
boolean includeEmpty = f.includeEmptyArray(_headContext.hasCurrentIndex());
if (includeEmpty) {
_headContext._needToHandleName = false;
return _nextBuffered(_headContext);
}
}
}
_headContext = _headContext.getParent();
_itemFilter = _headContext.getFilter();
if (returnEnd) {
return (_currToken = t);
}
}
continue main_loop;
case ID_END_OBJECT:
{
boolean returnEnd = _headContext.isStartHandled();
f = _headContext.getFilter();
if ((f != null) && (f != TokenFilter.INCLUDE_ALL)) {
f.filterFinishObject();
if (!returnEnd) {
boolean includeEmpty = f.includeEmptyObject(_headContext.hasCurrentName());
if (includeEmpty) {
_headContext._needToHandleName = false;
return _nextBuffered(_headContext);
}
}
}
_headContext = _headContext.getParent();
_itemFilter = _headContext.getFilter();
if (returnEnd) {
return (_currToken = t);
}
}
continue main_loop;
case ID_PROPERTY_NAME:
{
final String name = delegate.currentName();
f = _headContext.setPropertyName(name);
if (f == TokenFilter.INCLUDE_ALL) {
_itemFilter = f;
return (_currToken = t);
}
if (f == null) { // filter out the value
delegate.nextToken();
delegate.skipChildren();
continue main_loop;
}
f = f.includeProperty(name);
if (f == null) { // filter out the value
delegate.nextToken();
delegate.skipChildren();
continue main_loop;
}
_itemFilter = f;
if (f == TokenFilter.INCLUDE_ALL) {
if (_verifyAllowedMatches()) {
if (_inclusion == Inclusion.INCLUDE_ALL_AND_PATH) {
return (_currToken = t);
}
} else {
delegate.nextToken();
delegate.skipChildren();
}
continue main_loop;
}
if (_inclusion != Inclusion.ONLY_INCLUDE_ALL) {
t = _nextTokenWithBuffering(_headContext);
if (t != null) {
_currToken = t;
return t;
}
}
}
continue main_loop;
default: // scalar value
f = _itemFilter;
if (f == TokenFilter.INCLUDE_ALL) {
return (_currToken = t);
}
if (f != null) {
f = _headContext.checkValue(f);
if ((f == TokenFilter.INCLUDE_ALL)
|| ((f != null) && f.includeValue(delegate))) {
if (_verifyAllowedMatches()) {
return (_currToken = t);
}
}
}
// Otherwise not included (leaves must be explicitly included)
break;
}
}
}
// Method called when a new potentially included context is found.
protected final JsonToken _nextTokenWithBuffering(final TokenFilterContext buffRoot)
throws JacksonException
{
main_loop:
while (true) {
JsonToken t = delegate.nextToken();
if (t == null) { // is this even legal?
return t;
}
TokenFilter f;
// One simplification here: we know for a fact that the item filter is
// neither null nor 'include all', for most cases; the only exception
// being PROPERTY_NAME handling
switch (t.id()) {
case ID_START_ARRAY:
f = _headContext.checkValue(_itemFilter);
if (f == null) {
delegate.skipChildren();
continue main_loop;
}
if (f != TokenFilter.INCLUDE_ALL) {
f = f.filterStartArray();
}
_itemFilter = f;
if (f == TokenFilter.INCLUDE_ALL) {
_headContext = _headContext.createChildArrayContext(f, null, true);
return _nextBuffered(buffRoot);
} else if (f != null && _inclusion == Inclusion.INCLUDE_NON_NULL) {
// TODO don't count as match?
_headContext = _headContext.createChildArrayContext(f, null, true);
return _nextBuffered(buffRoot);
}
_headContext = _headContext.createChildArrayContext(f, null, false);
continue main_loop;
case ID_START_OBJECT:
f = _itemFilter;
if (f == TokenFilter.INCLUDE_ALL) {
_headContext = _headContext.createChildObjectContext(f, null, true);
return t;
}
if (f == null) { // does this occur?
delegate.skipChildren();
continue main_loop;
}
// Otherwise still iffy, need to check
f = _headContext.checkValue(f);
if (f == null) {
delegate.skipChildren();
continue main_loop;
}
if (f != TokenFilter.INCLUDE_ALL) {
f = f.filterStartObject();
}
_itemFilter = f;
if (f == TokenFilter.INCLUDE_ALL) {
_headContext = _headContext.createChildObjectContext(f, null, true);
return _nextBuffered(buffRoot);
} else if (f != null && _inclusion == Inclusion.INCLUDE_NON_NULL) {
// TODO don't count as match?
_headContext = _headContext.createChildArrayContext(f, null, true);
return _nextBuffered(buffRoot);
}
_headContext = _headContext.createChildObjectContext(f, null, false);
continue main_loop;
case ID_END_ARRAY:
{
// Unlike with other loops, here we know that content was NOT
// included (won't get this far otherwise)
f = _headContext.getFilter();
boolean gotEnd = (_headContext == buffRoot);
boolean returnEnd = gotEnd && _headContext.isStartHandled();
if ((f != null) && (f != TokenFilter.INCLUDE_ALL)) {
f.filterFinishArray();
if (!returnEnd) {
boolean includeEmpty = f.includeEmptyArray(_headContext.hasCurrentIndex());
if (includeEmpty) {
_headContext._needToHandleName = false;
return _nextBuffered(buffRoot);
}
}
}
_headContext = _headContext.getParent();
_itemFilter = _headContext.getFilter();
if (returnEnd) {
return t;
}
if (gotEnd) {
return null;
}
}
continue main_loop;
case ID_END_OBJECT:
{
// Unlike with other loops, here we know that content was NOT
// included (won't get this far otherwise)
f = _headContext.getFilter();
boolean gotEnd = (_headContext == buffRoot);
boolean returnEnd = gotEnd && _headContext.isStartHandled();
if ((f != null) && (f != TokenFilter.INCLUDE_ALL)) {
f.filterFinishObject();
if (!returnEnd) {
boolean includeEmpty = f.includeEmptyObject(_headContext.hasCurrentName());
if (includeEmpty) {
_headContext._needToHandleName = false;
return _nextBuffered(buffRoot);
}
}
}
_headContext = _headContext.getParent();
_itemFilter = _headContext.getFilter();
if (returnEnd) {
return t;
}
if (gotEnd) {
return null;
}
}
continue main_loop;
case ID_PROPERTY_NAME:
{
final String name = delegate.currentName();
f = _headContext.setPropertyName(name);
if (f == TokenFilter.INCLUDE_ALL) {
_itemFilter = f;
return _nextBuffered(buffRoot);
}
if (f == null) { // filter out the value
delegate.nextToken();
delegate.skipChildren();
continue main_loop;
}
f = f.includeProperty(name);
if (f == null) { // filter out the value
delegate.nextToken();
delegate.skipChildren();
continue main_loop;
}
_itemFilter = f;
if (f == TokenFilter.INCLUDE_ALL) {
if (_verifyAllowedMatches()) {
return _nextBuffered(buffRoot);
} else {
// edge case: if no more matches allowed, reset filter
// to initial state to prevent missing a token in next iteration
_itemFilter = _headContext.setPropertyName(name);
}
}
}
continue main_loop;
default: // scalar value
f = _itemFilter;
if (f == TokenFilter.INCLUDE_ALL) {
return _nextBuffered(buffRoot);
}
if (f != null) {
f = _headContext.checkValue(f);
if ((f == TokenFilter.INCLUDE_ALL)
|| ((f != null) && f.includeValue(delegate))) {
if (_verifyAllowedMatches()) {
return _nextBuffered(buffRoot);
}
}
}
// Otherwise not included (leaves must be explicitly included)
continue main_loop;
}
}
}
private JsonToken _nextBuffered(TokenFilterContext buffRoot) throws JacksonException
{
_exposedContext = buffRoot;
TokenFilterContext ctxt = buffRoot;
JsonToken t = ctxt.nextTokenToRead();
if (t != null) {
return t;
}
while (true) {
// all done with buffered stuff?
if (ctxt == _headContext) {
throw _constructReadException("Internal error: failed to locate expected buffered tokens");
/*
_exposedContext = null;
break;
*/
}
// If not, traverse down the context chain
ctxt = _exposedContext.findChildOf(ctxt);
_exposedContext = ctxt;
if (ctxt == null) { // should never occur
throw _constructReadException("Unexpected problem: chain of filtered context broken");
}
t = _exposedContext.nextTokenToRead();
if (t != null) {
return t;
}
}
}
private final boolean _verifyAllowedMatches() throws JacksonException {
if (_matchCount == 0 || _allowMultipleMatches) {
++_matchCount;
return true;
}
return false;
}
@Override
public JsonToken nextValue() throws JacksonException {
// Re-implemented same as ParserMinimalBase:
JsonToken t = nextToken();
if (t == JsonToken.PROPERTY_NAME) {
t = nextToken();
}
return t;
}
/**
* Need to override, re-implement similar to how method defined in
* {@link tools.jackson.core.base.ParserMinimalBase}, to keep
* state correct here.
*/
@Override
public JsonParser skipChildren() throws JacksonException
{
if ((_currToken != JsonToken.START_OBJECT)
&& (_currToken != JsonToken.START_ARRAY)) {
return this;
}
int open = 1;
// Since proper matching of start/end markers is handled
// by nextToken(), we'll just count nesting levels here
while (true) {
JsonToken t = nextToken();
if (t == null) { // not ideal but for now, just return
return this;
}
if (t.isStructStart()) {
++open;
} else if (t.isStructEnd()) {
if (--open == 0) {
return this;
}
}
}
}
/*
/**********************************************************************
/* Public API, access to token information, text; cannot simply delegate
/* due to access via `JsonToken.PROPERTY_NAME`
/**********************************************************************
*/
// 19-Jul-2021, tatu: Cannot quite just delegate these methods due to oddity
// of property name token, which may be buffered.
@Override public String getString() throws JacksonException {
if (_currToken == JsonToken.PROPERTY_NAME) {
return currentName();
}
return delegate.getString();
}
@Override public boolean hasStringCharacters() {
if (_currToken == JsonToken.PROPERTY_NAME) {
return false;
}
return delegate.hasStringCharacters();
}
@Override public char[] getStringCharacters() throws JacksonException {
// Not optimal but is correct, unlike delegating (as underlying stream
// may point to something else due to buffering)
if (_currToken == JsonToken.PROPERTY_NAME) {
return currentName().toCharArray();
}
return delegate.getStringCharacters();
}
@Override public int getStringLength() throws JacksonException {
if (_currToken == JsonToken.PROPERTY_NAME) {
return currentName().length();
}
return delegate.getStringLength();
}
@Override public int getStringOffset() throws JacksonException {
if (_currToken == JsonToken.PROPERTY_NAME) {
return 0;
}
return delegate.getStringOffset();
}
@Override public String getValueAsString() throws JacksonException {
if (_currToken == JsonToken.PROPERTY_NAME) {
return currentName();
}
return delegate.getValueAsString();
}
@Override public String getValueAsString(String defaultValue) throws JacksonException {
if (_currToken == JsonToken.PROPERTY_NAME) {
return currentName();
}
return delegate.getValueAsString(defaultValue);
}
/*
/**********************************************************************
/* Public API, traversal methods that CANNOT just delegate
/* and where we need to override default delegation
/**********************************************************************
*/
@Override
public String nextName() throws JacksonException {
return (nextToken() == JsonToken.PROPERTY_NAME) ? currentName() : null;
}
@Override
public boolean nextName(SerializableString str) throws JacksonException {
return (nextToken() == JsonToken.PROPERTY_NAME) && str.getValue().equals(currentName());
}
@Override
public int nextNameMatch(PropertyNameMatcher matcher) throws JacksonException {
String str = nextName();
if (str != null) {
// 15-Nov-2017, tatu: We cannot rely on name being interned here
return matcher.matchName(str);
}
if (hasToken(JsonToken.END_OBJECT)) {
return PropertyNameMatcher.MATCH_END_OBJECT;
}
return PropertyNameMatcher.MATCH_ODD_TOKEN;
}
/*
/**********************************************************************
/* Internal helper methods
/**********************************************************************
*/
protected TokenStreamContext _filterContext() {
if (_exposedContext != null) {
return _exposedContext;
}
return _headContext;
}
}
| FilteringParserDelegate |
java | processing__processing4 | build/macos/appbundler/src/com/oracle/appbundler/AppBundlerTask.java | {
"start": 15012,
"end": 15116
} | class ____ entries to Java folder
copyClassPathEntries(javaDirectory);
// Copy | path |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ClassNewInstanceTest.java | {
"start": 11180,
"end": 11676
} | class ____ {
void f() throws Exception {
try {
getClass().getDeclaredConstructor().newInstance();
} catch (ReflectiveOperationException e) {
getClass().getDeclaredConstructor().newInstance();
}
}
}
""")
.doTest();
}
@Test
public void withFinally() {
testHelper
.addInputLines(
"in/Test.java",
"""
| Test |
java | quarkusio__quarkus | independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/AroundConstructInvocationContext.java | {
"start": 289,
"end": 1258
} | class ____ extends LifecycleCallbackInvocationContext {
private final Constructor<?> constructor;
private final Function<Object[], Object> forward;
AroundConstructInvocationContext(Constructor<?> constructor, Object[] parameters, Set<Annotation> interceptorBindings,
List<InterceptorInvocation> chain, Function<Object[], Object> forward) {
super(null, parameters, interceptorBindings, chain);
this.forward = forward;
this.constructor = constructor;
}
protected void interceptorChainCompleted() {
target = forward.apply(parameters);
}
@Override
public Constructor<?> getConstructor() {
return constructor;
}
@Override
public Object[] getParameters() {
return parameters;
}
@Override
public void setParameters(Object[] params) {
validateParameters(constructor, params);
this.parameters = params;
}
}
| AroundConstructInvocationContext |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestDeleteConnectorSyncJobAction.java | {
"start": 894,
"end": 1786
} | class ____ extends BaseRestHandler {
@Override
public String getName() {
return "connector_sync_job_delete_action";
}
@Override
public List<Route> routes() {
return List.of(
new Route(
RestRequest.Method.DELETE,
"/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT + "/{" + CONNECTOR_SYNC_JOB_ID_PARAM + "}"
)
);
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
DeleteConnectorSyncJobAction.Request request = new DeleteConnectorSyncJobAction.Request(
restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM)
);
return restChannel -> client.execute(DeleteConnectorSyncJobAction.INSTANCE, request, new RestToXContentListener<>(restChannel));
}
}
| RestDeleteConnectorSyncJobAction |
java | spring-projects__spring-boot | module/spring-boot-transaction/src/test/java/org/springframework/boot/transaction/jta/autoconfigure/JtaAutoConfigurationTests.java | {
"start": 5313,
"end": 6211
} | class ____ {
private final String name;
private final Class<?> type;
private final Object value;
private JndiEntry(String name, Class<?> type) {
this.name = name;
this.type = type;
this.value = mock(type);
}
private void register(InitialContext initialContext) throws NamingException {
String[] components = this.name.split("/");
String subcontextName = components[0];
String entryName = components[1];
Context javaComp = initialContext.createSubcontext(subcontextName);
JndiLoader loader = new JndiLoader(initialContext.getEnvironment());
Properties properties = new Properties();
properties.setProperty(entryName + "/type", this.type.getName());
properties.put(entryName + "/valueToConvert", this.value);
loader.load(properties, javaComp);
}
@Override
public String toString() {
return this.name;
}
}
private static final | JndiEntry |
java | spring-projects__spring-framework | spring-context/src/testFixtures/java/org/springframework/context/testfixture/cache/AbstractValueAdaptingCacheTests.java | {
"start": 922,
"end": 1508
} | class ____<T extends AbstractValueAdaptingCache>
extends AbstractCacheTests<T> {
protected static final String CACHE_NAME_NO_NULL = "testCacheNoNull";
protected abstract T getCache(boolean allowNull);
@Test
protected void testCachePutNullValueAllowNullFalse() {
T cache = getCache(false);
String key = createRandomKey();
assertThatIllegalArgumentException().isThrownBy(() ->
cache.put(key, null))
.withMessageContaining(CACHE_NAME_NO_NULL)
.withMessageContaining("is configured to not allow null values but null was provided");
}
}
| AbstractValueAdaptingCacheTests |
java | jhy__jsoup | src/test/java/org/jsoup/integration/servlets/ProxyServlet.java | {
"start": 629,
"end": 2328
} | class ____ extends AsyncProxyServlet {
public static TestServer.ProxySettings ProxySettings = TestServer.proxySettings();
public static String Via = "1.1 jsoup test proxy";
static {
System.setProperty("jdk.http.auth.tunneling.disabledSchemes", "");
// removes Basic, which is otherwise excluded from auth for CONNECT tunnels
}
public static Handler createHandler(boolean alwaysAuth) {
// ConnectHandler wraps this ProxyServlet and handles CONNECT, which sets up a tunnel for HTTPS requests and is
// opaque to the proxy. The ProxyServlet handles simple HTTP requests.
AuthFilter authFilter = new AuthFilter(alwaysAuth, true);
ConnectHandler connectHandler = new ConnectProxy(authFilter);
ServletHandler proxyHandler = new ServletHandler();
proxyHandler.addFilterWithMapping(new FilterHolder(authFilter), "/*", FilterMapping.ALL); // auth for HTTP proxy
ServletHolder proxyServletHolder = new ServletHolder(ProxyServlet.class); // Holder wraps as it requires maxThreads initialization
proxyServletHolder.setAsyncSupported(true);
proxyServletHolder.setInitParameter("maxThreads", "200");
proxyHandler.addServletWithMapping(proxyServletHolder, "/*");
connectHandler.setHandler(proxyHandler);
return connectHandler;
}
@Override
protected void onServerResponseHeaders(HttpServletRequest clientRequest, HttpServletResponse proxyResponse, Response serverResponse) {
super.onServerResponseHeaders(clientRequest, proxyResponse, serverResponse);
proxyResponse.addHeader("Via", Via);
}
/** Supports CONNECT tunnels */
static | ProxyServlet |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.