language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/io/ProtocolResolverApplicationContextInitializerIntegrationTests.java | {
"start": 2372,
"end": 2585
} | class ____ {
@Nullable Resource resource;
@Nullable Resource getResource() {
return this.resource;
}
void setResource(@Nullable Resource resource) {
this.resource = resource;
}
}
}
| TestProperties |
java | apache__flink | flink-kubernetes/src/main/java/org/apache/flink/kubernetes/kubeclient/parameters/KubernetesTaskManagerParameters.java | {
"start": 1663,
"end": 7187
} | class ____ extends AbstractKubernetesParameters {
private final String podName;
private final String dynamicProperties;
private final String jvmMemOptsEnv;
private final ContaineredTaskManagerParameters containeredTaskManagerParameters;
private final Map<String, String> taskManagerExternalResourceConfigKeys;
private final Set<String> blockedNodes;
public KubernetesTaskManagerParameters(
Configuration flinkConfig,
String podName,
String dynamicProperties,
String jvmMemOptsEnv,
ContaineredTaskManagerParameters containeredTaskManagerParameters,
Map<String, String> taskManagerExternalResourceConfigKeys,
Set<String> blockedNodes) {
super(flinkConfig);
this.podName = checkNotNull(podName);
this.dynamicProperties = checkNotNull(dynamicProperties);
this.jvmMemOptsEnv = checkNotNull(jvmMemOptsEnv);
this.containeredTaskManagerParameters = checkNotNull(containeredTaskManagerParameters);
this.taskManagerExternalResourceConfigKeys =
checkNotNull(taskManagerExternalResourceConfigKeys);
this.blockedNodes = checkNotNull(blockedNodes);
}
@Override
public Map<String, String> getLabels() {
final Map<String, String> labels = new HashMap<>();
labels.putAll(
flinkConfig
.getOptional(KubernetesConfigOptions.TASK_MANAGER_LABELS)
.orElse(Collections.emptyMap()));
labels.putAll(getSelectors());
return Collections.unmodifiableMap(labels);
}
@Override
public Map<String, String> getSelectors() {
return KubernetesUtils.getTaskManagerSelectors(getClusterId());
}
@Override
public Map<String, String> getNodeSelector() {
return Collections.unmodifiableMap(
flinkConfig
.getOptional(KubernetesConfigOptions.TASK_MANAGER_NODE_SELECTOR)
.orElse(Collections.emptyMap()));
}
@Override
public Map<String, String> getEnvironments() {
return this.containeredTaskManagerParameters.taskManagerEnv();
}
@Override
public Map<String, String> getAnnotations() {
return flinkConfig
.getOptional(KubernetesConfigOptions.TASK_MANAGER_ANNOTATIONS)
.orElse(Collections.emptyMap());
}
@Override
public List<Map<String, String>> getTolerations() {
return flinkConfig
.getOptional(KubernetesConfigOptions.TASK_MANAGER_TOLERATIONS)
.orElse(Collections.emptyList());
}
public String getPodName() {
return podName;
}
public int getTaskManagerMemoryMB() {
return containeredTaskManagerParameters
.getTaskExecutorProcessSpec()
.getTotalProcessMemorySize()
.getMebiBytes();
}
public double getTaskManagerCPU() {
return containeredTaskManagerParameters
.getTaskExecutorProcessSpec()
.getCpuCores()
.getValue()
.doubleValue();
}
public double getTaskManagerCPULimitFactor() {
final double limitFactor =
flinkConfig.get(KubernetesConfigOptions.TASK_MANAGER_CPU_LIMIT_FACTOR);
checkArgument(
limitFactor >= 1,
"%s should be greater or equal to 1.",
KubernetesConfigOptions.TASK_MANAGER_CPU_LIMIT_FACTOR.key());
return limitFactor;
}
public double getTaskManagerMemoryLimitFactor() {
final double limitFactor =
flinkConfig.get(KubernetesConfigOptions.TASK_MANAGER_MEMORY_LIMIT_FACTOR);
checkArgument(
limitFactor >= 1,
"%s should be greater or equal to 1.",
KubernetesConfigOptions.TASK_MANAGER_MEMORY_LIMIT_FACTOR.key());
return limitFactor;
}
public Map<String, ExternalResource> getTaskManagerExternalResources() {
return containeredTaskManagerParameters.getTaskExecutorProcessSpec().getExtendedResources();
}
public String getServiceAccount() {
return flinkConfig.get(KubernetesConfigOptions.TASK_MANAGER_SERVICE_ACCOUNT);
}
public Map<String, String> getTaskManagerExternalResourceConfigKeys() {
return Collections.unmodifiableMap(taskManagerExternalResourceConfigKeys);
}
public int getRPCPort() {
final int taskManagerRpcPort =
KubernetesUtils.parsePort(flinkConfig, TaskManagerOptions.RPC_PORT);
checkArgument(
taskManagerRpcPort > 0, "%s should not be 0.", TaskManagerOptions.RPC_PORT.key());
return taskManagerRpcPort;
}
public String getDynamicProperties() {
return dynamicProperties;
}
public String getJvmMemOptsEnv() {
return jvmMemOptsEnv;
}
public ContaineredTaskManagerParameters getContaineredTaskManagerParameters() {
return containeredTaskManagerParameters;
}
public Set<String> getBlockedNodes() {
return Collections.unmodifiableSet(blockedNodes);
}
public String getNodeNameLabel() {
return checkNotNull(flinkConfig.get(KubernetesConfigOptions.KUBERNETES_NODE_NAME_LABEL));
}
public String getEntrypointArgs() {
return flinkConfig.get(KubernetesConfigOptions.KUBERNETES_TASKMANAGER_ENTRYPOINT_ARGS);
}
}
| KubernetesTaskManagerParameters |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java | {
"start": 7120,
"end": 8223
} | class ____ {
private ConfigurationOverrides configurationOverrides;
private boolean enabled;
private Cron interval;
private Instant lastSynced;
private String name;
public Builder setConfigurationOverrides(ConfigurationOverrides configurationOverrides) {
this.configurationOverrides = configurationOverrides;
return this;
}
public Builder setEnabled(boolean enabled) {
this.enabled = enabled;
return this;
}
public Builder setInterval(Cron interval) {
this.interval = interval;
return this;
}
public Builder setLastSynced(Instant lastSynced) {
this.lastSynced = lastSynced;
return this;
}
public Builder setName(String name) {
this.name = name;
return this;
}
public ConnectorCustomSchedule build() {
return new ConnectorCustomSchedule(configurationOverrides, enabled, interval, lastSynced, name);
}
}
public static | Builder |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/TestTemplateInvocationTests.java | {
"start": 4099,
"end": 21474
} | class ____ extends AbstractJupiterTestEngineTests {
@Test
void templateWithSingleRegisteredExtensionIsInvoked() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithSingleRegisteredExtension")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithSingleRegisteredExtension"), started()), //
event(dynamicTestRegistered("test-template-invocation:#1")), //
event(test("test-template-invocation:#1"), started()), //
event(test("test-template-invocation:#1"),
finishedWithFailure(message("invocation is expected to fail"))), //
event(container("templateWithSingleRegisteredExtension"), finishedSuccessfully())));
}
@Test
void parentRelationshipIsEstablished() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithSingleRegisteredExtension")).build();
EngineExecutionResults executionResults = executeTests(request);
TestDescriptor templateMethodDescriptor = findTestDescriptor(executionResults,
container("templateWithSingleRegisteredExtension"));
TestDescriptor invocationDescriptor = findTestDescriptor(executionResults, test("test-template-invocation:#1"));
assertThat(invocationDescriptor.getParent()).hasValue(templateMethodDescriptor);
}
@Test
void beforeAndAfterEachMethodsAreExecutedAroundInvocation() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(TestTemplateTestClassWithBeforeAndAfterEach.class, "testTemplateWithTwoInvocations")).build();
executeTests(request);
assertThat(TestTemplateTestClassWithBeforeAndAfterEach.lifecycleEvents).containsExactly(
"beforeAll:TestTemplateInvocationTests$TestTemplateTestClassWithBeforeAndAfterEach", "beforeEach:[1]",
"afterEach:[1]", "beforeEach:[2]", "afterEach:[2]",
"afterAll:TestTemplateInvocationTests$TestTemplateTestClassWithBeforeAndAfterEach");
}
@Test
void templateWithTwoRegisteredExtensionsIsInvoked() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithTwoRegisteredExtensions")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithTwoRegisteredExtensions"), started()), //
event(dynamicTestRegistered("test-template-invocation:#1"), displayName("[1]")), //
event(test("test-template-invocation:#1"), started()), //
event(test("test-template-invocation:#1"),
finishedWithFailure(message("invocation is expected to fail"))), //
event(dynamicTestRegistered("test-template-invocation:#2"), displayName("[2]")), //
event(test("test-template-invocation:#2"), started()), //
event(test("test-template-invocation:#2"),
finishedWithFailure(message("invocation is expected to fail"))), //
event(container("templateWithTwoRegisteredExtensions"), finishedSuccessfully())));
}
@Test
void legacyReportingNames() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithTwoRegisteredExtensions")).build();
EngineExecutionResults results = executeTests(request);
Events events = results.allEvents();
events.assertStatistics(stats -> stats.dynamicallyRegistered(2));
// events.dynamicallyRegistered().debug();
// results.testEvents().dynamicallyRegistered().debug();
// results.containerEvents().dynamicallyRegistered().debug();
// @formatter:off
Stream<String> legacyReportingNames = events.dynamicallyRegistered()
.map(Event::getTestDescriptor)
.map(TestDescriptor::getLegacyReportingName);
// @formatter:off
assertThat(legacyReportingNames).containsExactly("templateWithTwoRegisteredExtensions()[1]",
"templateWithTwoRegisteredExtensions()[2]");
}
@Test
void templateWithTwoInvocationsFromSingleExtensionIsInvoked() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithTwoInvocationsFromSingleExtension")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithTwoInvocationsFromSingleExtension"), started()), //
event(dynamicTestRegistered("test-template-invocation:#1"), displayName("[1]")), //
event(test("test-template-invocation:#1"), started()), //
event(test("test-template-invocation:#1"),
finishedWithFailure(message("invocation is expected to fail"))), //
event(dynamicTestRegistered("test-template-invocation:#2"), displayName("[2]")), //
event(test("test-template-invocation:#2"), started()), //
event(test("test-template-invocation:#2"),
finishedWithFailure(message("invocation is expected to fail"))), //
event(container("templateWithTwoInvocationsFromSingleExtension"), finishedSuccessfully())));
}
@Test
void singleInvocationIsExecutedWhenDiscoveredByUniqueId() {
UniqueId uniqueId = discoverUniqueId(MyTestTemplateTestCase.class,
"templateWithTwoInvocationsFromSingleExtension") //
.append(TestTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#2");
EngineExecutionResults executionResults = executeTests(selectUniqueId(uniqueId));
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithTwoInvocationsFromSingleExtension"), started()), //
event(dynamicTestRegistered("test-template-invocation:#2"), displayName("[2]")), //
event(test("test-template-invocation:#2"), started()), //
event(test("test-template-invocation:#2"),
finishedWithFailure(message("invocation is expected to fail"))), //
event(container("templateWithTwoInvocationsFromSingleExtension"), finishedSuccessfully())));
}
@Test
void templateWithDisabledInvocationsIsSkipped() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithDisabledInvocations")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithDisabledInvocations"), started()), //
event(dynamicTestRegistered("test-template-invocation:#1")), //
event(test("test-template-invocation:#1"), skippedWithReason("always disabled")), //
event(container("templateWithDisabledInvocations"), finishedSuccessfully())));
}
@Test
void disabledTemplateIsSkipped() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "disabledTemplate")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("disabledTemplate"), skippedWithReason("always disabled"))));
}
@Test
void templateWithCustomizedDisplayNamesIsInvoked() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithCustomizedDisplayNames")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithCustomizedDisplayNames"), started()), //
event(dynamicTestRegistered("test-template-invocation:#1"),
displayName("1 --> templateWithCustomizedDisplayNames()")), //
event(test("test-template-invocation:#1"), started()), //
event(test("test-template-invocation:#1"),
finishedWithFailure(message("invocation is expected to fail"))), //
event(container("templateWithCustomizedDisplayNames"), finishedSuccessfully())));
}
@Test
void templateWithDynamicParameterResolverIsInvoked() {
LauncherDiscoveryRequest request = request().selectors(selectMethod(MyTestTemplateTestCase.class,
"templateWithDynamicParameterResolver", "java.lang.String")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithDynamicParameterResolver"), started()), //
event(dynamicTestRegistered("test-template-invocation:#1"), displayName("[1] foo")), //
event(test("test-template-invocation:#1"), started()), //
event(test("test-template-invocation:#1"), finishedWithFailure(message("foo"))), //
event(dynamicTestRegistered("test-template-invocation:#2"), displayName("[2] bar")), //
event(test("test-template-invocation:#2"), started()), //
event(test("test-template-invocation:#2"), finishedWithFailure(message("bar"))), //
event(container("templateWithDynamicParameterResolver"), finishedSuccessfully())));
}
@Test
void contextParameterResolverCanResolveConstructorArguments() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCaseWithConstructor.class, "template", "java.lang.String")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("template"), started()), //
event(dynamicTestRegistered("test-template-invocation:#1"), displayName("[1] foo")), //
event(test("test-template-invocation:#1"), started()), //
event(test("test-template-invocation:#1"), finishedSuccessfully()), //
event(dynamicTestRegistered("test-template-invocation:#2"), displayName("[2] bar")), //
event(test("test-template-invocation:#2"), started()), //
event(test("test-template-invocation:#2"), finishedSuccessfully()), //
event(container("template"), finishedSuccessfully())));
}
@Test
void templateWithDynamicTestInstancePostProcessorIsInvoked() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithDynamicTestInstancePostProcessor")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithDynamicTestInstancePostProcessor"), started()), //
event(dynamicTestRegistered("test-template-invocation:#1")), //
event(test("test-template-invocation:#1"), started()), //
event(test("test-template-invocation:#1"), finishedWithFailure(message("foo"))), //
event(dynamicTestRegistered("test-template-invocation:#2")), //
event(test("test-template-invocation:#2"), started()), //
event(test("test-template-invocation:#2"), finishedWithFailure(message("bar"))), //
event(container("templateWithDynamicTestInstancePostProcessor"), finishedSuccessfully())));
}
@Test
void lifecycleCallbacksAreExecutedForInvocation() {
LauncherDiscoveryRequest request = request().selectors(
selectClass(TestTemplateTestClassWithDynamicLifecycleCallbacks.class)).build();
executeTests(request);
// @formatter:off
assertThat(TestTemplateTestClassWithDynamicLifecycleCallbacks.lifecycleEvents).containsExactly(
"beforeEach",
"beforeTestExecution",
"testTemplate:foo",
"handleTestExecutionException",
"afterTestExecution",
"afterEach",
"beforeEach",
"beforeTestExecution",
"testTemplate:bar",
"afterTestExecution",
"afterEach");
// @formatter:on
}
@Test
void extensionIsAskedForSupportBeforeItMustProvide() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithWrongParameterType", int.class.getName())).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithWrongParameterType"), started()), //
event(container("templateWithWrongParameterType"), finishedWithFailure(message(s -> s.startsWith(
"You must register at least one TestTemplateInvocationContextProvider that supports @TestTemplate method ["))))));
}
@Test
void templateWithSupportingProviderButNoInvocationsReportsFailure() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithSupportingProviderButNoInvocations")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithSupportingProviderButNoInvocations"), started()), //
event(container("templateWithSupportingProviderButNoInvocations"),
finishedWithFailure(message(
"Provider [%s] did not provide any invocation contexts, but was expected to do so. ".formatted(
InvocationContextProviderThatSupportsEverythingButProvidesNothing.class.getSimpleName())
+ "You may override mayReturnZeroTestTemplateInvocationContexts() to allow this.")))));
}
@Test
void templateWithSupportingProviderAllowingNoInvocationsDoesNotFail() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithSupportingProviderAllowingNoInvocations")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class,
event(container("templateWithSupportingProviderAllowingNoInvocations"), started()),
event(container("templateWithSupportingProviderAllowingNoInvocations"), finishedSuccessfully())));
}
@Test
void templateWithMixedProvidersNoInvocationReportsFailure() {
LauncherDiscoveryRequest request = request().selectors(selectMethod(MyTestTemplateTestCase.class,
"templateWithMultipleProvidersAllowingAndRestrictingToProvideNothing")).build();
EngineExecutionResults executionResults = executeTests(request);
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithMultipleProvidersAllowingAndRestrictingToProvideNothing"), started()), //
event(container("templateWithMultipleProvidersAllowingAndRestrictingToProvideNothing"),
finishedWithFailure(message(
"Provider [%s] did not provide any invocation contexts, but was expected to do so. ".formatted(
InvocationContextProviderThatSupportsEverythingButProvidesNothing.class.getSimpleName())
+ "You may override mayReturnZeroTestTemplateInvocationContexts() to allow this.")))));
}
@Test
void templateWithCloseableStream() {
LauncherDiscoveryRequest request = request().selectors(
selectMethod(MyTestTemplateTestCase.class, "templateWithCloseableStream")).build();
EngineExecutionResults executionResults = executeTests(request);
assertThat(InvocationContextProviderWithCloseableStream.streamClosed.get()).describedAs(
"streamClosed").isTrue();
executionResults.allEvents().assertEventsMatchExactly( //
wrappedInContainerEvents(MyTestTemplateTestCase.class, //
event(container("templateWithCloseableStream"), started()), //
event(dynamicTestRegistered("test-template-invocation:#1")), //
event(test("test-template-invocation:#1"), started()), //
event(test("test-template-invocation:#1"), finishedSuccessfully()), //
event(container("templateWithCloseableStream"), finishedSuccessfully())));
}
@Test
void templateWithPreparations() {
var results = executeTestsForClass(TestTemplateWithPreparationsTestCase.class);
assertTrue(CustomCloseableResource.closed, "resource in store was closed");
results.allEvents().assertStatistics(stats -> stats.started(4).succeeded(4));
}
private TestDescriptor findTestDescriptor(EngineExecutionResults executionResults, Condition<Event> condition) {
// @formatter:off
return executionResults.allEvents()
.filter(condition::matches)
.findAny()
.map(Event::getTestDescriptor)
.orElseThrow(() -> new AssertionFailedError("Could not find event for condition: " + condition));
// @formatter:on
}
@SafeVarargs
@SuppressWarnings({ "unchecked", "varargs", "rawtypes" })
private final Condition<? super Event>[] wrappedInContainerEvents(Class<MyTestTemplateTestCase> clazz,
Condition<? super Event>... wrappedConditions) {
List<Condition<? super Event>> conditions = new ArrayList<>();
conditions.add(event(engine(), started()));
conditions.add(event(container(clazz), started()));
conditions.addAll(asList(wrappedConditions));
conditions.add(event(container(clazz), finishedSuccessfully()));
conditions.add(event(engine(), finishedSuccessfully()));
return conditions.toArray(new Condition[0]);
}
static | TestTemplateInvocationTests |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoOneOfCompilationTest.java | {
"start": 16453,
"end": 16804
} | enum ____");
}
@Test
public void enumExtraCase() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Pet",
"package foo.bar;",
"",
"import com.google.auto.value.AutoOneOf;",
"",
"@AutoOneOf(Pet.Kind.class)",
"public abstract | Kind |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/PainlessScripts.java | {
"start": 512,
"end": 1425
} | class ____ {
/**
* Template for the comparison script.
* It uses "String.valueOf" method in case the mapping types of the two fields are different.
*/
private static final MessageFormat COMPARISON_SCRIPT_TEMPLATE = new MessageFormat(
"String.valueOf(doc[''{0}''].value).equals(String.valueOf(doc[''{1}''].value))",
Locale.ROOT
);
/**
* Builds script that tests field values equality for the given actual and predicted field names.
*
* @param actualField name of the actual field
* @param predictedField name of the predicted field
* @return script that tests whether the values of actualField and predictedField are equal
*/
static Script buildIsEqualScript(String actualField, String predictedField) {
return new Script(COMPARISON_SCRIPT_TEMPLATE.format(new Object[] { actualField, predictedField }));
}
}
| PainlessScripts |
java | micronaut-projects__micronaut-core | http-netty/src/main/java/io/micronaut/http/netty/websocket/NettyServerWebSocketBroadcaster.java | {
"start": 1572,
"end": 6248
} | class ____ implements WebSocketBroadcaster {
private final WebSocketMessageEncoder webSocketMessageEncoder;
private final WebSocketSessionRepository webSocketSessionRepository;
/**
*
* @param webSocketMessageEncoder An instance of {@link io.micronaut.http.netty.websocket.WebSocketMessageEncoder} responsible for encoding WebSocket messages.
* @param webSocketSessionRepository An instance of {@link io.micronaut.http.netty.websocket.WebSocketSessionRepository}. Defines a ChannelGroup repository to handle WebSockets.
*/
public NettyServerWebSocketBroadcaster(WebSocketMessageEncoder webSocketMessageEncoder,
WebSocketSessionRepository webSocketSessionRepository) {
this.webSocketMessageEncoder = webSocketMessageEncoder;
this.webSocketSessionRepository = webSocketSessionRepository;
}
@Override
public <T> void broadcastSync(T message, MediaType mediaType, Predicate<WebSocketSession> filter) {
WebSocketFrame frame = webSocketMessageEncoder.encodeMessage(message, mediaType);
try {
webSocketSessionRepository.getChannelGroup().writeAndFlush(frame, ch -> {
Attribute<NettyWebSocketSession> attr = ch.attr(NettyWebSocketSession.WEB_SOCKET_SESSION_KEY);
NettyWebSocketSession s = attr.get();
return s != null && s.isOpen() && filter.test(s);
}).sync();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new WebSocketSessionException("Broadcast Interrupted");
}
}
@Override
public <T> Publisher<T> broadcast(T message, MediaType mediaType, Predicate<WebSocketSession> filter) {
return Flux.create(emitter -> {
try {
WebSocketFrame frame = webSocketMessageEncoder.encodeMessage(message, mediaType);
webSocketSessionRepository.getChannelGroup().writeAndFlush(frame, ch -> {
Attribute<NettyWebSocketSession> attr = ch.attr(NettyWebSocketSession.WEB_SOCKET_SESSION_KEY);
NettyWebSocketSession s = attr.get();
return s != null && s.isOpen() && filter.test(s);
}).addListener(future -> {
if (!future.isSuccess()) {
Throwable cause = extractBroadcastFailure(future.cause());
if (cause != null) {
emitter.error(new WebSocketSessionException("Broadcast Failure: " + cause.getMessage(), cause));
return;
}
}
emitter.next(message);
emitter.complete();
});
} catch (Throwable e) {
emitter.error(new WebSocketSessionException("Broadcast Failure: " + e.getMessage(), e));
}
}, FluxSink.OverflowStrategy.BUFFER);
}
/**
* Attempt to extract a single failure from a failure of {@link io.netty.channel.group.ChannelGroup#write}
* exception. {@link io.netty.channel.group.ChannelGroup} aggregates exceptions into a {@link ChannelGroupException}
* that has no useful stacktrace. If there was only one actual failure, we will just forward that instead of the
* {@link ChannelGroupException}.
*
* We also need to ignore {@link ClosedChannelException}s.
*/
@Nullable
private Throwable extractBroadcastFailure(Throwable failure) {
if (failure instanceof ChannelGroupException exception) {
Throwable singleCause = null;
for (Map.Entry<Channel, Throwable> entry : exception) {
Throwable entryCause = extractBroadcastFailure(entry.getValue());
if (entryCause != null) {
if (singleCause == null) {
singleCause = entryCause;
} else {
return failure;
}
}
}
return singleCause;
} else if (failure instanceof ClosedChannelException) {
// ClosedChannelException can happen when there is a race condition between the call to writeAndFlush and
// the closing of a channel. session.isOpen will still return true, but when to write is actually
// performed, the channel is closed. Since we would have skipped to write anyway had we known the channel
// would go away, we can safely ignore this error.
return null;
} else {
return failure;
}
}
}
| NettyServerWebSocketBroadcaster |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/time/FastDatePrinter.java | {
"start": 8647,
"end": 9061
} | interface ____ extends Rule {
/**
* Appends the specified value to the output buffer based on the rule implementation.
*
* @param buffer the output buffer.
* @param value the value to be appended.
* @throws IOException if an I/O error occurs.
*/
void appendTo(Appendable buffer, int value) throws IOException;
}
/**
* Inner | NumberRule |
java | spring-projects__spring-boot | module/spring-boot-data-commons/src/main/java/org/springframework/boot/data/autoconfigure/metrics/DataMetricsProperties.java | {
"start": 1037,
"end": 1208
} | class ____ {
private final Repository repository = new Repository();
public Repository getRepository() {
return this.repository;
}
public static | DataMetricsProperties |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableSource.java | {
"start": 2036,
"end": 3819
} | class ____<KIn, VIn> implements ProcessorSupplier<KIn, VIn, KIn, Change<VIn>> {
private static final Logger LOG = LoggerFactory.getLogger(KTableSource.class);
private final String storeName;
private final StoreFactory storeFactory;
private String queryableName;
private boolean sendOldValues;
public KTableSource(
final MaterializedInternal<KIn, VIn, KeyValueStore<Bytes, byte[]>> materialized) {
this.storeName = materialized.storeName();
Objects.requireNonNull(storeName, "storeName can't be null");
this.queryableName = materialized.queryableStoreName();
this.sendOldValues = false;
this.storeFactory = new KeyValueStoreMaterializer<>(materialized);
}
public String queryableName() {
return queryableName;
}
@Override
public Processor<KIn, VIn, KIn, Change<VIn>> get() {
return new KTableSourceProcessor();
}
@Override
public Set<StoreBuilder<?>> stores() {
if (materialized()) {
return Set.of(new StoreFactory.FactoryWrappingStoreBuilder<>(storeFactory));
} else {
return null;
}
}
// when source ktable requires sending old values, we just
// need to set the queryable name as the store name to enforce materialization
public void enableSendingOldValues() {
this.sendOldValues = true;
this.queryableName = storeName;
}
// when the source ktable requires materialization from downstream, we just
// need to set the queryable name as the store name to enforce materialization
public void materialize() {
this.queryableName = storeName;
}
public boolean materialized() {
return queryableName != null;
}
private | KTableSource |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/completion/AlibabaCloudSearchCompletionTaskSettingsTests.java | {
"start": 824,
"end": 2995
} | class ____ extends AbstractWireSerializingTestCase<
AlibabaCloudSearchCompletionTaskSettings> {
public static AlibabaCloudSearchCompletionTaskSettings createRandom() {
Map<String, Object> parameters = randomBoolean() ? Map.of() : null;
return new AlibabaCloudSearchCompletionTaskSettings(parameters);
}
public void testFromMap() {
MatcherAssert.assertThat(
AlibabaCloudSearchCompletionTaskSettings.fromMap(Map.of()),
is(new AlibabaCloudSearchCompletionTaskSettings((Map<String, Object>) null))
);
}
public void testUpdatedTaskSettings() {
var initialSettings = createRandom();
var newSettings = createRandom();
Map<String, Object> newSettingsMap = new HashMap<>();
if (newSettings.getParameters() != null) {
newSettingsMap.put(PARAMETERS, newSettings.getParameters());
}
AlibabaCloudSearchCompletionTaskSettings updatedSettings = (AlibabaCloudSearchCompletionTaskSettings) initialSettings
.updatedTaskSettings(Collections.unmodifiableMap(newSettingsMap));
if (newSettings.getParameters() == null) {
assertEquals(initialSettings.getParameters(), updatedSettings.getParameters());
} else {
assertEquals(newSettings.getParameters(), updatedSettings.getParameters());
}
}
@Override
protected Writeable.Reader<AlibabaCloudSearchCompletionTaskSettings> instanceReader() {
return AlibabaCloudSearchCompletionTaskSettings::new;
}
@Override
protected AlibabaCloudSearchCompletionTaskSettings createTestInstance() {
return createRandom();
}
@Override
protected AlibabaCloudSearchCompletionTaskSettings mutateInstance(AlibabaCloudSearchCompletionTaskSettings instance)
throws IOException {
return null;
}
public static Map<String, Object> getTaskSettingsMap(@Nullable Map<String, Object> params) {
var map = new HashMap<String, Object>();
if (params != null) {
map.put(PARAMETERS, params);
}
return map;
}
}
| AlibabaCloudSearchCompletionTaskSettingsTests |
java | resilience4j__resilience4j | resilience4j-hedge/src/main/java/io/github/resilience4j/hedge/internal/HedgeDurationSupplier.java | {
"start": 950,
"end": 3336
} | interface ____ extends Supplier<Duration>{
/**
* Creates a HedgeDurationSupplier from the given HedgeConfig
*
* @param config - the given HedgeConfiguration
* @return the configured HedgeDurationSupplier
*/
static HedgeDurationSupplier fromConfig(HedgeConfig config) {
if (config.getDurationSupplier() == HedgeConfig.HedgeDurationSupplierType.PRECONFIGURED) {
return ofPreconfigured(config.getCutoff());
} else {
return ofAveragePlus(
config.isShouldUseFactorAsPercentage(),
config.getHedgeTimeFactor(),
config.isShouldMeasureErrors(),
config.getWindowSize());
}
}
/**
* Creates a tracker of average response time and uses some increment over it for computing when to hedge.
*
* @param shouldUseFactorAsPercentage whether to use factor as an integer percent of average or an absolute number
* of milliseconds to add to the average ot determine hedge start time
* @param factor the factor either as percentage or milliseconds
* @param shouldMeasureErrors whether to count errors when calculating average time
* @param windowSize only supports fixed size window, not time-based
* @return the configured HedgeDurationSupplier
*/
static HedgeDurationSupplier ofAveragePlus(boolean shouldUseFactorAsPercentage, int factor, boolean shouldMeasureErrors, int windowSize) {
return new AverageDurationSupplier(shouldUseFactorAsPercentage, factor, shouldMeasureErrors, windowSize);
}
/**
* Creates a simple Duration Supplier that returns a pre-defined delay before hedging.
*
* @param cutoff the preconfigured cutoff time as a Duration
* @return the configured HedgeDurationSupplier
*/
static HedgeDurationSupplier ofPreconfigured(Duration cutoff) {
return new PreconfiguredDurationSupplier(cutoff);
}
/**
* accepts events and uses them to perform computation, resulting in the proposed Duration.
* @param type indicates whether the result is primary or not, and successful or not
* @param duration duration it took to complete the hedged call
*/
void accept(HedgeEvent.Type type, Duration duration);
}
| HedgeDurationSupplier |
java | apache__camel | components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsTemporaryQueueEndpoint.java | {
"start": 1224,
"end": 2860
} | class ____ extends JmsQueueEndpoint implements DestinationEndpoint {
private Destination jmsDestination;
public JmsTemporaryQueueEndpoint(String uri, JmsComponent component, String destination, JmsConfiguration configuration) {
super(uri, component, destination, configuration);
setDestinationType("temp-queue");
}
public JmsTemporaryQueueEndpoint(String uri, JmsComponent component, String destination, JmsConfiguration configuration,
QueueBrowseStrategy queueBrowseStrategy) {
super(uri, component, destination, configuration, queueBrowseStrategy);
setDestinationType("temp-queue");
}
public JmsTemporaryQueueEndpoint(String endpointUri, String destination) {
super(endpointUri, destination);
setDestinationType("temp-queue");
}
/**
* This endpoint is a singleton so that the temporary destination instances are shared across all producers and
* consumers of the same endpoint URI
*
* @return true
*/
@Override
public boolean isSingleton() {
return true;
}
@Override
public Destination getJmsDestination(Session session) throws JMSException {
lock.lock();
try {
if (jmsDestination == null) {
jmsDestination = createJmsDestination(session);
}
return jmsDestination;
} finally {
lock.unlock();
}
}
protected Destination createJmsDestination(Session session) throws JMSException {
return session.createTemporaryQueue();
}
}
| JmsTemporaryQueueEndpoint |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/MockitoBeanConfigurationErrorTests.java | {
"start": 3926,
"end": 4025
} | class ____ {
@MockitoBean(enforceOverride = true)
String example;
}
static | FailureByTypeLookup |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/ArrayListFieldTest_1.java | {
"start": 747,
"end": 1008
} | class ____<T> extends V<T> {
private ArrayList<T> value;
public ArrayList<T> getValue() {
return value;
}
public void setValue(ArrayList<T> value) {
this.value = value;
}
}
public static | V0 |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/InsertOrderingWithSecondaryTable.java | {
"start": 2765,
"end": 3385
} | class ____ {
@Id
@SequenceGenerator(name = "SHAPE_ID_GENERATOR", sequenceName = "SHAPE_SEQ", allocationSize = 1)
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SHAPE_ID_GENERATOR")
@Column(name = "SHAPE_ID", insertable = false, updatable = false)
private Long id;
private String name;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity(name = "ShapePolygonEntity")
@DiscriminatorValue("POLYGON")
public static | ShapeEntity |
java | google__dagger | javatests/dagger/internal/codegen/MembersInjectionTest.java | {
"start": 48409,
"end": 48860
} | class ____ {",
" @Inject Set<String> multibindingStrings;",
" @Inject Bar() {}",
"}");
Source componentModule =
CompilerTests.javaSource(
"test.MyComponentModule",
"package test;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"import dagger.multibindings.IntoSet;",
"",
"@Module",
" | Bar |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/AdditionalAnswers.java | {
"start": 1662,
"end": 2404
} | class ____ {
/**
* Returns the first parameter of an invocation.
*
* <p>
* This additional answer could be used at stub time using the
* <code>then|do|will{@link org.mockito.stubbing.Answer}</code> methods. For example :
*
* <pre class="code"><code class="java">
* given(carKeyFob.authenticate(carKey)).will(returnsFirstArg());
* doAnswer(returnsFirstArg()).when(carKeyFob).authenticate(carKey);
* </code></pre>
* </p>
*
* <p>
* This methods works with varargs as well, mockito will expand the vararg to return the argument
* at the given position. Suppose the following signature :
*
* <pre class="code"><code class="java">
* | AdditionalAnswers |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/MapReduceTrackingUriPlugin.java | {
"start": 1208,
"end": 2113
} | class ____ extends TrackingUriPlugin implements
Configurable {
@Override
public void setConf(Configuration conf) {
Configuration jobConf = null;
// Force loading of mapred configuration.
if (conf != null) {
jobConf = new JobConf(conf);
} else {
jobConf = new JobConf();
}
super.setConf(jobConf);
}
/**
* Gets the URI to access the given application on MapReduce history server
* @param id the ID for which a URI is returned
* @return the tracking URI
* @throws URISyntaxException
*/
@Override
public URI getTrackingUri(ApplicationId id) throws URISyntaxException {
String jobSuffix = id.toString().replaceFirst("^application_", "job_");
String historyServerAddress =
MRWebAppUtil.getJHSWebappURLWithScheme(getConf());
return new URI(historyServerAddress + "/jobhistory/job/"+ jobSuffix);
}
}
| MapReduceTrackingUriPlugin |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/manual/ReducePerformance.java | {
"start": 7912,
"end": 9143
} | class ____
implements CopyableIterator<Tuple2<String, Integer>>, Serializable {
private final int keyRange;
private Tuple2<String, Integer> reuse = new Tuple2<>();
private int rndSeed = 11;
private Random rnd;
public TupleStringIntIterator(int keyRange) {
this.keyRange = keyRange;
this.rnd = new Random(this.rndSeed);
}
public TupleStringIntIterator(int keyRange, int rndSeed) {
this.keyRange = keyRange;
this.rndSeed = rndSeed;
this.rnd = new Random(rndSeed);
}
@Override
public boolean hasNext() {
return true;
}
@Override
public Tuple2<String, Integer> next() {
reuse.f0 = String.valueOf(rnd.nextInt(keyRange));
reuse.f1 = 1;
return reuse;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public CopyableIterator<Tuple2<String, Integer>> copy() {
return new TupleStringIntIterator(keyRange, rndSeed + rnd.nextInt(10000));
}
}
private static final | TupleStringIntIterator |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/TestDescription.java | {
"start": 784,
"end": 992
} | class ____ extends Description {
private final String value;
public TestDescription(String value) {
this.value = value;
}
@Override
public String value() {
return value;
}
}
| TestDescription |
java | qos-ch__slf4j | integration/src/test/java/org/slf4j/test_osgi/FrameworkErrorListener.java | {
"start": 1380,
"end": 2201
} | class ____ implements FrameworkListener {
public List<FrameworkEvent> errorList = new ArrayList<>();
@Override
public void frameworkEvent(FrameworkEvent fe) {
if (fe.getType() == FrameworkEvent.ERROR) {
errorList.add(fe);
}
}
private void dump(FrameworkEvent fe) {
Throwable t = fe.getThrowable();
String tString = null;
if (t != null) {
tString = t.toString();
}
System.out.println("Framework ERROR:" + ", source " + fe.getSource() + ", bundle=" + fe.getBundle() + ", ex=" + tString);
if (t != null) {
t.printStackTrace();
}
}
public void dumpAll() {
for (FrameworkEvent frameworkEvent : errorList) {
dump(frameworkEvent);
}
}
}
| FrameworkErrorListener |
java | google__dagger | javatests/dagger/functional/builder/BuilderTest.java | {
"start": 11432,
"end": 11514
} | interface ____<B> {
B subcomponentBuilder();
}
@Module
static | GenericParent |
java | resilience4j__resilience4j | resilience4j-spring-boot2/src/test/java/io/github/resilience4j/ratelimiter/monitoring/health/RateLimitersHealthIndicatorTest.java | {
"start": 842,
"end": 6297
} | class ____ {
@Test
public void health() throws Exception {
// given
RateLimiterConfig config = mock(RateLimiterConfig.class);
AtomicRateLimiter.AtomicRateLimiterMetrics metrics = mock(
AtomicRateLimiter.AtomicRateLimiterMetrics.class);
AtomicRateLimiter rateLimiter = mock(AtomicRateLimiter.class);
RateLimiterRegistry rateLimiterRegistry = mock(RateLimiterRegistry.class);
io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties instanceProperties =
mock(
io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties.class);
RateLimiterConfigurationProperties rateLimiterProperties = mock(
RateLimiterConfigurationProperties.class);
//when
when(rateLimiter.getRateLimiterConfig()).thenReturn(config);
when(rateLimiter.getName()).thenReturn("test");
when(rateLimiterProperties.findRateLimiterProperties("test"))
.thenReturn(Optional.of(instanceProperties));
when(instanceProperties.getRegisterHealthIndicator()).thenReturn(true);
when(instanceProperties.getAllowHealthIndicatorToFail()).thenReturn(true);
when(rateLimiter.getMetrics()).thenReturn(metrics);
when(rateLimiter.getDetailedMetrics()).thenReturn(metrics);
when(rateLimiterRegistry.getAllRateLimiters()).thenReturn(Set.of(rateLimiter));
when(config.getTimeoutDuration()).thenReturn(Duration.ofNanos(30L));
when(metrics.getAvailablePermissions())
.thenReturn(5, -1, -2);
when(metrics.getNumberOfWaitingThreads())
.thenReturn(0, 1, 2);
when(metrics.getNanosToWait())
.thenReturn(20L, 40L);
// then
RateLimitersHealthIndicator healthIndicator =
new RateLimitersHealthIndicator(rateLimiterRegistry, rateLimiterProperties, new SimpleStatusAggregator());
Health health = healthIndicator.health();
then(health.getStatus()).isEqualTo(Status.UP);
health = healthIndicator.health();
then(health.getStatus()).isEqualTo(Status.UNKNOWN);
health = healthIndicator.health();
then(health.getStatus()).isEqualTo(Status.DOWN);
then(health.getDetails().get("test")).isInstanceOf(Health.class);
then(((Health) health.getDetails().get("test")).getDetails())
.contains(
entry("availablePermissions", -2),
entry("numberOfWaitingThreads", 2)
);
}
@Test
public void healthIndicatorMaxImpactCanBeOverridden() throws Exception {
// given
RateLimiterConfig config = mock(RateLimiterConfig.class);
AtomicRateLimiter.AtomicRateLimiterMetrics metrics = mock(AtomicRateLimiter.AtomicRateLimiterMetrics.class);
AtomicRateLimiter rateLimiter = mock(AtomicRateLimiter.class);
RateLimiterRegistry rateLimiterRegistry = mock(RateLimiterRegistry.class);
io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties instanceProperties =
mock(io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties.class);
RateLimiterConfigurationProperties rateLimiterProperties = mock(RateLimiterConfigurationProperties.class);
//when
when(rateLimiter.getRateLimiterConfig()).thenReturn(config);
when(rateLimiter.getName()).thenReturn("test");
when(rateLimiterProperties.findRateLimiterProperties("test")).thenReturn(Optional.of(instanceProperties));
when(instanceProperties.getRegisterHealthIndicator()).thenReturn(true);
boolean allowHealthIndicatorToFail = false; // do not allow health indicator to fail
when(instanceProperties.getAllowHealthIndicatorToFail()).thenReturn(allowHealthIndicatorToFail);
when(rateLimiter.getMetrics()).thenReturn(metrics);
when(rateLimiter.getDetailedMetrics()).thenReturn(metrics);
when(rateLimiterRegistry.getAllRateLimiters()).thenReturn(Set.of(rateLimiter));
when(config.getTimeoutDuration()).thenReturn(Duration.ofNanos(30L));
when(metrics.getAvailablePermissions())
.thenReturn(-2);
when(metrics.getNumberOfWaitingThreads())
.thenReturn(2);
when(metrics.getNanosToWait())
.thenReturn(40L);
// then
RateLimitersHealthIndicator healthIndicator =
new RateLimitersHealthIndicator(rateLimiterRegistry, rateLimiterProperties, new SimpleStatusAggregator());
Health health = healthIndicator.health();
then(health.getStatus()).isEqualTo(Status.UNKNOWN);
then(((Health) health.getDetails().get("test")).getStatus()).isEqualTo(new Status("RATE_LIMITED"));
then(health.getDetails().get("test")).isInstanceOf(Health.class);
then(((Health) health.getDetails().get("test")).getDetails())
.contains(
entry("availablePermissions", -2),
entry("numberOfWaitingThreads", 2)
);
}
private SimpleEntry<String, ?> entry(String key, Object value) {
return new SimpleEntry<>(key, value);
}
}
| RateLimitersHealthIndicatorTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/log/LogEndpointTest.java | {
"start": 1462,
"end": 1647
} | class ____ extends ContextTestSupport {
private static Exchange logged;
@AfterAll
public static void clean() {
logged = null;
}
private static | LogEndpointTest |
java | netty__netty | transport/src/main/java/io/netty/channel/ManualIoEventLoop.java | {
"start": 24505,
"end": 25386
} | class ____ implements IoHandlerContext {
long maxBlockingNanos = Long.MAX_VALUE;
@Override
public boolean canBlock() {
assert inEventLoop();
return !hasTasks() && !hasScheduledTasks();
}
@Override
public long delayNanos(long currentTimeNanos) {
assert inEventLoop();
return Math.min(maxBlockingNanos, ManualIoEventLoop.this.delayNanos(currentTimeNanos, maxBlockingNanos));
}
@Override
public long deadlineNanos() {
assert inEventLoop();
long next = nextScheduledTaskDeadlineNanos();
long maxDeadlineNanos = ticker.nanoTime() + maxBlockingNanos;
if (next == -1) {
return maxDeadlineNanos;
}
return Math.min(next, maxDeadlineNanos);
}
};
}
| BlockingIoHandlerContext |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/SpliteratorAssertBaseTest.java | {
"start": 835,
"end": 1427
} | class ____ extends BaseTestTemplate<SpliteratorAssert<String>, Spliterator<String>> {
protected Spliterators spliterators;
@Override
protected SpliteratorAssert<String> create_assertions() {
return new SpliteratorAssert<>(new StringSpliterator());
}
@Override
protected void inject_internal_objects() {
super.inject_internal_objects();
spliterators = mock(Spliterators.class);
assertions.spliterators = spliterators;
}
protected Spliterators getSpliterators(SpliteratorAssert<?> assertions) {
return assertions.spliterators;
}
}
| SpliteratorAssertBaseTest |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/interceptor/producer/ProducerWithFinalInterceptedMethodTest.java | {
"start": 1588,
"end": 1779
} | class ____ {
@AroundInvoke
Object intercept(InvocationContext ctx) throws Exception {
return "intercepted: " + ctx.proceed();
}
}
static | MyInterceptor |
java | google__error-prone | core/src/test/java/com/google/errorprone/ErrorProneCompilerIntegrationTest.java | {
"start": 19310,
"end": 20810
} | class ____ {
void f() {
return;
}
}
""")));
assertWithMessage(outputStream.toString()).that(exitCode).isEqualTo(Result.ERROR);
assertThat(diagnosticHelper.getDiagnostics()).hasSize(1);
Diagnostic<? extends JavaFileObject> diag =
Iterables.getOnlyElement(diagnosticHelper.getDiagnostics());
assertThat(diag.getLineNumber()).isEqualTo(4);
assertThat(diag.getColumnNumber()).isEqualTo(5);
assertThat(diag.getSource().toUri().toString()).endsWith("test/Test.java");
assertThat(diag.getMessage(ENGLISH))
.contains("An unhandled exception was thrown by the Error Prone static analysis plugin");
}
@Test
public void compilePolicy_bytodo() {
InvalidCommandLineOptionException e =
assertThrows(
InvalidCommandLineOptionException.class,
() ->
compiler.compile(
new String[] {"-XDcompilePolicy=bytodo"},
Collections.<JavaFileObject>emptyList()));
assertThat(e).hasMessageThat().contains("-XDcompilePolicy=bytodo is not supported");
}
@Test
public void compilePolicy_byfile() {
Result exitCode =
compiler.compile(
new String[] {"-XDcompilePolicy=byfile"},
Arrays.asList(
forSourceLines(
"Test.java",
"""
package test;
| Test |
java | spring-projects__spring-boot | core/spring-boot-test/src/main/java/org/springframework/boot/test/json/ObjectContent.java | {
"start": 1141,
"end": 2047
} | class ____<T> implements AssertProvider<ObjectContentAssert<T>> {
private final @Nullable ResolvableType type;
private final T object;
/**
* Create a new {@link ObjectContent} instance.
* @param type the type under test (or {@code null} if not known)
* @param object the actual object content
*/
public ObjectContent(@Nullable ResolvableType type, T object) {
Assert.notNull(object, "'object' must not be null");
this.type = type;
this.object = object;
}
@Override
public ObjectContentAssert<T> assertThat() {
return new ObjectContentAssert<>(this.object);
}
/**
* Return the actual object content.
* @return the object content
*/
public T getObject() {
return this.object;
}
@Override
public String toString() {
String createdFrom = (this.type != null) ? " created from " + this.type : "";
return "ObjectContent " + this.object + createdFrom;
}
}
| ObjectContent |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/ModuleUtils.java | {
"start": 10571,
"end": 10671
} | class ____ name '" + binaryName + "'.", e);
}
}
}
/**
* {@link ModuleReference} resource | with |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/execution/StagingSinkJobStatusHook.java | {
"start": 1310,
"end": 1911
} | class ____ implements JobStatusHook {
private final StagedTable stagedTable;
public StagingSinkJobStatusHook(StagedTable stagedTable) {
this.stagedTable = stagedTable;
}
@Override
public void onCreated(JobID jobId) {
stagedTable.begin();
}
@Override
public void onFinished(JobID jobId) {
stagedTable.commit();
}
@Override
public void onFailed(JobID jobId, Throwable throwable) {
stagedTable.abort();
}
@Override
public void onCanceled(JobID jobId) {
stagedTable.abort();
}
}
| StagingSinkJobStatusHook |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/comparison/NotEquals.java | {
"start": 712,
"end": 1702
} | class ____ extends BinaryComparison implements Negatable<BinaryComparison> {
public NotEquals(Source source, Expression left, Expression right, ZoneId zoneId) {
super(source, left, right, BinaryComparisonOperation.NEQ, zoneId);
}
@Override
protected NodeInfo<NotEquals> info() {
return NodeInfo.create(this, NotEquals::new, left(), right(), zoneId());
}
@Override
protected NotEquals replaceChildren(Expression newLeft, Expression newRight) {
return new NotEquals(source(), newLeft, newRight, zoneId());
}
@Override
public NotEquals swapLeftAndRight() {
return new NotEquals(source(), right(), left(), zoneId());
}
@Override
public BinaryComparison negate() {
return new Equals(source(), left(), right(), zoneId());
}
@Override
public BinaryComparison reverse() {
return this;
}
@Override
protected boolean isCommutative() {
return true;
}
}
| NotEquals |
java | quarkusio__quarkus | extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuartzSchedulerImpl.java | {
"start": 42014,
"end": 48788
} | interface ____ {
Consumer<ScheduledExecution> task();
Class<? extends Consumer<ScheduledExecution>> taskClass();
Function<ScheduledExecution, Uni<Void>> asyncTask();
Class<? extends Function<ScheduledExecution, Uni<Void>>> asyncTaskClass();
boolean isRunOnVirtualThread();
SkipPredicate skipPredicate();
Class<? extends SkipPredicate> skipPredicateClass();
boolean nonconcurrent();
}
static final String SCHEDULED_METADATA = "scheduled_metadata";
static final String EXECUTION_METADATA_TASK_CLASS = "execution_metadata_task_class";
static final String EXECUTION_METADATA_ASYNC_TASK_CLASS = "execution_metadata_async_task_class";
static final String EXECUTION_METADATA_RUN_ON_VIRTUAL_THREAD = "execution_metadata_run_on_virtual_thread";
static final String EXECUTION_METADATA_NONCONCURRENT = "execution_metadata_nonconcurrent";
static final String EXECUTION_METADATA_SKIP_PREDICATE_CLASS = "execution_metadata_skip_predicate_class";
QuartzTrigger createJobDefinitionQuartzTrigger(ExecutionMetadata executionMetadata, SyntheticScheduled scheduled,
org.quartz.Trigger oldTrigger) {
ScheduledInvoker invoker;
Consumer<ScheduledExecution> task = executionMetadata.task();
Function<ScheduledExecution, Uni<Void>> asyncTask = executionMetadata.asyncTask();
boolean runOnVirtualThread = executionMetadata.isRunOnVirtualThread();
SkipPredicate skipPredicate = executionMetadata.skipPredicate();
if (task != null) {
// Use the default invoker to make sure the CDI request context is activated
invoker = new DefaultInvoker() {
@Override
public CompletionStage<Void> invokeBean(ScheduledExecution execution) {
try {
task.accept(execution);
return CompletableFuture.completedStage(null);
} catch (Exception e) {
return CompletableFuture.failedStage(e);
}
}
@Override
public boolean isRunningOnVirtualThread() {
return runOnVirtualThread;
}
};
} else {
invoker = new DefaultInvoker() {
@Override
public CompletionStage<Void> invokeBean(ScheduledExecution execution) {
try {
return asyncTask.apply(execution).subscribeAsCompletionStage();
} catch (Exception e) {
return CompletableFuture.failedStage(e);
}
}
@Override
public boolean isBlocking() {
return false;
}
};
}
JobBuilder jobBuilder = createJobBuilder(scheduled.identity(), QuartzSchedulerImpl.class.getName(),
executionMetadata.nonconcurrent());
if (storeType.isDbStore()) {
jobBuilder.usingJobData(SCHEDULED_METADATA, scheduled.toJson())
.usingJobData(EXECUTION_METADATA_RUN_ON_VIRTUAL_THREAD, Boolean.toString(runOnVirtualThread));
if (executionMetadata.taskClass() != null) {
jobBuilder.usingJobData(EXECUTION_METADATA_TASK_CLASS, executionMetadata.taskClass().getName());
} else if (executionMetadata.asyncTaskClass() != null) {
jobBuilder.usingJobData(EXECUTION_METADATA_ASYNC_TASK_CLASS, executionMetadata.asyncTaskClass().getName());
}
if (executionMetadata.skipPredicateClass() != null) {
jobBuilder.usingJobData(EXECUTION_METADATA_SKIP_PREDICATE_CLASS,
executionMetadata.skipPredicateClass().getName());
}
}
JobDetail jobDetail = jobBuilder.requestRecovery().build();
org.quartz.Trigger trigger;
Optional<TriggerBuilder<?>> triggerBuilder = createTrigger(scheduled.identity(), scheduled, runtimeConfig,
jobDetail);
if (triggerBuilder.isPresent()) {
if (oldTrigger != null) {
trigger = triggerBuilder.get().startAt(oldTrigger.getNextFireTime()).build();
} else {
trigger = triggerBuilder.get().build();
}
} else {
if (oldTrigger != null) {
throw new IllegalStateException(
"Job [" + scheduled.identity() + "] that was previously scheduled programmatically cannot be disabled");
}
// Job is disabled
return null;
}
JobInstrumenter instrumenter = null;
if (schedulerConfig.tracingEnabled() && jobInstrumenter.isResolvable()) {
instrumenter = jobInstrumenter.get();
}
invoker = initInvoker(invoker, events, scheduled.concurrentExecution(), skipPredicate, instrumenter,
vertx, task != null && runtimeConfig.runBlockingScheduledMethodOnQuartzThread(),
SchedulerUtils.parseExecutionMaxDelayAsMillis(scheduled), blockingExecutor);
QuartzTrigger quartzTrigger = new QuartzTrigger(trigger.getKey(),
new Function<>() {
@Override
public org.quartz.Trigger apply(TriggerKey triggerKey) {
try {
return scheduler.getTrigger(triggerKey);
} catch (SchedulerException e) {
throw new IllegalStateException(e);
}
}
}, invoker,
SchedulerUtils.parseOverdueGracePeriod(scheduled, defaultOverdueGracePeriod),
runtimeConfig.runBlockingScheduledMethodOnQuartzThread(), true, null);
QuartzTrigger existing = scheduledTasks.putIfAbsent(scheduled.identity(), quartzTrigger);
if (existing != null) {
throw new IllegalStateException("A job with this identity is already scheduled: " + scheduled.identity());
}
try {
if (oldTrigger != null) {
scheduler.rescheduleJob(trigger.getKey(), trigger);
LOGGER.debugf("Rescheduled job definition with config %s", scheduled);
} else {
scheduler.scheduleJob(jobDetail, trigger);
LOGGER.debugf("Scheduled job definition with config %s", scheduled);
}
} catch (SchedulerException e) {
throw new IllegalStateException(e);
}
return quartzTrigger;
}
/**
* @see Nonconcurrent
*/
@DisallowConcurrentExecution
static | ExecutionMetadata |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/customproviders/ImpliedReadBodyRequestFilterTest.java | {
"start": 901,
"end": 2573
} | class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(HelloResource.class);
}
});
@Test
public void testMethodWithBody() {
RestAssured.with()
.formParam("name", "Quarkus")
.post("/hello")
.then().body(Matchers.equalTo("hello Quarkus!!!!!!!"));
}
@Test
public void testMethodWithUndeclaredBody() {
RestAssured.with()
.formParam("name", "Quarkus")
.post("/hello/empty")
.then().body(Matchers.equalTo("hello !!!!!!!"));
}
@Test
public void testMethodWithStringBody() {
// make sure that a form-reading filter doesn't prevent non-form request bodies from being deserialised
RestAssured.with()
.formParam("name", "Quarkus")
.post("/hello/string")
.then().body(Matchers.equalTo("hello name=Quarkus!!!!!!!"));
RestAssured.with()
.body("Quarkus")
.post("/hello/string")
.then().body(Matchers.equalTo("hello Quarkus?"));
}
@Test
public void testMethodWithoutBody() {
RestAssured.with()
.queryParam("name", "Quarkus")
.get("/hello")
.then().body(Matchers.equalTo("hello Quarkus!"));
}
@Path("hello")
public static | ImpliedReadBodyRequestFilterTest |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/method/HandlerMethodTests.java | {
"start": 7784,
"end": 8054
} | class ____<C> implements GenericInterface<Long, C> {
@Override
public void processOne(Long value1) {
}
@Override
public void processOneAndTwo(Long value1, C value2) {
}
public abstract void processTwo(@Max(42) C value);
}
static | GenericAbstractSuperclass |
java | apache__kafka | storage/src/test/java/org/apache/kafka/tiered/storage/integration/FetchFromLeaderWithCorruptedCheckpointTest.java | {
"start": 1387,
"end": 4082
} | class ____ extends TieredStorageTestHarness {
@Override
public int brokerCount() {
return 2;
}
@Override
protected void writeTestSpecifications(TieredStorageTestBuilder builder) {
final int broker0 = 0;
final int broker1 = 1;
final String topicA = "topicA";
final int p0 = 0;
final int partitionCount = 1;
final int replicationFactor = 2;
final int maxBatchCountPerSegment = 1;
final boolean enableRemoteLogStorage = true;
final Map<Integer, List<Integer>> assignment = mkMap(mkEntry(p0, List.of(broker0, broker1)));
final List<String> checkpointFiles = List.of(
ReplicaManager.HighWatermarkFilename(),
LogManager.RECOVERY_POINT_CHECKPOINT_FILE,
CleanShutdownFileHandler.CLEAN_SHUTDOWN_FILE_NAME);
builder.createTopic(topicA, partitionCount, replicationFactor, maxBatchCountPerSegment, assignment,
enableRemoteLogStorage)
// send records to partition 0
.expectSegmentToBeOffloaded(broker0, topicA, p0, 0, new KeyValueSpec("k0", "v0"))
.expectSegmentToBeOffloaded(broker0, topicA, p0, 1, new KeyValueSpec("k1", "v1"))
.expectEarliestLocalOffsetInLogDirectory(topicA, p0, 2L)
.produce(topicA, p0, new KeyValueSpec("k0", "v0"), new KeyValueSpec("k1", "v1"),
new KeyValueSpec("k2", "v2"))
.expectFetchFromTieredStorage(broker0, topicA, p0, 2)
.consume(topicA, p0, 0L, 3, 2)
// shutdown the brokers
.stop(broker1)
.stop(broker0)
// delete the checkpoint files
.eraseBrokerStorage(broker0, (dir, name) -> checkpointFiles.contains(name), true)
// start the broker first whose checkpoint files were deleted.
.start(broker0)
.start(broker1)
// send some records to partition 0
// Note that the segment 2 gets offloaded for p0, but we cannot expect those events deterministically
// because the rlm-task-thread runs in background and this framework doesn't support it.
.expectSegmentToBeOffloaded(broker0, topicA, p0, 3, new KeyValueSpec("k3", "v3"))
.expectEarliestLocalOffsetInLogDirectory(topicA, p0, 4L)
.produce(topicA, p0, new KeyValueSpec("k3", "v3"), new KeyValueSpec("k4", "v4"))
.expectFetchFromTieredStorage(broker0, topicA, p0, 4)
.consume(topicA, p0, 0L, 5, 4);
}
}
| FetchFromLeaderWithCorruptedCheckpointTest |
java | apache__flink | flink-core-api/src/main/java/org/apache/flink/api/java/tuple/builder/Tuple3Builder.java | {
"start": 1420,
"end": 1802
} | class ____<T0, T1, T2> {
private List<Tuple3<T0, T1, T2>> tuples = new ArrayList<>();
public Tuple3Builder<T0, T1, T2> add(T0 f0, T1 f1, T2 f2) {
tuples.add(new Tuple3<>(f0, f1, f2));
return this;
}
@SuppressWarnings("unchecked")
public Tuple3<T0, T1, T2>[] build() {
return tuples.toArray(new Tuple3[tuples.size()]);
}
}
| Tuple3Builder |
java | apache__camel | components/camel-influxdb/src/test/java/org/apache/camel/component/influxdb/InfluxDbProducerTest.java | {
"start": 1121,
"end": 3640
} | class ____ extends AbstractInfluxDbTest {
@EndpointInject("mock:test")
MockEndpoint successEndpoint;
@EndpointInject("mock:error")
MockEndpoint errorEndpoint;
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
errorHandler(deadLetterChannel("mock:error").redeliveryDelay(0).maximumRedeliveries(0));
//test route
from("direct:test")
.to("influxdb:influxDbBean?databaseName={{influxdb.testDb}}")
.to("mock:test");
}
};
}
@BeforeEach
public void resetEndpoints() {
errorEndpoint.reset();
successEndpoint.reset();
}
@Test
public void writePointFromMapAndStaticDbName() throws InterruptedException {
errorEndpoint.expectedMessageCount(0);
successEndpoint.expectedMessageCount(1);
Map<String, Object> pointMap = createMapPoint();
sendBody("direct:test", pointMap);
errorEndpoint.assertIsSatisfied();
successEndpoint.assertIsSatisfied();
}
@Test
public void writePointFromMapAndDynamicDbName() throws InterruptedException {
errorEndpoint.expectedMessageCount(0);
successEndpoint.expectedMessageCount(1);
Map<String, Object> pointMap = createMapPoint();
Map<String, Object> header = createHeader();
sendBody("direct:test", pointMap, header);
errorEndpoint.assertIsSatisfied();
successEndpoint.assertIsSatisfied();
}
@Test
public void missingMeassurementNameFails() throws InterruptedException {
errorEndpoint.expectedMessageCount(1);
successEndpoint.expectedMessageCount(0);
Map<String, Object> pointMap = new HashMap<>();
pointMap.remove(InfluxDbConstants.MEASUREMENT_NAME);
sendBody("direct:test", pointMap);
errorEndpoint.assertIsSatisfied();
successEndpoint.assertIsSatisfied();
}
private Map<String, Object> createHeader() {
Map<String, Object> header = new HashMap<>();
header.put(InfluxDbConstants.DBNAME_HEADER, "myOtherDatabase");
return header;
}
private Map<String, Object> createMapPoint() {
Map<String, Object> pointMap = new HashMap<>();
pointMap.put(InfluxDbConstants.MEASUREMENT_NAME, "MyTestMeasurement");
pointMap.put("CPU", 1);
return pointMap;
}
}
| InfluxDbProducerTest |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/aspectj/annotation/AbstractAspectJAdvisorFactory.java | {
"start": 2142,
"end": 5365
} | class ____ implements AspectJAdvisorFactory {
private static final Class<?>[] ASPECTJ_ANNOTATION_CLASSES = new Class<?>[] {
Pointcut.class, Around.class, Before.class, After.class, AfterReturning.class, AfterThrowing.class};
private static final String AJC_MAGIC = "ajc$";
/**
* System property that instructs Spring to ignore ajc-compiled aspects
* for Spring AOP proxying, restoring traditional Spring behavior for
* scenarios where both weaving and AspectJ auto-proxying are enabled.
* <p>The default is "false". Consider switching this to "true" if you
* encounter double execution of your aspects in a given build setup.
* Note that we recommend restructuring your AspectJ configuration to
* avoid such double exposure of an AspectJ aspect to begin with.
* @since 6.1.15
*/
public static final String IGNORE_AJC_PROPERTY_NAME = "spring.aop.ajc.ignore";
private static final boolean shouldIgnoreAjcCompiledAspects =
SpringProperties.getFlag(IGNORE_AJC_PROPERTY_NAME);
/** Logger available to subclasses. */
protected final Log logger = LogFactory.getLog(getClass());
protected final ParameterNameDiscoverer parameterNameDiscoverer = new AspectJAnnotationParameterNameDiscoverer();
@Override
public boolean isAspect(Class<?> clazz) {
return (AnnotationUtils.findAnnotation(clazz, Aspect.class) != null &&
(!shouldIgnoreAjcCompiledAspects || !compiledByAjc(clazz)));
}
@Override
public void validate(Class<?> aspectClass) throws AopConfigException {
AjType<?> ajType = AjTypeSystem.getAjType(aspectClass);
if (!ajType.isAspect()) {
throw new NotAnAtAspectException(aspectClass);
}
if (ajType.getPerClause().getKind() == PerClauseKind.PERCFLOW) {
throw new AopConfigException(aspectClass.getName() + " uses percflow instantiation model: " +
"This is not supported in Spring AOP.");
}
if (ajType.getPerClause().getKind() == PerClauseKind.PERCFLOWBELOW) {
throw new AopConfigException(aspectClass.getName() + " uses percflowbelow instantiation model: " +
"This is not supported in Spring AOP.");
}
}
/**
* Find and return the first AspectJ annotation on the given method
* (there <i>should</i> only be one anyway...).
*/
@SuppressWarnings("unchecked")
protected static @Nullable AspectJAnnotation findAspectJAnnotationOnMethod(Method method) {
for (Class<?> annotationType : ASPECTJ_ANNOTATION_CLASSES) {
AspectJAnnotation annotation = findAnnotation(method, (Class<Annotation>) annotationType);
if (annotation != null) {
return annotation;
}
}
return null;
}
private static @Nullable AspectJAnnotation findAnnotation(Method method, Class<? extends Annotation> annotationType) {
Annotation annotation = AnnotationUtils.findAnnotation(method, annotationType);
if (annotation != null) {
return new AspectJAnnotation(annotation);
}
else {
return null;
}
}
private static boolean compiledByAjc(Class<?> clazz) {
for (Field field : clazz.getDeclaredFields()) {
if (field.getName().startsWith(AJC_MAGIC)) {
return true;
}
}
return false;
}
/**
* Enum for AspectJ annotation types.
* @see AspectJAnnotation#getAnnotationType()
*/
protected | AbstractAspectJAdvisorFactory |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1558/java8/Issue1558Test.java | {
"start": 481,
"end": 656
} | class ____ {
@ProcessorTest
public void testShouldCompile() {
Car2 car = new Car2();
Car target = CarMapper.INSTANCE.toCar( car );
}
}
| Issue1558Test |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/context/properties/source/ConfigurationPropertySources.java | {
"start": 1563,
"end": 7695
} | class ____ {
/**
* The name of the {@link PropertySource} {@link #attach(Environment) adapter}.
*/
private static final String ATTACHED_PROPERTY_SOURCE_NAME = "configurationProperties";
private ConfigurationPropertySources() {
}
/**
* Create a new {@link PropertyResolver} that resolves property values against an
* underlying set of {@link PropertySources}. Provides an
* {@link ConfigurationPropertySource} aware and optimized alternative to
* {@link PropertySourcesPropertyResolver}.
* @param propertySources the set of {@link PropertySource} objects to use
* @return a {@link ConfigurablePropertyResolver} implementation
* @since 2.5.0
*/
public static ConfigurablePropertyResolver createPropertyResolver(MutablePropertySources propertySources) {
return new ConfigurationPropertySourcesPropertyResolver(propertySources);
}
/**
* Determines if the specific {@link PropertySource} is the
* {@link ConfigurationPropertySource} that was {@link #attach(Environment) attached}
* to the {@link Environment}.
* @param propertySource the property source to test
* @return {@code true} if this is the attached {@link ConfigurationPropertySource}
*/
public static boolean isAttachedConfigurationPropertySource(PropertySource<?> propertySource) {
return ATTACHED_PROPERTY_SOURCE_NAME.equals(propertySource.getName());
}
/**
* Attach a {@link ConfigurationPropertySource} support to the specified
* {@link Environment}. Adapts each {@link PropertySource} managed by the environment
* to a {@link ConfigurationPropertySource} and allows classic
* {@link PropertySourcesPropertyResolver} calls to resolve using
* {@link ConfigurationPropertyName configuration property names}.
* <p>
* The attached resolver will dynamically track any additions or removals from the
* underlying {@link Environment} property sources.
* @param environment the source environment (must be an instance of
* {@link ConfigurableEnvironment})
* @see #get(Environment)
*/
public static void attach(Environment environment) {
Assert.isInstanceOf(ConfigurableEnvironment.class, environment);
MutablePropertySources sources = ((ConfigurableEnvironment) environment).getPropertySources();
PropertySource<?> attached = getAttached(sources);
if (!isUsingSources(attached, sources)) {
attached = new ConfigurationPropertySourcesPropertySource(ATTACHED_PROPERTY_SOURCE_NAME,
new SpringConfigurationPropertySources(sources));
}
sources.remove(ATTACHED_PROPERTY_SOURCE_NAME);
sources.addFirst(attached);
}
@Contract("null, _ -> false")
private static boolean isUsingSources(@Nullable PropertySource<?> attached, MutablePropertySources sources) {
return attached instanceof ConfigurationPropertySourcesPropertySource
&& ((SpringConfigurationPropertySources) attached.getSource()).isUsingSources(sources);
}
static @Nullable PropertySource<?> getAttached(@Nullable MutablePropertySources sources) {
return (sources != null) ? sources.get(ATTACHED_PROPERTY_SOURCE_NAME) : null;
}
/**
* Return a set of {@link ConfigurationPropertySource} instances that have previously
* been {@link #attach(Environment) attached} to the {@link Environment}.
* @param environment the source environment (must be an instance of
* {@link ConfigurableEnvironment})
* @return an iterable set of configuration property sources
* @throws IllegalStateException if not configuration property sources have been
* attached
*/
public static Iterable<ConfigurationPropertySource> get(Environment environment) {
Assert.isInstanceOf(ConfigurableEnvironment.class, environment);
MutablePropertySources sources = ((ConfigurableEnvironment) environment).getPropertySources();
ConfigurationPropertySourcesPropertySource attached = (ConfigurationPropertySourcesPropertySource) sources
.get(ATTACHED_PROPERTY_SOURCE_NAME);
if (attached == null) {
return from(sources);
}
return attached.getSource();
}
/**
* Return {@link Iterable} containing a single new {@link ConfigurationPropertySource}
* adapted from the given Spring {@link PropertySource}. The single element can be
* {@code null} if the source cannot be adapted.
* @param source the Spring property source to adapt
* @return an {@link Iterable} containing a single newly adapted
* {@link SpringConfigurationPropertySource}
*/
public static Iterable<@Nullable ConfigurationPropertySource> from(PropertySource<?> source) {
return Collections.singleton(ConfigurationPropertySource.from(source));
}
/**
* Return {@link Iterable} containing new {@link ConfigurationPropertySource}
* instances adapted from the given Spring {@link PropertySource PropertySources}.
* <p>
* This method will flatten any nested property sources and will filter all
* {@link StubPropertySource stub property sources}. Updates to the underlying source,
* identified by changes in the sources returned by its iterator, will be
* automatically tracked. The underlying source should be thread safe, for example a
* {@link MutablePropertySources}
* @param sources the Spring property sources to adapt
* @return an {@link Iterable} containing newly adapted
* {@link SpringConfigurationPropertySource} instances
*/
public static Iterable<ConfigurationPropertySource> from(Iterable<PropertySource<?>> sources) {
return new SpringConfigurationPropertySources(sources);
}
private static Stream<PropertySource<?>> streamPropertySources(PropertySources sources) {
return sources.stream()
.flatMap(ConfigurationPropertySources::flatten)
.filter(ConfigurationPropertySources::isIncluded);
}
private static Stream<PropertySource<?>> flatten(PropertySource<?> source) {
if (source.getSource() instanceof ConfigurableEnvironment configurableEnvironment) {
return streamPropertySources(configurableEnvironment.getPropertySources());
}
return Stream.of(source);
}
private static boolean isIncluded(PropertySource<?> source) {
return !(source instanceof StubPropertySource)
&& !(source instanceof ConfigurationPropertySourcesPropertySource);
}
}
| ConfigurationPropertySources |
java | apache__dubbo | dubbo-cluster/src/test/java/org/apache/dubbo/rpc/cluster/router/affinity/AffinityRouteTest.java | {
"start": 1675,
"end": 10687
} | class ____ {
private static final Logger logger = LoggerFactory.getLogger(AffinityRouteTest.class);
private static BitList<Invoker<String>> invokers;
private static List<String> providerUrls;
@BeforeAll
public static void setUp() {
providerUrls = Arrays.asList(
"dubbo://127.0.0.1/com.foo.BarService",
"dubbo://127.0.0.1/com.foo.BarService",
"dubbo://127.0.0.1/com.foo.BarService?env=normal",
"dubbo://127.0.0.1/com.foo.BarService?env=normal",
"dubbo://127.0.0.1/com.foo.BarService?env=normal",
"dubbo://127.0.0.1/com.foo.BarService?region=beijing",
"dubbo://127.0.0.1/com.foo.BarService?region=beijing",
"dubbo://127.0.0.1/com.foo.BarService?region=beijing",
"dubbo://127.0.0.1/com.foo.BarService?region=beijing&env=gray",
"dubbo://127.0.0.1/com.foo.BarService?region=beijing&env=gray",
"dubbo://127.0.0.1/com.foo.BarService?region=beijing&env=gray",
"dubbo://127.0.0.1/com.foo.BarService?region=beijing&env=gray",
"dubbo://127.0.0.1/com.foo.BarService?region=beijing&env=normal",
"dubbo://127.0.0.1/com.foo.BarService?region=hangzhou",
"dubbo://127.0.0.1/com.foo.BarService?region=hangzhou",
"dubbo://127.0.0.1/com.foo.BarService?region=hangzhou&env=gray",
"dubbo://127.0.0.1/com.foo.BarService?region=hangzhou&env=gray",
"dubbo://127.0.0.1/com.foo.BarService?region=hangzhou&env=normal",
"dubbo://127.0.0.1/com.foo.BarService?region=hangzhou&env=normal",
"dubbo://127.0.0.1/com.foo.BarService?region=hangzhou&env=normal",
"dubbo://dubbo.apache.org/com.foo.BarService",
"dubbo://dubbo.apache.org/com.foo.BarService",
"dubbo://dubbo.apache.org/com.foo.BarService?env=normal",
"dubbo://dubbo.apache.org/com.foo.BarService?env=normal",
"dubbo://dubbo.apache.org/com.foo.BarService?env=normal",
"dubbo://dubbo.apache.org/com.foo.BarService?region=beijing",
"dubbo://dubbo.apache.org/com.foo.BarService?region=beijing",
"dubbo://dubbo.apache.org/com.foo.BarService?region=beijing",
"dubbo://dubbo.apache.org/com.foo.BarService?region=beijing&env=gray",
"dubbo://dubbo.apache.org/com.foo.BarService?region=beijing&env=gray",
"dubbo://dubbo.apache.org/com.foo.BarService?region=beijing&env=gray",
"dubbo://dubbo.apache.org/com.foo.BarService?region=beijing&env=gray",
"dubbo://dubbo.apache.org/com.foo.BarService?region=beijing&env=normal",
"dubbo://dubbo.apache.org/com.foo.BarService?region=hangzhou",
"dubbo://dubbo.apache.org/com.foo.BarService?region=hangzhou",
"dubbo://dubbo.apache.org/com.foo.BarService?region=hangzhou&env=gray",
"dubbo://dubbo.apache.org/com.foo.BarService?region=hangzhou&env=gray",
"dubbo://dubbo.apache.org/com.foo.BarService?region=hangzhou&env=normal",
"dubbo://dubbo.apache.org/com.foo.BarService?region=hangzhou&env=normal",
"dubbo://dubbo.apache.org/com.foo.BarService?region=hangzhou&env=normal");
List<Invoker<String>> invokerList = providerUrls.stream()
.map(url -> new MockInvoker<String>(URL.valueOf(url)))
.collect(Collectors.toList());
invokers = new BitList<>(invokerList);
}
public List<String> filtrate(List<String> invokers, String key) {
return invokers.stream().filter(invoker -> invoker.contains(key)).collect(Collectors.toList());
}
@Test
void testMetAffinityRoute() {
String config = "configVersion: v3.1\n"
+ "scope: service\n"
+ "key: service.apache.com\n"
+ "enabled: true\n"
+ "runtime: true\n"
+ "affinityAware:\n"
+ " key: region\n"
+ " ratio: 20\n";
AffinityServiceStateRouter<String> affinityRoute = new AffinityServiceStateRouter<>(
URL.valueOf("consumer://127.0.0.1/com.foo.BarService?env=gray®ion=beijing"));
affinityRoute.process(new ConfigChangedEvent("com.foo.BarService", "", config, ConfigChangeType.ADDED));
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getComment");
BitList<Invoker<String>> res = affinityRoute.route(
invokers.clone(),
URL.valueOf("consumer://127.0.0.1/com.foo.BarService?env=gray®ion=beijing"),
invocation,
false,
new Holder<>());
List<String> filtered = filtrate(new ArrayList<String>(providerUrls), "region=beijing");
assertEquals(filtered.size(), res.size());
logger.info("The affinity routing condition is met and the result is routed");
}
@Test
void testUnMetAffinityRoute() {
String config = "configVersion: v3.1\n"
+ "scope: service\n"
+ "key: service.apache.com\n"
+ "enabled: true\n"
+ "runtime: true\n"
+ "affinityAware:\n"
+ " key: region\n"
+ " ratio: 80\n";
AffinityServiceStateRouter<String> affinityRoute = new AffinityServiceStateRouter<>(
URL.valueOf("consumer://127.0.0.1/com.foo.BarService?env=gray®ion=beijing"));
affinityRoute.process(new ConfigChangedEvent("com.foo.BarService", "", config, ConfigChangeType.ADDED));
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getComment");
BitList<Invoker<String>> res = affinityRoute.route(
invokers.clone(),
URL.valueOf("consumer://127.0.0.1/com.foo.BarService?env=gray®ion=beijing"),
invocation,
false,
new Holder<>());
List<String> filtered = filtrate(new ArrayList<String>(providerUrls), "region=beijing");
assertEquals(invokers.size(), res.size());
logger.info("The affinity routing condition was not met and the result was not routed");
}
@Test
void testRatioEqualsAffinityRoute() {
String config = "configVersion: v3.1\n"
+ "scope: service\n"
+ "key: service.apache.com\n"
+ "enabled: true\n"
+ "runtime: true\n"
+ "affinityAware:\n"
+ " key: region\n"
+ " ratio: 40\n";
AffinityServiceStateRouter<String> affinityRoute = new AffinityServiceStateRouter<>(
URL.valueOf("consumer://127.0.0.1/com.foo.BarService?env=gray®ion=beijing"));
affinityRoute.process(new ConfigChangedEvent("com.foo.BarService", "", config, ConfigChangeType.ADDED));
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getComment");
BitList<Invoker<String>> res = affinityRoute.route(
invokers.clone(),
URL.valueOf("consumer://127.0.0.1/com.foo.BarService?env=gray®ion=beijing"),
invocation,
false,
new Holder<>());
List<String> filtered = filtrate(new ArrayList<String>(providerUrls), "region=beijing");
assertEquals(filtered.size(), res.size());
logger.info("The affinity routing condition is met and the result is routed");
}
@Test
void testRatioNotEqualsAffinityRoute() {
String config = "configVersion: v3.1\n"
+ "scope: service\n"
+ "key: service.apache.com\n"
+ "enabled: true\n"
+ "runtime: true\n"
+ "affinityAware:\n"
+ " key: region\n"
+ " ratio: 40.1\n";
AffinityServiceStateRouter<String> affinityRoute = new AffinityServiceStateRouter<>(
URL.valueOf("consumer://127.0.0.1/com.foo.BarService?env=gray®ion=beijing"));
affinityRoute.process(new ConfigChangedEvent("com.foo.BarService", "", config, ConfigChangeType.ADDED));
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getComment");
BitList<Invoker<String>> res = affinityRoute.route(
invokers.clone(),
URL.valueOf("consumer://127.0.0.1/com.foo.BarService?env=gray®ion=beijing"),
invocation,
false,
new Holder<>());
List<String> filtered = filtrate(new ArrayList<String>(providerUrls), "region=beijing");
assertEquals(invokers.size(), res.size());
logger.info("The affinity routing condition was not met and the result was not routed");
}
}
| AffinityRouteTest |
java | spring-projects__spring-boot | module/spring-boot-webmvc/src/main/java/org/springframework/boot/webmvc/autoconfigure/WebMvcProperties.java | {
"start": 11014,
"end": 11325
} | class ____ {
/**
* Whether RFC 9457 Problem Details support should be enabled.
*/
private boolean enabled;
public boolean isEnabled() {
return this.enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
}
/**
* API Version.
*/
public static | Problemdetails |
java | apache__maven | compat/maven-resolver-provider/src/main/java/org/apache/maven/repository/internal/VersionsMetadata.java | {
"start": 1386,
"end": 4350
} | class ____ extends MavenMetadata {
private final Artifact artifact;
VersionsMetadata(Artifact artifact, Date timestamp) {
super(createRepositoryMetadata(artifact), (Path) null, timestamp);
this.artifact = artifact;
}
VersionsMetadata(Artifact artifact, Path path, Date timestamp) {
super(createRepositoryMetadata(artifact), path, timestamp);
this.artifact = artifact;
}
private static Metadata createRepositoryMetadata(Artifact artifact) {
Metadata metadata = new Metadata();
metadata.setGroupId(artifact.getGroupId());
metadata.setArtifactId(artifact.getArtifactId());
Versioning versioning = new Versioning();
versioning.addVersion(artifact.getBaseVersion());
if (!artifact.isSnapshot()) {
versioning.setRelease(artifact.getBaseVersion());
}
if ("maven-plugin".equals(artifact.getProperty(ArtifactProperties.TYPE, ""))) {
versioning.setLatest(artifact.getBaseVersion());
}
metadata.setVersioning(versioning);
return metadata;
}
@Override
protected void merge(Metadata recessive) {
Versioning versioning = metadata.getVersioning();
versioning.setLastUpdatedTimestamp(timestamp);
if (recessive.getVersioning() != null) {
if (versioning.getLatest() == null) {
versioning.setLatest(recessive.getVersioning().getLatest());
}
if (versioning.getRelease() == null) {
versioning.setRelease(recessive.getVersioning().getRelease());
}
Collection<String> versions =
new LinkedHashSet<>(recessive.getVersioning().getVersions());
versions.addAll(versioning.getVersions());
versioning.setVersions(new ArrayList<>(versions));
}
// just carry-on as-is
if (!recessive.getPlugins().isEmpty()) {
metadata.setPlugins(new ArrayList<>(recessive.getPlugins()));
}
}
public Object getKey() {
return getGroupId() + ':' + getArtifactId();
}
public static Object getKey(Artifact artifact) {
return artifact.getGroupId() + ':' + artifact.getArtifactId();
}
@Deprecated
@Override
public MavenMetadata setFile(File file) {
return new VersionsMetadata(artifact, file.toPath(), timestamp);
}
@Override
public MavenMetadata setPath(Path path) {
return new VersionsMetadata(artifact, path, timestamp);
}
@Override
public String getGroupId() {
return artifact.getGroupId();
}
@Override
public String getArtifactId() {
return artifact.getArtifactId();
}
@Override
public String getVersion() {
return "";
}
@Override
public Nature getNature() {
return artifact.isSnapshot() ? Nature.RELEASE_OR_SNAPSHOT : Nature.RELEASE;
}
}
| VersionsMetadata |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/cache/spi/entry/StructuredMapCacheEntry.java | {
"start": 422,
"end": 1374
} | class ____ implements CacheEntryStructure {
/**
* Access to the singleton reference
*/
public static final StructuredMapCacheEntry INSTANCE = new StructuredMapCacheEntry();
@Override
public Object structure(Object item) {
final var entry = (CollectionCacheEntry) item;
final Serializable[] state = entry.getState();
final Map<Serializable,Serializable> map = mapOfSize( state.length );
for ( int i = 0; i < state.length; ) {
map.put( state[i++], state[i++] );
}
return map;
}
@Override
public Object destructure(Object structured, SessionFactoryImplementor factory) {
final var map = (Map<?,?>) structured;
final Serializable[] state = new Serializable[ map.size()*2 ];
int i = 0;
for ( var me : map.entrySet() ) {
state[i++] = (Serializable) me.getKey();
state[i++] = (Serializable) me.getValue();
}
return new CollectionCacheEntry(state);
}
private StructuredMapCacheEntry() {
}
}
| StructuredMapCacheEntry |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/transport/Lz4TransportDecompressor.java | {
"start": 1897,
"end": 1988
} | class ____ necessary as Netty is not a dependency in Elasticsearch server module.
*/
public | is |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/pkg/PackageConfig.java | {
"start": 10105,
"end": 11337
} | interface ____ {
/**
* Specify whether the `Implementation` information should be included in the runner jar's MANIFEST.MF.
*/
@WithDefault("true")
boolean addImplementationEntries();
/**
* Custom manifest attributes to be added to the main section of the MANIFEST.MF file.
* An example of the user defined property:
* quarkus.package.jar.manifest.attributes."Entry-key1"=Value1
* quarkus.package.jar.manifest.attributes."Entry-key2"=Value2
*/
@ConfigDocMapKey("attribute-name")
Map<String, String> attributes();
/**
* Custom manifest sections to be added to the MANIFEST.MF file.
* An example of the user defined property:
* quarkus.package.jar.manifest.sections."Section-Name"."Entry-Key1"=Value1
* quarkus.package.jar.manifest.sections."Section-Name"."Entry-Key2"=Value2
*/
@ConfigDocMapKey("section-name")
Map<String, Map<String, String>> sections();
}
/**
* The possible packaging options for JAR output.
*/
| ManifestConfig |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java | {
"start": 922,
"end": 1028
} | interface ____ components to be managed by the {@link Server} class.
*/
@InterfaceAudience.Private
public | for |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/tls/TlsServerWithP12WithSniMatchingSanDNSTest.java | {
"start": 1377,
"end": 3005
} | class ____ {
@TestHTTPResource(value = "/tls", tls = true)
URL url;
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyBean.class)
.addAsResource(new File("target/certs/ssl-test-sni-keystore.p12"), "server-keystore.pkcs12"))
.overrideConfigKey("quarkus.tls.key-store.p12.path", "server-keystore.pkcs12")
.overrideConfigKey("quarkus.tls.key-store.p12.password", "secret")
.overrideConfigKey("quarkus.tls.key-store.p12.alias-password", "secret")
.overrideConfigKey("quarkus.tls.key-store.sni", "true");
@Inject
Vertx vertx;
@Test
public void testSslServerWithPkcs12() {
// Cannot use RESTAssured as it does not validate the certificate names (even when forced.)
WebClientOptions options = new WebClientOptions()
.setSsl(true)
.setTrustOptions(new io.vertx.core.net.JksOptions()
.setPath("target/certs/ssl-test-sni-truststore.jks")
.setPassword("secret"))
.setForceSni(true);
WebClient client = WebClient.create(vertx, options);
HttpResponse<Buffer> response = client.getAbs(url.toExternalForm()).send().toCompletionStage().toCompletableFuture()
.join();
Assertions.assertThat(response.statusCode()).isEqualTo(200);
Assertions.assertThat(response.bodyAsString()).isEqualTo("ssl");
}
@ApplicationScoped
static | TlsServerWithP12WithSniMatchingSanDNSTest |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceCriterion.java | {
"start": 650,
"end": 3773
} | class ____ extends Criterion<BoxedQueryRequest> {
private final int stage;
private final BoxedQueryRequest queryRequest;
private final List<HitExtractor> keys;
private final HitExtractor timestamp;
private final HitExtractor tiebreaker;
private final HitExtractor implicitTiebreaker;
private final boolean descending;
private final boolean missing;
public SequenceCriterion(
int stage,
BoxedQueryRequest queryRequest,
List<HitExtractor> keys,
HitExtractor timestamp,
HitExtractor tiebreaker,
HitExtractor implicitTiebreaker,
boolean descending,
boolean missing
) {
super(keys.size());
this.stage = stage;
this.queryRequest = queryRequest;
this.keys = keys;
this.timestamp = timestamp;
this.tiebreaker = tiebreaker;
this.implicitTiebreaker = implicitTiebreaker;
this.descending = descending;
this.missing = missing;
}
public int stage() {
return stage;
}
public boolean descending() {
return descending;
}
public BoxedQueryRequest queryRequest() {
return queryRequest;
}
public Object[] key(SearchHit hit) {
Object[] key = null;
if (keySize() > 0) {
Object[] docKeys = new Object[keySize()];
for (int i = 0; i < keySize(); i++) {
docKeys[i] = keys.get(i).extract(hit);
}
key = docKeys;
}
return key;
}
@SuppressWarnings({ "unchecked" })
public Ordinal ordinal(SearchHit hit) {
Object ts = timestamp.extract(hit);
if (ts instanceof Timestamp == false) {
throw new EqlIllegalArgumentException("Expected timestamp as a Timestamp but got {}", ts.getClass());
}
Comparable<Object> tbreaker = null;
if (tiebreaker != null) {
Object tb = tiebreaker.extract(hit);
if (tb != null && tb instanceof Comparable == false) {
throw new EqlIllegalArgumentException("Expected tiebreaker to be Comparable but got {}", tb);
}
tbreaker = (Comparable<Object>) tb;
}
Object implicitTbreaker = implicitTiebreaker.extract(hit);
if (implicitTbreaker instanceof Number == false) {
throw new EqlIllegalArgumentException("Expected _shard_doc/implicit tiebreaker as long but got [{}]", implicitTbreaker);
}
long timebreakerValue = ((Number) implicitTbreaker).longValue();
return new Ordinal((Timestamp) ts, tbreaker, timebreakerValue);
}
public boolean missing() {
return missing;
}
public Timestamp timestamp(SearchHit hit) {
Object ts = timestamp.extract(hit);
if (ts instanceof Timestamp == false) {
throw new EqlIllegalArgumentException("Expected timestamp as a Timestamp but got {}", ts.getClass());
}
return (Timestamp) ts;
}
@Override
public String toString() {
return "[" + stage + "][" + descending + "]";
}
}
| SequenceCriterion |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/LocalizerAction.java | {
"start": 870,
"end": 907
} | enum ____ {
LIVE, DIE
}
| LocalizerAction |
java | google__guava | android/guava-tests/test/com/google/common/reflect/TypeTokenResolutionTest.java | {
"start": 10610,
"end": 12030
} | class ____<B> extends Red<B>.Orange {
Yellow(Red<B> red) {
red.super();
}
Class<?> getClassB() {
return new TypeToken<B>(getClass()) {}.getRawType();
}
Red<A> getA() {
return getSelfA();
}
Red<B> getB() {
return getSelfB();
}
}
Class<?> getClassDirect() {
return new TypeToken<A>(getClass()) {}.getRawType();
}
}
public void test1() {
Red<String> redString = new Red<String>() {};
Red<Integer> redInteger = new Red<Integer>() {};
assertEquals(String.class, redString.getClassDirect());
assertEquals(Integer.class, redInteger.getClassDirect());
Red<String>.Yellow<Integer> yellowInteger = redString.new Yellow<Integer>(redInteger) {};
assertEquals(Integer.class, yellowInteger.getClassA());
assertEquals(Integer.class, yellowInteger.getClassB());
assertEquals(String.class, yellowInteger.getA().getClassDirect());
assertEquals(Integer.class, yellowInteger.getB().getClassDirect());
}
public void test2() {
Red<String> redString = new Red<>();
Red<Integer> redInteger = new Red<>();
Red<String>.Yellow<Integer> yellowInteger = redString.new Yellow<Integer>(redInteger) {};
assertEquals(Integer.class, yellowInteger.getClassA());
assertEquals(Integer.class, yellowInteger.getClassB());
}
private static <T> Type staticMethodWithLocalClass() {
| Yellow |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/inlineme/InlineMeData.java | {
"start": 5532,
"end": 6622
} | class ____ Foo.Builder.something()
static InlineMeData buildExpectedInlineMeAnnotation(
VisitorState state, ExpressionTree expression) {
ClassSymbol classSymbol = getSymbol(findEnclosingNode(state.getPath(), ClassTree.class));
// Scan the statement to collect identifiers that need to be qualified - unqualified references
// to field or instance methods, as well as collecting the imports we need to use.
ImportAndQualificationFinder qualifier = new ImportAndQualificationFinder(classSymbol, state);
qualifier.scan(TreePath.getPath(state.getPath(), expression), null);
return create(
prettyPrint(
new QualifyingTreeCopier(state, qualifier.qualifications)
.copy((JCExpression) expression)),
qualifier.imports,
qualifier.staticImports);
}
private static String prettyPrint(JCTree tree) {
StringWriter w = new StringWriter();
tree.accept(new GooglePrinter(w));
return w.toString();
}
/** Copies statements, inserting appropriate qualifiers so make it inline-ready. */
private static | like |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoBuilderTest.java | {
"start": 13271,
"end": 13584
} | interface ____ {
void build() throws IOException;
}
@Test
public void emptyBuilderThrowsException() {
try {
throwExceptionBuilder().build();
fail();
} catch (IOException expected) {
assertThat(expected).hasMessageThat().isEqualTo("oops");
}
}
static | ThrowExceptionBuilder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java | {
"start": 1448,
"end": 4311
} | class ____ extends AbstractCatAction {
private final Settings settings;
public RestCountAction(Settings settings) {
this.settings = settings;
}
@Override
public List<Route> routes() {
return List.of(new Route(GET, "/_cat/count"), new Route(GET, "/_cat/count/{index}"));
}
@Override
public String getName() {
return "cat_count_action";
}
@Override
protected void documentation(StringBuilder sb) {
sb.append("/_cat/count\n");
sb.append("/_cat/count/{index}\n");
}
@Override
public RestChannelConsumer doCatRequest(final RestRequest request, final NodeClient client) {
if (settings != null && settings.getAsBoolean("serverless.cross_project.enabled", false)) {
// accept but drop project_routing param until fully supported
request.param("project_routing");
}
String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
SearchRequest countRequest = new SearchRequest(indices);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0).trackTotalHits(true);
countRequest.source(searchSourceBuilder);
try {
request.withContentOrSourceParamParserOrNull(parser -> {
if (parser == null) {
QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request);
if (queryBuilder != null) {
searchSourceBuilder.query(queryBuilder);
}
} else {
searchSourceBuilder.query(RestActions.getQueryContent(parser));
}
});
} catch (IOException e) {
throw new ElasticsearchException("Couldn't parse query", e);
}
return channel -> client.search(countRequest, new RestResponseListener<SearchResponse>(channel) {
@Override
public RestResponse buildResponse(SearchResponse countResponse) throws Exception {
assert countResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO;
return RestTable.buildResponse(buildTable(request, countResponse), channel);
}
});
}
@Override
protected Table getTableWithHeader(final RestRequest request) {
Table table = new Table();
table.startHeadersWithTimestamp();
table.addCell("count", "alias:dc,docs.count,docsCount;desc:the document count");
table.endHeaders();
return table;
}
private Table buildTable(RestRequest request, SearchResponse response) {
Table table = getTableWithHeader(request);
table.startRow();
table.addCell(response.getHits().getTotalHits().value());
table.endRow();
return table;
}
}
| RestCountAction |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/WasbRemoteCallHelper.java | {
"start": 1881,
"end": 2068
} | class ____ has constants and helper methods
* used in WASB when integrating with a remote http cred
* service. Currently, remote service will be used to generate
* SAS keys.
*/
public | the |
java | redisson__redisson | redisson/src/main/java/org/redisson/spring/session/RedissonSessionRepository.java | {
"start": 2081,
"end": 16299
} | class ____ implements Session {
private String principalName;
private final MapSession delegate;
private RMap<String, Object> map;
RedissonSession() {
this.delegate = new MapSession();
map = redisson.getMap(keyPrefix + delegate.getId(), new CompositeCodec(StringCodec.INSTANCE, redisson.getConfig().getCodec()));
Map<String, Object> newMap = new HashMap<String, Object>(3);
newMap.put("session:creationTime", delegate.getCreationTime().toEpochMilli());
newMap.put("session:lastAccessedTime", delegate.getLastAccessedTime().toEpochMilli());
newMap.put("session:maxInactiveInterval", delegate.getMaxInactiveInterval().getSeconds());
map.putAll(newMap);
updateExpiration();
String channelName = getEventsChannelName(delegate.getId());
RTopic topic = redisson.getTopic(channelName, StringCodec.INSTANCE);
topic.publish(delegate.getId());
}
private void updateExpiration() {
if (delegate.getMaxInactiveInterval().getSeconds() > 0) {
redisson.getBucket(getExpiredKey(delegate.getId())).set("", delegate.getMaxInactiveInterval().getSeconds(), TimeUnit.SECONDS);
map.expire(Duration.ofSeconds(delegate.getMaxInactiveInterval().getSeconds() + 60));
}
}
RedissonSession(MapSession session) {
this.delegate = session;
map = redisson.getMap(keyPrefix + session.getId(), new CompositeCodec(StringCodec.INSTANCE, redisson.getConfig().getCodec()));
principalName = resolvePrincipal(this);
}
@Override
public String getId() {
return delegate.getId();
}
@Override
public <T> T getAttribute(String attributeName) {
return delegate.getAttribute(attributeName);
}
@Override
public Set<String> getAttributeNames() {
return delegate.getAttributeNames();
}
@Override
public void setAttribute(String attributeName, Object attributeValue) {
if (attributeValue == null) {
removeAttribute(attributeName);
return;
}
delegate.setAttribute(attributeName, attributeValue);
if (map != null) {
map.fastPut(getSessionAttrNameKey(attributeName), attributeValue);
if (attributeName.equals(PRINCIPAL_NAME_INDEX_NAME)
|| attributeName.equals(SPRING_SECURITY_CONTEXT)) {
// remove old
if (principalName != null) {
RSet<String> set = getPrincipalSet(principalName);
set.remove(getId());
}
principalName = resolvePrincipal(this);
if (principalName != null) {
RSet<String> set = getPrincipalSet(principalName);
set.add(getId());
}
}
}
}
public void clearPrincipal() {
principalName = resolvePrincipal(this);
if (principalName != null) {
RSet<String> set = getPrincipalSet(principalName);
set.remove(getId());
}
}
@Override
public void removeAttribute(String attributeName) {
delegate.removeAttribute(attributeName);
if (map != null) {
map.fastRemove(getSessionAttrNameKey(attributeName));
}
}
@Override
public Instant getCreationTime() {
return delegate.getCreationTime();
}
@Override
public void setLastAccessedTime(Instant lastAccessedTime) {
delegate.setLastAccessedTime(lastAccessedTime);
if (map != null) {
map.fastPut("session:lastAccessedTime", lastAccessedTime.toEpochMilli());
updateExpiration();
}
}
@Override
public Instant getLastAccessedTime() {
return delegate.getLastAccessedTime();
}
@Override
public void setMaxInactiveInterval(Duration interval) {
delegate.setMaxInactiveInterval(interval);
if (map != null) {
map.fastPut("session:maxInactiveInterval", interval.getSeconds());
updateExpiration();
}
}
@Override
public Duration getMaxInactiveInterval() {
return delegate.getMaxInactiveInterval();
}
@Override
public boolean isExpired() {
return delegate.isExpired();
}
@Override
public String changeSessionId() {
String oldId = delegate.getId();
String id = delegate.changeSessionId();
RBatch batch = redisson.createBatch(BatchOptions.defaults());
batch.getBucket(getExpiredKey(oldId)).remainTimeToLiveAsync();
batch.getBucket(getExpiredKey(oldId)).deleteAsync();
batch.getMap(map.getName(), map.getCodec()).readAllMapAsync();
batch.getMap(map.getName()).deleteAsync();
BatchResult<?> res = batch.execute();
List<?> list = res.getResponses();
Long remainTTL = (Long) list.get(0);
Map<String, Object> oldState = (Map<String, Object>) list.get(2);
if (remainTTL == -2) {
// Either:
// - a parallel request also invoked changeSessionId() on this session, and the
// expiredKey for oldId had been deleted
// - sessions do not expire
remainTTL = delegate.getMaxInactiveInterval().toMillis();
}
RBatch batchNew = redisson.createBatch();
batchNew.getMap(keyPrefix + id, map.getCodec()).putAllAsync(oldState);
if (remainTTL > 0) {
batchNew.getBucket(getExpiredKey(id)).setAsync("", remainTTL, TimeUnit.MILLISECONDS);
}
batchNew.execute();
map = redisson.getMap(keyPrefix + id, map.getCodec());
return id;
}
}
private static final Logger log = LoggerFactory.getLogger(RedissonSessionRepository.class);
private static final String SPRING_SECURITY_CONTEXT = "SPRING_SECURITY_CONTEXT";
private static final SpelExpressionParser SPEL_PARSER = new SpelExpressionParser();
private RedissonClient redisson;
private ApplicationEventPublisher eventPublisher;
private RPatternTopic deletedTopic;
private RPatternTopic expiredTopic;
private RPatternTopic createdTopic;
private String keyPrefix = "spring:session:";
private Integer defaultMaxInactiveInterval;
public RedissonSessionRepository(RedissonClient redissonClient, ApplicationEventPublisher eventPublisher, String keyPrefix) {
this.redisson = redissonClient;
this.eventPublisher = eventPublisher;
if (StringUtils.hasText(keyPrefix)) {
this.keyPrefix = keyPrefix;
}
deletedTopic = this.redisson.getPatternTopic("__keyevent@*:del", StringCodec.INSTANCE);
expiredTopic = this.redisson.getPatternTopic("__keyevent@*:expired", StringCodec.INSTANCE);
createdTopic = this.redisson.getPatternTopic(getEventsChannelPrefix() + "*", StringCodec.INSTANCE);
// add listeners after all topics are created to avoid race and potential NPE if we get messages right away
deletedTopic.addListener(String.class, this);
expiredTopic.addListener(String.class, this);
createdTopic.addListener(String.class, this);
}
public RedissonSessionRepository(RedissonClient redissonClient, ApplicationEventPublisher eventPublisher) {
this(redissonClient, eventPublisher, null);
}
private MapSession loadSession(String sessionId) {
RMap<String, Object> map = redisson.getMap(keyPrefix + sessionId, new CompositeCodec(StringCodec.INSTANCE, redisson.getConfig().getCodec()));
Set<Entry<String, Object>> entrySet = map.readAllEntrySet();
if (entrySet.isEmpty()) {
return null;
}
MapSession delegate = new MapSession(sessionId);
for (Entry<String, Object> entry : entrySet) {
if ("session:creationTime".equals(entry.getKey())) {
delegate.setCreationTime(Instant.ofEpochMilli((Long) entry.getValue()));
} else if ("session:lastAccessedTime".equals(entry.getKey())) {
delegate.setLastAccessedTime(Instant.ofEpochMilli((Long) entry.getValue()));
} else if ("session:maxInactiveInterval".equals(entry.getKey())) {
delegate.setMaxInactiveInterval(Duration.ofSeconds((Long) entry.getValue()));
} else if (entry.getKey().startsWith(SESSION_ATTR_PREFIX)) {
delegate.setAttribute(entry.getKey().substring(SESSION_ATTR_PREFIX.length()), entry.getValue());
}
}
return delegate;
}
@Override
public void onMessage(CharSequence pattern, CharSequence channel, String body) {
if (createdTopic.getPatternNames().contains(pattern.toString())) {
RedissonSession session = findById(body);
if (session != null) {
publishEvent(new SessionCreatedEvent(this, session));
}
} else if (deletedTopic.getPatternNames().contains(pattern.toString())) {
if (!body.startsWith(getExpiredKeyPrefix())) {
return;
}
String id = body.split(getExpiredKeyPrefix())[1];
MapSession mapSession = loadSession(id);
if (mapSession != null) {
RedissonSession session = new RedissonSession(mapSession);
session.clearPrincipal();
publishEvent(new SessionDeletedEvent(this, session));
}
} else if (expiredTopic.getPatternNames().contains(pattern.toString())) {
if (!body.startsWith(getExpiredKeyPrefix())) {
return;
}
String id = body.split(getExpiredKeyPrefix())[1];
MapSession mapSession = loadSession(id);
if (mapSession != null) {
RedissonSession session = new RedissonSession(mapSession);
session.clearPrincipal();
publishEvent(new SessionExpiredEvent(this, session));
}
}
}
private void publishEvent(ApplicationEvent event) {
try {
eventPublisher.publishEvent(event);
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
public void setDefaultMaxInactiveInterval(int defaultMaxInactiveInterval) {
this.defaultMaxInactiveInterval = defaultMaxInactiveInterval;
}
@Override
public RedissonSession createSession() {
RedissonSession session = new RedissonSession();
if (defaultMaxInactiveInterval != null) {
session.setMaxInactiveInterval(Duration.ofSeconds(defaultMaxInactiveInterval));
}
return session;
}
@Override
public void save(RedissonSession session) {
// session changes are stored in real-time
}
@Override
public RedissonSession findById(String id) {
MapSession mapSession = loadSession(id);
if (mapSession == null || mapSession.isExpired()) {
return null;
}
return new RedissonSession(mapSession);
}
@Override
public void deleteById(String id) {
RedissonSession session = findById(id);
if (session == null) {
return;
}
redisson.getBucket(getExpiredKey(id)).delete();
session.clearPrincipal();
session.setMaxInactiveInterval(Duration.ZERO);
}
public void setKeyPrefix(String keyPrefix) {
this.keyPrefix = keyPrefix;
}
String resolvePrincipal(Session session) {
String principalName = session.getAttribute(PRINCIPAL_NAME_INDEX_NAME);
if (principalName != null) {
return principalName;
}
Object auth = session.getAttribute(SPRING_SECURITY_CONTEXT);
if (auth == null) {
return null;
}
Expression expression = SPEL_PARSER.parseExpression("authentication?.name");
return expression.getValue(auth, String.class);
}
String getEventsChannelName(String sessionId) {
return getEventsChannelPrefix() + sessionId;
}
String getExpiredKey(String sessionId) {
return getExpiredKeyPrefix() + sessionId;
}
String getExpiredKeyPrefix() {
return keyPrefix + "sessions:expires:";
}
String getEventsChannelPrefix() {
return keyPrefix + "created:event:";
}
String getPrincipalKey(String principalName) {
return keyPrefix + "index:" + FindByIndexNameSessionRepository.PRINCIPAL_NAME_INDEX_NAME + ":" + principalName;
}
String getSessionAttrNameKey(String name) {
return SESSION_ATTR_PREFIX + name;
}
@Override
public Map<String, RedissonSession> findByIndexNameAndIndexValue(String indexName, String indexValue) {
if (!PRINCIPAL_NAME_INDEX_NAME.equals(indexName)) {
return Collections.emptyMap();
}
RSet<String> set = getPrincipalSet(indexValue);
Set<String> sessionIds = set.readAll();
Map<String, RedissonSession> result = new HashMap<String, RedissonSession>();
for (String id : sessionIds) {
RedissonSession session = findById(id);
if (session != null) {
result.put(id, session);
}
}
return result;
}
private RSet<String> getPrincipalSet(String indexValue) {
String principalKey = getPrincipalKey(indexValue);
return redisson.getSet(principalKey, StringCodec.INSTANCE);
}
}
| RedissonSession |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationFields.java | {
"start": 1450,
"end": 3015
} | class ____ value
*/
private final String predictedClassField;
/**
* The field containing the predicted probability value in [0.0, 1.0]
*/
private final String predictedProbabilityField;
/**
* Whether the {@code predictedProbabilityField} should be treated as nested (e.g.: when used in exists queries).
*/
private final boolean predictedProbabilityFieldNested;
public EvaluationFields(
@Nullable String actualField,
@Nullable String predictedField,
@Nullable String topClassesField,
@Nullable String predictedClassField,
@Nullable String predictedProbabilityField,
boolean predictedProbabilityFieldNested
) {
this.actualField = actualField;
this.predictedField = predictedField;
this.topClassesField = topClassesField;
this.predictedClassField = predictedClassField;
this.predictedProbabilityField = predictedProbabilityField;
this.predictedProbabilityFieldNested = predictedProbabilityFieldNested;
}
/**
* Returns the field containing the actual value
*/
public String getActualField() {
return actualField;
}
/**
* Returns the field containing the predicted value
*/
public String getPredictedField() {
return predictedField;
}
/**
* Returns the field containing the array of top classes
*/
public String getTopClassesField() {
return topClassesField;
}
/**
* Returns the field containing the predicted | name |
java | netty__netty | buffer/src/test/java/io/netty/buffer/RetainedSlicedByteBufTest.java | {
"start": 732,
"end": 1053
} | class ____ extends SlicedByteBufTest {
@Override
protected ByteBuf newSlice(ByteBuf buffer, int offset, int length) {
ByteBuf slice = buffer.retainedSlice(offset, length);
buffer.release();
assertEquals(buffer.refCnt(), slice.refCnt());
return slice;
}
}
| RetainedSlicedByteBufTest |
java | apache__hadoop | hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/main/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/WorkloadMapper.java | {
"start": 1204,
"end": 1501
} | class ____ a generic workload-generating mapper. By
* default, it will expect to use {@link VirtualInputFormat} as its
* {@link InputFormat}. Subclasses requiring a reducer or expecting a different
* {@link InputFormat} should override the {@link #configureJob(Job)} method.
*/
public abstract | for |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng4644StrictPomParsingRejectsMisplacedTextTest.java | {
"start": 1040,
"end": 1869
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that misplaced text inside the project element of a POM causes a parser error during reactor builds.
*
* @throws Exception in case of failure
*/
@Test
public void testit() throws Exception {
File testDir = extractResources("/mng-4644");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
try {
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
fail("Should fail to validate the POM syntax due to misplaced text in project element.");
} catch (VerificationException e) {
// expected
}
}
}
| MavenITmng4644StrictPomParsingRejectsMisplacedTextTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_2700/Issue2754.java | {
"start": 207,
"end": 2208
} | class ____ extends TestCase {
public void test_for_issue0() throws Exception {
String s = "{\"p1\":\"2019-09-18T20:35:00+12:45\"}";
C c = JSON.parseObject(s, C.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX");
sdf.setTimeZone(TimeZone.getTimeZone("Pacific/Chatham"));
assertEquals("2019-09-18T20:35:00+12:45", sdf.format(c.p1.getTime()));
}
public void test_for_issue1() throws Exception {
String s = "{\"p1\":\"2019-09-18T20:35:00+12:45\"}";
C c = JSON.parseObject(s, C.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX");
sdf.setTimeZone(TimeZone.getTimeZone("NZ-CHAT"));
assertEquals("2019-09-18T20:35:00+12:45", sdf.format(c.p1.getTime()));
}
public void test_for_issue2() throws Exception {
String s = "{\"p1\":\"2019-09-18T20:35:00+05:45\"}";
C c = JSON.parseObject(s, C.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX");
sdf.setTimeZone(TimeZone.getTimeZone("Asia/Kathmandu"));
assertEquals("2019-09-18T20:35:00+05:45", sdf.format(c.p1.getTime()));
}
public void test_for_issue3() throws Exception {
String s = "{\"p1\":\"2019-09-18T20:35:00+05:45\"}";
C c = JSON.parseObject(s, C.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX");
sdf.setTimeZone(TimeZone.getTimeZone("Asia/Katmandu"));
assertEquals("2019-09-18T20:35:00+05:45", sdf.format(c.p1.getTime()));
}
public void test_for_issue4() throws Exception {
String s = "{\"p1\":\"2019-09-18T20:35:00+08:45\"}";
C c = JSON.parseObject(s, C.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX");
sdf.setTimeZone(TimeZone.getTimeZone("Australia/Eucla"));
assertEquals("2019-09-18T20:35:00+08:45", sdf.format(c.p1.getTime()));
}
public static | Issue2754 |
java | apache__camel | components/camel-spring-parent/camel-spring-redis/src/main/java/org/apache/camel/component/redis/RedisComponent.java | {
"start": 1207,
"end": 2555
} | class ____ extends DefaultComponent {
@Metadata(autowired = true)
private RedisTemplate<?, ?> redisTemplate;
public RedisComponent() {
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
RedisConfiguration configuration = new RedisConfiguration();
configuration.setRedisTemplate(redisTemplate);
setHostAndPort(configuration, remaining);
RedisEndpoint endpoint = new RedisEndpoint(uri, this, configuration);
setProperties(endpoint, parameters);
return endpoint;
}
private void setHostAndPort(RedisConfiguration configuration, String remaining) {
String[] hostAndPort = remaining.split(":");
if (hostAndPort.length > 0 && hostAndPort[0].length() > 0) {
configuration.setHost(hostAndPort[0]);
}
if (hostAndPort.length > 1 && hostAndPort[1].length() > 0) {
configuration.setPort(Integer.parseInt(hostAndPort[1]));
}
}
public RedisTemplate<?, ?> getRedisTemplate() {
return redisTemplate;
}
/**
* Reference to a pre-configured RedisTemplate instance to use.
*/
public void setRedisTemplate(RedisTemplate<?, ?> redisTemplate) {
this.redisTemplate = redisTemplate;
}
}
| RedisComponent |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/internal/EntityEntryImpl.java | {
"start": 21745,
"end": 21873
} | enum ____ is stored.
*/
private int getOffset() {
return offset;
}
/**
* Returns the bit mask for reading this | value |
java | spring-projects__spring-framework | integration-tests/src/test/java/org/springframework/scheduling/annotation/ScheduledAndTransactionalAnnotationIntegrationTests.java | {
"start": 4884,
"end": 4993
} | class ____ {
}
@Configuration
@EnableTransactionManagement(proxyTargetClass = true)
static | JdkProxyTxConfig |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/util/LambdaSafeTests.java | {
"start": 17213,
"end": 17337
} | interface ____<T extends CharSequence> {
void handle(Integer number, T argument, Boolean bool);
}
| GenericMultiArgCallback |
java | netty__netty | codec-marshalling/src/main/java/io/netty/handler/codec/marshalling/ThreadLocalMarshallerProvider.java | {
"start": 1212,
"end": 2238
} | class ____ implements MarshallerProvider {
private final FastThreadLocal<Marshaller> marshallers = new FastThreadLocal<Marshaller>();
private final MarshallerFactory factory;
private final MarshallingConfiguration config;
/**
* Create a new instance of the {@link ThreadLocalMarshallerProvider}
*
* @param factory the {@link MarshallerFactory} to use to create {@link Marshaller}'s if needed
* @param config the {@link MarshallingConfiguration} to use
*/
public ThreadLocalMarshallerProvider(MarshallerFactory factory, MarshallingConfiguration config) {
this.factory = factory;
this.config = config;
}
@Override
public Marshaller getMarshaller(ChannelHandlerContext ctx) throws Exception {
Marshaller marshaller = marshallers.get();
if (marshaller == null) {
marshaller = factory.createMarshaller(config);
marshallers.set(marshaller);
}
return marshaller;
}
}
| ThreadLocalMarshallerProvider |
java | apache__camel | components/camel-metrics/src/test/java/org/apache/camel/component/metrics/routepolicy/ManagedMetricsRoutePolicyTest.java | {
"start": 1575,
"end": 3987
} | class ____ extends CamelTestSupport {
private final Logger log = LoggerFactory.getLogger(getClass());
@BindToRegistry(MetricsComponent.METRIC_REGISTRY_NAME)
private MetricRegistry metricRegistry = new MetricRegistry();
@Override
protected boolean useJmx() {
return true;
}
protected MBeanServer getMBeanServer() {
return context.getManagementStrategy().getManagementAgent().getMBeanServer();
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
MetricsRoutePolicyFactory factory = new MetricsRoutePolicyFactory();
factory.setUseJmx(true);
factory.setPrettyPrint(true);
context.addRoutePolicyFactory(factory);
return context;
}
@Test
public void testMetricsRoutePolicy() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(10);
for (int i = 0; i < 10; i++) {
if (i % 2 == 0) {
template.sendBody("seda:foo", "Hello " + i);
} else {
template.sendBody("seda:bar", "Hello " + i);
}
}
MockEndpoint.assertIsSatisfied(context);
// there should be 3 names
assertEquals(3, metricRegistry.getNames().size());
// there should be 3 mbeans
Set<ObjectName> set = getMBeanServer().queryNames(new ObjectName("org.apache.camel.metrics:*"), null);
assertEquals(3, set.size());
String name = String.format("org.apache.camel:context=%s,type=services,name=MetricsRegistryService",
context.getManagementName());
ObjectName on = ObjectName.getInstance(name);
String json = (String) getMBeanServer().invoke(on, "dumpStatisticsAsJson", null, null);
assertNotNull(json);
log.info(json);
assertTrue(json.contains("test"));
assertTrue(json.contains("bar.responses"));
assertTrue(json.contains("foo.responses"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("seda:foo").routeId("foo").to("metrics:counter:test").to("mock:result");
from("seda:bar").routeId("bar").to("mock:result");
}
};
}
}
| ManagedMetricsRoutePolicyTest |
java | apache__camel | components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpComponentMuteExceptionTest.java | {
"start": 1376,
"end": 2537
} | class ____ extends BaseNettyTest {
@Test
public void testMuteException() throws Exception {
HttpGet get = new HttpGet("http://localhost:" + getPort() + "/foo");
get.addHeader("Accept", "application/text");
try (CloseableHttpClient client = HttpClients.createDefault();
CloseableHttpResponse response = client.execute(get)) {
String body = EntityUtils.toString(response.getEntity(), "UTF-8");
assertNotNull(body);
assertEquals("", body);
assertEquals(500, response.getCode());
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
NettyHttpComponent nhc = context.getComponent("netty-http", NettyHttpComponent.class);
nhc.setMuteException(true);
from("netty-http:http://0.0.0.0:{{port}}/foo")
.to("mock:input")
.throwException(new IllegalArgumentException("Camel cannot do this"));
}
};
}
}
| NettyHttpComponentMuteExceptionTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/util/comparator/InstanceComparatorTests.java | {
"start": 2438,
"end": 2483
} | class ____ implements I2 {
}
private static | C3 |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-common/deployment/src/main/java/io/quarkus/resteasy/reactive/common/deployment/AggregatedParameterContainersBuildItem.java | {
"start": 839,
"end": 954
} | class ____
*/
public Set<DotName> getClassNames() {
return classNames;
}
/**
* All | names |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3288SystemScopeDirTest.java | {
"start": 1098,
"end": 1851
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Test the use of a system scoped dependency to a directory instead of a JAR which should fail early.
*
* @throws Exception in case of failure
*/
@Test
public void testitMNG3288() throws Exception {
File testDir = extractResources("/mng-3288");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.addCliArgument("validate");
assertThrows(
VerificationException.class,
verifier::execute,
"Usage of directory instead of file for system-scoped dependency did not fail dependency resolution");
}
}
| MavenITmng3288SystemScopeDirTest |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableMergeTest.java | {
"start": 1395,
"end": 13290
} | class ____ extends RxJavaTest {
Observer<String> stringObserver;
int count;
@Before
public void before() {
stringObserver = TestHelper.mockObserver();
for (Thread t : Thread.getAllStackTraces().keySet()) {
if (t.getName().startsWith("RxNewThread")) {
count++;
}
}
}
@After
public void after() {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
for (Thread t : Thread.getAllStackTraces().keySet()) {
if (t.getName().startsWith("RxNewThread")) {
--count;
}
}
if (count != 0) {
throw new IllegalStateException("NewThread leak!");
}
}
@Test
public void mergeObservableOfObservables() {
final Observable<String> o1 = Observable.unsafeCreate(new TestSynchronousObservable());
final Observable<String> o2 = Observable.unsafeCreate(new TestSynchronousObservable());
Observable<Observable<String>> observableOfObservables = Observable.unsafeCreate(new ObservableSource<Observable<String>>() {
@Override
public void subscribe(Observer<? super Observable<String>> observer) {
observer.onSubscribe(Disposable.empty());
// simulate what would happen in an Observable
observer.onNext(o1);
observer.onNext(o2);
observer.onComplete();
}
});
Observable<String> m = Observable.merge(observableOfObservables);
m.subscribe(stringObserver);
verify(stringObserver, never()).onError(any(Throwable.class));
verify(stringObserver, times(1)).onComplete();
verify(stringObserver, times(2)).onNext("hello");
}
@Test
public void mergeArray() {
final Observable<String> o1 = Observable.unsafeCreate(new TestSynchronousObservable());
final Observable<String> o2 = Observable.unsafeCreate(new TestSynchronousObservable());
Observable<String> m = Observable.merge(o1, o2);
m.subscribe(stringObserver);
verify(stringObserver, never()).onError(any(Throwable.class));
verify(stringObserver, times(2)).onNext("hello");
verify(stringObserver, times(1)).onComplete();
}
@Test
public void mergeList() {
final Observable<String> o1 = Observable.unsafeCreate(new TestSynchronousObservable());
final Observable<String> o2 = Observable.unsafeCreate(new TestSynchronousObservable());
List<Observable<String>> listOfObservables = new ArrayList<>();
listOfObservables.add(o1);
listOfObservables.add(o2);
Observable<String> m = Observable.merge(listOfObservables);
m.subscribe(stringObserver);
verify(stringObserver, never()).onError(any(Throwable.class));
verify(stringObserver, times(1)).onComplete();
verify(stringObserver, times(2)).onNext("hello");
}
@Test
public void unSubscribeObservableOfObservables() throws InterruptedException {
final AtomicBoolean unsubscribed = new AtomicBoolean();
final CountDownLatch latch = new CountDownLatch(1);
Observable<Observable<Long>> source = Observable.unsafeCreate(new ObservableSource<Observable<Long>>() {
@Override
public void subscribe(final Observer<? super Observable<Long>> observer) {
// verbose on purpose so I can track the inside of it
final Disposable upstream = Disposable.fromRunnable(new Runnable() {
@Override
public void run() {
System.out.println("*** unsubscribed");
unsubscribed.set(true);
}
});
observer.onSubscribe(upstream);
new Thread(new Runnable() {
@Override
public void run() {
while (!unsubscribed.get()) {
observer.onNext(Observable.just(1L, 2L));
}
System.out.println("Done looping after unsubscribe: " + unsubscribed.get());
observer.onComplete();
// mark that the thread is finished
latch.countDown();
}
}).start();
}
});
final AtomicInteger count = new AtomicInteger();
Observable.merge(source).take(6).blockingForEach(new Consumer<Long>() {
@Override
public void accept(Long v) {
System.out.println("Value: " + v);
int c = count.incrementAndGet();
if (c > 6) {
fail("Should be only 6");
}
}
});
latch.await(1000, TimeUnit.MILLISECONDS);
System.out.println("unsubscribed: " + unsubscribed.get());
assertTrue(unsubscribed.get());
}
@Test
public void mergeArrayWithThreading() {
final TestASynchronousObservable o1 = new TestASynchronousObservable();
final TestASynchronousObservable o2 = new TestASynchronousObservable();
Observable<String> m = Observable.merge(Observable.unsafeCreate(o1), Observable.unsafeCreate(o2));
TestObserver<String> to = new TestObserver<>(stringObserver);
m.subscribe(to);
to.awaitDone(5, TimeUnit.SECONDS);
to.assertNoErrors();
verify(stringObserver, never()).onError(any(Throwable.class));
verify(stringObserver, times(2)).onNext("hello");
verify(stringObserver, times(1)).onComplete();
}
@Test
public void synchronizationOfMultipleSequencesLoop() throws Throwable {
for (int i = 0; i < 100; i++) {
System.out.println("testSynchronizationOfMultipleSequencesLoop > " + i);
synchronizationOfMultipleSequences();
}
}
@Test
public void synchronizationOfMultipleSequences() throws Throwable {
final TestASynchronousObservable o1 = new TestASynchronousObservable();
final TestASynchronousObservable o2 = new TestASynchronousObservable();
// use this latch to cause onNext to wait until we're ready to let it go
final CountDownLatch endLatch = new CountDownLatch(1);
final AtomicInteger concurrentCounter = new AtomicInteger();
final AtomicInteger totalCounter = new AtomicInteger();
Observable<String> m = Observable.merge(Observable.unsafeCreate(o1), Observable.unsafeCreate(o2));
m.subscribe(new DefaultObserver<String>() {
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
throw new RuntimeException("failed", e);
}
@Override
public void onNext(String v) {
totalCounter.incrementAndGet();
concurrentCounter.incrementAndGet();
try {
// avoid deadlocking the main thread
if (Thread.currentThread().getName().equals("TestASynchronousObservable")) {
// wait here until we're done asserting
endLatch.await();
}
} catch (InterruptedException e) {
e.printStackTrace();
throw new RuntimeException("failed", e);
} finally {
concurrentCounter.decrementAndGet();
}
}
});
// wait for both observables to send (one should be blocked)
o1.onNextBeingSent.await();
o2.onNextBeingSent.await();
// I can't think of a way to know for sure that both threads have or are trying to send onNext
// since I can't use a CountDownLatch for "after" onNext since I want to catch during it
// but I can't know for sure onNext is invoked
// so I'm unfortunately reverting to using a Thread.sleep to allow the process scheduler time
// to make sure after o1.onNextBeingSent and o2.onNextBeingSent are hit that the following
// onNext is invoked.
int timeout = 20;
while (timeout-- > 0 && concurrentCounter.get() != 1) {
Thread.sleep(100);
}
try { // in try/finally so threads are released via latch countDown even if assertion fails
assertEquals(1, concurrentCounter.get());
} finally {
// release so it can finish
endLatch.countDown();
}
try {
o1.t.join();
o2.t.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
assertEquals(2, totalCounter.get());
assertEquals(0, concurrentCounter.get());
}
/**
* Unit test from OperationMergeDelayError backported here to show how these use cases work with normal merge.
*/
@Test
public void error1() {
// we are using synchronous execution to test this exactly rather than non-deterministic concurrent behavior
final Observable<String> o1 = Observable.unsafeCreate(new TestErrorObservable("four", null, "six")); // we expect to lose "six"
final Observable<String> o2 = Observable.unsafeCreate(new TestErrorObservable("one", "two", "three")); // we expect to lose all of these since o1 is done first and fails
Observable<String> m = Observable.merge(o1, o2);
m.subscribe(stringObserver);
verify(stringObserver, times(1)).onError(any(NullPointerException.class));
verify(stringObserver, never()).onComplete();
verify(stringObserver, times(0)).onNext("one");
verify(stringObserver, times(0)).onNext("two");
verify(stringObserver, times(0)).onNext("three");
verify(stringObserver, times(1)).onNext("four");
verify(stringObserver, times(0)).onNext("five");
verify(stringObserver, times(0)).onNext("six");
}
/**
* Unit test from OperationMergeDelayError backported here to show how these use cases work with normal merge.
*/
@Test
public void error2() {
// we are using synchronous execution to test this exactly rather than non-deterministic concurrent behavior
final Observable<String> o1 = Observable.unsafeCreate(new TestErrorObservable("one", "two", "three"));
final Observable<String> o2 = Observable.unsafeCreate(new TestErrorObservable("four", null, "six")); // we expect to lose "six"
final Observable<String> o3 = Observable.unsafeCreate(new TestErrorObservable("seven", "eight", null)); // we expect to lose all of these since o2 is done first and fails
final Observable<String> o4 = Observable.unsafeCreate(new TestErrorObservable("nine")); // we expect to lose all of these since o2 is done first and fails
Observable<String> m = Observable.merge(o1, o2, o3, o4);
m.subscribe(stringObserver);
verify(stringObserver, times(1)).onError(any(NullPointerException.class));
verify(stringObserver, never()).onComplete();
verify(stringObserver, times(1)).onNext("one");
verify(stringObserver, times(1)).onNext("two");
verify(stringObserver, times(1)).onNext("three");
verify(stringObserver, times(1)).onNext("four");
verify(stringObserver, times(0)).onNext("five");
verify(stringObserver, times(0)).onNext("six");
verify(stringObserver, times(0)).onNext("seven");
verify(stringObserver, times(0)).onNext("eight");
verify(stringObserver, times(0)).onNext("nine");
}
private static | ObservableMergeTest |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/devmode/GeneratedStaticResourcesDevModeTest.java | {
"start": 301,
"end": 4000
} | class ____ {
@RegisterExtension
final static QuarkusDevModeTest devMode = new QuarkusDevModeTest()
.withApplicationRoot((jar) -> jar
.add(new StringAsset("quarkus.http.enable-compression=true\n"),
"application.properties")
.addAsResource("static-file.html", "META-INF/generated-resources-test/bytes/static-file.html")
.addAsResource("static-file.html", "META-INF/generated-resources-test/bytes/.hidden-file.html")
.addAsResource("static-file.html", "META-INF/generated-resources-test/bytes/index.html")
.addAsResource("static-file.html", "META-INF/generated-resources-test/bytes/image.svg")
.addAsResource("static-file.html", "META-INF/generated-resources-test/static-file.html")
.addAsResource("static-file.html", "META-INF/generated-resources-test/.hidden-file.html")
.addAsResource("static-file.html", "META-INF/generated-resources-test/index.html")
.addAsResource("static-file.html", "META-INF/generated-resources-test/image.svg"));
@Test
void shouldUpdateResourceIndexHtmlOnUserChange() {
RestAssured.given()
.get("/bytes/")
.then()
.statusCode(200)
.body(Matchers.containsString("This is the title of the webpage!"));
devMode.modifyResourceFile("META-INF/generated-resources-test/bytes/index.html", s -> s.replace("webpage", "Matheus"));
RestAssured.given()
.get("/bytes/")
.then()
.statusCode(200)
.body(Matchers.containsString("This is the title of the Matheus!"));
}
@Test
void shouldUpdateHiddenResourceOnUserChange() {
RestAssured.given()
.get("/bytes/.hidden-file.html")
.then()
.statusCode(200)
.body(Matchers.containsString("This is the title of the webpage!"));
devMode.modifyResourceFile("META-INF/generated-resources-test/bytes/.hidden-file.html",
s -> s.replace("webpage", "Matheus"));
RestAssured.given()
.get("/bytes/.hidden-file.html")
.then()
.statusCode(200)
.body(Matchers.containsString("This is the title of the Matheus!"));
}
@Test
void shouldUpdateFileResourceIndexHtmlOnUserChange() {
RestAssured.given()
.get("/")
.then()
.statusCode(200)
.body(Matchers.containsString("This is the title of the webpage!"));
devMode.modifyResourceFile("META-INF/generated-resources-test/index.html", s -> s.replace("webpage", "Matheus"));
RestAssured.given()
.get("/")
.then()
.statusCode(200)
.body(Matchers.containsString("This is the title of the Matheus!"));
}
@Test
void shouldUpdateHiddenFileResourceOnUserChange() {
RestAssured.given()
.get("/.hidden-file.html")
.then()
.statusCode(200)
.body(Matchers.containsString("This is the title of the webpage!"));
devMode.modifyResourceFile("META-INF/generated-resources-test/.hidden-file.html",
s -> s.replace("webpage", "Matheus"));
RestAssured.given()
.get("/.hidden-file.html")
.then()
.statusCode(200)
.body(Matchers.containsString("This is the title of the Matheus!"));
}
}
| GeneratedStaticResourcesDevModeTest |
java | elastic__elasticsearch | x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/patterntext/PatternTextIndexFieldData.java | {
"start": 1500,
"end": 4441
} | class ____ implements IndexFieldData.Builder {
final PatternTextFieldType fieldType;
Builder(PatternTextFieldType fieldType) {
this.fieldType = fieldType;
}
public PatternTextIndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) {
return new PatternTextIndexFieldData(fieldType);
}
}
PatternTextIndexFieldData(PatternTextFieldType fieldType) {
this.fieldType = fieldType;
}
@Override
public String getFieldName() {
return fieldType.name();
}
@Override
public ValuesSourceType getValuesSourceType() {
return null;
}
@Override
public LeafFieldData load(LeafReaderContext context) {
try {
return loadDirect(context);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public LeafFieldData loadDirect(LeafReaderContext context) throws IOException {
LeafReader leafReader = context.reader();
PatternTextCompositeValues values = PatternTextCompositeValues.from(leafReader, fieldType);
return new LeafFieldData() {
final ToScriptFieldFactory<SortedBinaryDocValues> factory = KeywordDocValuesField::new;
@Override
public DocValuesScriptFieldFactory getScriptFieldFactory(String name) {
return factory.getScriptFieldFactory(getBytesValues(), name);
}
@Override
public SortedBinaryDocValues getBytesValues() {
return new SortedBinaryDocValues() {
@Override
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
@Override
public int docValueCount() {
return 1;
}
@Override
public BytesRef nextValue() throws IOException {
return values.binaryValue();
}
};
}
@Override
public long ramBytesUsed() {
return 1L;
}
};
}
@Override
public SortField sortField(Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) {
throw new IllegalArgumentException("not supported for source pattern_text field type");
}
@Override
public BucketedSort newBucketedSort(
BigArrays bigArrays,
Object missingValue,
MultiValueMode sortMode,
XFieldComparatorSource.Nested nested,
SortOrder sortOrder,
DocValueFormat format,
int bucketSize,
BucketedSort.ExtraData extra
) {
throw new IllegalArgumentException("only supported on numeric fields");
}
}
| Builder |
java | quarkusio__quarkus | extensions/spring-security/deployment/src/test/java/io/quarkus/spring/security/deployment/springapp/SpringComponent.java | {
"start": 254,
"end": 1567
} | class ____ {
@PreAuthorize("hasRole(@roles.ADMIN)")
public String accessibleForAdminOnly() {
return "accessibleForAdminOnly";
}
@PreAuthorize("hasRole(@roles.USER)")
public String accessibleForUserOnly() {
return "accessibleForUserOnly";
}
@PreAuthorize("hasRole('user')")
public String accessibleForUserOnlyString() {
return "accessibleForUserOnlyString";
}
@PreAuthorize("#username == authentication.principal.username")
public String principalNameIs(Object something, String username, Object somethingElse) {
return username;
}
@PreAuthorize("#name != authentication.principal.username")
public String principalNameIsNot(String name) {
return name;
}
@PreAuthorize("#person.name == authentication.principal.username")
public String principalNameFromObject(Person person) {
return person.getName();
}
@PreAuthorize("#person.name != authentication.principal.username")
public String principalNameFromObjectIsNot(Person person) {
return person.getName();
}
public String notSecured() {
return "notSecured";
}
@Secured("admin")
public String securedWithSecuredAnnotation() {
return "securedWithSecuredAnnotation";
}
}
| SpringComponent |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/asm/MethodVisitor.java | {
"start": 1739,
"end": 3872
} | interface ____ {
// -------------------------------------------------------------------------
// Annotations and non standard attributes
// -------------------------------------------------------------------------
// -------------------------------------------------------------------------
// Normal instructions
// -------------------------------------------------------------------------
/**
* Visits a zero operand instruction.
*
* @param opcode the opcode of the instruction to be visited. This opcode is either NOP, ACONST_NULL, ICONST_M1,
* ICONST_0, ICONST_1, ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1, FCONST_0, FCONST_1, FCONST_2,
* DCONST_0, DCONST_1, IALOAD, LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE, FASTORE,
* DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD,
* LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, FDIV, DDIV, IREM, LREM, FREM, DREM,
* INEG, LNEG, FNEG, DNEG, ISHL, LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, I2L, I2F, I2D,
* L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN,
* FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, MONITORENTER, or MONITOREXIT.
*/
void visitInsn(int opcode);
void visitIntInsn(int opcode, int operand);
/**
* Visits a local variable instruction. A local variable instruction is an instruction that loads or stores the
* value of a local variable.
*
* @param opcode the opcode of the local variable instruction to be visited. This opcode is either ILOAD, LLOAD,
* FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
* @param var the operand of the instruction to be visited. This operand is the index of a local variable.
*/
void visitVarInsn(int opcode, int var);
/**
* Visits a type instruction. A type instruction is an instruction that takes the internal name of a | MethodVisitor |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/converters/RuntimeParamConverterTest.java | {
"start": 2187,
"end": 2638
} | class ____ {
@GET
public Response greet(@QueryParam("number") Optional<Integer> numberOpt) {
if (numberOpt.isPresent()) {
return Response.ok(String.format("Hello, %s!", numberOpt.get())).build();
} else {
return Response.ok("Hello, world! No number was provided.").build();
}
}
}
@Provider
@ApplicationScoped
public static | ParamConverterEndpoint |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoAnnotationErrorsTest.java | {
"start": 6812,
"end": 7689
} | class ____ {",
" @AutoAnnotation static TestAnnotation newTestAnnotation(String value) {",
" return new AutoAnnotation_Test_newTestAnnotation(value);",
" }",
"}");
Compilation compilation =
javac().withProcessors(new AutoAnnotationProcessor()).compile(TEST_ANNOTATION, testSource);
assertThat(compilation)
.hadErrorContaining(
"method parameter 'value' has type java.lang.String "
+ "but com.example.TestAnnotation.value has type int")
.inFile(testSource)
.onLineContaining("newTestAnnotation(String value)");
}
@Test
public void testWrongTypeCollection() {
JavaFileObject testAnnotation =
JavaFileObjects.forSourceLines(
"com.example.TestAnnotation",
"package com.example;",
"",
"public @ | Test |
java | quarkusio__quarkus | extensions/elytron-security-jdbc/deployment/src/test/java/io/quarkus/elytron/security/jdbc/CustomRoleDecoderDevModeTest.java | {
"start": 832,
"end": 3902
} | class ____ {
static Class[] testClassesWithCustomRoleDecoder = Stream.concat(
Arrays.stream(testClasses),
Arrays.stream(new Class[] { CustomRoleDecoder.class })).toArray(Class[]::new);
@RegisterExtension
static final QuarkusDevModeTest config = new QuarkusDevModeTest()
.setArchiveProducer(() -> {
try (var in = CustomRoleDecoderDevModeTest.class.getClassLoader()
.getResourceAsStream("custom-role-decoder/application.properties")) {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(testClassesWithCustomRoleDecoder)
.addClasses(JdbcSecurityRealmTest.class)
.addAsResource("custom-role-decoder/import.sql")
.addAsResource(
new StringAsset(ContinuousTestingTestUtils
.appProperties(new String(FileUtil.readFileContents(in), StandardCharsets.UTF_8))),
"application.properties");
} catch (IOException e) {
throw new RuntimeException(e);
}
}).setTestArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClass(CustomRoleDecoderET.class));
@Test
public void testConfigChange() {
RestAssured.given().auth().preemptive().basic("user", "user")
.when().get("/servlet-secured").then()
.statusCode(200);
//break the build time config
config.modifyResourceFile("application.properties",
s -> s.replace("quarkus.security.jdbc.principal-query.attribute-mappings.0.index=2",
"quarkus.security.jdbc.principal-query.attribute-mappings.0.index=3"));
RestAssured.given().auth().preemptive().basic("user", "user")
.when().get("/servlet-secured").then()
.statusCode(500);
//now fix it again
config.modifyResourceFile("application.properties",
s -> s.replace("quarkus.security.jdbc.principal-query.attribute-mappings.0.index=3",
"quarkus.security.jdbc.principal-query.attribute-mappings.0.index=2"));
RestAssured.given().auth().preemptive().basic("user", "user")
.when().get("/servlet-secured").then()
.statusCode(200);
}
@Test
public void testContinuousTesting() {
ContinuousTestingTestUtils utils = new ContinuousTestingTestUtils();
RestAssured.given().auth().preemptive().basic("user", "user")
.when().get("/servlet-secured").then()
.statusCode(200);
TestStatus status = utils.waitForNextCompletion();
Assertions.assertEquals(0, status.getTotalTestsFailed());
RestAssured.given().auth().preemptive().basic("user", "user")
.when().get("/servlet-secured").then()
.statusCode(200);
}
}
| CustomRoleDecoderDevModeTest |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/classrealm/ClassRealmConstituent.java | {
"start": 904,
"end": 931
} | class ____.
*
*/
public | realm |
java | alibaba__nacos | naming/src/main/java/com/alibaba/nacos/naming/cluster/transport/Serializer.java | {
"start": 771,
"end": 1307
} | interface ____ {
/**
* Serialize data with some kind of serializing protocol.
*
* @param data data to serialize
* @param <T> type of data
* @return byte array of serialized data
*/
<T> byte[] serialize(T data);
/**
* Deserialize byte array data to target type.
*
* @param data data to deserialize
* @param clazz target type
* @param <T> target type
* @return deserialized data map
*/
<T> T deserialize(byte[] data, Class<T> clazz);
}
| Serializer |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeSerializerSnapshot.java | {
"start": 3857,
"end": 8481
} | interface ____<T> {
/**
* Returns the version of the current snapshot's written binary format.
*
* @return the version of the current snapshot's written binary format.
*/
int getCurrentVersion();
/**
* Writes the serializer snapshot to the provided {@link DataOutputView}. The current version of
* the written serializer snapshot's binary format is specified by the {@link
* #getCurrentVersion()} method.
*
* @param out the {@link DataOutputView} to write the snapshot to.
* @throws IOException Thrown if the snapshot data could not be written.
* @see #writeVersionedSnapshot(DataOutputView, TypeSerializerSnapshot)
*/
void writeSnapshot(DataOutputView out) throws IOException;
/**
* Reads the serializer snapshot from the provided {@link DataInputView}. The version of the
* binary format that the serializer snapshot was written with is provided. This version can be
* used to determine how the serializer snapshot should be read.
*
* @param readVersion version of the serializer snapshot's written binary format
* @param in the {@link DataInputView} to read the snapshot from.
* @param userCodeClassLoader the user code classloader
* @throws IOException Thrown if the snapshot data could be read or parsed.
* @see #readVersionedSnapshot(DataInputView, ClassLoader)
*/
void readSnapshot(int readVersion, DataInputView in, ClassLoader userCodeClassLoader)
throws IOException;
/**
* Recreates a serializer instance from this snapshot. The returned serializer can be safely
* used to read data written by the prior serializer (i.e., the serializer that created this
* snapshot).
*
* @return a serializer instance restored from this serializer snapshot.
*/
TypeSerializer<T> restoreSerializer();
/**
* Checks current serializer's compatibility to read data written by the prior serializer.
*
* <p>When a checkpoint/savepoint is restored, this method checks whether the serialization
* format of the data in the checkpoint/savepoint is compatible for the format of the serializer
* used by the program that restores the checkpoint/savepoint. The outcome can be that the
* serialization format is compatible, that the program's serializer needs to reconfigure itself
* (meaning to incorporate some information from the TypeSerializerSnapshot to be compatible),
* that the format is outright incompatible, or that a migration needed. In the latter case, the
* TypeSerializerSnapshot produces a serializer to deserialize the data, and the restoring
* program's serializer re-serializes the data, thus converting the format during the restore
* operation.
*
* <p>This method must be implemented to clarify the compatibility. See FLIP-263 for more
* details.
*
* @param oldSerializerSnapshot the old serializer snapshot to check.
* @return the serializer compatibility result.
*/
TypeSerializerSchemaCompatibility<T> resolveSchemaCompatibility(
TypeSerializerSnapshot<T> oldSerializerSnapshot);
// ------------------------------------------------------------------------
// read / write utilities
// ------------------------------------------------------------------------
/**
* Writes the given snapshot to the out stream. One should always use this method to write
* snapshots out, rather than directly calling {@link #writeSnapshot(DataOutputView)}.
*
* <p>The snapshot written with this method can be read via {@link
* #readVersionedSnapshot(DataInputView, ClassLoader)}.
*/
static void writeVersionedSnapshot(DataOutputView out, TypeSerializerSnapshot<?> snapshot)
throws IOException {
out.writeUTF(snapshot.getClass().getName());
out.writeInt(snapshot.getCurrentVersion());
snapshot.writeSnapshot(out);
}
/**
* Reads a snapshot from the stream, performing resolving
*
* <p>This method reads snapshots written by {@link #writeVersionedSnapshot(DataOutputView,
* TypeSerializerSnapshot)}.
*/
static <T> TypeSerializerSnapshot<T> readVersionedSnapshot(DataInputView in, ClassLoader cl)
throws IOException {
final TypeSerializerSnapshot<T> snapshot =
TypeSerializerSnapshotSerializationUtil.readAndInstantiateSnapshotClass(in, cl);
int version = in.readInt();
snapshot.readSnapshot(version, in, cl);
return snapshot;
}
}
| TypeSerializerSnapshot |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/joined/MixedInheritanceTest.java | {
"start": 8641,
"end": 8993
} | class ____ extends ForeignCustomer {
private String code;
public ItalianCustomer() {
}
public ItalianCustomer(Integer id, String name, String vat, String code) {
super( id, name, vat );
this.code = code;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
}
}
| ItalianCustomer |
java | quarkusio__quarkus | extensions/reactive-routes/deployment/src/test/java/io/quarkus/vertx/web/failure/FailureHandlerPathTest.java | {
"start": 785,
"end": 1313
} | class ____ {
@Route
String fail(@Param String type) {
throw new RuntimeException("Unknown!");
}
// this path should not match
@Route(path = "/liaf", type = FAILURE, order = 1)
void onFailure(HttpServerResponse response) {
response.setStatusCode(501).end("liaf");
}
@Route(type = FAILURE, order = 2)
void onFailureNoPath(HttpServerResponse response) {
response.setStatusCode(500).end("no-path");
}
}
}
| Routes |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArrays_assertHasSizeBetween_Test.java | {
"start": 1075,
"end": 2667
} | class ____ extends ShortArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertHasSizeBetween(someInfo(), null, 0, 6))
.withMessage(actualIsNull());
}
@Test
void should_throw_illegal_argument_exception_if_lower_boundary_is_greater_than_higher_boundary() {
assertThatIllegalArgumentException().isThrownBy(() -> arrays.assertHasSizeBetween(someInfo(), actual, 4, 2))
.withMessage("The higher boundary <2> must be greater than the lower boundary <4>.");
}
@Test
void should_fail_if_size_of_actual_is_not_greater_than_or_equal_to_lower_boundary() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertHasSizeBetween(someInfo(), actual, 4, 6))
.withMessage(shouldHaveSizeBetween(actual, actual.length, 4, 6).create());
}
@Test
void should_fail_if_size_of_actual_is_not_less_than_higher_boundary() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertHasSizeBetween(someInfo(), actual, 1, 2))
.withMessage(shouldHaveSizeBetween(actual, actual.length, 1, 2).create());
}
@Test
void should_pass_if_size_of_actual_is_between_boundaries() {
arrays.assertHasSizeBetween(someInfo(), actual, 1, 6);
arrays.assertHasSizeBetween(someInfo(), actual, actual.length, actual.length);
}
}
| ShortArrays_assertHasSizeBetween_Test |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/impl/DefaultDependencyCoordinatesFactory.java | {
"start": 1375,
"end": 3342
} | class ____ implements DependencyCoordinatesFactory {
@Nonnull
@Override
public DependencyCoordinates create(@Nonnull DependencyCoordinatesFactoryRequest request) {
requireNonNull(request, "request");
InternalSession session = InternalSession.from(request.getSession());
ArtifactType type = null;
if (request.getType() != null) {
type = session.getSession().getArtifactTypeRegistry().get(request.getType());
}
if (request.getCoordinatesString() != null) {
return new DefaultDependencyCoordinates(
session,
new org.eclipse.aether.graph.Dependency(
new org.eclipse.aether.artifact.DefaultArtifact(request.getCoordinatesString()),
request.getScope(),
request.isOptional(),
map(request.getExclusions(), this::toExclusion)));
} else {
return new DefaultDependencyCoordinates(
session,
new org.eclipse.aether.graph.Dependency(
new org.eclipse.aether.artifact.DefaultArtifact(
request.getGroupId(),
request.getArtifactId(),
request.getClassifier(),
request.getExtension(),
request.getVersion(),
type),
request.getScope(),
request.isOptional(),
map(request.getExclusions(), this::toExclusion)));
}
}
private org.eclipse.aether.graph.Exclusion toExclusion(Exclusion exclusion) {
return new org.eclipse.aether.graph.Exclusion(exclusion.getGroupId(), exclusion.getArtifactId(), "*", "*");
}
}
| DefaultDependencyCoordinatesFactory |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/build/BuildpackResolvers.java | {
"start": 1077,
"end": 2694
} | class ____ {
private static final List<BuildpackResolver> resolvers = getResolvers();
private BuildpackResolvers() {
}
private static List<BuildpackResolver> getResolvers() {
List<BuildpackResolver> resolvers = new ArrayList<>();
resolvers.add(BuilderBuildpack::resolve);
resolvers.add(DirectoryBuildpack::resolve);
resolvers.add(TarGzipBuildpack::resolve);
resolvers.add(ImageBuildpack::resolve);
return Collections.unmodifiableList(resolvers);
}
/**
* Resolve a collection of {@link BuildpackReference BuildpackReferences} to a
* {@link Buildpacks} instance.
* @param context the resolver context
* @param references the references to resolve
* @return a {@link Buildpacks} instance
*/
static Buildpacks resolveAll(BuildpackResolverContext context, Collection<BuildpackReference> references) {
Assert.notNull(context, "'context' must not be null");
if (CollectionUtils.isEmpty(references)) {
return Buildpacks.EMPTY;
}
List<Buildpack> buildpacks = new ArrayList<>(references.size());
for (BuildpackReference reference : references) {
buildpacks.add(resolve(context, reference));
}
return Buildpacks.of(buildpacks);
}
private static Buildpack resolve(BuildpackResolverContext context, BuildpackReference reference) {
Assert.notNull(reference, "'reference' must not be null");
for (BuildpackResolver resolver : resolvers) {
Buildpack buildpack = resolver.resolve(context, reference);
if (buildpack != null) {
return buildpack;
}
}
throw new IllegalArgumentException("Invalid buildpack reference '" + reference + "'");
}
}
| BuildpackResolvers |
java | quarkusio__quarkus | integration-tests/opentelemetry-vertx/src/main/java/io/quarkus/it/opentelemetry/vertx/ExporterRouter.java | {
"start": 553,
"end": 1415
} | class ____ {
@Inject
Router router;
@Inject
InMemorySpanExporter exporter;
public void register(@Observes StartupEvent ev) {
router.get("/reset").handler(rc -> {
exporter.reset();
rc.response().end();
});
router.get("/export").handler(rc -> {
List<SpanData> export = exporter.getFinishedSpanItems()
.stream()
.filter(sd -> !sd.getName().contains("export") && !sd.getName().contains("reset")
&& !sd.getName().contains("bus/messages"))
.collect(Collectors.toList());
rc.response()
.putHeader("content-type", "application/json; charset=utf-8")
.end(Json.encodePrettily(export));
});
}
@ApplicationScoped
static | ExporterRouter |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/parser/SQLSelectListCache.java | {
"start": 972,
"end": 3239
} | class ____ {
private static final Log LOG = LogFactory.getLog(SQLSelectListCache.class);
private final DbType dbType;
private final List<Entry> entries = new CopyOnWriteArrayList<Entry>();
public SQLSelectListCache(DbType dbType) {
this.dbType = dbType;
}
public void add(String select) {
if (select == null || select.length() == 0) {
return;
}
SQLSelectParser selectParser = SQLParserUtils.createSQLStatementParser(select, dbType)
.createSQLSelectParser();
SQLSelectQueryBlock queryBlock = SQLParserUtils.createSelectQueryBlock(dbType);
selectParser.accept(Token.SELECT);
selectParser.parseSelectList(queryBlock);
selectParser.accept(Token.FROM);
selectParser.accept(Token.EOF);
String printSql = queryBlock.toString();
long printSqlHash = FnvHash.fnv1a_64_lower(printSql);
entries.add(
new Entry(
select.substring(6),
queryBlock,
printSql,
printSqlHash
)
);
if (entries.size() > 5) {
LOG.warn("SelectListCache is too large.");
}
}
public int getSize() {
return entries.size();
}
public void clear() {
entries.clear();
}
public boolean match(Lexer lexer, SQLSelectQueryBlock queryBlock) {
if (lexer.token != Token.SELECT) {
return false;
}
int pos = lexer.pos;
String text = lexer.text;
for (int i = 0; i < entries.size(); i++) {
Entry entry = entries.get(i);
String block = entry.sql;
if (text.startsWith(block, pos)) {
//SQLSelectQueryBlock queryBlockCached = queryBlockCache.get(i);
// queryBlockCached.cloneSelectListTo(queryBlock);
queryBlock.setCachedSelectList(entry.printSql, entry.printSqlHash);
int len = pos + block.length();
//todo fix reset
lexer.reset(len, text.charAt(len), Token.FROM);
return true;
}
}
return false;
}
private static | SQLSelectListCache |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/sealed/DependentSealedTest.java | {
"start": 600,
"end": 672
} | class ____ permits MyDependentSubclass {
}
static final | MyDependent |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/urls/Urls_assertHasProtocol_Test.java | {
"start": 1042,
"end": 2124
} | class ____ extends UrlsBaseTest {
@Test
void should_pass_if_actual_uri_has_the_given_protocol() throws MalformedURLException {
// GIVEN
URL url = new URL("http://example.com/pages/");
String expectedProtocol = "http";
// WHEN/THEN
urls.assertHasProtocol(info, url, expectedProtocol);
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
URL url = null;
String expectedProtocol = "http";
// WHEN
var assertionError = expectAssertionError(() -> urls.assertHasProtocol(info, url, expectedProtocol));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
@Test
void should_fail_if_actual_protocol_is_not_the_expected_protocol() throws MalformedURLException {
// GIVEN
URL url = new URL("http://example.com/pages/");
String expectedProtocol = "ftp";
// WHEN
var assertionError = expectAssertionError(() -> urls.assertHasProtocol(info, url, expectedProtocol));
// THEN
then(assertionError).hasMessage(shouldHaveProtocol(url, expectedProtocol).create());
}
}
| Urls_assertHasProtocol_Test |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/util/ContentCachingResponseWrapper.java | {
"start": 1779,
"end": 8355
} | class ____ extends HttpServletResponseWrapper {
private final FastByteArrayOutputStream content = new FastByteArrayOutputStream(1024);
private @Nullable ServletOutputStream outputStream;
private @Nullable PrintWriter writer;
private @Nullable Integer contentLength;
/**
* Create a new ContentCachingResponseWrapper for the given servlet response.
* @param response the original servlet response
*/
public ContentCachingResponseWrapper(HttpServletResponse response) {
super(response);
}
@Override
public void sendError(int sc) throws IOException {
copyBodyToResponse(false);
try {
super.sendError(sc);
}
catch (IllegalStateException ex) {
// Possibly on Tomcat when called too late: fall back to silent setStatus
super.setStatus(sc);
}
}
@Override
public void sendError(int sc, String msg) throws IOException {
copyBodyToResponse(false);
try {
super.sendError(sc, msg);
}
catch (IllegalStateException ex) {
// Possibly on Tomcat when called too late: fall back to silent setStatus
super.setStatus(sc);
}
}
@Override
public void sendRedirect(String location) throws IOException {
copyBodyToResponse(false);
super.sendRedirect(location);
}
@Override
public ServletOutputStream getOutputStream() throws IOException {
if (this.outputStream == null) {
this.outputStream = new ResponseServletOutputStream(getResponse().getOutputStream());
}
return this.outputStream;
}
@Override
public PrintWriter getWriter() throws IOException {
if (this.writer == null) {
String characterEncoding = getCharacterEncoding();
this.writer = (characterEncoding != null ? new ResponsePrintWriter(characterEncoding) :
new ResponsePrintWriter(WebUtils.DEFAULT_CHARACTER_ENCODING));
}
return this.writer;
}
/**
* This method neither flushes content to the client nor commits the underlying
* response, since the content has not yet been copied to the response.
* <p>Invoke {@link #copyBodyToResponse()} to copy the cached body content to
* the wrapped response object and flush its buffer.
* @see jakarta.servlet.ServletResponseWrapper#flushBuffer()
*/
@Override
public void flushBuffer() throws IOException {
// no-op
}
@Override
public void setContentLength(int len) {
if (len > this.content.size()) {
this.content.resize(len);
}
this.contentLength = len;
}
@Override
public void setContentLengthLong(long len) {
setContentLength(toContentLengthInt(len));
}
private int toContentLengthInt(long contentLength) {
if (contentLength > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Content-Length exceeds ContentCachingResponseWrapper's maximum (" +
Integer.MAX_VALUE + "): " + contentLength);
}
return (int) contentLength;
}
@Override
public boolean containsHeader(String name) {
if (this.contentLength != null && HttpHeaders.CONTENT_LENGTH.equalsIgnoreCase(name)) {
return true;
}
else {
return super.containsHeader(name);
}
}
@Override
public void setHeader(String name, String value) {
if (HttpHeaders.CONTENT_LENGTH.equalsIgnoreCase(name)) {
if (value != null) {
this.contentLength = toContentLengthInt(Long.parseLong(value));
}
else {
this.contentLength = null;
super.setHeader(name, null);
}
}
else {
super.setHeader(name, value);
}
}
@Override
public void addHeader(String name, String value) {
if (HttpHeaders.CONTENT_LENGTH.equalsIgnoreCase(name)) {
this.contentLength = toContentLengthInt(Long.parseLong(value));
}
else {
super.addHeader(name, value);
}
}
@Override
public void setIntHeader(String name, int value) {
if (HttpHeaders.CONTENT_LENGTH.equalsIgnoreCase(name)) {
this.contentLength = value;
}
else {
super.setIntHeader(name, value);
}
}
@Override
public void addIntHeader(String name, int value) {
if (HttpHeaders.CONTENT_LENGTH.equalsIgnoreCase(name)) {
this.contentLength = value;
}
else {
super.addIntHeader(name, value);
}
}
@Override
public @Nullable String getHeader(String name) {
if (this.contentLength != null && HttpHeaders.CONTENT_LENGTH.equalsIgnoreCase(name)) {
return this.contentLength.toString();
}
else {
return super.getHeader(name);
}
}
@Override
public Collection<String> getHeaders(String name) {
if (this.contentLength != null && HttpHeaders.CONTENT_LENGTH.equalsIgnoreCase(name)) {
return Collections.singleton(this.contentLength.toString());
}
else {
return super.getHeaders(name);
}
}
@Override
public Collection<String> getHeaderNames() {
Collection<String> headerNames = super.getHeaderNames();
if (this.contentLength != null) {
Set<String> result = new LinkedHashSet<>(headerNames);
result.add(HttpHeaders.CONTENT_LENGTH);
return result;
}
else {
return headerNames;
}
}
@Override
public void setBufferSize(int size) {
if (size > this.content.size()) {
this.content.resize(size);
}
}
@Override
public void resetBuffer() {
this.content.reset();
}
@Override
public void reset() {
super.reset();
this.content.reset();
}
/**
* Return the cached response content as a byte array.
*/
public byte[] getContentAsByteArray() {
return this.content.toByteArray();
}
/**
* Return an {@link InputStream} to the cached content.
* @since 4.2
*/
public InputStream getContentInputStream() {
return this.content.getInputStream();
}
/**
* Return the current size of the cached content.
* @since 4.2
*/
public int getContentSize() {
return this.content.size();
}
/**
* Copy the complete cached body content to the response.
* @since 4.2
*/
public void copyBodyToResponse() throws IOException {
copyBodyToResponse(true);
}
/**
* Copy the cached body content to the response.
* @param complete whether to set a corresponding content length
* for the complete cached body content
* @since 4.2
*/
protected void copyBodyToResponse(boolean complete) throws IOException {
if (this.content.size() > 0) {
HttpServletResponse rawResponse = (HttpServletResponse) getResponse();
if (!rawResponse.isCommitted()) {
if (complete || this.contentLength != null) {
if (rawResponse.getHeader(HttpHeaders.TRANSFER_ENCODING) == null) {
rawResponse.setContentLength(complete ? this.content.size() : this.contentLength);
}
this.contentLength = null;
}
}
this.content.writeTo(rawResponse.getOutputStream());
this.content.reset();
if (complete) {
super.flushBuffer();
}
}
}
private | ContentCachingResponseWrapper |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java | {
"start": 3877,
"end": 43404
} | class ____ extends ESTestCase {
private SSLService sslService;
private Environment env;
private Path testnodeCert;
private Path testnodeKey;
@Before
public void createSSLService() {
testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt");
testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem");
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.http.ssl.secure_key_passphrase", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.http.ssl.enabled", true)
.put("xpack.security.http.ssl.key", testnodeKey)
.put("xpack.security.http.ssl.certificate", testnodeCert)
.put("path.home", createTempDir())
.setSecureSettings(secureSettings)
.build();
env = TestEnvironment.newEnvironment(settings);
sslService = new SSLService(env);
}
public void testDefaultClientAuth() throws Exception {
Settings settings = Settings.builder().put(env.settings()).put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
sslService = new SSLService(TestEnvironment.newEnvironment(settings));
Netty4HttpServerTransport transport = new Netty4HttpServerTransport(
settings,
new NetworkService(Collections.emptyList()),
mock(ThreadPool.class),
xContentRegistry(),
new AggregatingDispatcher(),
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
new TLSConfig(sslService.profile(XPackSettings.HTTP_SSL_PREFIX)::engine),
null,
randomFrom((httpPreRequest, channel, listener) -> listener.onResponse(null), null)
);
ChannelHandler handler = transport.configureServerChannelHandler();
final EmbeddedChannel ch = new EmbeddedChannel(handler);
assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false));
assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false));
}
public void testOptionalClientAuth() throws Exception {
String value = randomCapitalization(SslClientAuthenticationMode.OPTIONAL);
Settings settings = Settings.builder()
.put(env.settings())
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
.put("xpack.security.http.ssl.client_authentication", value)
.build();
sslService = new SSLService(TestEnvironment.newEnvironment(settings));
Netty4HttpServerTransport transport = new Netty4HttpServerTransport(
settings,
new NetworkService(Collections.emptyList()),
mock(ThreadPool.class),
xContentRegistry(),
new AggregatingDispatcher(),
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
new TLSConfig(sslService.profile(XPackSettings.HTTP_SSL_PREFIX)::engine),
null,
randomFrom((httpPreRequest, channel, listener) -> listener.onResponse(null), null)
);
ChannelHandler handler = transport.configureServerChannelHandler();
final EmbeddedChannel ch = new EmbeddedChannel(handler);
assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false));
assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(true));
}
public void testRequiredClientAuth() throws Exception {
String value = randomCapitalization(SslClientAuthenticationMode.REQUIRED);
Settings settings = Settings.builder()
.put(env.settings())
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
.put("xpack.security.http.ssl.client_authentication", value)
.build();
sslService = new SSLService(TestEnvironment.newEnvironment(settings));
final SslProfile httpSslProfile = sslService.profile(XPackSettings.HTTP_SSL_PREFIX);
Netty4HttpServerTransport transport = new Netty4HttpServerTransport(
settings,
new NetworkService(Collections.emptyList()),
mock(ThreadPool.class),
xContentRegistry(),
new AggregatingDispatcher(),
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
new TLSConfig(httpSslProfile::engine),
null,
randomFrom((httpPreRequest, channel, listener) -> listener.onResponse(null), null)
);
ChannelHandler handler = transport.configureServerChannelHandler();
final EmbeddedChannel ch = new EmbeddedChannel(handler);
assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(true));
assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false));
}
public void testNoClientAuth() throws Exception {
String value = randomCapitalization(SslClientAuthenticationMode.NONE);
Settings settings = Settings.builder()
.put(env.settings())
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
.put("xpack.security.http.ssl.client_authentication", value)
.build();
sslService = new SSLService(TestEnvironment.newEnvironment(settings));
Netty4HttpServerTransport transport = new Netty4HttpServerTransport(
settings,
new NetworkService(Collections.emptyList()),
mock(ThreadPool.class),
xContentRegistry(),
new AggregatingDispatcher(),
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
new TLSConfig(sslService.profile(XPackSettings.HTTP_SSL_PREFIX)::engine),
null,
randomFrom((httpPreRequest, channel, listener) -> listener.onResponse(null), null)
);
ChannelHandler handler = transport.configureServerChannelHandler();
final EmbeddedChannel ch = new EmbeddedChannel(handler);
assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false));
assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false));
}
public void testCustomSSLConfiguration() throws Exception {
Settings settings = Settings.builder().put(env.settings()).put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
sslService = new SSLService(TestEnvironment.newEnvironment(settings));
Netty4HttpServerTransport transport = new Netty4HttpServerTransport(
settings,
new NetworkService(Collections.emptyList()),
mock(ThreadPool.class),
xContentRegistry(),
new AggregatingDispatcher(),
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
new TLSConfig(sslService.profile(XPackSettings.HTTP_SSL_PREFIX)::engine),
null,
randomFrom((httpPreRequest, channel, listener) -> listener.onResponse(null), null)
);
ChannelHandler handler = transport.configureServerChannelHandler();
EmbeddedChannel ch = new EmbeddedChannel(handler);
SSLEngine defaultEngine = ch.pipeline().get(SslHandler.class).engine();
settings = Settings.builder()
.put(env.settings())
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
.put("xpack.security.http.ssl.supported_protocols", "TLSv1.2")
.build();
sslService = new SSLService(TestEnvironment.newEnvironment(settings));
transport = new Netty4HttpServerTransport(
settings,
new NetworkService(Collections.emptyList()),
mock(ThreadPool.class),
xContentRegistry(),
new AggregatingDispatcher(),
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
new TLSConfig(sslService.profile(XPackSettings.HTTP_SSL_PREFIX)::engine),
null,
randomFrom((httpPreRequest, channel, listener) -> listener.onResponse(null), null)
);
handler = transport.configureServerChannelHandler();
ch = new EmbeddedChannel(handler);
SSLEngine customEngine = ch.pipeline().get(SslHandler.class).engine();
assertThat(customEngine.getEnabledProtocols(), arrayContaining("TLSv1.2"));
assertThat(customEngine.getEnabledProtocols(), not(equalTo(defaultEngine.getEnabledProtocols())));
}
public void testNoExceptionWhenConfiguredWithoutSslKeySSLDisabled() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.http.ssl.secure_key_passphrase", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.http.ssl.enabled", false)
.put("xpack.security.http.ssl.key", testnodeKey)
.put("xpack.security.http.ssl.certificate", testnodeCert)
.setSecureSettings(secureSettings)
.put("path.home", createTempDir())
.build();
env = TestEnvironment.newEnvironment(settings);
sslService = new SSLService(env);
Netty4HttpServerTransport transport = new Netty4HttpServerTransport(
settings,
new NetworkService(Collections.emptyList()),
mock(ThreadPool.class),
xContentRegistry(),
new AggregatingDispatcher(),
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
new TLSConfig(sslService.profile(XPackSettings.HTTP_SSL_PREFIX)::engine),
null,
randomFrom((httpPreRequest, channel, listener) -> listener.onResponse(null), null)
);
assertNotNull(transport.configureServerChannelHandler());
}
public void testAuthnContextWrapping() throws Exception {
final Settings settings = Settings.builder().put(env.settings()).build();
final AtomicReference<HttpRequest> dispatchedHttpRequestReference = new AtomicReference<>();
final String header = "TEST-" + randomAlphaOfLength(8);
final String headerValue = "TEST-" + randomAlphaOfLength(8);
final String transientHeader = "TEST-" + randomAlphaOfLength(8);
final String transientHeaderValue = "TEST-" + randomAlphaOfLength(8);
final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() {
@Override
public void dispatchRequest(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) {
request.getHttpRequest().release();
// STEP 2: store the dispatched request, which should be wrapping the context
dispatchedHttpRequestReference.set(request.getHttpRequest());
}
@Override
public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) {
logger.error(() -> "--> Unexpected bad request [" + FakeRestRequest.requestToString(channel.request()) + "]", cause);
throw new AssertionError("Unexpected bad request");
}
};
final ThreadPool testThreadPool = new TestThreadPool(TEST_MOCK_TRANSPORT_THREAD_PREFIX);
try (
Netty4HttpServerTransport transport = Security.getHttpServerTransportWithHeadersValidator(
settings,
new NetworkService(List.of()),
testThreadPool,
xContentRegistry(),
dispatcher,
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
TLSConfig.noTLS(),
null,
(httpPreRequest, channel, listener) -> {
// STEP 1: amend the thread context during authentication
testThreadPool.getThreadContext().putHeader(header, headerValue);
testThreadPool.getThreadContext().putTransient(transientHeader, transientHeaderValue);
listener.onResponse(null);
}
)
) {
safeGet(testThreadPool.generic().submit(() -> {
final ChannelHandler handler = transport.configureServerChannelHandler();
final EmbeddedChannel ch = new EmbeddedChannel(handler);
// remove these pipeline handlers as they interfere in the test scenario
for (String pipelineHandlerName : ch.pipeline().names()) {
if (pipelineHandlerName.equals("decoder")
|| pipelineHandlerName.equals("encoder")
|| pipelineHandlerName.equals("encoder_compress")
|| pipelineHandlerName.equals("chunked_writer")) {
ch.pipeline().remove(pipelineHandlerName);
}
}
// STEP 0: send a "wrapped" request
ch.writeInbound(
HttpHeadersAuthenticatorUtils.wrapAsMessageWithAuthenticationContext(
new DefaultHttpRequest(HTTP_1_1, HttpMethod.GET, "/wrapped_request")
)
);
ch.writeInbound(new DefaultLastHttpContent());
ch.flushInbound();
}));
// STEP 3: assert the wrapped context
var storedAuthnContext = HttpHeadersAuthenticatorUtils.extractAuthenticationContext(dispatchedHttpRequestReference.get());
assertThat(storedAuthnContext, notNullValue());
try (var ignored = testThreadPool.getThreadContext().stashContext()) {
assertThat(testThreadPool.getThreadContext().getHeader(header), nullValue());
assertThat(testThreadPool.getThreadContext().getTransientHeaders().get(transientHeader), nullValue());
storedAuthnContext.restore();
assertThat(testThreadPool.getThreadContext().getHeader(header), is(headerValue));
assertThat(testThreadPool.getThreadContext().getTransientHeaders().get(transientHeader), is(transientHeaderValue));
}
} finally {
testThreadPool.shutdownNow();
}
}
public void testHttpHeaderAuthnBypassHeaderValidator() throws Exception {
final Settings settings = Settings.builder().put(env.settings()).build();
final ThreadPool testThreadPool = new TestThreadPool(TEST_MOCK_TRANSPORT_THREAD_PREFIX);
try (
Netty4HttpServerTransport transport = Security.getHttpServerTransportWithHeadersValidator(
settings,
new NetworkService(List.of()),
testThreadPool,
xContentRegistry(),
new HttpServerTransport.Dispatcher() {
@Override
public void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext threadContext) {
fail("Request should not be dispatched");
}
@Override
public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, Throwable cause) {
try {
channel.sendResponse(new RestResponse(channel, (Exception) cause));
} catch (IOException e) {
fail(e, "Unexpected exception dispatching bad request");
}
}
},
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
TLSConfig.noTLS(),
null,
(httpPreRequest, channel, listener) -> listener.onResponse(null)
)
) {
safeGet(testThreadPool.generic().submit(() -> {
final ChannelHandler handler = transport.configureServerChannelHandler();
final EmbeddedChannel ch = new EmbeddedChannel(handler);
for (String pipelineHandlerName : ch.pipeline().names()) {
// remove the decoder AND the header_validator
if (pipelineHandlerName.equals("decoder") || pipelineHandlerName.equals("header_validator")
// remove these pipeline handlers as they interfere in the test scenario
|| pipelineHandlerName.equals("encoder")
|| pipelineHandlerName.equals("encoder_compress")
|| pipelineHandlerName.equals("chunked_writer")) {
ch.pipeline().remove(pipelineHandlerName);
}
}
// this tests a request that cannot be authenticated, but somehow passed authentication
// this is the case of an erroneous internal state
ch.writeInbound(new DefaultFullHttpRequest(HTTP_1_1, HttpMethod.GET, "/unauthenticable_request"));
ch.flushInbound();
ch.flushOutbound();
Netty4FullHttpResponse response = ch.readOutbound();
assertThat(response.status(), is(HttpResponseStatus.INTERNAL_SERVER_ERROR));
String responseContentString = new String(ByteBufUtil.getBytes(response.content()), StandardCharsets.UTF_8);
assertThat(
responseContentString,
containsString("\"type\":\"security_exception\",\"reason\":\"Request is not authenticated\"")
);
// this tests a request that CAN be authenticated, but that, somehow, has not been
ch.writeInbound(
HttpHeadersAuthenticatorUtils.wrapAsMessageWithAuthenticationContext(
new DefaultHttpRequest(HTTP_1_1, HttpMethod.GET, "/_request")
)
);
ch.writeInbound(new DefaultLastHttpContent());
ch.flushInbound();
ch.flushOutbound();
response = ch.readOutbound();
assertThat(response.status(), is(HttpResponseStatus.INTERNAL_SERVER_ERROR));
responseContentString = new String(ByteBufUtil.getBytes(response.content()), StandardCharsets.UTF_8);
assertThat(
responseContentString,
containsString("\"type\":\"security_exception\",\"reason\":\"Request is not authenticated\"")
);
// this tests the case where authentication passed and the request is to be dispatched, BUT that the authentication context
// cannot be instated before dispatching the request
HttpMessage authenticableMessage = HttpHeadersAuthenticatorUtils.wrapAsMessageWithAuthenticationContext(
new DefaultHttpRequest(HTTP_1_1, HttpMethod.GET, "/unauthenticated_request")
);
((HttpHeadersWithAuthenticationContext) authenticableMessage.headers()).setAuthenticationContext(() -> {
throw new ElasticsearchException("Boom");
});
ch.writeInbound(authenticableMessage);
ch.writeInbound(new DefaultLastHttpContent());
ch.flushInbound();
ch.flushOutbound();
response = ch.readOutbound();
assertThat(response.status(), is(HttpResponseStatus.INTERNAL_SERVER_ERROR));
responseContentString = new String(ByteBufUtil.getBytes(response.content()), StandardCharsets.UTF_8);
assertThat(responseContentString, containsString("\"type\":\"exception\",\"reason\":\"Boom\""));
}));
} finally {
testThreadPool.shutdownNow();
}
}
public void testHttpHeaderAuthnBypassDecoder() throws Exception {
final Settings settings = Settings.builder().put(env.settings()).build();
final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() {
@Override
public void dispatchRequest(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) {
logger.error("--> Unexpected good request dispatch [" + FakeRestRequest.requestToString(channel.request()) + "]");
throw new AssertionError("Unexpected good request dispatch");
}
@Override
public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) {
assertThat(cause, instanceOf(HttpHeadersValidationException.class));
try {
channel.sendResponse(new RestResponse(channel, (Exception) ((ElasticsearchWrapperException) cause).getCause()));
} catch (IOException e) {
throw new AssertionError(e);
}
}
};
final ThreadPool testThreadPool = new TestThreadPool(TEST_MOCK_TRANSPORT_THREAD_PREFIX);
try (
Netty4HttpServerTransport transport = Security.getHttpServerTransportWithHeadersValidator(
settings,
new NetworkService(List.of()),
testThreadPool,
xContentRegistry(),
dispatcher,
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
TLSConfig.noTLS(),
null,
(httpPreRequest, channel, listener) -> listener.onResponse(null)
)
) {
safeGet(testThreadPool.generic().submit(() -> {
final ChannelHandler handler = transport.configureServerChannelHandler();
final EmbeddedChannel ch = new EmbeddedChannel(handler);
// replace the decoder with the vanilla one that does no wrapping and will trip the header validator
ch.pipeline().replace("decoder", "decoder", new HttpRequestDecoder());
// remove these pipeline handlers as they interfere in the test scenario
for (String pipelineHandlerName : ch.pipeline().names()) {
if (pipelineHandlerName.equals("encoder")
|| pipelineHandlerName.equals("encoder_compress")
|| pipelineHandlerName.equals("chunked_writer")) {
ch.pipeline().remove(pipelineHandlerName);
}
}
// tests requests that are not wrapped by the "decoder" and so cannot be authenticated
ch.writeInbound(new DefaultFullHttpRequest(HTTP_1_1, HttpMethod.GET, "/unwrapped_full_request"));
ch.flushInbound();
ch.flushOutbound();
Netty4FullHttpResponse response = ch.readOutbound();
assertThat(response.status(), is(HttpResponseStatus.INTERNAL_SERVER_ERROR));
var responseContentString = new String(ByteBufUtil.getBytes(response.content()), StandardCharsets.UTF_8);
assertThat(
responseContentString,
containsString("\"type\":\"illegal_state_exception\",\"reason\":\"Cannot authenticate unwrapped requests\"")
);
ch.writeInbound(new DefaultHttpRequest(HTTP_1_1, HttpMethod.GET, "/unwrapped_request"));
ch.flushInbound();
ch.flushOutbound();
response = ch.readOutbound();
assertThat(response.status(), is(HttpResponseStatus.INTERNAL_SERVER_ERROR));
responseContentString = new String(ByteBufUtil.getBytes(response.content()), StandardCharsets.UTF_8);
assertThat(
responseContentString,
containsString("\"type\":\"illegal_state_exception\",\"reason\":\"Cannot authenticate unwrapped requests\"")
);
}));
} finally {
testThreadPool.shutdownNow();
}
}
public void testMalformedRequestDispatchedNoAuthn() throws Exception {
assumeTrue(
"This test doesn't work correctly under turkish-like locale, because it uses String#toUpper() for asserted error messages",
isTurkishLocale() == false
);
final AtomicReference<Throwable> dispatchThrowableReference = new AtomicReference<>();
final AtomicInteger authnInvocationCount = new AtomicInteger();
final AtomicInteger badDispatchInvocationCount = new AtomicInteger();
final Settings settings = Settings.builder()
.put(env.settings())
.put(HttpTransportSettings.SETTING_HTTP_MAX_HEADER_SIZE.getKey(), "32b")
.put(HttpTransportSettings.SETTING_HTTP_MAX_INITIAL_LINE_LENGTH.getKey(), "32b")
.build();
final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() {
@Override
public void dispatchRequest(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) {
logger.error("--> Unexpected dispatched request [" + FakeRestRequest.requestToString(channel.request()) + "]");
throw new AssertionError("Unexpected dispatched request");
}
@Override
public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) {
assertThat(cause, notNullValue());
dispatchThrowableReference.set(cause);
badDispatchInvocationCount.incrementAndGet();
}
};
final ThreadPool testThreadPool = new TestThreadPool(TEST_MOCK_TRANSPORT_THREAD_PREFIX);
try (
Netty4HttpServerTransport transport = Security.getHttpServerTransportWithHeadersValidator(
settings,
new NetworkService(List.of()),
testThreadPool,
xContentRegistry(),
dispatcher,
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
TLSConfig.noTLS(),
null,
(httpPreRequest, channel, listener) -> {
authnInvocationCount.incrementAndGet();
throw new AssertionError("Malformed requests shouldn't be authenticated");
}
)
) {
safeGet(testThreadPool.generic().submit(() -> {
final ChannelHandler handler = transport.configureServerChannelHandler();
assertThat(authnInvocationCount.get(), is(0));
assertThat(badDispatchInvocationCount.get(), is(0));
// case 1: invalid initial line
{
EmbeddedChannel ch = new EmbeddedChannel(handler);
ByteBuf buf = ch.alloc().buffer();
appendAsciiLine("This is not a valid HTTP line", buf);
appendCrLf(buf);
ch.writeInbound(buf);
ch.flushInbound();
assertThat(dispatchThrowableReference.get().toString(), containsString("NOT A VALID HTTP LINE"));
assertThat(badDispatchInvocationCount.get(), is(1));
assertThat(authnInvocationCount.get(), is(0));
}
// case 2: too long initial line
{
EmbeddedChannel ch = new EmbeddedChannel(handler);
ByteBuf buf = ch.alloc().buffer();
appendAsciiLine("GET /this/is/a/valid/but/too/long/initial/line HTTP/1.1", buf);
appendCrLf(buf);
ch.writeInbound(buf);
ch.flushInbound();
assertThat(dispatchThrowableReference.get().toString(), containsString("HTTP line is larger than"));
assertThat(badDispatchInvocationCount.get(), is(2));
assertThat(authnInvocationCount.get(), is(0));
}
// case 3: invalid header with no colon
{
EmbeddedChannel ch = new EmbeddedChannel(handler);
ByteBuf buf = ch.alloc().buffer();
appendAsciiLine("GET /url HTTP/1.1", buf);
appendAsciiLine("Host", buf);
appendCrLf(buf);
ch.writeInbound(buf);
ch.flushInbound();
assertThat(dispatchThrowableReference.get().toString(), containsString("No colon found"));
assertThat(badDispatchInvocationCount.get(), is(3));
assertThat(authnInvocationCount.get(), is(0));
}
// case 4: invalid header longer than max allowed
{
EmbeddedChannel ch = new EmbeddedChannel(handler);
ByteBuf buf = ch.alloc().buffer();
appendAsciiLine("GET /url HTTP/1.1", buf);
appendAsciiLine("Host: this.looks.like.a.good.url.but.is.longer.than.permitted", buf);
appendCrLf(buf);
ch.writeInbound(buf);
ch.flushInbound();
assertThat(dispatchThrowableReference.get().toString(), containsString("HTTP header is larger than"));
assertThat(badDispatchInvocationCount.get(), is(4));
assertThat(authnInvocationCount.get(), is(0));
}
// case 5: invalid header format
{
EmbeddedChannel ch = new EmbeddedChannel(handler);
ByteBuf buf = ch.alloc().buffer();
appendAsciiLine("GET /url HTTP/1.1", buf);
ByteBufUtil.copy(AsciiString.of("Host: invalid header value"), buf);
buf.writeByte(0x01);
appendCrLf(buf);
appendCrLf(buf);
ch.writeInbound(buf);
ch.flushInbound();
assertThat(dispatchThrowableReference.get().toString(), containsString("Validation failed for header 'Host'"));
assertThat(badDispatchInvocationCount.get(), is(5));
assertThat(authnInvocationCount.get(), is(0));
}
// case 6: connection closed before all headers are sent
{
EmbeddedChannel ch = new EmbeddedChannel(handler);
ByteBuf buf = ch.alloc().buffer();
appendAsciiLine("GET /url HTTP/1.1", buf);
appendAsciiLine("Host: localhost", buf);
ch.writeInbound(buf);
ch.flushInbound();
safeGet(ch.close());
assertThat(authnInvocationCount.get(), is(0));
}
}));
} finally {
testThreadPool.shutdownNow();
}
}
public void testOptionsRequestsFailWith400AndNoAuthn() throws Exception {
final Settings settings = Settings.builder().put(env.settings()).build();
AtomicReference<Throwable> badRequestCauseReference = new AtomicReference<>();
final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() {
@Override
public void dispatchRequest(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) {
logger.error("--> Unexpected dispatched request [" + FakeRestRequest.requestToString(channel.request()) + "]");
throw new AssertionError("Unexpected dispatched request");
}
@Override
public void dispatchBadRequest(final RestChannel channel, final ThreadContext threadContext, final Throwable cause) {
badRequestCauseReference.set(cause);
}
};
final ThreadPool testThreadPool = new TestThreadPool(TEST_MOCK_TRANSPORT_THREAD_PREFIX);
try (
Netty4HttpServerTransport transport = Security.getHttpServerTransportWithHeadersValidator(
settings,
new NetworkService(List.of()),
testThreadPool,
xContentRegistry(),
dispatcher,
randomClusterSettings(),
new SharedGroupFactory(settings),
TelemetryProvider.NOOP,
TLSConfig.noTLS(),
null,
(httpPreRequest, channel, listener) -> {
throw new AssertionError("should not be invoked for OPTIONS requests");
},
(httpPreRequest, channel, listener) -> {
throw new AssertionError("should not be invoked for OPTIONS requests with a body");
}
)
) {
safeGet(testThreadPool.generic().submit(() -> {
final ChannelHandler handler = transport.configureServerChannelHandler();
final EmbeddedChannel ch = new EmbeddedChannel(handler);
// OPTIONS request with fixed length content written in one chunk
{
ByteBuf buf = ch.alloc().buffer();
appendAsciiLine("OPTIONS /url/whatever/fixed-length-single-chunk HTTP/1.1", buf);
if (randomBoolean()) {
appendAsciiLine("Host: localhost", buf);
}
if (randomBoolean()) {
appendAsciiLine("Accept: */*", buf);
}
if (randomBoolean()) {
appendAsciiLine("Content-Encoding: gzip", buf);
}
if (randomBoolean()) {
appendAsciiLine("Content-Type: " + randomFrom("text/plain; charset=utf-8", "application/json; charset=utf-8"), buf);
}
String content = randomAlphaOfLengthBetween(4, 1024);
// having a "Content-Length" request header is what makes it "fixed length"
appendAsciiLine("Content-Length: " + content.length(), buf);
// end of headers
appendCrLf(buf);
ByteBufUtil.copy(AsciiString.of(content), buf);
// write everything in one single chunk
ch.writeInbound(buf);
ch.flushInbound();
ch.runPendingTasks();
Throwable badRequestCause = badRequestCauseReference.get();
assertThat(badRequestCause, instanceOf(HttpHeadersValidationException.class));
assertThat(badRequestCause.getCause(), instanceOf(ElasticsearchException.class));
assertThat(((ElasticsearchException) badRequestCause.getCause()).status(), is(RestStatus.BAD_REQUEST));
assertThat(
((ElasticsearchException) badRequestCause.getCause()).getDetailedMessage(),
containsString("OPTIONS requests with a payload body are not supported")
);
}
{
ByteBuf buf = ch.alloc().buffer();
appendAsciiLine("OPTIONS /url/whatever/chunked-transfer?encoding HTTP/1.1", buf);
if (randomBoolean()) {
appendAsciiLine("Host: localhost", buf);
}
if (randomBoolean()) {
appendAsciiLine("Accept: */*", buf);
}
if (randomBoolean()) {
appendAsciiLine("Content-Encoding: gzip", buf);
}
if (randomBoolean()) {
appendAsciiLine("Content-Type: " + randomFrom("text/plain; charset=utf-8", "application/json; charset=utf-8"), buf);
}
// do not write a "Content-Length" header to make the request "variable length"
if (randomBoolean()) {
appendAsciiLine("Transfer-Encoding: " + randomFrom("chunked", "gzip, chunked"), buf);
} else {
appendAsciiLine("Transfer-Encoding: chunked", buf);
}
// End of headers
appendCrLf(buf);
// maybe append some chunks as well
String[] contentParts = randomArray(0, 4, String[]::new, () -> randomAlphaOfLengthBetween(1, 64));
for (String content : contentParts) {
appendAsciiLine(Integer.toHexString(content.length()), buf);
appendAsciiLine(content, buf);
}
ch.writeInbound(buf);
ch.flushInbound();
ByteBuf buf2 = ch.alloc().buffer();
contentParts = randomArray(1, 4, String[]::new, () -> randomAlphaOfLengthBetween(1, 64));
for (String content : contentParts) {
appendAsciiLine(Integer.toHexString(content.length()), buf2);
appendAsciiLine(content, buf2);
}
// finish chunked request
appendAsciiLine("0", buf2);
appendCrLf(buf2);
ch.writeInbound(buf2);
ch.flushInbound();
ch.runPendingTasks();
Throwable badRequestCause = badRequestCauseReference.get();
assertThat(badRequestCause, instanceOf(HttpHeadersValidationException.class));
assertThat(badRequestCause.getCause(), instanceOf(ElasticsearchException.class));
assertThat(((ElasticsearchException) badRequestCause.getCause()).status(), is(RestStatus.BAD_REQUEST));
assertThat(
((ElasticsearchException) badRequestCause.getCause()).getDetailedMessage(),
containsString("OPTIONS requests with a payload body are not supported")
);
}
}));
} finally {
testThreadPool.shutdownNow();
}
}
/**
* Append a string as ASCII terminated by a CR/LF newline
*
* @param string The string to append
* @param buf The buffer to append to
*/
private static void appendAsciiLine(String string, ByteBuf buf) {
ByteBufUtil.copy(AsciiString.of(string), buf);
appendCrLf(buf);
}
/**
* Append a CR/LF newline
*
* @param buf The buffer to append to
*/
private static void appendCrLf(ByteBuf buf) {
buf.writeByte(HttpConstants.CR);
buf.writeByte(HttpConstants.LF);
}
}
| SecurityNetty4HttpServerTransportTests |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/JSONSerializerTest2.java | {
"start": 309,
"end": 2933
} | class ____ extends TestCase {
public void test_0() throws Exception {
JSONSerializer serializer = new JSONSerializer();
serializer.getMapping().clearSerializers();
int size = JSONSerializerMapTest.size(serializer.getMapping());
serializer.config(SerializerFeature.WriteEnumUsingToString, false);
serializer.config(SerializerFeature.WriteEnumUsingName, false);
serializer.write(Type.A);
Assert.assertTrue(size < JSONSerializerMapTest.size(serializer.getMapping()));
Assert.assertEquals(Integer.toString(Type.A.ordinal()), serializer.getWriter().toString());
}
public void test_1() throws Exception {
JSONSerializer serializer = new JSONSerializer();
serializer.config(SerializerFeature.WriteEnumUsingToString, false);
serializer.config(SerializerFeature.WriteEnumUsingName, false);
serializer.write(new A(Type.B));
Assert.assertEquals("{\"type\":" + Integer.toString(Type.B.ordinal()) + "}", serializer.getWriter().toString());
A a = JSON.parseObject(serializer.getWriter().toString(), A.class);
Assert.assertEquals(a.getType(), Type.B);
}
public void test_2() throws Exception {
JSONSerializer serializer = new JSONSerializer();
serializer.write(new C());
Assert.assertEquals("{}", serializer.getWriter().toString());
}
public void test_3() throws Exception {
JSONSerializer serializer = new JSONSerializer();
serializer.config(SerializerFeature.WriteEnumUsingToString, true);
serializer.write(new A(Type.B));
Assert.assertEquals("{\"type\":\"B\"}", serializer.getWriter().toString());
A a = JSON.parseObject(serializer.getWriter().toString(), A.class);
Assert.assertEquals(a.getType(), Type.B);
}
public void test_error() throws Exception {
Exception error = null;
try {
JSONSerializer.write(new Writer() {
@Override
public void write(char[] cbuf, int off, int len) throws IOException {
throw new IOException();
}
@Override
public void flush() throws IOException {
throw new IOException();
}
@Override
public void close() throws IOException {
throw new IOException();
}
}, (Object) "abc");
} catch (Exception ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public static | JSONSerializerTest2 |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/collectionelement/embeddables/withcustomenumdef/TestBasicOps.java | {
"start": 679,
"end": 2355
} | class ____ {
@Test
public void testLoadAndStore(SessionFactoryScope scope) {
Query q = new Query( new Location( "first", Location.Type.COUNTY ) );
scope.inTransaction(
session ->
session.persist( q )
);
scope.inTransaction(
session -> {
Query q1 = session.get( Query.class, q.getId() );
assertEquals( 1, q1.getIncludedLocations().size() );
Location l = q1.getIncludedLocations().iterator().next();
assertEquals( Location.Type.COUNTY, l.getType() );
session.remove( q1 );
}
);
}
@Test
@JiraKey(value = "HHH-7072")
public void testEmbeddableWithNullables(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Query q = new Query( new Location( null, Location.Type.COMMUNE ) );
session.persist( q );
session.getTransaction().commit();
session.clear();
Transaction transaction = session.beginTransaction();
q.getIncludedLocations().add( new Location( null, Location.Type.COUNTY ) );
session.merge( q );
transaction.commit();
session.clear();
transaction = session.beginTransaction();
q = session.get( Query.class, q.getId() );
// assertEquals( 2, q.getIncludedLocations().size() );
transaction.commit();
session.clear();
transaction = session.beginTransaction();
Iterator<Location> itr = q.getIncludedLocations().iterator();
itr.next();
itr.remove();
session.merge( q );
transaction.commit();
session.clear();
session.beginTransaction();
q = session.get( Query.class, q.getId() );
assertEquals( 1, q.getIncludedLocations().size() );
session.remove( q );
}
);
}
}
| TestBasicOps |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/protocol/DecodeBufferPolicies.java | {
"start": 83,
"end": 212
} | class ____ construct commonly used {@link DecodeBufferPolicy} objects.
*
* @author Mark Paluch
* @since 6.0
*/
public abstract | to |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/selection/twosteperror/ErroneousMapperMM.java | {
"start": 1314,
"end": 1475
} | class ____ {
TypeInTheMiddleB(String t1) {
this.test = t1;
}
public String test;
}
// CHECKSTYLE:ON
}
| TypeInTheMiddleB |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/SchedulerUtilsTest.java | {
"start": 3105,
"end": 8326
} | class ____ {
private static final Logger log = LoggerFactory.getLogger(SharedSlotTest.class);
@Test
void testSettingMaxNumberOfCheckpointsToRetain() throws Exception {
final int maxNumberOfCheckpointsToRetain = 10;
final Configuration jobManagerConfig = new Configuration();
jobManagerConfig.set(
CheckpointingOptions.MAX_RETAINED_CHECKPOINTS, maxNumberOfCheckpointsToRetain);
final CompletedCheckpointStore completedCheckpointStore =
SchedulerUtils.createCompletedCheckpointStore(
jobManagerConfig,
new StandaloneCheckpointRecoveryFactory(),
Executors.directExecutor(),
log,
new JobID(),
RecoveryClaimMode.CLAIM);
assertThat(completedCheckpointStore.getMaxNumberOfRetainedCheckpoints())
.isEqualTo(maxNumberOfCheckpointsToRetain);
}
/**
* Check that a {@link SharedStateRegistryFactory} used by {@link SchedulerUtils} registers
* shared checkpoint state on restore.
*/
@Test
void testSharedStateRegistration() throws Exception {
UUID backendId = UUID.randomUUID();
String localPath = "k0";
StreamStateHandle handle = new ByteStreamStateHandle("h0", new byte[] {1, 2, 3});
CheckpointRecoveryFactory recoveryFactory =
buildRecoveryFactory(
buildCheckpoint(buildIncrementalHandle(localPath, handle, backendId)));
CompletedCheckpointStore checkpointStore =
SchedulerUtils.createCompletedCheckpointStore(
new Configuration(),
recoveryFactory,
Executors.directExecutor(),
log,
new JobID(),
RecoveryClaimMode.CLAIM);
SharedStateRegistry sharedStateRegistry = checkpointStore.getSharedStateRegistry();
IncrementalRemoteKeyedStateHandle newHandle =
buildIncrementalHandle(
localPath,
new PlaceholderStreamStateHandle(
handle.getStreamStateHandleID(), handle.getStateSize(), false),
backendId);
newHandle.registerSharedStates(sharedStateRegistry, 1L);
assertThat(
newHandle.getSharedState().stream()
.filter(e -> e.getLocalPath().equals(localPath))
.findFirst()
.get()
.getHandle())
.isEqualTo(handle);
}
private CheckpointRecoveryFactory buildRecoveryFactory(CompletedCheckpoint checkpoint) {
return new CheckpointRecoveryFactory() {
@Override
public CompletedCheckpointStore createRecoveredCompletedCheckpointStore(
JobID jobId,
int maxNumberOfCheckpointsToRetain,
SharedStateRegistryFactory sharedStateRegistryFactory,
Executor ioExecutor,
RecoveryClaimMode recoveryClaimMode) {
List<CompletedCheckpoint> checkpoints = singletonList(checkpoint);
return new EmbeddedCompletedCheckpointStore(
maxNumberOfCheckpointsToRetain,
checkpoints,
sharedStateRegistryFactory.create(
ioExecutor, checkpoints, RecoveryClaimMode.DEFAULT));
}
@Override
public CheckpointIDCounter createCheckpointIDCounter(JobID jobId) {
return new StandaloneCheckpointIDCounter();
}
};
}
private CompletedCheckpoint buildCheckpoint(KeyedStateHandle incremental) {
OperatorID operatorID = new OperatorID();
OperatorState operatorState = new OperatorState(null, null, operatorID, 1, 1);
operatorState.putState(
0, OperatorSubtaskState.builder().setManagedKeyedState(incremental).build());
return new CompletedCheckpoint(
new JobID(),
1,
1,
1,
singletonMap(operatorID, operatorState),
emptyList(),
CheckpointProperties.forCheckpoint(NEVER_RETAIN_AFTER_TERMINATION),
new TestCompletedCheckpointStorageLocation(),
null);
}
private IncrementalRemoteKeyedStateHandle buildIncrementalHandle(
String localPath, StreamStateHandle shared, UUID backendIdentifier) {
StreamStateHandle meta = new ByteStreamStateHandle("meta", new byte[] {1, 2, 3});
List<HandleAndLocalPath> sharedState = new ArrayList<>(1);
sharedState.add(HandleAndLocalPath.of(shared, localPath));
return new IncrementalRemoteKeyedStateHandle(
backendIdentifier,
KeyGroupRange.EMPTY_KEY_GROUP_RANGE,
1,
sharedState,
emptyList(),
meta);
}
}
| SchedulerUtilsTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.