language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
spring-projects__spring-security
|
oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/web/client/ClientRegistrationIdProcessorTests.java
|
{
"start": 4716,
"end": 4775
}
|
interface ____ {
}
@ClientRegistrationId("b")
|
ARestService
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesCapacitySchedDynamicConfigWeightModeDQC.java
|
{
"start": 4061,
"end": 5294
}
|
class ____ extends JerseyTestBase {
private boolean legacyQueueMode;
private MockRM rm;
public static Collection<Boolean> getParameters() {
return Arrays.asList(true, false);
}
private static final String EXPECTED_FILE_TMPL = "webapp/dynamic-%s-%s.json";
private Configuration conf;
@Override
public void setUp() throws Exception {
super.setUp();
}
public void initTestRMWebServicesCapacitySchedDynamicConfigWeightModeDQC(
boolean pLegacyQueueMode) throws Exception {
this.legacyQueueMode = pLegacyQueueMode;
backupSchedulerConfigFileInTarget();
setUp();
}
@Override
protected Application configure() {
ResourceConfig config = new ResourceConfig();
config.register(RMWebServices.class);
config.register(new JerseyBinder());
config.register(GenericExceptionHandler.class);
config.register(ApplicationSubmissionContextInfoWriter.class);
config.register(ApplicationSubmissionContextInfoReader.class);
config.register(TestRMWebServicesAppsModification.TestRMCustomAuthFilter.class);
config.register(new JettisonFeature()).register(JAXBContextResolver.class);
return config;
}
private
|
TestRMWebServicesCapacitySchedDynamicConfigWeightModeDQC
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java
|
{
"start": 10504,
"end": 10868
}
|
class ____ {
private final Long time;
private final OutlierMetrics latency;
LatencyWithLastReportTime(Long time, OutlierMetrics latency) {
this.time = time;
this.latency = latency;
}
public Long getTime() {
return time;
}
public OutlierMetrics getLatency() {
return latency;
}
}
}
|
LatencyWithLastReportTime
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/provider/TestProviderUtils.java
|
{
"start": 1931,
"end": 6626
}
|
class ____ {
@Test
public void testStaticFileLocalization() throws IOException {
// A bunch of mocks ...
ContainerLaunchService.ComponentLaunchContext compLaunchCtx =
mock(ContainerLaunchService.ComponentLaunchContext.class);
AbstractLauncher launcher = mock(AbstractLauncher.class);
SliderFileSystem sfs = mock(SliderFileSystem.class);
FileSystem fs = mock(FileSystem.class);
when(fs.getFileStatus(any(Path.class))).thenAnswer(
invocationOnMock -> new FileStatus(1L, false, 1, 1L, 1L,
(Path) invocationOnMock.getArguments()[0]));
when(fs.exists(any(Path.class))).thenReturn(true);
when(sfs.getFileSystem()).thenReturn(fs);
Configuration conf = mock(Configuration.class);
List<ConfigFile> configFileList = new ArrayList<>();
when(conf.getFiles()).thenReturn(configFileList);
when(compLaunchCtx.getConfiguration()).thenReturn(conf);
when(sfs.createAmResource(any(Path.class), any(LocalResourceType.class),
any(LocalResourceVisibility.class))).thenAnswer(
invocationOnMock -> new LocalResource() {
@Override
public URL getResource() {
return URL.fromPath(((Path) invocationOnMock.getArguments()[0]));
}
@Override
public void setResource(URL resource) {
}
@Override
public long getSize() {
return 0;
}
@Override
public void setSize(long size) {
}
@Override
public long getTimestamp() {
return 0;
}
@Override
public void setTimestamp(long timestamp) {
}
@Override
public LocalResourceType getType() {
return (LocalResourceType) invocationOnMock.getArguments()[1];
}
@Override
public void setType(LocalResourceType type) {
}
@Override
public LocalResourceVisibility getVisibility() {
return LocalResourceVisibility.APPLICATION;
}
@Override
public void setVisibility(LocalResourceVisibility visibility) {
}
@Override
public String getPattern() {
return null;
}
@Override
public void setPattern(String pattern) {
}
@Override
public boolean getShouldBeUploadedToSharedCache() {
return false;
}
@Override
public void setShouldBeUploadedToSharedCache(
boolean shouldBeUploadedToSharedCache) {
}
});
// Initialize list of files.
//archive
configFileList.add(new ConfigFile().srcFile("hdfs://default/sourceFile1")
.destFile("destFile1").type(ConfigFile.TypeEnum.ARCHIVE)
.visibility(LocalResourceVisibility.APPLICATION));
//static file
configFileList.add(new ConfigFile().srcFile("hdfs://default/sourceFile2")
.destFile("folder/destFile_2").type(ConfigFile.TypeEnum.STATIC)
.visibility(LocalResourceVisibility.APPLICATION));
//This will be ignored since type is JSON
configFileList.add(new ConfigFile().srcFile("hdfs://default/sourceFile3")
.destFile("destFile3").type(ConfigFile.TypeEnum.JSON)
.visibility(LocalResourceVisibility.APPLICATION));
//No destination file specified
configFileList.add(new ConfigFile().srcFile("hdfs://default/sourceFile4")
.type(ConfigFile.TypeEnum.STATIC)
.visibility(LocalResourceVisibility.APPLICATION));
ProviderService.ResolvedLaunchParams resolved =
new ProviderService.ResolvedLaunchParams();
ProviderUtils.handleStaticFilesForLocalization(launcher, sfs,
compLaunchCtx, resolved);
Mockito.verify(launcher).addLocalResource(Mockito.eq("destFile1"),
any(LocalResource.class));
Mockito.verify(launcher).addLocalResource(
Mockito.eq("destFile_2"), any(LocalResource.class));
Mockito.verify(launcher).addLocalResource(
Mockito.eq("sourceFile4"), any(LocalResource.class));
assertEquals(3, resolved.getResolvedRsrcPaths().size());
assertEquals(resolved.getResolvedRsrcPaths().get("destFile1"),
"destFile1");
}
@Test
public void testReplaceSpacesWithDelimiter() {
String command = "ls -l \" space\"";
String expected = "ls,-l, space";
String actual = ProviderUtils.replaceSpacesWithDelimiter(command, ",");
assertEquals(expected, actual,
"replaceSpaceWithDelimiter produces unexpected result.");
}
}
|
TestProviderUtils
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/dataformat/ASN1DataFormat.java
|
{
"start": 3956,
"end": 5443
}
|
class ____ implements DataFormatBuilder<ASN1DataFormat> {
private Class<?> unmarshalType;
private String unmarshalTypeName;
private String usingIterator;
/**
* If the asn1 file has more than one entry, the setting this option to true, allows working with the splitter
* EIP, to split the data using an iterator in a streaming mode.
*/
public Builder usingIterator(String usingIterator) {
this.usingIterator = usingIterator;
return this;
}
/**
* If the asn1 file has more than one entry, the setting this option to true, allows working with the splitter
* EIP, to split the data using an iterator in a streaming mode.
*/
public Builder usingIterator(boolean usingIterator) {
this.usingIterator = Boolean.toString(usingIterator);
return this;
}
/**
* Class to use when unmarshalling.
*/
public Builder unmarshalTypeName(String unmarshalTypeName) {
this.unmarshalTypeName = unmarshalTypeName;
return this;
}
/**
* Class to use when unmarshalling.
*/
public Builder unmarshalType(Class<?> unmarshalType) {
this.unmarshalType = unmarshalType;
return this;
}
@Override
public ASN1DataFormat end() {
return new ASN1DataFormat(this);
}
}
}
|
Builder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/gen/pipeline/AggNameInput.java
|
{
"start": 475,
"end": 995
}
|
class ____ extends CommonNonExecutableInput<String> {
public AggNameInput(Source source, Expression expression, String context) {
super(source, expression, context);
}
@Override
protected NodeInfo<AggNameInput> info() {
return NodeInfo.create(this, AggNameInput::new, expression(), context());
}
@Override
public final boolean supportedByAggsOnlyQuery() {
return true;
}
@Override
public final boolean resolved() {
return false;
}
}
|
AggNameInput
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/CheckpointCoordinatorDeActivator.java
|
{
"start": 1205,
"end": 2095
}
|
class ____ implements JobStatusListener {
private final CheckpointCoordinator coordinator;
private final boolean allTasksOutputNonBlocking;
public CheckpointCoordinatorDeActivator(
CheckpointCoordinator coordinator, boolean allTasksOutputNonBlocking) {
this.coordinator = checkNotNull(coordinator);
this.allTasksOutputNonBlocking = allTasksOutputNonBlocking;
}
@Override
public void jobStatusChanges(JobID jobId, JobStatus newJobStatus, long timestamp) {
if (newJobStatus == JobStatus.RUNNING && allTasksOutputNonBlocking) {
// start the checkpoint scheduler if there is no blocking edge
coordinator.startCheckpointScheduler();
} else {
// anything else should stop the trigger for now
coordinator.stopCheckpointScheduler();
}
}
}
|
CheckpointCoordinatorDeActivator
|
java
|
apache__camel
|
components/camel-univocity-parsers/src/main/java/org/apache/camel/dataformat/univocity/UniVocityCsvDataFormat.java
|
{
"start": 1176,
"end": 1276
}
|
class ____ the data format that uses the CSV uniVocity parser.
*/
@Dataformat("univocityCsv")
public
|
is
|
java
|
elastic__elasticsearch
|
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/SourceDirectoryCommandLineArgumentProvider.java
|
{
"start": 777,
"end": 1335
}
|
class ____ implements CommandLineArgumentProvider {
private final Directory sourceDirectory;
public SourceDirectoryCommandLineArgumentProvider(Directory sourceDirectory) {
this.sourceDirectory = sourceDirectory;
}
public Iterable<String> asArguments() {
return Arrays.asList("-s", sourceDirectory.getAsFile().getAbsolutePath());
}
@InputDirectory
@PathSensitive(PathSensitivity.RELATIVE)
public Directory getSourceDirectory() {
return sourceDirectory;
}
}
|
SourceDirectoryCommandLineArgumentProvider
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/test/java/org/hibernate/testing/annotations/BasicEntityManagerFactoryScopeTests.java
|
{
"start": 580,
"end": 959
}
|
class ____ {
@Test
public void testBasicUsage(EntityManagerFactoryScope scope) {
assertThat( scope, notNullValue() );
assertThat( scope.getEntityManagerFactory(), notNullValue() );
// check we can use the EMF to create EMs
scope.inTransaction(
(session) -> session.createQuery( "select a from AnEntity a" ).getResultList()
);
}
}
|
BasicEntityManagerFactoryScopeTests
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/Assertions_assertAny_Test.java
|
{
"start": 1344,
"end": 4443
}
|
class ____ {
public static Stream<Arguments> should_throw_IllegalArgumentException_if_one_or_more_of_the_executables_are_null() {
return Stream.of(Arguments.of((Object) new Executable[] { null, () -> {}, () -> {} }),
Arguments.of((Object) new Executable[] { null, null, () -> {} }),
Arguments.of((Object) new Executable[] { null, null, null }));
}
static Stream<Arguments> should_pass_when_at_least_one_of_the_executables_does_not_fail() {
Throwable failed = new Throwable("Failed");
return Stream.of(Arguments.of((Object) new Executable[] { throwingExecutable(failed), () -> {}, () -> {} }),
Arguments.of((Object) new Executable[] { throwingExecutable(failed), throwingExecutable(failed), () -> {} }),
Arguments.of((Object) new Executable[] { () -> {}, () -> {}, () -> {} }));
}
@MethodSource
@ParameterizedTest
void should_throw_IllegalArgumentException_if_one_or_more_of_the_executables_are_null(Executable[] executables) {
// WHEN
var illegalArgumentException = catchIllegalArgumentException(() -> assertAny(executables));
// THEN
then(illegalArgumentException).hasMessage("No executable can be null");
}
@Test
@DisabledOnOs(OS.WINDOWS) // fails but the error messages match in the log :/
void should_fail_if_one_and_only_executable_provided_fails() {
// GIVEN
Throwable throwable = new Throwable("Failed");
// WHEN
var assertionError = expectAssertionError(() -> assertAny(throwingExecutable(throwable)));
// THEN
then(assertionError).hasMessage("""
None of the provided executables succeeded.
Executable #0 failed with:
%s""".formatted(getStackTrace(throwable)));
}
@Test
@DisabledOnOs(OS.WINDOWS) // fails but the error messages match in the log :/
void should_fail_if_all_executables_fail() {
// GIVEN
Throwable throwable1 = new Throwable("Failure message 1");
Throwable throwable2 = new Throwable("Failure message 2");
Throwable throwable3 = new Throwable("Failure message 3");
// WHEN
var assertionError = expectAssertionError(() -> assertAny(throwingExecutable(throwable1),
throwingExecutable(throwable2),
throwingExecutable(throwable3)));
// THEN
then(assertionError).hasMessage("""
None of the provided executables succeeded.
Executable #0 failed with:
%s
Executable #1 failed with:
%s
Executable #2 failed with:
%s""".formatted(getStackTrace(throwable1),
getStackTrace(throwable2),
getStackTrace(throwable3)));
}
@MethodSource
@ParameterizedTest
void should_pass_when_at_least_one_of_the_executables_does_not_fail(Executable[] executables) {
assertAny(executables);
}
private static Executable throwingExecutable(Throwable throwable) {
return () -> {
throw throwable;
};
}
}
|
Assertions_assertAny_Test
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProxyCombiner.java
|
{
"start": 2190,
"end": 3291
}
|
interface ____ the combined proxy.
* @param proxies The proxies which should be used as delegates.
* @param <T> The type of the proxy that will be returned.
* @return The combined proxy.
*/
@SuppressWarnings("unchecked")
public static <T> T combine(Class<T> combinedProxyInterface,
Object... proxies) {
methodLoop:
for (Method m : combinedProxyInterface.getMethods()) {
for (Object proxy : proxies) {
try {
proxy.getClass().getMethod(m.getName(), m.getParameterTypes());
continue methodLoop; // go to the next method
} catch (NoSuchMethodException nsme) {
// Continue to try the next proxy
}
}
throw new IllegalStateException("The proxies specified for "
+ combinedProxyInterface + " do not cover method " + m);
}
InvocationHandler handler =
new CombinedProxyInvocationHandler(combinedProxyInterface, proxies);
return (T) Proxy.newProxyInstance(combinedProxyInterface.getClassLoader(),
new Class[] {combinedProxyInterface}, handler);
}
private static final
|
of
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/metrics/groups/TaskManagerJobMetricGroup.java
|
{
"start": 1560,
"end": 4313
}
|
class ____ extends JobMetricGroup<TaskManagerMetricGroup> {
/** Map from execution attempt ID (task identifier) to task metrics. */
private final Map<ExecutionAttemptID, TaskMetricGroup> tasks = new HashMap<>();
// ------------------------------------------------------------------------
public TaskManagerJobMetricGroup(
MetricRegistry registry,
TaskManagerMetricGroup parent,
JobID jobId,
@Nullable String jobName) {
super(
registry,
parent,
jobId,
jobName,
registry.getScopeFormats()
.getTaskManagerJobFormat()
.formatScope(checkNotNull(parent), jobId, jobName));
}
public final TaskManagerMetricGroup parent() {
return parent;
}
// ------------------------------------------------------------------------
// adding / removing tasks
// ------------------------------------------------------------------------
public TaskMetricGroup addTask(
final ExecutionAttemptID executionAttemptID, final String taskName) {
checkNotNull(executionAttemptID);
checkNotNull(taskName);
synchronized (this) {
if (!isClosed()) {
TaskMetricGroup prior = tasks.get(executionAttemptID);
if (prior != null) {
return prior;
} else {
TaskMetricGroup task =
new TaskMetricGroup(registry, this, executionAttemptID, taskName);
tasks.put(executionAttemptID, task);
return task;
}
} else {
return null;
}
}
}
public void removeTaskMetricGroup(ExecutionAttemptID executionId) {
checkNotNull(executionId);
// this can be a call from this.close which iterates over tasks
// changing tasks here would break iteration
synchronized (this) {
if (!isClosed()) {
tasks.remove(executionId);
// keep this group open even if tasks is empty - to re-use on new task submission
// the group will be closed by TM with the release of the last job slot on this TM
}
}
}
// ------------------------------------------------------------------------
// Component Metric Group Specifics
// ------------------------------------------------------------------------
@Override
protected Iterable<? extends ComponentMetricGroup<?>> subComponents() {
checkState(holdsLock(this));
return tasks.values();
}
}
|
TaskManagerJobMetricGroup
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java
|
{
"start": 1088,
"end": 4754
}
|
class ____ extends ESTestCase {
/**
* test sort key order respects PREFER_IPV4
*/
public void testSortKey() throws Exception {
InetAddress localhostv4 = InetAddress.getByName("127.0.0.1");
InetAddress localhostv6 = InetAddress.getByName("::1");
assertTrue(NetworkUtils.sortKey(localhostv4, false) < NetworkUtils.sortKey(localhostv6, false));
assertTrue(NetworkUtils.sortKey(localhostv6, true) < NetworkUtils.sortKey(localhostv4, true));
}
/**
* test ordinary addresses sort before private addresses
*/
public void testSortKeySiteLocal() throws Exception {
InetAddress siteLocal = InetAddress.getByName("172.16.0.1");
assert siteLocal.isSiteLocalAddress();
InetAddress ordinary = InetAddress.getByName("192.192.192.192");
assertTrue(NetworkUtils.sortKey(ordinary, true) < NetworkUtils.sortKey(siteLocal, true));
assertTrue(NetworkUtils.sortKey(ordinary, false) < NetworkUtils.sortKey(siteLocal, false));
InetAddress siteLocal6 = InetAddress.getByName("fec0::1");
assert siteLocal6.isSiteLocalAddress();
InetAddress ordinary6 = InetAddress.getByName("fddd::1");
assertTrue(NetworkUtils.sortKey(ordinary6, true) < NetworkUtils.sortKey(siteLocal6, true));
assertTrue(NetworkUtils.sortKey(ordinary6, false) < NetworkUtils.sortKey(siteLocal6, false));
}
/**
* test private addresses sort before link local addresses
*/
public void testSortKeyLinkLocal() throws Exception {
InetAddress linkLocal = InetAddress.getByName("fe80::1");
assert linkLocal.isLinkLocalAddress();
InetAddress ordinary = InetAddress.getByName("fddd::1");
assertTrue(NetworkUtils.sortKey(ordinary, true) < NetworkUtils.sortKey(linkLocal, true));
assertTrue(NetworkUtils.sortKey(ordinary, false) < NetworkUtils.sortKey(linkLocal, false));
}
/**
* Test filtering out ipv4/ipv6 addresses
*/
public void testFilter() throws Exception {
InetAddress addresses[] = { InetAddress.getByName("::1"), InetAddress.getByName("127.0.0.1") };
assertArrayEquals(new InetAddress[] { InetAddress.getByName("127.0.0.1") }, NetworkUtils.filterIPV4(addresses));
assertArrayEquals(new InetAddress[] { InetAddress.getByName("::1") }, NetworkUtils.filterIPV6(addresses));
}
// test that selecting by name is possible
public void testMaybeGetInterfaceByName() throws Exception {
final List<NetworkInterface> networkInterfaces = getInterfaces();
for (NetworkInterface netIf : networkInterfaces) {
final Optional<NetworkInterface> maybeNetworkInterface = NetworkUtils.maybeGetInterfaceByName(
networkInterfaces,
netIf.getName()
);
assertThat(maybeNetworkInterface, isPresentWith(transformedMatch(NetworkInterface::getName, equalTo(netIf.getName()))));
}
}
public void testNonExistingInterface() throws Exception {
final IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> NetworkUtils.getAddressesForInterface("settingValue", ":suffix", "non-existing")
);
assertThat(exception.getMessage(), containsString("setting [settingValue] matched no network interfaces; valid values include"));
final boolean atLeastOneInterfaceIsPresentInExceptionMessage = getInterfaces().stream()
.anyMatch(anInterface -> exception.getMessage().contains(anInterface.getName() + ":suffix"));
assertThat(
"Expected to get at least one
|
NetworkUtilsTests
|
java
|
junit-team__junit5
|
junit-vintage-engine/src/test/java/org/junit/vintage/engine/discovery/VintageDiscovererTests.java
|
{
"start": 4943,
"end": 5008
}
|
class ____ {
@org.junit.Test
public void test() {
}
}
}
|
Bar
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/test/java/org/apache/camel/component/salesforce/api/dto/LimitsTest.java
|
{
"start": 1731,
"end": 3980
}
|
class ____ {
@Test
public void shouldBeKnownIfDefined() {
assertFalse(new Usage(1, 2).isUnknown(), "Known usage must not declare itself as unknown");
}
@Test
public void shouldDeserializeFromSalesforceGeneratedJSON() throws IOException {
final ObjectMapper mapper = JsonUtils.createObjectMapper();
final Object read = mapper.readerFor(Limits.class)
.readValue(LimitsTest.class.getResource("/org/apache/camel/component/salesforce/api/dto/limits.json"));
assertThat("Limits should be parsed from JSON", read, instanceOf(Limits.class));
final Limits limits = (Limits) read;
final Usage dailyApiRequests = limits.getDailyApiRequests();
assertFalse(dailyApiRequests.isUnknown(), "Should have some usage present");
assertFalse(dailyApiRequests.getPerApplicationUsage().isEmpty(), "Per application usage should be present");
assertNotNull(dailyApiRequests.forApplication("Camel Salesman"),
"'Camel Salesman' application usage should be present");
}
@Test
public void shouldDeserializeWithUnsupportedKeys() throws IOException {
final ObjectMapper mapper = JsonUtils.createObjectMapper();
final Limits withUnsupported
= mapper.readerFor(Limits.class).readValue("{\"Camel-NotSupportedKey\": {\"Max\": 200,\"Remaining\": 200}}");
assertNotNull(withUnsupported);
assertNotNull(withUnsupported.forOperation("Camel-NotSupportedKey"));
}
@Test
public void shouldSupportGettingAllDefinedUsages() throws IntrospectionException {
final BeanInfo beanInfo = Introspector.getBeanInfo(Limits.class);
final PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
final Set<String> found = new HashSet<>();
for (final PropertyDescriptor descriptor : propertyDescriptors) {
found.add(descriptor.getName());
}
final Set<String> defined = Arrays.stream(Limits.Operation.values()).map(Limits.Operation::name)
.map(Introspector::decapitalize).collect(Collectors.toSet());
defined.removeAll(found);
assertThat("All operations declared in Operation
|
LimitsTest
|
java
|
apache__hadoop
|
hadoop-cloud-storage-project/hadoop-tos/src/test/java/org/apache/hadoop/fs/tosfs/TestRawFSUtils.java
|
{
"start": 1007,
"end": 1666
}
|
class ____ {
@Test
public void testIsAncestor() {
assertTrue(RawFSUtils.inSubtree("/", "/"));
assertTrue(RawFSUtils.inSubtree("/", "/a"));
assertTrue(RawFSUtils.inSubtree("/a", "/a"));
assertFalse(RawFSUtils.inSubtree("/a", "/"));
assertTrue(RawFSUtils.inSubtree("/", "/a/b/c"));
assertFalse(RawFSUtils.inSubtree("/a/b/c", "/"));
assertTrue(RawFSUtils.inSubtree("/", "/a/b/c.txt"));
assertFalse(RawFSUtils.inSubtree("/a/b/c.txt", "/"));
assertTrue(RawFSUtils.inSubtree("/a/b/", "/a/b"));
assertTrue(RawFSUtils.inSubtree("/a/b/", "/a/b/c"));
assertFalse(RawFSUtils.inSubtree("/a/b/c", "/a/b"));
}
}
|
TestRawFSUtils
|
java
|
elastic__elasticsearch
|
libs/native/src/main/java/org/elasticsearch/nativeaccess/jdk/JdkNativeLibraryProvider.java
|
{
"start": 1005,
"end": 1719
}
|
class ____ extends NativeLibraryProvider {
public JdkNativeLibraryProvider() {
super(
"jdk",
Map.of(
JavaLibrary.class,
JdkJavaLibrary::new,
PosixCLibrary.class,
JdkPosixCLibrary::new,
LinuxCLibrary.class,
JdkLinuxCLibrary::new,
MacCLibrary.class,
JdkMacCLibrary::new,
Kernel32Library.class,
JdkKernel32Library::new,
ZstdLibrary.class,
JdkZstdLibrary::new,
VectorLibrary.class,
JdkVectorLibrary::new
)
);
}
}
|
JdkNativeLibraryProvider
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/stream/StreamExecMultipleInput.java
|
{
"start": 2410,
"end": 3358
}
|
class ____ extends ExecNodeBase<RowData>
implements StreamExecNode<RowData> {
private final ExecNode<?> rootNode;
public StreamExecMultipleInput(
ReadableConfig tableConfig,
List<InputProperty> inputProperties,
ExecNode<?> rootNode,
String description) {
super(
ExecNodeContext.newNodeId(),
ExecNodeContext.newContext(StreamExecMultipleInput.class),
ExecNodeContext.newPersistedConfig(StreamExecMultipleInput.class, tableConfig),
inputProperties,
rootNode.getOutputType(),
description);
this.rootNode = rootNode;
}
@Override
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
throw new UnsupportedOperationException("This method is not implemented yet.");
}
}
|
StreamExecMultipleInput
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/web/support/AbstractHeaderAssertions.java
|
{
"start": 1571,
"end": 9739
}
|
class ____ <E, R> {
private final E exchangeResult;
private final R responseSpec;
protected AbstractHeaderAssertions(E exchangeResult, R responseSpec) {
this.exchangeResult = exchangeResult;
this.responseSpec = responseSpec;
}
/**
* Return the exchange result.
*/
protected E getExchangeResult() {
return this.exchangeResult;
}
protected R getResponseSpec() {
return this.responseSpec;
}
/**
* Subclasses must implement this to provide access to response headers.
*/
protected abstract HttpHeaders getResponseHeaders();
/**
* Subclasses must implement this to assert with diagnostics.
*/
protected abstract void assertWithDiagnostics(Runnable assertion);
/**
* Expect a header with the given name to match the specified values.
*/
public R valueEquals(String headerName, String... values) {
return assertHeader(headerName, Arrays.asList(values), getResponseHeaders().getOrEmpty(headerName));
}
/**
* Expect a header with the given name to match the given long value.
*/
public R valueEquals(String headerName, long value) {
String actual = getResponseHeaders().getFirst(headerName);
assertWithDiagnostics(() ->
assertNotNull("Response does not contain header '" + headerName + "'", actual));
return assertHeader(headerName, value, Long.parseLong(actual));
}
/**
* Expect a header with the given name to match the specified long value
* parsed into a date using the preferred date format described in RFC 7231.
* <p>An {@link AssertionError} is thrown if the response does not contain
* the specified header, or if the supplied {@code value} does not match the
* primary header value.
*/
public R valueEqualsDate(String headerName, long value) {
assertWithDiagnostics(() -> {
String headerValue = getResponseHeaders().getFirst(headerName);
assertNotNull("Response does not contain header '" + headerName + "'", headerValue);
HttpHeaders headers = new HttpHeaders();
headers.setDate("expected", value);
headers.set("actual", headerValue);
assertEquals(getMessage(headerName) + "='" + headerValue + "' " +
"does not match expected value '" + headers.getFirst("expected") + "'",
headers.getFirstDate("expected"), headers.getFirstDate("actual"));
});
return this.responseSpec;
}
/**
* Match the first value of the response header with a regex.
* @param name the header name
* @param pattern the regex pattern
*/
public R valueMatches(String name, String pattern) {
String value = getRequiredValue(name);
String message = getMessage(name) + "=[" + value + "] does not match [" + pattern + "]";
assertWithDiagnostics(() -> assertTrue(message, value.matches(pattern)));
return this.responseSpec;
}
/**
* Match all values of the response header with the given regex
* patterns which are applied to the values of the header in the
* same order. Note that the number of patterns must match the
* number of actual values.
* @param name the header name
* @param patterns one or more regex patterns, one per expected value
*/
public R valuesMatch(String name, String... patterns) {
List<String> values = getRequiredValues(name);
assertWithDiagnostics(() -> {
assertTrue(
getMessage(name) + " has fewer or more values " + values +
" than number of patterns to match with " + Arrays.toString(patterns),
values.size() == patterns.length);
for (int i = 0; i < values.size(); i++) {
String value = values.get(i);
String pattern = patterns[i];
assertTrue(
getMessage(name) + "[" + i + "]='" + value + "' does not match '" + pattern + "'",
value.matches(pattern));
}
});
return this.responseSpec;
}
/**
* Consume the first value of the named response header.
* @param name the header name
* @param consumer the consumer to use
*/
public R value(String name, Consumer<String> consumer) {
String value = getResponseHeaders().getFirst(name);
assertWithDiagnostics(() -> consumer.accept(value));
return this.responseSpec;
}
/**
* Consume all values of the named response header.
* @param name the header name
* @param consumer the consumer to use
*/
public R values(String name, Consumer<List<String>> consumer) {
List<String> values = getResponseHeaders().get(name);
assertWithDiagnostics(() -> consumer.accept(values));
return this.responseSpec;
}
/**
* Expect that the header with the given name is present.
*/
public R exists(String name) {
if (!getResponseHeaders().containsHeader(name)) {
String message = getMessage(name) + " does not exist";
assertWithDiagnostics(() -> fail(message));
}
return this.responseSpec;
}
/**
* Expect that the header with the given name is not present.
*/
public R doesNotExist(String name) {
if (getResponseHeaders().containsHeader(name)) {
String message = getMessage(name) + " exists with value=[" + getResponseHeaders().getFirst(name) + "]";
assertWithDiagnostics(() -> fail(message));
}
return this.responseSpec;
}
/**
* Expect a "Cache-Control" header with the given value.
*/
public R cacheControl(CacheControl cacheControl) {
return assertHeader("Cache-Control", cacheControl.getHeaderValue(), getResponseHeaders().getCacheControl());
}
/**
* Expect a "Content-Disposition" header with the given value.
*/
public R contentDisposition(ContentDisposition contentDisposition) {
return assertHeader("Content-Disposition", contentDisposition, getResponseHeaders().getContentDisposition());
}
/**
* Expect a "Content-Length" header with the given value.
*/
public R contentLength(long contentLength) {
return assertHeader("Content-Length", contentLength, getResponseHeaders().getContentLength());
}
/**
* Expect a "Content-Type" header with the given value.
*/
public R contentType(MediaType mediaType) {
return assertHeader("Content-Type", mediaType, getResponseHeaders().getContentType());
}
/**
* Expect a "Content-Type" header with the given value.
*/
public R contentType(String mediaType) {
return contentType(MediaType.parseMediaType(mediaType));
}
/**
* Expect a "Content-Type" header compatible with the given value.
*/
public R contentTypeCompatibleWith(MediaType mediaType) {
MediaType actual = getResponseHeaders().getContentType();
String message = getMessage("Content-Type") + "=[" + actual + "] is not compatible with [" + mediaType + "]";
assertWithDiagnostics(() ->
assertTrue(message, (actual != null && actual.isCompatibleWith(mediaType))));
return this.responseSpec;
}
/**
* Expect a "Content-Type" header compatible with the given value.
*/
public R contentTypeCompatibleWith(String mediaType) {
return contentTypeCompatibleWith(MediaType.parseMediaType(mediaType));
}
/**
* Expect an "Expires" header with the given value.
*/
public R expires(long expires) {
return assertHeader("Expires", expires, getResponseHeaders().getExpires());
}
/**
* Expect a "Last-Modified" header with the given value.
*/
public R lastModified(long lastModified) {
return assertHeader("Last-Modified", lastModified, getResponseHeaders().getLastModified());
}
/**
* Expect a "Location" header with the given value.
*/
public R location(String location) {
return assertHeader("Location", URI.create(location), getResponseHeaders().getLocation());
}
private R assertHeader(String name, @Nullable Object expected, @Nullable Object actual) {
assertWithDiagnostics(() -> {
String message = getMessage(name);
assertEquals(message, expected, actual);
});
return this.responseSpec;
}
private String getRequiredValue(String name) {
return getRequiredValues(name).get(0);
}
private List<String> getRequiredValues(String name) {
List<String> values = getResponseHeaders().get(name);
if (!CollectionUtils.isEmpty(values)) {
return values;
}
else {
assertWithDiagnostics(() -> fail(getMessage(name) + " not found"));
}
throw new IllegalStateException("This code path should not be reachable");
}
protected String getMessage(String headerName) {
return "Response header '" + headerName + "'";
}
}
|
AbstractHeaderAssertions
|
java
|
quarkusio__quarkus
|
extensions/reactive-streams-operators/smallrye-reactive-streams-operators/deployment/src/test/java/io/quarkus/smallrye/reactivestreamoperators/deployment/ReactiveStreamsOperatorsHotReloadTest.java
|
{
"start": 286,
"end": 893
}
|
class ____ {
@RegisterExtension
static final QuarkusDevModeTest test = new QuarkusDevModeTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyTestResource.class));
@Test
public void testHotReload() {
String resp = RestAssured.get("/test").asString();
Assertions.assertTrue(resp.startsWith("5"));
test.modifySourceFile(MyTestResource.class, s -> s.replace(".limit(2)", ".limit(10)"));
resp = RestAssured.get("/test").asString();
Assertions.assertTrue(resp.startsWith("9"));
}
}
|
ReactiveStreamsOperatorsHotReloadTest
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ResolverRules.java
|
{
"start": 1170,
"end": 2992
}
|
class ____ {
/**
* Resolves {@link UnresolvedReferenceExpression}. See {@link ReferenceResolverRule} for
* details.
*/
public static final ResolverRule FIELD_RESOLVE = new ReferenceResolverRule();
/** Resolves {@link SqlCallExpression}s. */
public static final ResolverRule RESOLVE_SQL_CALL = new ResolveSqlCallRule();
/**
* Resolves call based on argument types. See {@link ResolveCallByArgumentsRule} for details.
*/
public static final ResolverRule RESOLVE_CALL_BY_ARGUMENTS = new ResolveCallByArgumentsRule();
/** Looks up unresolved call by name. See {@link LookupCallByNameRule} for details. */
public static final ResolverRule LOOKUP_CALL_BY_NAME = new LookupCallByNameRule();
/**
* Concatenates over aggregations with corresponding over window. See {@link
* OverWindowResolverRule} for details.
*/
public static final ResolverRule OVER_WINDOWS = new OverWindowResolverRule();
/**
* Resolves '*' expressions to corresponding fields of inputs. See {@link
* StarReferenceFlatteningRule} for details.
*/
public static final ResolverRule FLATTEN_STAR_REFERENCE = new StarReferenceFlatteningRule();
/**
* Resolves column functions to corresponding fields of inputs. See {@link
* ExpandColumnFunctionsRule} for details.
*/
public static final ResolverRule EXPAND_COLUMN_FUNCTIONS = new ExpandColumnFunctionsRule();
/** Looks up unresolved calls of built-in functions to make them fully qualified. */
public static final ResolverRule QUALIFY_BUILT_IN_FUNCTIONS = new QualifyBuiltInFunctionsRule();
/** Unwraps all {@link ApiExpression}. */
public static final ResolverRule UNWRAP_API_EXPRESSION = new UnwrapApiExpressionRule();
private ResolverRules() {}
}
|
ResolverRules
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/operators/coordination/OperatorCoordinatorHolderTest.java
|
{
"start": 26786,
"end": 27798
}
|
class ____
extends ManuallyTriggeredScheduledExecutorService {
private boolean delayNewRunnables;
private final Queue<Runnable> delayedRunnables = new ArrayDeque<>();
public void setDelayNewRunnables(boolean delayNewRunnables) {
this.delayNewRunnables = delayNewRunnables;
}
@Override
public void execute(@Nonnull Runnable command) {
if (delayNewRunnables) {
delayedRunnables.add(command);
} else {
super.execute(command);
}
}
public void executeAllDelayedRunnables() {
while (!delayedRunnables.isEmpty()) {
super.execute(delayedRunnables.poll());
}
}
}
// ------------------------------------------------------------------------
// test implementations
// ------------------------------------------------------------------------
private static final
|
ReorderableManualExecutorService
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/MissingBindingValidationTest.java
|
{
"start": 30201,
"end": 30556
}
|
class ____<T> {",
" @Inject T t;",
" @Inject Generic() {}",
"}");
Source testClass =
CompilerTests.javaSource(
"test.TestClass",
"package test;",
"",
"import javax.inject.Inject;",
"import java.util.List;",
"",
"final
|
Generic
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorFactory.java
|
{
"start": 3379,
"end": 5552
}
|
class ____. KeywordScriptFieldType is
// the exception that we do want to support, so we need to check for that separately. (It's not a
// complete disaster if we end up analyzing an inappropriate field, for example if the user has added
// a new field type via a plugin that also creates a bespoke TextSearchInfo member - it will just get
// converted to a string and then likely the analyzer won't create any tokens, so the categorizer
// will see an empty token list.)
if (fieldType.getTextSearchInfo() == TextSearchInfo.NONE
|| (fieldType.getTextSearchInfo() == TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS
&& fieldType instanceof KeywordScriptFieldType == false)) {
throw new IllegalArgumentException(
"categorize_text agg ["
+ name
+ "] only works on text and keyword fields. Cannot aggregate field type ["
+ fieldType.name()
+ "] via ["
+ fieldType.getClass().getSimpleName()
+ "]"
);
}
TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(this.bucketCountThresholds);
if (bucketCountThresholds.getShardSize() == CategorizeTextAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.shardSize()) {
// The user has not made a shardSize selection. Use default
// heuristic to avoid any wrong-ranking caused by distributed
// counting
// TODO significant text does a 2x here, should we as well?
bucketCountThresholds.setShardSize(BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize()));
}
bucketCountThresholds.ensureValidity();
return new CategorizeTextAggregator(
name,
factories,
context,
parent,
fieldType.name(),
fieldType,
bucketCountThresholds,
similarityThreshold,
categorizationAnalyzerConfig,
metadata
);
}
}
|
definition
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/spi/HttpResponseAware.java
|
{
"start": 907,
"end": 1283
}
|
interface ____ {
/**
* The HTTP status code
*/
int getHttpResponseCode();
/**
* Sets the HTTP status code
*/
void setHttpResponseCode(int code);
/**
* The HTTP status line
*/
String getHttpResponseStatus();
/**
* Sets the HTTP status line
*/
void setHttpResponseStatus(String status);
}
|
HttpResponseAware
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/chararray/CharArrayAssert_usingDefaultComparator_Test.java
|
{
"start": 1161,
"end": 1788
}
|
class ____ extends CharArrayAssertBaseTest {
private CharArrays arraysBefore;
@BeforeEach
void before() {
arraysBefore = getArrays(assertions);
}
@Override
protected CharArrayAssert invoke_api_method() {
return assertions.usingComparator(alwaysEqual())
.usingDefaultComparator();
}
@Override
protected void verify_internal_effects() {
assertThat(getObjects(assertions).getComparator()).isNull();
assertThat(getObjects(assertions)).isSameAs(Objects.instance());
assertThat(getArrays(assertions)).isSameAs(arraysBefore);
}
}
|
CharArrayAssert_usingDefaultComparator_Test
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java
|
{
"start": 1647,
"end": 14908
}
|
class ____ {
/*
* The transport-level handshake allows the node that opened the connection to determine the newest protocol version with which it can
* communicate with the remote node. Each node sends its maximum acceptable protocol version to the other, but the responding node
* ignores the body of the request. After the handshake, the OutboundHandler uses the min(local,remote) protocol version for all later
* messages.
*
* This version supports two handshake protocols, v7170099 and v8800000, which respectively have the same message structure as the
* transport protocols of v7.17.0, and v8.18.0. This node only sends v8800000 requests, but it can send a valid response to any v7170099
* requests that it receives.
*
* Note that these are not really TransportVersion constants as used elsewhere in ES, they're independent things that just happen to be
* stored in the same location in the message header and which roughly match the same ID numbering scheme. Older versions of ES did rely
* on them matching the real transport protocol (which itself matched the release version numbers), but these days that's no longer
* true.
*
* Here are some example messages, broken down to show their structure. See TransportHandshakerRawMessageTests for supporting tests.
*
* ## v7170099 Requests:
*
* 45 53 -- 'ES' marker
* 00 00 00 31 -- total message length
* 00 00 00 00 00 00 00 01 -- request ID
* 08 -- status flags (0b1000 == handshake request)
* 00 6d 68 33 -- handshake protocol version (0x6d6833 == 7170099)
* 00 00 00 1a -- length of variable portion of header
* 00 -- no request headers [1]
* 00 -- no response headers [1]
* 00 -- no features [2]
* 16 -- action string size
* 69 6e 74 65 72 6e 61 6c }
* 3a 74 63 70 2f 68 61 6e }- ASCII representation of HANDSHAKE_ACTION_NAME
* 64 73 68 61 6b 65 }
* 00 -- no parent task ID [3]
* 04 -- payload length
* c3 f9 eb 03 -- max acceptable protocol version (vInt: 00000011 11101011 11111001 11000011 == 8060099)
*
* ## v7170099 Responses:
*
* 45 53 -- 'ES' marker
* 00 00 00 17 -- total message length
* 00 00 00 00 00 00 00 01 -- request ID (copied from request)
* 09 -- status flags (0b1001 == handshake response)
* 00 6d 68 33 -- handshake protocol version (0x6d6833 == 7170099, copied from request)
* 00 00 00 02 -- length of following variable portion of header
* 00 -- no request headers [1]
* 00 -- no response headers [1]
* c3 f9 eb 03 -- max acceptable protocol version (vInt: 00000011 11101011 11111001 11000011 == 8060099)
*
* ## v8800000 Requests:
*
* 45 53 -- 'ES' marker
* 00 00 00 36 -- total message length
* 00 00 00 00 00 00 00 01 -- request ID
* 08 -- status flags (0b1000 == handshake request)
* 00 86 47 00 -- handshake protocol version (0x6d6833 == 7170099)
* 00 00 00 19 -- length of variable portion of header
* 00 -- no request headers [1]
* 00 -- no response headers [1]
* 16 -- action string size
* 69 6e 74 65 72 6e 61 6c }
* 3a 74 63 70 2f 68 61 6e }- ASCII representation of HANDSHAKE_ACTION_NAME
* 64 73 68 61 6b 65 }
* 00 -- no parent task ID [3]
* 0a -- payload length
* e8 8f 9b 04 -- requesting node transport version (vInt: 00000100 10011011 10001111 11101000 == 8833000)
* 05 -- requesting node release version string length
* 39 2e 30 2e 30 -- requesting node release version string "9.0.0"
*
* ## v8800000 Responses:
*
* 45 53 -- 'ES' marker
* 00 00 00 1d -- total message length
* 00 00 00 00 00 00 00 01 -- request ID (copied from request)
* 09 -- status flags (0b1001 == handshake response)
* 00 86 47 00 -- handshake protocol version (0x864700 == 8800000, copied from request)
* 00 00 00 02 -- length of following variable portion of header
* 00 -- no request headers [1]
* 00 -- no response headers [1]
* e8 8f 9b 04 -- responding node transport version (vInt: 00000100 10011011 10001111 11101000 == 8833000)
* 05 -- responding node release version string length
* 39 2e 30 2e 30 -- responding node release version string "9.0.0"
*
* [1] Thread context headers should be empty; see org.elasticsearch.common.util.concurrent.ThreadContext.ThreadContextStruct.writeTo
* for their structure.
* [2] A list of strings, which can safely be ignored
* [3] Parent task ID should be empty; see org.elasticsearch.tasks.TaskId.writeTo for its structure.
*/
private static final Logger logger = LogManager.getLogger(TransportHandshaker.class);
static final TransportVersion V8_HANDSHAKE_VERSION = TransportVersion.fromId(7_17_00_99);
static final TransportVersion V9_HANDSHAKE_VERSION = TransportVersion.fromId(8_800_00_0);
static final Set<TransportVersion> ALLOWED_HANDSHAKE_VERSIONS = Set.of(V8_HANDSHAKE_VERSION, V9_HANDSHAKE_VERSION);
static final String HANDSHAKE_ACTION_NAME = "internal:tcp/handshake";
private final ConcurrentMap<Long, HandshakeResponseHandler> pendingHandshakes = new ConcurrentHashMap<>();
private final CounterMetric numHandshakes = new CounterMetric();
private final TransportVersion version;
private final ThreadPool threadPool;
private final HandshakeRequestSender handshakeRequestSender;
private final boolean ignoreDeserializationErrors;
TransportHandshaker(
TransportVersion version,
ThreadPool threadPool,
HandshakeRequestSender handshakeRequestSender,
boolean ignoreDeserializationErrors
) {
this.version = version;
this.threadPool = threadPool;
this.handshakeRequestSender = handshakeRequestSender;
this.ignoreDeserializationErrors = ignoreDeserializationErrors;
}
void sendHandshake(
long requestId,
DiscoveryNode node,
TcpChannel channel,
TimeValue timeout,
ActionListener<TransportVersion> listener
) {
numHandshakes.inc();
final HandshakeResponseHandler handler = new HandshakeResponseHandler(requestId, channel, listener);
pendingHandshakes.put(requestId, handler);
channel.addCloseListener(
ActionListener.running(() -> handler.handleLocalException(new TransportException("handshake failed because connection reset")))
);
boolean success = false;
try {
handshakeRequestSender.sendRequest(node, channel, requestId, V9_HANDSHAKE_VERSION);
threadPool.schedule(
() -> handler.handleLocalException(new ConnectTransportException(node, "handshake_timeout[" + timeout + "]")),
timeout,
threadPool.generic()
);
success = true;
} catch (Exception e) {
handler.handleLocalException(new ConnectTransportException(node, "failure to send " + HANDSHAKE_ACTION_NAME, e));
} finally {
if (success == false) {
TransportResponseHandler<?> removed = pendingHandshakes.remove(requestId);
assert removed == null : "Handshake should not be pending if exception was thrown";
}
}
}
void handleHandshake(TransportChannel channel, long requestId, StreamInput stream) throws IOException {
final HandshakeRequest handshakeRequest;
try {
handshakeRequest = new HandshakeRequest(stream);
} catch (Exception e) {
assert ignoreDeserializationErrors : e;
throw e;
}
final int nextByte = stream.read();
if (nextByte != -1) {
final IllegalStateException exception = new IllegalStateException(
"Handshake request not fully read for requestId ["
+ requestId
+ "], action ["
+ TransportHandshaker.HANDSHAKE_ACTION_NAME
+ "], available ["
+ stream.available()
+ "]; resetting"
);
assert ignoreDeserializationErrors : exception;
throw exception;
}
channel.sendResponse(
new HandshakeResponse(
ensureCompatibleVersion(version, handshakeRequest.transportVersion, handshakeRequest.releaseVersion, channel),
Build.current().version()
)
);
}
private static TransportVersion ensureCompatibleVersion(
TransportVersion localTransportVersion,
TransportVersion remoteTransportVersion,
String releaseVersion,
Object channel
) {
if (TransportVersion.isCompatible(remoteTransportVersion)) {
if (remoteTransportVersion.onOrAfter(localTransportVersion)) {
// Remote is semantically newer than us (i.e. has a greater transport protocol version), so we propose using our current
// transport protocol version. If we're initiating the connection then that's the version we'll use; if the other end is
// initiating the connection then it's up to the other end to decide whether to use this version (if it knows it) or
// an earlier one.
return localTransportVersion;
}
final var bestKnownVersion = remoteTransportVersion.bestKnownVersion();
if (bestKnownVersion.equals(TransportVersion.zero()) == false) {
if (bestKnownVersion.equals(remoteTransportVersion) == false) {
// Remote is semantically older than us (i.e. has a lower transport protocol version), but we do not know its exact
// transport protocol version so it must be chronologically newer. We recommend not doing this, it implies an upgrade
// that goes backwards in time and therefore may regress in some way, so we emit a warning. But we carry on with the
// best known version anyway since both ends will know it.
logger.warn(
"""
Negotiating transport handshake with remote node with version [{}/{}] received on [{}] which appears to be \
from a chronologically-newer release with a numerically-older version compared to this node's version [{}/{}]. \
Upgrading to this version from a chronologically-newer release may not work reliably and is not recommended. \
Falling back to transport protocol version [{}].""",
releaseVersion,
remoteTransportVersion,
channel,
Build.current().version(),
localTransportVersion,
bestKnownVersion
);
} // else remote is semantically older and we _do_ know its version, so we just use that without further fuss.
return bestKnownVersion;
}
}
final var message = Strings.format(
"""
Rejecting unreadable transport handshake from remote node with version [%s/%s] received on [%s] since this node has \
version [%s/%s] which has an incompatible wire format.""",
releaseVersion,
remoteTransportVersion,
channel,
Build.current().version(),
localTransportVersion
);
logger.warn(message);
throw new IllegalStateException(message);
}
TransportResponseHandler<HandshakeResponse> removeHandlerForHandshake(long requestId) {
return pendingHandshakes.remove(requestId);
}
int getNumPendingHandshakes() {
return pendingHandshakes.size();
}
long getNumHandshakes() {
return numHandshakes.count();
}
private
|
TransportHandshaker
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeLifelineProtocolServerSideTranslatorPB.java
|
{
"start": 1676,
"end": 2937
}
|
class ____ implements
DatanodeLifelineProtocolPB {
private static final LifelineResponseProto VOID_LIFELINE_RESPONSE_PROTO =
LifelineResponseProto.newBuilder().build();
private final DatanodeLifelineProtocol impl;
public DatanodeLifelineProtocolServerSideTranslatorPB(
DatanodeLifelineProtocol impl) {
this.impl = impl;
}
@Override
public LifelineResponseProto sendLifeline(RpcController controller,
HeartbeatRequestProto request) throws ServiceException {
try {
final StorageReport[] report = PBHelperClient.convertStorageReports(
request.getReportsList());
VolumeFailureSummary volumeFailureSummary =
request.hasVolumeFailureSummary() ?
PBHelper.convertVolumeFailureSummary(
request.getVolumeFailureSummary()) : null;
impl.sendLifeline(PBHelper.convert(request.getRegistration()), report,
request.getCacheCapacity(), request.getCacheUsed(),
request.getXmitsInProgress(), request.getXceiverCount(),
request.getFailedVolumes(), volumeFailureSummary);
return VOID_LIFELINE_RESPONSE_PROTO;
} catch (IOException e) {
throw new ServiceException(e);
}
}
}
|
DatanodeLifelineProtocolServerSideTranslatorPB
|
java
|
quarkusio__quarkus
|
integration-tests/jpa/src/test/java/io/quarkus/it/jpa/configurationless/JPALoadScriptTestInGraalITCase.java
|
{
"start": 191,
"end": 258
}
|
class ____ extends JPALoadScriptTest {
}
|
JPALoadScriptTestInGraalITCase
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/FluxErrorOnRequestTest.java
|
{
"start": 846,
"end": 1689
}
|
class ____ {
@Test
public void scanOperator(){
FluxErrorOnRequest test = new FluxErrorOnRequest(new IllegalStateException());
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isNull();
}
@Test
public void scanSubscription() {
CoreSubscriber<Integer> actual = new LambdaSubscriber<>(null, e -> {}, null, sub -> sub.request(100));
IllegalStateException error = new IllegalStateException();
FluxErrorOnRequest.ErrorSubscription test = new FluxErrorOnRequest.ErrorSubscription(actual, error);
assertThat(test.scan(Scannable.Attr.ERROR)).isSameAs(error);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
}
|
FluxErrorOnRequestTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java
|
{
"start": 23151,
"end": 24105
}
|
class ____ {
public void foo(Suit suit) {
// BUG: Diagnostic contains:
switch (suit) {
case HEART:
System.out.println("heart");
break;
case DIAMOND:
{
System.out.println("nested2a");
{
System.out.println("nested2b");
break;
}
}
case SPADE:
case CLUB:
System.out.println("everything else");
}
}
}
""")
.setArgs("-XepOpt:StatementSwitchToExpressionSwitch:EnableDirectConversion")
.doTest();
}
@Test
public void switchByEnumWithConditionalControl_noError() {
helper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/RecipientListStopOnExceptionWithOnExceptionTest.java
|
{
"start": 1035,
"end": 2301
}
|
class ____ extends ContextTestSupport {
@Test
public void testRecipientListStopOnException() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(0);
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:b").expectedMessageCount(1);
getMockEndpoint("mock:c").expectedMessageCount(0);
String out = template.requestBodyAndHeader("direct:start", "Hello World", "foo", "direct:a,direct:b,direct:c",
String.class);
assertEquals("Damn Forced", out);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
onException(Exception.class).handled(true).to("mock:handled").transform(simple("Damn ${exception.message}"));
from("direct:start").recipientList(header("foo")).stopOnException().to("mock:result");
from("direct:a").to("mock:a");
from("direct:b").to("mock:b").throwException(new IllegalArgumentException("Forced"));
from("direct:c").to("mock:c");
}
};
}
}
|
RecipientListStopOnExceptionWithOnExceptionTest
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java
|
{
"start": 23408,
"end": 23670
}
|
class ____ for the format of the path.
* @param functionDescriptor The descriptor of the function to create.
*/
void createFunction(String path, FunctionDescriptor functionDescriptor);
/**
* Registers a {@link UserDefinedFunction}
|
description
|
java
|
junit-team__junit5
|
junit-jupiter-params/src/main/java/org/junit/jupiter/params/provider/AnnotationBasedArgumentsProvider.java
|
{
"start": 1531,
"end": 3594
}
|
class ____<A extends Annotation>
implements ArgumentsProvider, AnnotationConsumer<A> {
public AnnotationBasedArgumentsProvider() {
}
private final List<A> annotations = new ArrayList<>();
@Override
public final void accept(A annotation) {
Preconditions.notNull(annotation, "annotation must not be null");
annotations.add(annotation);
}
@Override
public Stream<? extends Arguments> provideArguments(ParameterDeclarations parameters, ExtensionContext context) {
return annotations.stream().flatMap(annotation -> provideArguments(parameters, context, annotation));
}
/**
* Provide a {@link Stream} of {@link Arguments} — based on metadata in the
* provided annotation — to be passed to a {@code @ParameterizedTest} method.
*
* @param context the current extension context; never {@code null}
* @param annotation the annotation to process; never {@code null}
* @return a stream of arguments; never {@code null}
* @deprecated Please implement
* {@link #provideArguments(ParameterDeclarations, ExtensionContext, Annotation)}
* instead.
*/
@Deprecated(since = "5.13")
@API(status = DEPRECATED, since = "5.13")
protected Stream<? extends Arguments> provideArguments(ExtensionContext context, A annotation) {
throw new JUnitException("""
AnnotationBasedArgumentsProvider does not override the \
provideArguments(ParameterDeclarations, ExtensionContext, Annotation) method. \
Please report this issue to the maintainers of %s.""".formatted(getClass().getName()));
}
/**
* The returned {@code Stream} will be {@link Stream#close() properly closed}
* by the default implementation of
* {@link #provideArguments(ParameterDeclarations, ExtensionContext)},
* making it safe to use a resource such as
* {@link java.nio.file.Files#lines(java.nio.file.Path) Files.lines()}.
*/
protected Stream<? extends Arguments> provideArguments(ParameterDeclarations parameters, ExtensionContext context,
A annotation) {
return provideArguments(context, annotation);
}
}
|
AnnotationBasedArgumentsProvider
|
java
|
elastic__elasticsearch
|
test/framework/src/test/java/org/elasticsearch/test/rest/ESRestTestCaseTests.java
|
{
"start": 675,
"end": 1944
}
|
class ____ extends ESTestCase {
public void testIgnoreMatchMultipleTemplatesPattern() {
String input = "index [test_index] matches multiple legacy templates [global, prevent-bwc-deprecation-template], "
+ "composable templates will only match a single template";
Matcher matcher = ESRestTestCase.CREATE_INDEX_MULTIPLE_MATCHING_TEMPLATES.matcher(input);
assertThat(matcher.matches(), is(true));
assertThat(matcher.group(1), equalTo("test_index"));
assertThat(matcher.group(2), equalTo("global, prevent-bwc-deprecation-template"));
input = "index template [1] has index patterns [logs-*] matching patterns from existing older templates [global] "
+ "with patterns (global => [*]); this template [1] will take precedence during new index creation";
matcher = ESRestTestCase.PUT_TEMPLATE_MULTIPLE_MATCHING_TEMPLATES.matcher(input);
assertThat(matcher.matches(), is(true));
assertThat(matcher.group(1), equalTo("1"));
assertThat(matcher.group(2), equalTo("logs-*"));
assertThat(matcher.group(3), equalTo("global"));
assertThat(matcher.group(4), equalTo("global => [*]"));
assertThat(matcher.group(5), equalTo("1"));
}
}
|
ESRestTestCaseTests
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/UserRepositoryTests.java
|
{
"start": 4029,
"end": 4636
}
|
class ____ provide a custom provider configuration.
*
* @author Oliver Gierke
* @author Kevin Raymond
* @author Thomas Darimont
* @author Christoph Strobl
* @author Mark Paluch
* @author Kevin Peters
* @author Jens Schauder
* @author Andrey Kovalev
* @author Sander Krabbenborg
* @author Jesse Wouters
* @author Greg Turnquist
* @author Diego Krupitza
* @author Daniel Shuy
* @author Simon Paradies
* @author Geoffrey Deremetz
* @author Krzysztof Krason
* @author Yanming Zhou
*/
@ExtendWith(SpringExtension.class)
@ContextConfiguration("classpath:application-context.xml")
@Transactional
|
and
|
java
|
apache__camel
|
components/camel-telegram/src/main/java/org/apache/camel/component/telegram/model/InlineQueryResultLocation.java
|
{
"start": 1935,
"end": 5967
}
|
class ____ {
private String id;
private InlineKeyboardMarkup replyMarkup;
private Float latitude;
private Float longitude;
private Integer livePeriod;
private String thumbUrl;
private String thumbWidth;
private String thumbHeight;
private String title;
private InputMessageContent inputMessageContext;
private Builder() {
}
public Builder id(String id) {
this.id = id;
return this;
}
public Builder replyMarkup(InlineKeyboardMarkup replyMarkup) {
this.replyMarkup = replyMarkup;
return this;
}
public Builder latitude(Float latitude) {
this.latitude = latitude;
return this;
}
public Builder longitude(Float longitude) {
this.longitude = longitude;
return this;
}
public Builder livePeriod(Integer livePeriod) {
this.livePeriod = livePeriod;
return this;
}
public Builder thumbUrl(String thumbUrl) {
this.thumbUrl = thumbUrl;
return this;
}
public Builder thumbWidth(String thumbWidth) {
this.thumbWidth = thumbWidth;
return this;
}
public Builder thumbHeight(String thumbHeight) {
this.thumbHeight = thumbHeight;
return this;
}
public Builder title(String title) {
this.title = title;
return this;
}
public Builder inputMessageContext(InputMessageContent inputMessageContext) {
this.inputMessageContext = inputMessageContext;
return this;
}
public InlineQueryResultLocation build() {
InlineQueryResultLocation inlineQueryResultLocation = new InlineQueryResultLocation();
inlineQueryResultLocation.setType(TYPE);
inlineQueryResultLocation.setId(id);
inlineQueryResultLocation.setReplyMarkup(replyMarkup);
inlineQueryResultLocation.latitude = this.latitude;
inlineQueryResultLocation.thumbHeight = this.thumbHeight;
inlineQueryResultLocation.thumbWidth = this.thumbWidth;
inlineQueryResultLocation.inputMessageContext = this.inputMessageContext;
inlineQueryResultLocation.title = this.title;
inlineQueryResultLocation.livePeriod = this.livePeriod;
inlineQueryResultLocation.longitude = this.longitude;
inlineQueryResultLocation.thumbUrl = this.thumbUrl;
return inlineQueryResultLocation;
}
}
public Float getLatitude() {
return latitude;
}
public Float getLongitude() {
return longitude;
}
public Integer getLivePeriod() {
return livePeriod;
}
public String getThumbUrl() {
return thumbUrl;
}
public String getThumbWidth() {
return thumbWidth;
}
public String getThumbHeight() {
return thumbHeight;
}
public String getTitle() {
return title;
}
public InputMessageContent getInputMessageContext() {
return inputMessageContext;
}
public void setLatitude(Float latitude) {
this.latitude = latitude;
}
public void setLongitude(Float longitude) {
this.longitude = longitude;
}
public void setLivePeriod(Integer livePeriod) {
this.livePeriod = livePeriod;
}
public void setThumbUrl(String thumbUrl) {
this.thumbUrl = thumbUrl;
}
public void setThumbWidth(String thumbWidth) {
this.thumbWidth = thumbWidth;
}
public void setThumbHeight(String thumbHeight) {
this.thumbHeight = thumbHeight;
}
public void setTitle(String title) {
this.title = title;
}
public void setInputMessageContext(InputMessageContent inputMessageContext) {
this.inputMessageContext = inputMessageContext;
}
}
|
Builder
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringThreadsCoreAndMaxPoolTest.java
|
{
"start": 1046,
"end": 1325
}
|
class ____ extends ThreadsCoreAndMaxPoolTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/ThreadsCoreAndMaxPoolTest.xml");
}
}
|
SpringThreadsCoreAndMaxPoolTest
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/STS2EndpointBuilderFactory.java
|
{
"start": 19517,
"end": 21881
}
|
class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final STS2HeaderNameBuilder INSTANCE = new STS2HeaderNameBuilder();
/**
* The operation we want to perform.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsStsOperation}.
*/
public String awsStsOperation() {
return "CamelAwsStsOperation";
}
/**
* The Amazon Resource Name (ARN) of the role to assume.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsStsRoleArn}.
*/
public String awsStsRoleArn() {
return "CamelAwsStsRoleArn";
}
/**
* An identifier for the assumed role session.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsStsRoleSessionName}.
*/
public String awsStsRoleSessionName() {
return "CamelAwsStsRoleSessionName";
}
/**
* The name of the federated user.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsStsFederatedName}.
*/
public String awsStsFederatedName() {
return "CamelAwsStsFederatedName";
}
/**
* The duration, in seconds, of the role session. It could go from 900
* seconds, to 1 to 12 hours (dependent on administrator settings. The
* default if not specified is 3600 seconds.
*
* The option is a: {@code Integer} type.
*
* Group: producer
*
* @return the name of the header {@code
* AwsStsAssumeRoleDurationSeconds}.
*/
public String awsStsAssumeRoleDurationSeconds() {
return "CamelAwsStsAssumeRoleDurationSeconds";
}
}
static STS2EndpointBuilder endpointBuilder(String componentName, String path) {
|
STS2HeaderNameBuilder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/BreakingBytesRefBuilderTests.java
|
{
"start": 6852,
"end": 9383
}
|
interface ____ {
void applyToBuilder(BreakingBytesRefBuilder builder);
void applyToOracle(BytesRefBuilder oracle);
}
private void testAgainstOracle(Supplier<TestIteration> iterations) {
int limit = between(1_000, 10_000);
String label = randomAlphaOfLength(4);
CircuitBreaker breaker = new MockBigArrays.LimitedBreaker(CircuitBreaker.REQUEST, ByteSizeValue.ofBytes(limit));
assertThat(breaker.getUsed(), equalTo(0L));
try (BreakingBytesRefBuilder builder = new BreakingBytesRefBuilder(breaker, label)) {
assertThat(breaker.getUsed(), equalTo(builder.ramBytesUsed()));
BytesRefBuilder oracle = new BytesRefBuilder();
assertThat(builder.bytesRefView(), equalTo(oracle.get()));
while (true) {
TestIteration iteration = iterations.get();
int prevOracle = oracle.length();
iteration.applyToOracle(oracle);
int size = oracle.length() - prevOracle;
int targetSize = builder.length() + size;
boolean willResize = targetSize >= builder.bytes().length;
if (willResize) {
long resizeMemoryUsage = BreakingBytesRefBuilder.SHALLOW_SIZE + ramForArray(builder.bytes().length);
resizeMemoryUsage += ramForArray(ArrayUtil.oversize(targetSize, Byte.BYTES));
if (resizeMemoryUsage > limit) {
Exception e = expectThrows(CircuitBreakingException.class, () -> iteration.applyToBuilder(builder));
assertThat(e.getMessage(), equalTo("over test limit"));
break;
}
}
iteration.applyToBuilder(builder);
assertThat(builder.bytesRefView(), equalTo(oracle.get()));
assertThat(
builder.ramBytesUsed(),
// Label and breaker aren't counted in ramBytesUsed because they are usually shared with other instances.
equalTo(RamUsageTester.ramUsed(builder) - RamUsageTester.ramUsed(label) - RamUsageTester.ramUsed(breaker))
);
assertThat(builder.ramBytesUsed(), equalTo(breaker.getUsed()));
}
}
assertThat(breaker.getUsed(), equalTo(0L));
}
private long ramForArray(int length) {
return RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + length);
}
}
|
TestIteration
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/JaasConfiguration.java
|
{
"start": 1078,
"end": 2975
}
|
class ____ extends Configuration {
private final javax.security.auth.login.Configuration baseConfig =
javax.security.auth.login.Configuration.getConfiguration();
private final AppConfigurationEntry[] entry;
private final String entryName;
/**
* Add an entry to the jaas configuration with the passed in name,
* principal, and keytab. The other necessary options will be set for you.
*
* @param entryName The name of the entry (e.g. "Client")
* @param principal The principal of the user
* @param keytab The location of the keytab
*/
public JaasConfiguration(String entryName, String principal, String keytab) {
this.entryName = entryName;
Map<String, String> options = new HashMap<>();
options.put("keyTab", keytab);
options.put("principal", principal);
options.put("useKeyTab", "true");
options.put("storeKey", "true");
options.put("useTicketCache", "false");
options.put("refreshKrb5Config", "true");
String jaasEnvVar = System.getenv("HADOOP_JAAS_DEBUG");
if ("true".equalsIgnoreCase(jaasEnvVar)) {
options.put("debug", "true");
}
entry = new AppConfigurationEntry[]{
new AppConfigurationEntry(getKrb5LoginModuleName(),
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options)};
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
return (entryName.equals(name)) ? entry : ((baseConfig != null)
? baseConfig.getAppConfigurationEntry(name) : null);
}
private String getKrb5LoginModuleName() {
String krb5LoginModuleName;
if (System.getProperty("java.vendor").contains("IBM")) {
krb5LoginModuleName = "com.ibm.security.auth.module.Krb5LoginModule";
} else {
krb5LoginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
}
return krb5LoginModuleName;
}
}
|
JaasConfiguration
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java
|
{
"start": 4216,
"end": 65851
}
|
class ____ extends ESIntegTestCase {
private final String INDEX = RandomStrings.randomAsciiLettersOfLength(random(), 10).toLowerCase(Locale.ROOT);
private final String FIELD = RandomStrings.randomAsciiLettersOfLength(random(), 10).toLowerCase(Locale.ROOT);
private final CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder();
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(InternalSettingsPlugin.class);
}
public void testTieBreak() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
mapping.indexAnalyzer("keyword");
createIndexAndMapping(mapping);
int numDocs = randomIntBetween(3, 50);
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
String[] entries = new String[numDocs];
for (int i = 0; i < numDocs; i++) {
String value = "a" + randomAlphaOfLengthBetween(1, 10);
entries[i] = value;
indexRequestBuilders.add(
prepareIndex(INDEX).setId("" + i)
.setSource(
jsonBuilder().startObject().startObject(FIELD).field("input", value).field("weight", 10).endObject().endObject()
)
);
}
Arrays.sort(entries);
indexRandom(true, indexRequestBuilders);
for (int i = 1; i < numDocs; i++) {
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("a").size(i);
String[] topEntries = Arrays.copyOfRange(entries, 0, i);
assertSuggestions("foo", prefix, topEntries);
}
}
public void testPrefix() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = 10;
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 1; i <= numDocs; i++) {
indexRequestBuilders.add(
prepareIndex(INDEX).setId("" + i)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.field("input", "suggestion" + i)
.field("weight", i)
.endObject()
.endObject()
)
);
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
assertSuggestions("foo", prefix, "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6");
}
/**
* test that suggestion works if prefix is either provided via {@link CompletionSuggestionBuilder#text(String)} or
* {@link SuggestBuilder#setGlobalText(String)}
*/
public void testTextAndGlobalText() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = 10;
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 1; i <= numDocs; i++) {
indexRequestBuilders.add(
prepareIndex(INDEX).setId("" + i)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.field("input", "suggestion" + i)
.field("weight", i)
.endObject()
.endObject()
)
);
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder noText = SuggestBuilders.completionSuggestion(FIELD);
CompletionSuggestionBuilder withText = SuggestBuilders.completionSuggestion(FIELD).text("sugg");
assertResponses(
response -> assertSuggestions(response, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6"),
prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", noText).setGlobalText("sugg")),
prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", withText)),
// test that suggestion text takes precedence over global text
prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", withText).setGlobalText("bogus"))
);
}
public void testRegex() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = 10;
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 1; i <= numDocs; i++) {
indexRequestBuilders.add(
prepareIndex(INDEX).setId("" + i)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.field("input", "sugg" + i + "estion")
.field("weight", i)
.endObject()
.endObject()
)
);
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).regex("sugg.*es");
assertSuggestions("foo", prefix, "sugg10estion", "sugg9estion", "sugg8estion", "sugg7estion", "sugg6estion");
}
public void testFuzzy() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = 10;
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 1; i <= numDocs; i++) {
indexRequestBuilders.add(
prepareIndex(INDEX).setId("" + i)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.field("input", "sugxgestion" + i)
.field("weight", i)
.endObject()
.endObject()
)
);
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg", Fuzziness.ONE);
assertSuggestions("foo", prefix, "sugxgestion10", "sugxgestion9", "sugxgestion8", "sugxgestion7", "sugxgestion6");
}
public void testEarlyTermination() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = atLeast(100);
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
indexRequestBuilders.add(
prepareIndex(INDEX).setId("" + i)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.field("input", "suggestion" + (numDocs - i))
.field("weight", numDocs - i)
.endObject()
.endObject()
)
);
}
indexRandom(true, indexRequestBuilders);
int size = randomIntBetween(3, 10);
String[] outputs = new String[size];
for (int i = 0; i < size; i++) {
outputs[i] = "suggestion" + (numDocs - i);
}
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sug").size(size);
assertSuggestions("foo", prefix, outputs);
CompletionSuggestionBuilder regex = SuggestBuilders.completionSuggestion(FIELD).regex("su[g|s]g").size(size);
assertSuggestions("foo", regex, outputs);
CompletionSuggestionBuilder fuzzyPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg", Fuzziness.ONE).size(size);
assertSuggestions("foo", fuzzyPrefix, outputs);
}
public void testSuggestDocument() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = randomIntBetween(10, 100);
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 1; i <= numDocs; i++) {
indexRequestBuilders.add(
prepareIndex(INDEX).setId("" + i)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.field("input", "suggestion" + i)
.field("weight", i)
.endObject()
.endObject()
)
);
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg").size(numDocs);
assertResponse(prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", prefix)), response -> {
CompletionSuggestion completionSuggestion = response.getSuggest().getSuggestion("foo");
CompletionSuggestion.Entry options = completionSuggestion.getEntries().get(0);
assertThat(options.getOptions().size(), equalTo(numDocs));
int id = numDocs;
for (CompletionSuggestion.Entry.Option option : options) {
assertThat(option.getText().toString(), equalTo("suggestion" + id));
assertThat(option.getHit(), hasId("" + id));
assertThat(option.getHit(), hasScore((id)));
assertNotNull(option.getHit().getSourceAsMap());
id--;
}
});
}
public void testSuggestDocumentNoSource() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = randomIntBetween(10, 100);
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 1; i <= numDocs; i++) {
indexRequestBuilders.add(
prepareIndex(INDEX).setId("" + i)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.field("input", "suggestion" + i)
.field("weight", i)
.endObject()
.endObject()
)
);
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg").size(numDocs);
assertResponse(prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", prefix)).setFetchSource(false), response -> {
CompletionSuggestion completionSuggestion = response.getSuggest().getSuggestion("foo");
CompletionSuggestion.Entry options = completionSuggestion.getEntries().get(0);
assertThat(options.getOptions().size(), equalTo(numDocs));
int id = numDocs;
for (CompletionSuggestion.Entry.Option option : options) {
assertThat(option.getText().toString(), equalTo("suggestion" + id));
assertThat(option.getHit(), hasId("" + id));
assertThat(option.getHit(), hasScore((id)));
assertNull(option.getHit().getSourceAsMap());
id--;
}
});
}
public void testSuggestDocumentSourceFiltering() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = randomIntBetween(10, 100);
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 1; i <= numDocs; i++) {
indexRequestBuilders.add(
prepareIndex(INDEX).setId("" + i)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.field("input", "suggestion" + i)
.field("weight", i)
.endObject()
.field("a", "include")
.field("b", "exclude")
.endObject()
)
);
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg").size(numDocs);
assertResponse(
prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", prefix)).setFetchSource("a", "b"),
response -> {
CompletionSuggestion completionSuggestion = response.getSuggest().getSuggestion("foo");
CompletionSuggestion.Entry options = completionSuggestion.getEntries().get(0);
assertThat(options.getOptions().size(), equalTo(numDocs));
int id = numDocs;
for (CompletionSuggestion.Entry.Option option : options) {
assertThat(option.getText().toString(), equalTo("suggestion" + id));
assertThat(option.getHit(), hasId("" + id));
assertThat(option.getHit(), hasScore((id)));
Map<String, Object> source = option.getHit().getSourceAsMap();
assertNotNull(source);
Set<String> sourceFields = source.keySet();
assertThat(sourceFields, contains("a"));
assertThat(sourceFields, not(contains("b")));
id--;
}
}
);
}
/**
* Suggestions run on an empty index should return a suggest element as part of the response. See #42473 for details.
*/
public void testSuggestEmptyIndex() throws IOException {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("v");
assertResponse(
prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", prefix)).setFetchSource("a", "b"),
response -> {
Suggest suggest = response.getSuggest();
assertNotNull(suggest);
CompletionSuggestion completionSuggestion = suggest.getSuggestion("foo");
CompletionSuggestion.Entry options = completionSuggestion.getEntries().get(0);
assertEquals("v", options.getText().string());
assertEquals(1, options.getLength());
assertEquals(0, options.getOffset());
assertEquals(0, options.options.size());
}
);
}
public void testThatWeightsAreWorking() throws Exception {
createIndexAndMapping(completionMappingBuilder);
List<String> similarNames = Arrays.asList("the", "The Prodigy", "The Verve", "The the");
// the weight is 1000 divided by string length, so the results are easy to to check
for (String similarName : similarNames) {
prepareIndex(INDEX).setId(similarName)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("input")
.value(similarName)
.endArray()
.field("weight", 1000 / similarName.length())
.endObject()
.endObject()
)
.get();
}
refresh();
assertSuggestions("the", "the", "The the", "The Verve", "The Prodigy");
}
public void testThatWeightMustBeAnInteger() throws Exception {
createIndexAndMapping(completionMappingBuilder);
Exception e = expectThrows(
DocumentParsingException.class,
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("input")
.value("sth")
.endArray()
.field("weight", 2.5)
.endObject()
.endObject()
)
);
assertThat(e.getCause().getMessage(), equalTo("weight must be an integer, but was [2.5]"));
}
public void testThatWeightCanBeAString() throws Exception {
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("input")
.value("testing")
.endArray()
.field("weight", "10")
.endObject()
.endObject()
)
.get();
refresh();
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion("testSuggestions", new CompletionSuggestionBuilder(FIELD).text("test").size(10))
),
response -> {
assertSuggestions(response, "testSuggestions", "testing");
Suggest.Suggestion.Entry.Option option = response.getSuggest()
.getSuggestion("testSuggestions")
.getEntries()
.get(0)
.getOptions()
.get(0);
assertThat(option, is(instanceOf(CompletionSuggestion.Entry.Option.class)));
CompletionSuggestion.Entry.Option prefixOption = (CompletionSuggestion.Entry.Option) option;
assertThat(prefixOption.getText().string(), equalTo("testing"));
assertThat((long) prefixOption.getScore(), equalTo(10L));
}
);
}
public void testThatWeightMustNotBeANonNumberString() throws Exception {
createIndexAndMapping(completionMappingBuilder);
Exception e = expectThrows(
DocumentParsingException.class,
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("input")
.value("sth")
.endArray()
.field("weight", "thisIsNotValid")
.endObject()
.endObject()
)
);
assertThat(e.getCause().toString(), containsString("thisIsNotValid"));
}
public void testThatWeightAsStringMustBeInt() throws Exception {
createIndexAndMapping(completionMappingBuilder);
String weight = String.valueOf(Long.MAX_VALUE - 4);
Exception e = expectThrows(
DocumentParsingException.class,
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("input")
.value("testing")
.endArray()
.field("weight", weight)
.endObject()
.endObject()
)
);
assertThat(e.getCause().toString(), containsString(weight));
}
public void testThatInputCanBeAStringInsteadOfAnArray() throws Exception {
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(jsonBuilder().startObject().startObject(FIELD).field("input", "Foo Fighters").endObject().endObject())
.get();
refresh();
assertSuggestions("f", "Foo Fighters");
}
public void testDisabledPreserveSeparators() throws Exception {
completionMappingBuilder.preserveSeparators(false);
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("input")
.value("Foo Fighters")
.endArray()
.field("weight", 10)
.endObject()
.endObject()
)
.get();
prepareIndex(INDEX).setId("2")
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("input")
.value("Foof")
.endArray()
.field("weight", 20)
.endObject()
.endObject()
)
.get();
refresh();
assertSuggestions("foof", "Foof", "Foo Fighters");
}
public void testEnabledPreserveSeparators() throws Exception {
completionMappingBuilder.preserveSeparators(true);
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Foo Fighters").endArray().endObject().endObject()
)
.get();
prepareIndex(INDEX).setId("2")
.setSource(jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Foof").endArray().endObject().endObject())
.get();
refresh();
assertSuggestions("foof", "Foof");
}
public void testThatMultipleInputsAreSupported() throws Exception {
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("input")
.value("Foo Fighters")
.value("Fu Fighters")
.endArray()
.endObject()
.endObject()
)
.get();
refresh();
assertSuggestions("foo", "Foo Fighters");
assertSuggestions("fu", "Fu Fighters");
}
public void testThatShortSyntaxIsWorking() throws Exception {
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject().startArray(FIELD).value("The Prodigy Firestarter").value("Firestarter").endArray().endObject()
)
.get();
refresh();
assertSuggestions("t", "The Prodigy Firestarter");
assertSuggestions("f", "Firestarter");
}
public void testThatDisablingPositionIncrementsWorkForStopwords() throws Exception {
// analyzer which removes stopwords... so may not be the simple one
completionMappingBuilder.searchAnalyzer("classic").indexAnalyzer("classic").preservePositionIncrements(false);
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject().startObject(FIELD).startArray("input").value("The Beatles").endArray().endObject().endObject()
)
.get();
refresh();
assertSuggestions("b", "The Beatles");
}
public void testThatUpgradeToMultiFieldsWorks() throws Exception {
final XContentBuilder mapping = jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject(FIELD)
.field("type", "text")
.endObject()
.endObject()
.endObject()
.endObject();
assertAcked(prepareCreate(INDEX).setMapping(mapping));
prepareIndex(INDEX).setId("1")
.setRefreshPolicy(IMMEDIATE)
.setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject())
.get();
ensureGreen(INDEX);
AcknowledgedResponse putMappingResponse = indicesAdmin().preparePutMapping(INDEX)
.setSource(
jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject(FIELD)
.field("type", "text")
.startObject("fields")
.startObject("suggest")
.field("type", "completion")
.field("analyzer", "simple")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
.get();
assertThat(putMappingResponse.isAcknowledged(), is(true));
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion("suggs", SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10))
),
response -> assertSuggestions(response, "suggs")
);
prepareIndex(INDEX).setId("1")
.setRefreshPolicy(IMMEDIATE)
.setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject())
.get();
ensureGreen(INDEX);
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion("suggs", SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10))
),
response -> assertSuggestions(response, "suggs", "Foo Fighters")
);
}
public void testThatFuzzySuggesterWorks() throws Exception {
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject()
)
.get();
refresh();
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("Nirv").size(10))
),
response -> assertSuggestions(response, false, "foo", "Nirvana")
);
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion(
"foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", Fuzziness.ONE).size(10)
)
),
response -> assertSuggestions(response, false, "foo", "Nirvana")
);
}
public void testThatFuzzySuggesterSupportsEditDistances() throws Exception {
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject()
)
.get();
refresh();
// edit distance 1
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion(
"foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.ONE).size(10)
)
),
response -> assertSuggestions(response, false, "foo")
);
// edit distance 2
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion(
"foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.TWO).size(10)
)
),
response -> assertSuggestions(response, false, "foo", "Nirvana")
);
}
public void testThatFuzzySuggesterSupportsTranspositions() throws Exception {
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject()
)
.get();
refresh();
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion(
"foo",
SuggestBuilders.completionSuggestion(FIELD)
.prefix("Nriv", FuzzyOptions.builder().setTranspositions(false).build())
.size(10)
)
),
response -> assertSuggestions(response, false, "foo")
);
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion(
"foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", Fuzziness.ONE).size(10)
)
),
response -> assertSuggestions(response, false, "foo", "Nirvana")
);
}
public void testThatFuzzySuggesterSupportsMinPrefixLength() throws Exception {
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject()
)
.get();
refresh();
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion(
"foo",
SuggestBuilders.completionSuggestion(FIELD)
.prefix("Nriva", FuzzyOptions.builder().setFuzzyMinLength(6).build())
.size(10)
)
),
response -> assertSuggestions(response, false, "foo")
);
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion(
"foo",
SuggestBuilders.completionSuggestion(FIELD)
.prefix("Nrivan", FuzzyOptions.builder().setFuzzyMinLength(6).build())
.size(10)
)
),
response -> assertSuggestions(response, false, "foo", "Nirvana")
);
}
public void testThatFuzzySuggesterSupportsNonPrefixLength() throws Exception {
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject()
)
.get();
refresh();
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion(
"foo",
SuggestBuilders.completionSuggestion(FIELD)
.prefix("Nirw", FuzzyOptions.builder().setFuzzyPrefixLength(4).build())
.size(10)
)
),
response -> assertSuggestions(response, false, "foo")
);
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion(
"foo",
SuggestBuilders.completionSuggestion(FIELD)
.prefix("Nirvo", FuzzyOptions.builder().setFuzzyPrefixLength(4).build())
.size(10)
)
),
response -> assertSuggestions(response, false, "foo", "Nirvana")
);
}
public void testThatFuzzySuggesterIsUnicodeAware() throws Exception {
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(jsonBuilder().startObject().startObject(FIELD).startArray("input").value("ööööö").endArray().endObject().endObject())
.get();
refresh();
// suggestion with a character, which needs unicode awareness
org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder completionSuggestionBuilder = SuggestBuilders
.completionSuggestion(FIELD)
.prefix("öööи", FuzzyOptions.builder().setUnicodeAware(true).build())
.size(10);
assertResponse(
prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)),
response -> assertSuggestions(response, false, "foo", "ööööö")
);
// removing unicode awareness leads to no result
completionSuggestionBuilder = SuggestBuilders.completionSuggestion(FIELD)
.prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).build())
.size(10);
assertResponse(
prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)),
response -> assertSuggestions(response, false, "foo")
);
// increasing edit distance instead of unicode awareness works again, as this is only a single character
completionSuggestionBuilder = SuggestBuilders.completionSuggestion(FIELD)
.prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).setFuzziness(Fuzziness.TWO).build())
.size(10);
assertResponse(
prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)),
response -> assertSuggestions(response, false, "foo", "ööööö")
);
}
public void testThatStatsAreWorking() throws Exception {
String otherField = "testOtherField";
indicesAdmin().prepareCreate(INDEX).setSettings(indexSettings(2, 0)).get();
ensureGreen();
AcknowledgedResponse putMappingResponse = indicesAdmin().preparePutMapping(INDEX)
.setSource(
jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject(FIELD)
.field("type", "completion")
.field("analyzer", "simple")
.endObject()
.startObject(otherField)
.field("type", "completion")
.field("analyzer", "simple")
.endObject()
.endObject()
.endObject()
.endObject()
)
.get();
assertThat(putMappingResponse.isAcknowledged(), is(true));
// Index two entities
prepareIndex(INDEX).setId("1")
.setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").field(otherField, "WHATEVER").endObject())
.get();
prepareIndex(INDEX).setId("2")
.setSource(jsonBuilder().startObject().field(FIELD, "Bar Fighters").field(otherField, "WHATEVER2").endObject())
.get();
refresh();
ensureGreen();
// load the fst index into ram
prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("f")))
.get()
.decRef();
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(otherField).prefix("f"))
).get().decRef();
// Get all stats
IndicesStatsResponse indicesStatsResponse = indicesAdmin().prepareStats(INDEX).setIndices(INDEX).setCompletion(true).get();
CompletionStats completionStats = indicesStatsResponse.getIndex(INDEX).getPrimaries().completion;
assertThat(completionStats, notNullValue());
long totalSizeInBytes = completionStats.getSizeInBytes();
assertThat(totalSizeInBytes, is(greaterThan(0L)));
IndicesStatsResponse singleFieldStats = indicesAdmin().prepareStats(INDEX)
.setIndices(INDEX)
.setCompletion(true)
.setCompletionFields(FIELD)
.get();
long singleFieldSizeInBytes = singleFieldStats.getIndex(INDEX).getPrimaries().completion.getFields().get(FIELD);
IndicesStatsResponse otherFieldStats = indicesAdmin().prepareStats(INDEX)
.setIndices(INDEX)
.setCompletion(true)
.setCompletionFields(otherField)
.get();
long otherFieldSizeInBytes = otherFieldStats.getIndex(INDEX).getPrimaries().completion.getFields().get(otherField);
assertThat(singleFieldSizeInBytes + otherFieldSizeInBytes, is(totalSizeInBytes));
// regexes
IndicesStatsResponse regexFieldStats = indicesAdmin().prepareStats(INDEX)
.setIndices(INDEX)
.setCompletion(true)
.setCompletionFields("*")
.get();
FieldMemoryStats fields = regexFieldStats.getIndex(INDEX).getPrimaries().completion.getFields();
long regexSizeInBytes = fields.get(FIELD) + fields.get(otherField);
assertThat(regexSizeInBytes, is(totalSizeInBytes));
}
public void testThatSortingOnCompletionFieldReturnsUsefulException() throws Exception {
createIndexAndMapping(completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject().startObject(FIELD).startArray("input").value("Nirvana").endArray().endObject().endObject()
)
.get();
refresh();
SearchPhaseExecutionException e = expectThrows(
SearchPhaseExecutionException.class,
prepareSearch(INDEX).addSort(new FieldSortBuilder(FIELD))
);
assertThat(e.status().getStatus(), is(400));
assertThat(e.toString(), containsString("Fielddata is not supported on field [" + FIELD + "] of type [completion]"));
}
public void testThatSuggestStopFilterWorks() throws Exception {
Settings.Builder settingsBuilder = Settings.builder()
.put("index.analysis.analyzer.stoptest.tokenizer", "standard")
.putList("index.analysis.analyzer.stoptest.filter", "suggest_stop_filter")
.put("index.analysis.filter.suggest_stop_filter.type", "stop")
.put("index.analysis.filter.suggest_stop_filter.remove_trailing", false);
CompletionMappingBuilder builder = new CompletionMappingBuilder();
builder.preserveSeparators(true).preservePositionIncrements(true);
builder.searchAnalyzer("stoptest");
builder.indexAnalyzer("simple");
createIndexAndMappingAndSettings(settingsBuilder.build(), builder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("input")
.value("Feed trolls")
.endArray()
.field("weight", 5)
.endObject()
.endObject()
)
.get();
// Higher weight so it's ranked first:
prepareIndex(INDEX).setId("2")
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("input")
.value("Feed the trolls")
.endArray()
.field("weight", 10)
.endObject()
.endObject()
)
.get();
refresh();
assertSuggestions("f", "Feed the trolls", "Feed trolls");
assertSuggestions("fe", "Feed the trolls", "Feed trolls");
assertSuggestions("fee", "Feed the trolls", "Feed trolls");
assertSuggestions("feed", "Feed the trolls", "Feed trolls");
assertSuggestions("feed t", "Feed the trolls", "Feed trolls");
assertSuggestions("feed the", "Feed the trolls");
// stop word complete, gets ignored on query time, makes it "feed" only
assertSuggestions("feed the ", "Feed the trolls", "Feed trolls");
// stopword gets removed, but position increment kicks in, which doesnt work for the prefix suggester
assertSuggestions("feed the t");
}
public void testThatIndexingInvalidFieldsInCompletionFieldResultsInException() throws Exception {
CompletionMappingBuilder builder = new CompletionMappingBuilder();
createIndexAndMapping(builder);
try {
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("FRIGGININVALID")
.value("Nirvana")
.endArray()
.endObject()
.endObject()
)
.get();
fail("Expected Exception");
} catch (DocumentParsingException e) {
assertThat(e.getMessage(), containsString("failed to parse"));
}
}
public void testSkipDuplicates() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = randomIntBetween(10, 100);
int numUnique = randomIntBetween(1, numDocs);
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
int[] weights = new int[numUnique];
Integer[] termIds = new Integer[numUnique];
for (int i = 1; i <= numDocs; i++) {
int id = i % numUnique;
termIds[id] = id;
int weight = randomIntBetween(0, 100);
weights[id] = Math.max(weight, weights[id]);
String suggestion = "suggestion-" + String.format(Locale.ENGLISH, "%03d", id);
indexRequestBuilders.add(
prepareIndex(INDEX).setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.field("input", suggestion)
.field("weight", weight)
.endObject()
.endObject()
)
);
}
indexRandom(true, indexRequestBuilders);
Arrays.sort(termIds, Comparator.comparingInt(o -> weights[(int) o]).reversed().thenComparingInt(a -> (int) a));
String[] expected = new String[numUnique];
for (int i = 0; i < termIds.length; i++) {
expected[i] = "suggestion-" + String.format(Locale.ENGLISH, "%03d", termIds[i]);
}
CompletionSuggestionBuilder completionSuggestionBuilder = SuggestBuilders.completionSuggestion(FIELD)
.prefix("sugg")
.skipDuplicates(true)
.size(numUnique);
assertResponse(
prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("suggestions", completionSuggestionBuilder)),
response -> assertSuggestions(response, true, "suggestions", expected)
);
}
public void assertSuggestions(String suggestionName, SuggestionBuilder<?> suggestBuilder, String... suggestions) {
assertResponse(
prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion(suggestionName, suggestBuilder)),
response -> assertSuggestions(response, suggestionName, suggestions)
);
}
public void assertSuggestions(String suggestion, String... suggestions) {
String suggestionName = RandomStrings.randomAsciiLettersOfLength(random(), 10);
CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion(FIELD).text(suggestion).size(10);
assertSuggestions(suggestionName, suggestionBuilder, suggestions);
}
public void assertSuggestionsNotInOrder(String suggestString, String... suggestions) {
String suggestionName = RandomStrings.randomAsciiLettersOfLength(random(), 10);
assertResponse(
prepareSearch(INDEX).suggest(
new SuggestBuilder().addSuggestion(suggestionName, SuggestBuilders.completionSuggestion(FIELD).text(suggestString).size(10))
),
response -> assertSuggestions(response, false, suggestionName, suggestions)
);
}
static void assertSuggestions(SearchResponse searchResponse, String name, String... suggestions) {
assertSuggestions(searchResponse, true, name, suggestions);
}
private static void assertSuggestions(
SearchResponse searchResponse,
boolean suggestionOrderStrict,
String name,
String... suggestions
) {
assertAllSuccessful(searchResponse);
List<String> suggestionNames = new ArrayList<>();
for (Suggest.Suggestion<
? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> suggestion : iterableAsArrayList(
searchResponse.getSuggest()
)) {
suggestionNames.add(suggestion.getName());
}
String expectFieldInResponseMsg = String.format(
Locale.ROOT,
"Expected suggestion named %s in response, got %s",
name,
suggestionNames
);
assertThat(expectFieldInResponseMsg, searchResponse.getSuggest().getSuggestion(name), is(notNullValue()));
Suggest.Suggestion<Suggest.Suggestion.Entry<Suggest.Suggestion.Entry.Option>> suggestion = searchResponse.getSuggest()
.getSuggestion(name);
List<String> suggestionList = getNames(suggestion.getEntries().get(0));
List<Suggest.Suggestion.Entry.Option> options = suggestion.getEntries().get(0).getOptions();
String assertMsg = String.format(
Locale.ROOT,
"Expected options %s length to be %s, but was %s",
suggestionList,
suggestions.length,
options.size()
);
assertThat(assertMsg, options.size(), is(suggestions.length));
if (suggestionOrderStrict) {
for (int i = 0; i < suggestions.length; i++) {
String errMsg = String.format(
Locale.ROOT,
"Expected elem %s in list %s to be [%s] score: %s",
i,
suggestionList,
suggestions[i],
options.get(i).getScore()
);
assertThat(errMsg, options.get(i).getText().toString(), is(suggestions[i]));
}
} else {
for (String expectedSuggestion : suggestions) {
String errMsg = Strings.format("Expected elem %s to be in list %s", expectedSuggestion, suggestionList);
assertThat(errMsg, suggestionList, hasItem(expectedSuggestion));
}
}
}
private static List<String> getNames(Suggest.Suggestion.Entry<Suggest.Suggestion.Entry.Option> suggestEntry) {
List<String> names = new ArrayList<>();
for (Suggest.Suggestion.Entry.Option entry : suggestEntry.getOptions()) {
names.add(entry.getText().string());
}
return names;
}
private void createIndexAndMappingAndSettings(Settings settings, CompletionMappingBuilder builder) throws IOException {
XContentBuilder mapping = jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject("test_field")
.field("type", "keyword")
.endObject()
.startObject("title")
.field("type", "keyword")
.endObject()
.startObject(FIELD)
.field("type", "completion")
.field("analyzer", builder.indexAnalyzer)
.field("search_analyzer", builder.searchAnalyzer)
.field("preserve_separators", builder.preserveSeparators)
.field("preserve_position_increments", builder.preservePositionIncrements);
if (builder.contextMappings != null) {
mapping = mapping.startArray("contexts");
for (Map.Entry<String, ContextMapping<?>> contextMapping : builder.contextMappings.entrySet()) {
mapping = mapping.startObject()
.field("name", contextMapping.getValue().name())
.field("type", contextMapping.getValue().type().name());
mapping = switch (contextMapping.getValue().type()) {
case CATEGORY -> mapping.field("path", ((CategoryContextMapping) contextMapping.getValue()).getFieldName());
case GEO -> mapping.field("path", ((GeoContextMapping) contextMapping.getValue()).getFieldName())
.field("precision", ((GeoContextMapping) contextMapping.getValue()).getPrecision());
};
mapping = mapping.endObject();
}
mapping = mapping.endArray();
}
mapping = mapping.endObject().endObject().endObject().endObject();
assertAcked(
indicesAdmin().prepareCreate(INDEX).setSettings(Settings.builder().put(indexSettings()).put(settings)).setMapping(mapping)
);
}
private void createIndexAndMapping(CompletionMappingBuilder builder) throws IOException {
createIndexAndMappingAndSettings(Settings.EMPTY, builder);
}
// see #3555
public void testPrunedSegments() throws IOException {
createIndexAndMappingAndSettings(indexSettings(1, 0).build(), completionMappingBuilder);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject().startObject(FIELD).startArray("input").value("The Beatles").endArray().endObject().endObject()
)
.get();
// we have 2 docs in a segment...
prepareIndex(INDEX).setId("2").setSource(jsonBuilder().startObject().field("somefield", "somevalue").endObject()).get();
BroadcastResponse actionGet = indicesAdmin().prepareForceMerge().setFlush(true).setMaxNumSegments(1).get();
assertAllSuccessful(actionGet);
refresh();
// update the first one and then merge.. the target segment will have no value in FIELD
prepareIndex(INDEX).setId("1").setSource(jsonBuilder().startObject().field("somefield", "somevalue").endObject()).get();
actionGet = indicesAdmin().prepareForceMerge().setFlush(true).setMaxNumSegments(1).get();
assertAllSuccessful(actionGet);
refresh();
assertSuggestions("b");
assertHitCount(prepareSearch(INDEX).setSize(0), 2);
for (IndexShardSegments seg : indicesAdmin().prepareSegments().get().getIndices().get(INDEX)) {
ShardSegments[] shards = seg.shards();
for (ShardSegments shardSegments : shards) {
assertThat(shardSegments.getSegments().size(), equalTo(1));
}
}
}
// see #3596
public void testVeryLongInput() throws IOException {
assertAcked(
indicesAdmin().prepareCreate(INDEX)
.setMapping(
jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject(FIELD)
.field("type", "completion")
.endObject()
.endObject()
.endObject()
.endObject()
)
);
// can cause stack overflow without the default max_input_length
String longString = replaceReservedChars(randomRealisticUnicodeOfLength(randomIntBetween(5000, 10000)), (char) 0x01);
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject().startObject(FIELD).startArray("input").value(longString).endArray().endObject().endObject()
)
.setRefreshPolicy(IMMEDIATE)
.get();
}
// see #3648
public void testReservedChars() throws IOException {
assertAcked(
indicesAdmin().prepareCreate(INDEX)
.setMapping(
jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject(FIELD)
.field("type", "completion")
.endObject()
.endObject()
.endObject()
.endObject()
)
);
// can cause stack overflow without the default max_input_length
String string = "foo" + (char) 0x00 + "bar";
Exception e = expectThrows(
DocumentParsingException.class,
prepareIndex(INDEX).setId("1")
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.startArray("input")
.value(string)
.endArray()
.field("output", "foobar")
.endObject()
.endObject()
)
);
assertThat(e.getMessage(), containsString("failed to parse"));
}
// see #5930
public void testIssue5930() throws IOException {
assertAcked(
indicesAdmin().prepareCreate(INDEX)
.setMapping(
jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject(FIELD)
.field("type", "completion")
.endObject()
.endObject()
.endObject()
.endObject()
)
);
String string = "foo bar";
prepareIndex(INDEX).setId("1")
.setSource(jsonBuilder().startObject().field(FIELD, string).endObject())
.setRefreshPolicy(IMMEDIATE)
.get();
SearchPhaseExecutionException e = expectThrows(
SearchPhaseExecutionException.class,
prepareSearch(INDEX).addAggregation(
AggregationBuilders.terms("suggest_agg").field(FIELD).collectMode(randomFrom(SubAggCollectionMode.values()))
)
);
assertThat(e.toString(), containsString("Fielddata is not supported on field [" + FIELD + "] of type [completion]"));
}
public void testMultiDocSuggestions() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = 10;
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 1; i <= numDocs; i++) {
indexRequestBuilders.add(
prepareIndex(INDEX).setId("" + i)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.array("input", "suggestion" + i, "suggestions" + i, "suggester" + i)
.field("weight", i)
.endObject()
.endObject()
)
);
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg").shardSize(15);
assertSuggestions("foo", prefix, "suggester10", "suggester9", "suggester8", "suggester7", "suggester6");
}
public void testSuggestWithFieldAlias() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject(FIELD)
.field("type", "completion")
.endObject()
.startObject("alias")
.field("type", "alias")
.field("path", FIELD)
.endObject()
.endObject()
.endObject()
.endObject();
assertAcked(prepareCreate(INDEX).setMapping(mapping));
List<IndexRequestBuilder> builders = new ArrayList<>();
builders.add(prepareIndex(INDEX).setSource(FIELD, "apple"));
builders.add(prepareIndex(INDEX).setSource(FIELD, "mango"));
builders.add(prepareIndex(INDEX).setSource(FIELD, "papaya"));
indexRandom(true, false, builders);
CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("alias").text("app");
assertSuggestions("suggestion", suggestionBuilder, "apple");
}
public void testSuggestOnlyExplain() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = 10;
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 1; i <= numDocs; i++) {
indexRequestBuilders.add(
prepareIndex(INDEX).setId("" + i)
.setSource(
jsonBuilder().startObject()
.startObject(FIELD)
.field("input", "suggestion" + i)
.field("weight", i)
.endObject()
.endObject()
)
);
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
assertResponse(
prepareSearch(INDEX).setExplain(true).suggest(new SuggestBuilder().addSuggestion("foo", prefix)),
response -> assertSuggestions(response, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6")
);
}
public void testCompletionWithCollapse() throws Exception {
String suggestField = "suggest_field";
XContentBuilder mapping = jsonBuilder().startObject()
.startObject("properties")
.startObject("collapse_field")
.field("type", "keyword")
.endObject()
.startObject(suggestField)
.field("type", "completion")
.field("analyzer", "whitespace")
.endObject()
.endObject()
.endObject();
String index = "test";
assertAcked(
indicesAdmin().prepareCreate(index).setSettings(Settings.builder().put("index.number_of_shards", 2)).setMapping(mapping)
);
int numDocs = 2;
for (int i = 0; i < numDocs; i++) {
XContentBuilder builder = jsonBuilder().startObject();
builder.startObject(suggestField).field("input", "suggestion" + i).field("weight", i).endObject();
builder.field("collapse_field", "collapse me").endObject(); // all docs the same value for collapsing
prepareIndex(index).setId("" + i).setSource(builder).get();
}
indicesAdmin().prepareRefresh(index).get();
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(suggestField).prefix("sug").size(1);
assertResponse(
prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())
.setFrom(1)
.setSize(1)
.setCollapse(new CollapseBuilder("collapse_field"))
.suggest(new SuggestBuilder().addSuggestion("the_suggestion", prefix)),
response -> {
assertAllSuccessful(response);
assertThat(response.getSuggest().getSuggestion("the_suggestion"), is(notNullValue()));
Suggest.Suggestion<Suggest.Suggestion.Entry<Suggest.Suggestion.Entry.Option>> suggestion = response.getSuggest()
.getSuggestion("the_suggestion");
List<String> suggestionList = getNames(suggestion.getEntries().get(0));
assertThat(suggestionList, contains("suggestion" + (numDocs - 1)));
assertEquals(0, response.getHits().getHits().length);
}
);
}
public static boolean isReservedChar(char c) {
switch (c) {
case '\u001F':
case TokenStreamToAutomaton.HOLE:
case 0x0:
case ContextSuggestField.CONTEXT_SEPARATOR:
return true;
default:
return false;
}
}
private static String replaceReservedChars(String input, char replacement) {
char[] charArray = input.toCharArray();
for (int i = 0; i < charArray.length; i++) {
if (isReservedChar(charArray[i])) {
charArray[i] = replacement;
}
}
return new String(charArray);
}
static
|
CompletionSuggestSearchIT
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/postgresql/ast/stmt/PGStartTransactionStatement.java
|
{
"start": 890,
"end": 1553
}
|
class ____ extends SQLStatementImpl implements PGSQLStatement {
public PGStartTransactionStatement() {
super(DbType.postgresql);
}
private boolean useBegin;
public boolean isUseBegin() {
return useBegin;
}
public void setUseBegin(boolean useBegin) {
this.useBegin = useBegin;
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor instanceof PGASTVisitor) {
accept0((PGASTVisitor) visitor);
}
}
@Override
public void accept0(PGASTVisitor visitor) {
visitor.visit(this);
visitor.endVisit(this);
}
}
|
PGStartTransactionStatement
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/providers/serialisers/jsonp/JsonpUtil.java
|
{
"start": 467,
"end": 1122
}
|
class ____ {
private static final JsonReaderFactory jsonReaderFactory = Json.createReaderFactory(null);
private static final JsonWriterFactory jsonWriterFactory = Json.createWriterFactory(null);
public static JsonReader reader(InputStream entityStream, MediaType mediaType) {
return jsonReaderFactory.createReader(entityStream, Charset.forName(MessageReaderUtil.charsetFromMediaType(mediaType)));
}
public static JsonWriter writer(OutputStream entityStream, MediaType mediaType) {
return jsonWriterFactory.createWriter(entityStream, Charset.forName(MessageReaderUtil.charsetFromMediaType(mediaType)));
}
}
|
JsonpUtil
|
java
|
apache__flink
|
flink-examples/flink-examples-table/src/test/java/org/apache/flink/table/examples/java/functions/AdvancedFunctionsExampleITCase.java
|
{
"start": 1083,
"end": 2553
}
|
class ____ extends ExampleOutputTestBase {
@Test
void testExample() throws Exception {
AdvancedFunctionsExample.main(new String[0]);
final String consoleOutput = getOutputString();
testExecuteLastDatedValueFunction(consoleOutput);
testExecuteInternalRowMergerFunction(consoleOutput);
}
private void testExecuteLastDatedValueFunction(String consoleOutput) {
assertThat(consoleOutput)
.contains("| Guillermo Smith | (5, 2020-12-05) |")
.contains("| John Turner | (12, 2020-10-02) |")
.contains("| Brandy Sanders | (1, 2020-10-14) |")
.contains("| Valeria Mendoza | (10, 2020-06-02) |")
.contains("| Ellen Ortega | (100, 2020-06-18) |")
.contains("| Leann Holloway | (9, 2020-05-26) |");
}
private void testExecuteInternalRowMergerFunction(String consoleOutput) {
assertThat(consoleOutput)
.contains("| Guillermo Smith | (1992-12-12, New Jersey, 81... |")
.contains("| Valeria Mendoza | (1970-03-28, Los Angeles, 9... |")
.contains("| Leann Holloway | (1989-05-21, Eugene, 614-88... |");
}
}
|
AdvancedFunctionsExampleITCase
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/repositories/RepositoryException.java
|
{
"start": 747,
"end": 2553
}
|
class ____ extends ElasticsearchException {
private final String repository;
/**
* Construct a <code>RepositoryException</code> with the specified detail message.
*
* The message can be parameterized using <code>{}</code> as placeholders for the given
* arguments.
*
* @param repository the repository name
* @param msg the detail message
* @param args the arguments for the message
*/
public RepositoryException(String repository, String msg, Object... args) {
this(repository, msg, (Throwable) null, args);
}
/**
* Construct a <code>RepositoryException</code> with the specified detail message
* and nested exception.
*
* The message can be parameterized using <code>{}</code> as placeholders for the given
* arguments.
*
* @param repository the repository name
* @param msg the detail message
* @param cause the nested exception
* @param args the arguments for the message
*/
public RepositoryException(String repository, String msg, Throwable cause, Object... args) {
super("[" + (repository == null ? "_na" : repository) + "] " + msg, cause, args);
this.repository = repository;
}
/**
* Returns repository name
*
* @return repository name
*/
public String repository() {
return repository;
}
public RepositoryException(StreamInput in) throws IOException {
super(in);
repository = in.readOptionalString();
}
@Override
protected void writeTo(StreamOutput out, Writer<Throwable> nestedExceptionsWriter) throws IOException {
super.writeTo(out, nestedExceptionsWriter);
out.writeOptionalString(repository);
}
}
|
RepositoryException
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java
|
{
"start": 419,
"end": 711
}
|
class ____ extends ActionType<CreateTokenResponse> {
public static final String NAME = "cluster:admin/xpack/security/token/create";
public static final CreateTokenAction INSTANCE = new CreateTokenAction();
private CreateTokenAction() {
super(NAME);
}
}
|
CreateTokenAction
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/refaster/UStaticIdent.java
|
{
"start": 1145,
"end": 2522
}
|
class ____ extends UIdent {
public static UStaticIdent create(UClassIdent classIdent, CharSequence member, UType memberType) {
return new AutoValue_UStaticIdent(classIdent, StringName.of(member), memberType);
}
public static UStaticIdent create(String qualifiedClass, CharSequence member, UType memberType) {
return create(UClassIdent.create(qualifiedClass), member, memberType);
}
public static UStaticIdent create(ClassSymbol classSym, CharSequence member, UType memberType) {
return create(UClassIdent.create(classSym), member, memberType);
}
abstract UClassIdent classIdent();
@Override
public abstract StringName getName();
abstract UType memberType();
@Override
public JCExpression inline(Inliner inliner) throws CouldNotResolveImportException {
return inliner
.importPolicy()
.staticReference(
inliner, classIdent().getTopLevelClass(), classIdent().getName(), getName());
}
@Override
protected Choice<Unifier> defaultAction(Tree node, Unifier unifier) {
Symbol symbol = ASTHelpers.getSymbol(node);
if (symbol != null) {
return classIdent()
.unify(symbol.getEnclosingElement(), unifier)
.flatMap(unifications(getName(), symbol.getSimpleName()))
.flatMap(unifications(memberType(), symbol.asType()));
}
return Choice.none();
}
}
|
UStaticIdent
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/bind/support/SimpleSessionStatus.java
|
{
"start": 859,
"end": 1090
}
|
class ____ implements SessionStatus {
private boolean complete = false;
@Override
public void setComplete() {
this.complete = true;
}
@Override
public boolean isComplete() {
return this.complete;
}
}
|
SimpleSessionStatus
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/LiteByteStringUtf8Test.java
|
{
"start": 1287,
"end": 1789
}
|
class ____ {
void main(com.google.protobuf.MessageLite m) {
// BUG: Diagnostic contains: ByteString
String s = m.toByteString().toStringUtf8();
}
}
""")
.doTest();
}
@Test
public void negativeCase() {
compilationHelper
.addSourceLines(
"Foo.java",
"""
import com.google.protobuf.ByteString;
import com.google.protobuf.MessageLite;
|
Foo
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cut/Transaction.java
|
{
"start": 169,
"end": 637
}
|
class ____ {
private Long id;
private String description;
private MonetoryAmount value;
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public MonetoryAmount getValue() {
return value;
}
public void setValue(MonetoryAmount value) {
this.value = value;
}
}
|
Transaction
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/support/AbstractAutowireCapableBeanFactory.java
|
{
"start": 38129,
"end": 38300
}
|
class ____ hierarchy...
return getTypeForFactoryBeanFromMethod(mbd.getBeanClass(), factoryMethodName);
}
// For regular beans, try the target type and bean
|
inheritance
|
java
|
spring-projects__spring-boot
|
configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/immutable/ImmutablePrimitiveProperties.java
|
{
"start": 817,
"end": 1443
}
|
class ____ {
private final boolean flag;
private final byte octet;
private final char letter;
private final short number;
private final int counter;
private final long value;
private final float percentage;
private final double ratio;
public ImmutablePrimitiveProperties(boolean flag, byte octet, char letter, short number, int counter, long value,
float percentage, double ratio) {
this.flag = flag;
this.octet = octet;
this.letter = letter;
this.number = number;
this.counter = counter;
this.value = value;
this.percentage = percentage;
this.ratio = ratio;
}
}
|
ImmutablePrimitiveProperties
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/scheduler/TwoSchedulerConcurrentTasksTest.java
|
{
"start": 983,
"end": 1761
}
|
class ____ extends ContextTestSupport {
@Test
public void testTwoScheduler() throws Exception {
getMockEndpoint("mock:a").expectedMinimumMessageCount(4);
getMockEndpoint("mock:b").expectedMinimumMessageCount(2);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
SchedulerComponent comp = context.getComponent("scheduler", SchedulerComponent.class);
comp.setPoolSize(2);
from("scheduler://foo?delay=100").to("log:a").to("mock:a");
from("scheduler://foo?delay=200").to("log:b").to("mock:b");
}
};
}
}
|
TwoSchedulerConcurrentTasksTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/async/utils/AsyncUtil.java
|
{
"start": 1358,
"end": 1636
}
|
class ____ a collection of utility methods to simplify
* the implementation of asynchronous operations using Java's CompletableFuture.
* It encapsulates common patterns such as applying functions, handling exceptions,
* and executing tasks in a non-blocking manner. This
|
provides
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jackson/deployment/src/main/java/io/quarkus/resteasy/reactive/jackson/deployment/processor/JacksonDeserializerFactory.java
|
{
"start": 3178,
"end": 4532
}
|
class ____$quarkusjacksondeserializer extends StdDeserializer {
* public Person$quarkusjacksondeserializer() {
* super(Person.class);
* }
*
* public Object deserialize(JsonParser jsonParser, DeserializationContext context) throws IOException, JacksonException {
* Person person = new Person();
* Iterator iterator = ((JsonNode) jsonParser.getCodec().readTree(jsonParser)).fields();
*
* while (iterator.hasNext()) {
* Map.Entry entry = (Map.iterator) var3.next();
* String field = (String) entry.getKey();
* JsonNode jsonNode = (JsonNode) entry.getValue();
* switch (field) {
* case "firstName":
* person.setFirstName(jsonNode.asText());
* break;
* case "familyName":
* person.setLastName(jsonNode.asText());
* break;
* case "age":
* person.setAge(jsonNode.asInt());
* break;
* case "address":
* person.setAddress(context.readTreeAsValue(jsonNode, Address.class));
* break;
* }
* }
*
* return person;
* }
* }
* }</pre>
*
* Note that in this case also the {@code Address}
|
Person
|
java
|
grpc__grpc-java
|
xds/src/main/java/io/grpc/xds/XdsServerCredentials.java
|
{
"start": 997,
"end": 1736
}
|
class ____ {
private XdsServerCredentials() {} // prevent instantiation
/**
* Creates credentials to be configured by xDS, falling back to other credentials if no
* TLS configuration is provided by xDS.
*
* @param fallback Credentials to fall back to.
*
* @throws IllegalArgumentException if fallback is unable to be used
*/
public static ServerCredentials create(ServerCredentials fallback) {
InternalProtocolNegotiator.ServerFactory fallbackNegotiator =
InternalNettyServerCredentials.toNegotiator(checkNotNull(fallback, "fallback"));
return InternalNettyServerCredentials.create(
SecurityProtocolNegotiators.serverProtocolNegotiatorFactory(fallbackNegotiator));
}
}
|
XdsServerCredentials
|
java
|
alibaba__nacos
|
config/src/main/java/com/alibaba/nacos/config/server/service/notify/AsyncNotifyService.java
|
{
"start": 11963,
"end": 15880
}
|
class ____ implements RequestCallBack<ConfigChangeClusterSyncResponse> {
private NotifySingleRpcTask task;
AsyncNotifyService asyncNotifyService;
public AsyncRpcNotifyCallBack(AsyncNotifyService asyncNotifyService, NotifySingleRpcTask task) {
this.task = task;
this.asyncNotifyService = asyncNotifyService;
}
@Override
public Executor getExecutor() {
return ConfigExecutor.getConfigSubServiceExecutor();
}
@Override
public long getTimeout() {
return 1000L;
}
@Override
public void onResponse(ConfigChangeClusterSyncResponse response) {
String event = getNotifyEvent(task);
long delayed = System.currentTimeMillis() - task.getLastModified();
if (response.isSuccess()) {
ConfigTraceService.logNotifyEvent(task.getDataId(), task.getGroup(), task.getTenant(), null,
task.getLastModified(), InetUtils.getSelfIP(), event, ConfigTraceService.NOTIFY_TYPE_OK,
delayed, task.member.getAddress());
} else {
LOGGER.error("[notify-error] target:{} dataId:{} group:{} ts:{} code:{}", task.member.getAddress(),
task.getDataId(), task.getGroup(), task.getLastModified(), response.getErrorCode());
ConfigTraceService.logNotifyEvent(task.getDataId(), task.getGroup(), task.getTenant(), null,
task.getLastModified(), InetUtils.getSelfIP(), event, ConfigTraceService.NOTIFY_TYPE_ERROR,
delayed, task.member.getAddress());
//get delay time and set fail count to the task
asyncNotifyService.asyncTaskExecute(task);
LogUtil.NOTIFY_LOG.error("[notify-retry] target:{} dataId:{} group:{} ts:{}", task.member.getAddress(),
task.getDataId(), task.getGroup(), task.getLastModified());
MetricsMonitor.getConfigNotifyException().increment();
}
}
@Override
public void onException(Throwable ex) {
String event = getNotifyEvent(task);
long delayed = System.currentTimeMillis() - task.getLastModified();
LOGGER.error("[notify-exception] target:{} dataId:{} group:{} ts:{} ex:{}", task.member.getAddress(),
task.getDataId(), task.getGroup(), task.getLastModified(), ex);
ConfigTraceService.logNotifyEvent(task.getDataId(), task.getGroup(), task.getTenant(), null,
task.getLastModified(), InetUtils.getSelfIP(), event, ConfigTraceService.NOTIFY_TYPE_EXCEPTION,
delayed, task.member.getAddress());
//get delay time and set fail count to the task
asyncNotifyService.asyncTaskExecute(task);
LogUtil.NOTIFY_LOG.error("[notify-retry] target:{} dataId:{} group:{} ts:{}", task.member.getAddress(),
task.getDataId(), task.getGroup(), task.getLastModified());
MetricsMonitor.getConfigNotifyException().increment();
}
}
/**
* get delayTime and also set failCount to task; The failure time index increases, so as not to retry invalid tasks
* in the offline scene, which affects the normal synchronization.
*
* @param task notify task
* @return delay
*/
private static int getDelayTime(NotifySingleRpcTask task) {
int failCount = task.getFailCount();
int delay = MIN_RETRY_INTERVAL + failCount * failCount * INCREASE_STEPS;
if (failCount <= MAX_COUNT) {
task.setFailCount(failCount + 1);
}
return delay;
}
}
|
AsyncRpcNotifyCallBack
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/util/toolbox/FlexibleAggregationStrategiesTest.java
|
{
"start": 1850,
"end": 16672
}
|
class ____ extends ContextTestSupport {
private final CountDownLatch completionLatch = new CountDownLatch(1);
private final CountDownLatch timeoutLatch = new CountDownLatch(1);
@Test
@SuppressWarnings("unchecked")
public void testFlexibleAggregationStrategyNoCondition() throws Exception {
getMockEndpoint("mock:result1").expectedMessageCount(1);
getMockEndpoint("mock:result1").message(0).body().isInstanceOf(ArrayList.class);
template.sendBodyAndHeader("direct:start1", "AGGREGATE1", "id", "123");
template.sendBodyAndHeader("direct:start1", "AGGREGATE2", "id", "123");
template.sendBodyAndHeader("direct:start1", "AGGREGATE3", "id", "123");
template.sendBodyAndHeader("direct:start1", "AGGREGATE4", "id", "123");
template.sendBodyAndHeader("direct:start1", "AGGREGATE5", "id", "123");
assertMockEndpointsSatisfied();
List<String> resultList = getMockEndpoint("mock:result1").getReceivedExchanges().get(0).getIn().getBody(List.class);
for (int i = 0; i < 5; i++) {
assertEquals("AGGREGATE" + (i + 1), resultList.get(i));
}
}
@Test
@SuppressWarnings("unchecked")
public void testFlexibleAggregationStrategyCondition() throws Exception {
getMockEndpoint("mock:result1").expectedMessageCount(1);
getMockEndpoint("mock:result1").message(0).body().isInstanceOf(ArrayList.class);
template.sendBodyAndHeader("direct:start1", "AGGREGATE1", "id", "123");
template.sendBodyAndHeader("direct:start1", "DISCARD", "id", "123");
template.sendBodyAndHeader("direct:start1", "AGGREGATE2", "id", "123");
template.sendBodyAndHeader("direct:start1", "DISCARD", "id", "123");
template.sendBodyAndHeader("direct:start1", "AGGREGATE3", "id", "123");
assertMockEndpointsSatisfied();
List<String> resultList = getMockEndpoint("mock:result1").getReceivedExchanges().get(0).getIn().getBody(List.class);
for (int i = 0; i < 3; i++) {
assertEquals("AGGREGATE" + (i + 1), resultList.get(i));
}
}
@Test
@SuppressWarnings("unchecked")
public void testFlexibleAggregationStrategyStoreInPropertyHashSet() throws Exception {
getMockEndpoint("mock:result2").expectedMessageCount(1);
getMockEndpoint("mock:result2").message(0).exchangeProperty("AggregationResult").isInstanceOf(HashSet.class);
template.sendBodyAndHeader("direct:start2", "ignored body", "input", "AGGREGATE1");
template.sendBodyAndHeader("direct:start2", "ignored body", "input", "DISCARD");
template.sendBodyAndHeader("direct:start2", "ignored body", "input", "AGGREGATE2");
template.sendBodyAndHeader("direct:start2", "ignored body", "input", "DISCARD");
template.sendBodyAndHeader("direct:start2", "ignored body", "input", "AGGREGATE3");
assertMockEndpointsSatisfied();
HashSet<String> resultSet
= getMockEndpoint("mock:result2").getReceivedExchanges().get(0).getProperty("AggregationResult", HashSet.class);
assertEquals(3, resultSet.size());
assertTrue(resultSet.contains("AGGREGATE1") && resultSet.contains("AGGREGATE2") && resultSet.contains("AGGREGATE3"));
}
@Test
public void testFlexibleAggregationStrategyStoreInHeaderSingleValue() throws Exception {
getMockEndpoint("mock:result3").expectedMessageCount(1);
getMockEndpoint("mock:result3").message(0).header("AggregationResult").isInstanceOf(String.class);
getMockEndpoint("mock:result3").message(0).header("AggregationResult").isEqualTo("AGGREGATE3");
template.sendBody("direct:start3", "AGGREGATE1");
template.sendBody("direct:start3", "AGGREGATE2");
template.sendBody("direct:start3", "AGGREGATE3");
assertMockEndpointsSatisfied();
}
@Test
public void testFlexibleAggregationStrategyStoreInVariableSingleValue() throws Exception {
getMockEndpoint("mock:result7").expectedMessageCount(1);
getMockEndpoint("mock:result7").message(0).variable("AggregationResult").isInstanceOf(String.class);
getMockEndpoint("mock:result7").message(0).variable("AggregationResult").isEqualTo("AGGREGATE1");
template.sendBody("direct:start7", "AGGREGATE1");
assertMockEndpointsSatisfied();
}
@Test
@SuppressWarnings("rawtypes")
public void testFlexibleAggregationStrategyGenericArrayListWithoutNulls() throws Exception {
getMockEndpoint("mock:result4").expectedMessageCount(1);
getMockEndpoint("mock:result4").message(0).body().isInstanceOf(ArrayList.class);
template.sendBody("direct:start4", "AGGREGATE1");
template.sendBody("direct:start4", 123d);
template.sendBody("direct:start4", null);
assertMockEndpointsSatisfied();
ArrayList list = getMockEndpoint("mock:result4").getReceivedExchanges().get(0).getIn().getBody(ArrayList.class);
assertEquals(2, list.size());
assertTrue(list.contains("AGGREGATE1"));
assertTrue(list.contains(123d));
}
@Test
public void testFlexibleAggregationStrategyFailWithInvalidCast() throws Exception {
getMockEndpoint("mock:result5").expectedMessageCount(0);
Exception ex = assertThrows(Exception.class, () -> template.sendBody("direct:start5", "AGGREGATE1"),
"Type Conversion exception expected, as we are not ignoring invalid casts");
assertMockEndpointsSatisfied();
}
@Test
@SuppressWarnings("rawtypes")
public void testFlexibleAggregationStrategyFailOnInvalidCast() throws Exception {
getMockEndpoint("mock:result6").expectedMessageCount(1);
getMockEndpoint("mock:result6").message(0).body().isInstanceOf(ArrayList.class);
template.sendBody("direct:start6", "AGGREGATE1");
template.sendBody("direct:start6", "AGGREGATE2");
template.sendBody("direct:start6", "AGGREGATE3");
assertMockEndpointsSatisfied();
ArrayList list = getMockEndpoint("mock:result6").getReceivedExchanges().get(0).getIn().getBody(ArrayList.class);
assertEquals(3, list.size());
for (Object object : list) {
assertNull(object);
}
}
@Test
public void testFlexibleAggregationStrategyTimeoutCompletionMixins() throws Exception {
getMockEndpoint("mock:result.timeoutAndCompletionAware").expectedMessageCount(2);
getMockEndpoint("mock:result.timeoutAndCompletionAware").message(0).body().isEqualTo("AGGREGATE1");
getMockEndpoint("mock:result.timeoutAndCompletionAware").message(0).exchangeProperty("Timeout").isEqualTo(true);
getMockEndpoint("mock:result.timeoutAndCompletionAware").message(1).body().isEqualTo("AGGREGATE3");
template.sendBody("direct:start.timeoutAndCompletionAware", "AGGREGATE1");
assertTrue(timeoutLatch.await(2500, TimeUnit.MILLISECONDS));
template.sendBody("direct:start.timeoutAndCompletionAware", "AGGREGATE2");
template.sendBody("direct:start.timeoutAndCompletionAware", "AGGREGATE3");
assertTrue(completionLatch.await(2500, TimeUnit.MILLISECONDS));
getMockEndpoint("mock:result.timeoutAndCompletionAware").getReceivedExchanges();
assertMockEndpointsSatisfied();
}
@Test
@SuppressWarnings("unchecked")
public void testFlexibleAggregationStrategyPickXPath() throws Exception {
getMockEndpoint("mock:result.xpath1").expectedMessageCount(1);
getMockEndpoint("mock:result.xpath1").message(0).body().isInstanceOf(ArrayList.class);
template.sendBody("direct:start.xpath1", "<envelope><result>ok</result></envelope>");
template.sendBody("direct:start.xpath1", "<envelope><result>error</result></envelope>");
template.sendBody("direct:start.xpath1", "<envelope>no result</envelope>");
assertMockEndpointsSatisfied();
ArrayList<Node> list
= getMockEndpoint("mock:result.xpath1").getReceivedExchanges().get(0).getIn().getBody(ArrayList.class);
assertEquals(2, list.size());
assertEquals("ok", list.get(0).getTextContent());
assertEquals("error", list.get(1).getTextContent());
}
@Test
public void testLinkedList() {
NotifyBuilder notify = new NotifyBuilder(context).whenDone(1).and().whenExactlyFailed(0).create();
template.sendBody("direct:linkedlist", Arrays.asList("FIRST", "SECOND"));
assertTrue(notify.matches(10, TimeUnit.SECONDS));
}
@Test
public void testHashSet() {
HashSet<String> r = new HashSet<>();
r.add("FIRST");
r.add("SECOND");
NotifyBuilder notify = new NotifyBuilder(context).whenDone(1).and().whenExactlyFailed(0).create();
Set result = template.requestBody("direct:hashset", Arrays.asList("FIRST", "SECOND"), Set.class);
assertTrue(notify.matches(10, TimeUnit.SECONDS));
assertEquals(r, result);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start1")
.aggregate(AggregationStrategies.flexible(String.class).accumulateInCollection(ArrayList.class)
.pick(simple("${body}"))
.condition(simple("${body} contains 'AGGREGATE'")))
.header("id").completionSize(5).to("mock:result1");
from("direct:start2")
.aggregate(AggregationStrategies.flexible(String.class).accumulateInCollection(HashSet.class)
.pick(simple("${header.input}"))
.condition(simple("${header.input} contains 'AGGREGATE'")).storeInProperty("AggregationResult"))
.constant(true).completionSize(5).to("mock:result2");
from("direct:start3").aggregate(AggregationStrategies.flexible(String.class).storeInHeader("AggregationResult"))
.constant(true).completionSize(3)
.to("mock:result3");
from("direct:start4").aggregate(AggregationStrategies.flexible().accumulateInCollection(ArrayList.class))
.constant(true).completionSize(3).to("mock:result4");
from("direct:start5")
.aggregate(AggregationStrategies.flexible(Integer.class).accumulateInCollection(ArrayList.class))
.constant(true).completionSize(3)
.to("mock:result5");
from("direct:start6")
.aggregate(AggregationStrategies.flexible(Integer.class).ignoreInvalidCasts().storeNulls()
.accumulateInCollection(ArrayList.class))
.constant(true).completionSize(3).to("mock:result6");
from("direct:start7")
.aggregate(AggregationStrategies.flexible(String.class).storeInVariable("AggregationResult"))
.constant(true).completionSize(1)
.to("mock:result7");
AggregationStrategy timeoutCompletionStrategy
= AggregationStrategies.flexible(String.class).condition(simple("${body} contains 'AGGREGATE'"))
.timeoutAware(new TimeoutAwareMixin() {
@Override
public void timeout(Exchange exchange, int index, int total, long timeout) {
exchange.setProperty("Timeout", true);
timeoutLatch.countDown();
}
}).completionAware(new CompletionAwareMixin() {
@Override
public void onCompletion(Exchange exchange) {
completionLatch.countDown();
}
});
from("direct:start.timeoutAndCompletionAware").aggregate(timeoutCompletionStrategy).constant(true)
.completionTimeout(500).completionSize(2)
.to("mock:result.timeoutAndCompletionAware");
from("direct:start.xpath1")
.aggregate(AggregationStrategies.flexible(Node.class)
.pick(XPathBuilder.xpath("//result[1]").nodeResult()).accumulateInCollection(ArrayList.class))
.constant(true).completionSize(3).to("mock:result.xpath1");
from("direct:linkedlist")
.log(LoggingLevel.INFO, "Before the first split the body is ${body} and has class ${body.getClass()}")
.split(body(), AggregationStrategies.flexible().pick(body()).accumulateInCollection(LinkedList.class))
.log(LoggingLevel.INFO, "During the first split the body is ${body} and has class ${body.getClass()}")
.end()
.log(LoggingLevel.INFO, "Before the second split the body is ${body} and has class ${body.getClass()}")
.split(body(), AggregationStrategies.flexible().pick(body()).accumulateInCollection(LinkedList.class))
.log(LoggingLevel.INFO, "During the second split the body is ${body} and has class ${body.getClass()}")
.end()
.log(LoggingLevel.INFO, "After the second split the body is ${body} and has class ${body.getClass()}");
from("direct:hashset")
.log(LoggingLevel.INFO, "Before the first split the body is ${body} and has class ${body.getClass()}")
.split(body(), AggregationStrategies.flexible().pick(body()).accumulateInCollection(HashSet.class))
.log(LoggingLevel.INFO, "During the first split the body is ${body} and has class ${body.getClass()}")
.end()
.log(LoggingLevel.INFO, "Before the second split the body is ${body} and has class ${body.getClass()}")
.split(body(), AggregationStrategies.flexible().pick(body()).accumulateInCollection(HashSet.class))
.log(LoggingLevel.INFO, "During the second split the body is ${body} and has class ${body.getClass()}")
.end()
.log(LoggingLevel.INFO, "After the second split the body is ${body} and has class ${body.getClass()}");
}
};
}
}
|
FlexibleAggregationStrategiesTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/float_/FloatAssert_isStrictlyBetween_Floats_Test.java
|
{
"start": 908,
"end": 1258
}
|
class ____ extends FloatAssertBaseTest {
@Override
protected FloatAssert invoke_api_method() {
return assertions.isStrictlyBetween(6f, 8f);
}
@Override
protected void verify_internal_effects() {
verify(floats).assertIsStrictlyBetween(getInfo(assertions), getActual(assertions), 6f, 8f);
}
}
|
FloatAssert_isStrictlyBetween_Floats_Test
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/objectid/ObjectId687Test.java
|
{
"start": 1444,
"end": 1626
}
|
class ____ {
public String label = "label2";
}
@JsonIdentityInfo(generator=ObjectIdGenerators.PropertyGenerator.class, property="label")
static
|
ReferredWithNoCreator
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/manytoone/jointable/ManyToOneImplicitJoinTableTest.java
|
{
"start": 615,
"end": 1098
}
|
class ____ {
@JiraKey("HHH-19564") @Test
void test(EntityManagerFactoryScope scope) {
scope.inTransaction( s -> {
X x = new X();
Y y = new Y();
y.x = x;
s.persist( x );
s.persist( y );
} );
scope.inTransaction( s -> {
Y y = s.find( Y.class, 0L );
y.name = "Gavin";
} );
scope.inTransaction( s -> {
Y y = s.find( Y.class, 0L );
assertEquals("Gavin", y.name);
assertNotNull(y.x);
} );
}
@Entity(name="Y")
static
|
ManyToOneImplicitJoinTableTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncDateNanosEvaluator.java
|
{
"start": 1128,
"end": 4214
}
|
class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateTruncDateNanosEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator fieldVal;
private final Rounding.Prepared rounding;
private final DriverContext driverContext;
private Warnings warnings;
public DateTruncDateNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal,
Rounding.Prepared rounding, DriverContext driverContext) {
this.source = source;
this.fieldVal = fieldVal;
this.rounding = rounding;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (LongBlock fieldValBlock = (LongBlock) fieldVal.eval(page)) {
LongVector fieldValVector = fieldValBlock.asVector();
if (fieldValVector == null) {
return eval(page.getPositionCount(), fieldValBlock);
}
return eval(page.getPositionCount(), fieldValVector).asBlock();
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += fieldVal.baseRamBytesUsed();
return baseRamBytesUsed;
}
public LongBlock eval(int positionCount, LongBlock fieldValBlock) {
try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
switch (fieldValBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
long fieldVal = fieldValBlock.getLong(fieldValBlock.getFirstValueIndex(p));
result.appendLong(DateTrunc.processDateNanos(fieldVal, this.rounding));
}
return result.build();
}
}
public LongVector eval(int positionCount, LongVector fieldValVector) {
try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
long fieldVal = fieldValVector.getLong(p);
result.appendLong(p, DateTrunc.processDateNanos(fieldVal, this.rounding));
}
return result.build();
}
}
@Override
public String toString() {
return "DateTruncDateNanosEvaluator[" + "fieldVal=" + fieldVal + ", rounding=" + rounding + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(fieldVal);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static
|
DateTruncDateNanosEvaluator
|
java
|
quarkusio__quarkus
|
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/telemetry/endpoints/onbinarymessage/DtoBinaryCodec.java
|
{
"start": 303,
"end": 678
}
|
class ____ implements BinaryMessageCodec<Dto> {
@Override
public boolean supports(Type type) {
return type.equals(Dto.class);
}
@Override
public Buffer encode(Dto dto) {
return Buffer.buffer(dto.property());
}
@Override
public Dto decode(Type type, Buffer value) {
return new Dto(value.toString());
}
}
|
DtoBinaryCodec
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/postgresql/ast/stmt/PGEndTransactionStatement.java
|
{
"start": 1000,
"end": 1473
}
|
class ____ extends SQLStatementImpl implements PGSQLStatement {
public PGEndTransactionStatement() {
super(DbType.postgresql);
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor instanceof PGASTVisitor) {
accept0((PGASTVisitor) visitor);
}
}
@Override
public void accept0(PGASTVisitor visitor) {
visitor.visit(this);
visitor.endVisit(this);
}
}
|
PGEndTransactionStatement
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/resolver/order/TestAvailableSpaceResolver.java
|
{
"start": 2884,
"end": 9689
}
|
class ____ {
private static final int SUBCLUSTER_NUM = 10;
@Test
public void testResolverWithNoPreference() throws IOException {
MultipleDestinationMountTableResolver mountTableResolver =
mockAvailableSpaceResolver(1.0f);
// Since we don't have any preference, it will
// always chose the maximum-available-space subcluster.
PathLocation loc = mountTableResolver.getDestinationForPath("/space");
assertEquals("subcluster9",
loc.getDestinations().get(0).getNameserviceId());
loc = mountTableResolver.getDestinationForPath("/space/subdir");
assertEquals("subcluster9",
loc.getDestinations().get(0).getNameserviceId());
}
@Test
public void testResolverWithDefaultPreference() throws IOException {
MultipleDestinationMountTableResolver mountTableResolver =
mockAvailableSpaceResolver(BALANCER_PREFERENCE_DEFAULT);
int retries = 10;
int retryTimes = 0;
// There is chance we won't always chose the
// maximum-available-space subcluster.
for (retryTimes = 0; retryTimes < retries; retryTimes++) {
PathLocation loc = mountTableResolver.getDestinationForPath("/space");
if (!"subcluster9"
.equals(loc.getDestinations().get(0).getNameserviceId())) {
break;
}
}
assertNotEquals(retries, retryTimes);
}
/**
* Mock the available space based resolver.
*
* @param balancerPreference The balancer preference for the resolver.
* @throws IOException
* @return MultipleDestinationMountTableResolver instance.
*/
@SuppressWarnings("unchecked")
private MultipleDestinationMountTableResolver mockAvailableSpaceResolver(
float balancerPreference) throws IOException {
Configuration conf = new Configuration();
conf.setFloat(BALANCER_PREFERENCE_KEY, balancerPreference);
Router router = mock(Router.class);
StateStoreService stateStore = mock(StateStoreService.class);
MembershipStore membership = mock(MembershipStore.class);
when(router.getStateStore()).thenReturn(stateStore);
when(stateStore.getRegisteredRecordStore(any(Class.class)))
.thenReturn(membership);
GetNamenodeRegistrationsResponse response =
GetNamenodeRegistrationsResponse.newInstance();
// Set the mapping for each client
List<MembershipState> records = new LinkedList<>();
for (int i = 0; i < SUBCLUSTER_NUM; i++) {
records.add(newMembershipState("subcluster" + i, i));
}
response.setNamenodeMemberships(records);
when(membership
.getNamenodeRegistrations(any(GetNamenodeRegistrationsRequest.class)))
.thenReturn(response);
// construct available space resolver
AvailableSpaceResolver resolver = new AvailableSpaceResolver(conf, router);
MultipleDestinationMountTableResolver mountTableResolver =
new MultipleDestinationMountTableResolver(conf, router);
mountTableResolver.addResolver(DestinationOrder.SPACE, resolver);
// We point /space to subclusters [0,..9] with the SPACE order
Map<String, String> destinations = new HashMap<>();
for (int i = 0; i < SUBCLUSTER_NUM; i++) {
destinations.put("subcluster" + i, "/space");
}
MountTable spaceEntry = MountTable.newInstance("/space", destinations);
spaceEntry.setDestOrder(DestinationOrder.SPACE);
mountTableResolver.addEntry(spaceEntry);
return mountTableResolver;
}
public static MembershipState newMembershipState(String nameservice,
long availableSpace) {
MembershipState record = MembershipState.newInstance();
record.setNameserviceId(nameservice);
MembershipStats stats = new MembershipStatsPBImpl();
stats.setAvailableSpace(availableSpace);
record.setStats(stats);
return record;
}
@Test
public void testSubclusterSpaceComparator() {
verifyRank(0.0f, true, false);
verifyRank(1.0f, true, true);
verifyRank(0.5f, false, false);
verifyRank(BALANCER_PREFERENCE_DEFAULT, false, false);
// test for illegal cases
try {
verifyRank(2.0f, false, false);
fail("Subcluster comparison should be failed.");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains(
"The balancer preference value should be in the range 0.0 - 1.0", e);
}
try {
verifyRank(-1.0f, false, false);
fail("Subcluster comparison should be failed.");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains(
"The balancer preference value should be in the range 0.0 - 1.0", e);
}
}
/**
* Verify result rank with {@link SubclusterSpaceComparator}.
* @param balancerPreference The balancer preference used
* in {@link SubclusterSpaceComparator}.
* @param shouldOrdered The result rank should be ordered.
* @param isDesc If the rank result is in a descending order.
*/
private void verifyRank(float balancerPreference, boolean shouldOrdered,
boolean isDesc) {
List<SubclusterAvailableSpace> subclusters = new LinkedList<>();
for (int i = 0; i < SUBCLUSTER_NUM; i++) {
subclusters.add(new SubclusterAvailableSpace("subcluster" + i, i));
}
// shuffle the cluster list if we expect rank to be ordered
if (shouldOrdered) {
Collections.shuffle(subclusters);
}
SubclusterSpaceComparator comparator = new SubclusterSpaceComparator(
balancerPreference);
Collections.sort(subclusters, comparator);
int i = SUBCLUSTER_NUM - 1;
for (; i >= 0; i--) {
SubclusterAvailableSpace cluster = subclusters
.get(SUBCLUSTER_NUM - 1 - i);
if (shouldOrdered) {
if (isDesc) {
assertEquals("subcluster" + i, cluster.getNameserviceId());
assertEquals(i, cluster.getAvailableSpace());
} else {
assertEquals("subcluster" + (SUBCLUSTER_NUM - 1 - i),
cluster.getNameserviceId());
assertEquals(SUBCLUSTER_NUM - 1 - i, cluster.getAvailableSpace());
}
} else {
// If catch one cluster is not in ordered, that's expected behavior.
if (!cluster.getNameserviceId().equals("subcluster" + i)
&& cluster.getAvailableSpace() != i) {
break;
}
}
}
// The var i won't reach to 0 since cluster list won't be completely
// ordered.
if (!shouldOrdered) {
assertNotEquals(0, i);
}
subclusters.clear();
}
@Test
public void testChooseFirstNamespace() throws Exception {
MultipleDestinationMountTableResolver mountTableResolver =
mockAvailableSpaceResolver(1.0f);
PathLocation loc = mountTableResolver.getDestinationForPath("/space");
assertEquals("subcluster9", loc.getDefaultLocation().getNameserviceId());
}
}
|
TestAvailableSpaceResolver
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/secured/SerializedJwt.java
|
{
"start": 1162,
"end": 3124
}
|
class ____ {
private final String token;
private final String header;
private final String payload;
private final String signature;
public SerializedJwt(String token) {
if (token == null)
token = "";
else
token = token.trim();
if (token.isEmpty())
throw new JwtValidatorException("Malformed JWT provided; expected three sections (header, payload, and signature)");
String[] splits = token.split("\\.");
if (splits.length != 3)
throw new JwtValidatorException("Malformed JWT provided; expected three sections (header, payload, and signature)");
this.token = token.trim();
this.header = validateSection(splits[0]);
this.payload = validateSection(splits[1]);
this.signature = validateSection(splits[2]);
}
/**
* Returns the entire base 64-encoded JWT.
*
* @return JWT
*/
public String getToken() {
return token;
}
/**
* Returns the first section--the JWT header--in its base 64-encoded form.
*
* @return Header section of the JWT
*/
public String getHeader() {
return header;
}
/**
* Returns the second section--the JWT payload--in its base 64-encoded form.
*
* @return Payload section of the JWT
*/
public String getPayload() {
return payload;
}
/**
* Returns the third section--the JWT signature--in its base 64-encoded form.
*
* @return Signature section of the JWT
*/
public String getSignature() {
return signature;
}
private String validateSection(String section) throws JwtValidatorException {
section = section.trim();
if (section.isEmpty())
throw new JwtValidatorException("Malformed JWT provided; expected three sections (header, payload, and signature)");
return section;
}
}
|
SerializedJwt
|
java
|
quarkusio__quarkus
|
independent-projects/qute/core/src/main/java/io/quarkus/qute/IfSectionHelper.java
|
{
"start": 1575,
"end": 1702
}
|
interface ____ {
CompletionStage<ResultNode> resolve(SectionResolutionContext context);
}
static final
|
IfContext
|
java
|
spring-projects__spring-boot
|
module/spring-boot-pulsar/src/main/java/org/springframework/boot/pulsar/autoconfigure/PulsarProperties.java
|
{
"start": 22106,
"end": 22367
}
|
class ____ {
/**
* Whether transaction support is enabled.
*/
private boolean enabled;
public boolean isEnabled() {
return this.enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
}
public static
|
Transaction
|
java
|
netty__netty
|
common/src/main/java/io/netty/util/internal/NoOpTypeParameterMatcher.java
|
{
"start": 683,
"end": 827
}
|
class ____ extends TypeParameterMatcher {
@Override
public boolean match(Object msg) {
return true;
}
}
|
NoOpTypeParameterMatcher
|
java
|
apache__camel
|
core/camel-base/src/main/java/org/apache/camel/impl/converter/TypeConvertersLoader.java
|
{
"start": 983,
"end": 1080
}
|
class ____
* methods that has been annotated with {@link org.apache.camel.Converter}.
*/
public
|
for
|
java
|
apache__maven
|
api/maven-api-di/src/main/java/org/apache/maven/api/di/Typed.java
|
{
"start": 1560,
"end": 1733
}
|
class ____ implements Service {
* // Implementation
* }
* </pre>
*
* @since 4.0.0
*/
@Target({FIELD, METHOD, TYPE})
@Retention(RUNTIME)
@Documented
public @
|
ServiceImpl
|
java
|
reactor__reactor-core
|
reactor-core/src/withMicrometerTest/java/reactor/core/publisher/ContextPropagationTest.java
|
{
"start": 2169,
"end": 4296
}
|
class ____ {
private static final String KEY1 = "ContextPropagationTest.key1";
private static final String KEY2 = "ContextPropagationTest.key2";
private static final ThreadLocal<String> REF1 = ThreadLocal.withInitial(() -> "ref1_init");
private static final ThreadLocal<String> REF2 = ThreadLocal.withInitial(() -> "ref2_init");
@BeforeAll
static void initializeThreadLocalAccessors() {
ContextRegistry globalRegistry = ContextRegistry.getInstance();
globalRegistry.registerThreadLocalAccessor(KEY1, REF1);
globalRegistry.registerThreadLocalAccessor(KEY2, REF2);
}
//the cleanup of "thread locals" could be especially important if one starts relying on
//the global registry in tests: it would ensure no TL pollution.
@AfterEach
void cleanupThreadLocals() {
REF1.remove();
REF2.remove();
}
@AfterAll
static void removeThreadLocalAccessors() {
ContextRegistry globalRegistry = ContextRegistry.getInstance();
globalRegistry.removeThreadLocalAccessor(KEY1);
globalRegistry.removeThreadLocalAccessor(KEY2);
}
@Test
void isContextPropagationAvailable() {
assertThat(ContextPropagationSupport.isContextPropagationAvailable()).isTrue();
}
@Test
void contextCaptureWithNoPredicateReturnsTheConstantFunction() {
assertThat(ContextPropagation.contextCapture())
.as("no predicate nor registry")
.isSameAs(ContextPropagation.WITH_GLOBAL_REGISTRY_NO_PREDICATE);
}
@Test
void fluxApiUsesContextPropagationConstantFunction() {
Flux<Integer> source = Flux.empty();
assertThat(source.contextCapture())
.isInstanceOfSatisfying(FluxContextWrite.class, fcw ->
assertThat(fcw.doOnContext)
.as("flux's capture function")
.isSameAs(ContextPropagation.WITH_GLOBAL_REGISTRY_NO_PREDICATE)
);
}
@Test
void monoApiUsesContextPropagationConstantFunction() {
Mono<Integer> source = Mono.empty();
assertThat(source.contextCapture())
.isInstanceOfSatisfying(MonoContextWrite.class, fcw ->
assertThat(fcw.doOnContext)
.as("mono's capture function")
.isSameAs(ContextPropagation.WITH_GLOBAL_REGISTRY_NO_PREDICATE)
);
}
@Nested
|
ContextPropagationTest
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/tuple/ImmutablePair.java
|
{
"start": 1341,
"end": 6275
}
|
class ____<L, R> extends Pair<L, R> {
/**
* An empty array.
* <p>
* Consider using {@link #emptyArray()} to avoid generics warnings.
* </p>
*
* @since 3.10
*/
public static final ImmutablePair<?, ?>[] EMPTY_ARRAY = {};
/**
* An immutable pair of nulls.
*/
// This is not defined with generics to avoid warnings in call sites.
@SuppressWarnings("rawtypes")
private static final ImmutablePair NULL = new ImmutablePair<>(null, null);
/** Serialization version */
private static final long serialVersionUID = 4954918890077093841L;
/**
* Returns the empty array singleton that can be assigned without compiler warning.
*
* @param <L> the left element type
* @param <R> the right element type
* @return the empty array singleton that can be assigned without compiler warning.
* @since 3.10
*/
@SuppressWarnings("unchecked")
public static <L, R> ImmutablePair<L, R>[] emptyArray() {
return (ImmutablePair<L, R>[]) EMPTY_ARRAY;
}
/**
* Creates an immutable pair of two objects inferring the generic types.
*
* @param <L> the left element type.
* @param <R> the right element type.
* @param left the left element, may be null.
* @return an immutable formed from the two parameters, not null.
* @since 3.11
*/
public static <L, R> Pair<L, R> left(final L left) {
return of(left, null);
}
/**
* Returns an immutable pair of nulls.
*
* @param <L> the left element of this pair. Value is {@code null}.
* @param <R> the right element of this pair. Value is {@code null}.
* @return an immutable pair of nulls.
* @since 3.6
*/
@SuppressWarnings("unchecked")
public static <L, R> ImmutablePair<L, R> nullPair() {
return NULL;
}
/**
* Creates an immutable pair of two objects inferring the generic types.
*
* @param <L> the left element type.
* @param <R> the right element type.
* @param left the left element, may be null.
* @param right the right element, may be null.
* @return an immutable formed from the two parameters, not null.
*/
public static <L, R> ImmutablePair<L, R> of(final L left, final R right) {
return left != null || right != null ? new ImmutablePair<>(left, right) : nullPair();
}
/**
* Creates an immutable pair from a map entry.
*
* @param <L> the left element type.
* @param <R> the right element type.
* @param pair the existing map entry.
* @return an immutable formed from the map entry.
* @since 3.10
*/
public static <L, R> ImmutablePair<L, R> of(final Map.Entry<L, R> pair) {
return pair != null ? new ImmutablePair<>(pair.getKey(), pair.getValue()) : nullPair();
}
/**
* Creates an immutable pair of two non-null objects inferring the generic types.
*
* @param <L> the left element type.
* @param <R> the right element type.
* @param left the left element, may not be null.
* @param right the right element, may not be null.
* @return an immutable formed from the two parameters, not null.
* @throws NullPointerException if any input is null.
* @since 3.13.0
*/
public static <L, R> ImmutablePair<L, R> ofNonNull(final L left, final R right) {
return of(Objects.requireNonNull(left, "left"), Objects.requireNonNull(right, "right"));
}
/**
* Creates an immutable pair of two objects inferring the generic types.
*
* @param <L> the left element type.
* @param <R> the right element type.
* @param right the right element, may be null.
* @return an immutable formed from the two parameters, not null.
* @since 3.11
*/
public static <L, R> Pair<L, R> right(final R right) {
return of(null, right);
}
/** Left object */
public final L left;
/** Right object */
public final R right;
/**
* Create a new pair instance.
*
* @param left the left value, may be null
* @param right the right value, may be null
*/
public ImmutablePair(final L left, final R right) {
this.left = left;
this.right = right;
}
/**
* {@inheritDoc}
*/
@Override
public L getLeft() {
return left;
}
/**
* {@inheritDoc}
*/
@Override
public R getRight() {
return right;
}
/**
* Throws {@link UnsupportedOperationException}.
*
* <p>This pair is immutable, so this operation is not supported.</p>
*
* @param value the value to set
* @return never
* @throws UnsupportedOperationException as this operation is not supported
*/
@Override
public R setValue(final R value) {
throw new UnsupportedOperationException();
}
}
|
ImmutablePair
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/core/commands/reactive/StreamReactiveCommandIntegrationTests.java
|
{
"start": 384,
"end": 649
}
|
class ____ extends StreamCommandIntegrationTests {
@Inject
StreamReactiveCommandIntegrationTests(StatefulRedisConnection<String, String> connection) {
super(ReactiveSyncInvocationHandler.sync(connection));
}
}
|
StreamReactiveCommandIntegrationTests
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/stmt/OracleSelectPivotBase.java
|
{
"start": 732,
"end": 824
}
|
class ____ extends SQLPivot {
public OracleSelectPivotBase() {
}
}
|
OracleSelectPivotBase
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/config/AbstractAuditingViaJavaConfigRepositoriesTests.java
|
{
"start": 4840,
"end": 4993
}
|
class ____ {
@Bean
EvaluationContextExtension sampleEvaluationContextExtension() {
return new SampleEvaluationContextExtension();
}
}
}
|
TestConfig
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/protocol/AbstractProxyProtocol.java
|
{
"start": 7751,
"end": 9101
}
|
class ____ implements RemotingServer {
public abstract Object getDelegateServer();
/**
* @return
*/
@Override
public boolean isBound() {
return false;
}
@Override
public Collection<Channel> getChannels() {
return null;
}
@Override
public Channel getChannel(InetSocketAddress remoteAddress) {
return null;
}
@Override
public void reset(Parameters parameters) {}
@Override
public void reset(URL url) {}
@Override
public URL getUrl() {
return null;
}
@Override
public ChannelHandler getChannelHandler() {
return null;
}
@Override
public InetSocketAddress getLocalAddress() {
return null;
}
@Override
public void send(Object message) throws RemotingException {}
@Override
public void send(Object message, boolean sent) throws RemotingException {}
@Override
public void close() {}
@Override
public void close(int timeout) {}
@Override
public void startClose() {}
@Override
public boolean isClosed() {
return false;
}
}
}
|
RemotingServerAdapter
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_513/MappingException.java
|
{
"start": 233,
"end": 388
}
|
class ____ extends Exception {
public MappingException() {
}
public MappingException(String msg) {
super( msg );
}
}
|
MappingException
|
java
|
apache__flink
|
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/sink/filesystem/Buckets.java
|
{
"start": 1809,
"end": 2016
}
|
class ____ the lifecycle
* of the operator.
*
* @param <IN> The type of input elements.
* @param <BucketID> The type of ids for the buckets, as returned by the {@link BucketAssigner}.
*/
@Internal
public
|
to
|
java
|
google__guice
|
extensions/persist/src/com/google/inject/persist/jpa/JpaFinderProxy.java
|
{
"start": 10960,
"end": 11021
}
|
enum ____ {
PLAIN,
COLLECTION,
ARRAY
}
}
|
ReturnType
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/delegation/DefaultExecutorTest.java
|
{
"start": 1689,
"end": 4341
}
|
class ____ {
@Test
void testJobName() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final Executor executor = new DefaultExecutor(env);
final List<Transformation<?>> dummyTransformations =
Collections.singletonList(
env.fromData(1, 2, 3).sinkTo(new DiscardingSink<>()).getTransformation());
final Configuration configuration = new Configuration();
configuration.set(PipelineOptions.NAME, "Custom Name");
// default
testJobName(
executor.createPipeline(dummyTransformations, new Configuration(), "Default Name"),
"Default Name");
// Table API specific
testJobName(
executor.createPipeline(dummyTransformations, configuration, "Default Name"),
"Custom Name");
// DataStream API specific
env.configure(configuration);
testJobName(
executor.createPipeline(dummyTransformations, new Configuration(), "Default Name"),
"Custom Name");
}
@Test
void testDefaultBatchProperties() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final Executor executor = new DefaultExecutor(env);
final List<Transformation<?>> dummyTransformations =
Collections.singletonList(
env.fromData(1, 2, 3).sinkTo(new DiscardingSink<>()).getTransformation());
final Configuration configuration = new Configuration();
configuration.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);
final StreamGraph streamGraph =
(StreamGraph)
executor.createPipeline(
dummyTransformations, configuration, "Default Name");
assertThat(streamGraph.getExecutionConfig().isObjectReuseEnabled()).isTrue();
assertThat(streamGraph.getExecutionConfig().getLatencyTrackingInterval()).isEqualTo(0);
assertThat(streamGraph.isChainingEnabled()).isTrue();
assertThat(streamGraph.isAllVerticesInSameSlotSharingGroupByDefault()).isFalse();
assertThat(streamGraph.getCheckpointConfig().isCheckpointingEnabled()).isFalse();
assertThat(streamGraph.getGlobalStreamExchangeMode())
.isEqualTo(GlobalStreamExchangeMode.ALL_EDGES_BLOCKING);
}
private void testJobName(Pipeline pipeline, String expectedJobName) {
assertThat(((StreamGraph) pipeline).getJobName()).isEqualTo(expectedJobName);
}
}
|
DefaultExecutorTest
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/ConfigureOperatorLevelStateTtlJsonITCase.java
|
{
"start": 1607,
"end": 13813
}
|
class ____ extends JsonPlanTestBase {
@Test
void testDifferentStateTtlThroughCompiledPlanForDifferentOneInputStreamOperator()
throws Exception {
innerTestDeduplicateAndGroupAggregate(
"INSERT INTO OrdersStats \n"
+ "SELECT buyer, COUNT(1) AS ord_cnt, SUM(quantity) AS quantity_cnt, SUM(amount) AS total_amount FROM (\n"
+ "SELECT *, ROW_NUMBER() OVER(PARTITION BY order_id, buyer, quantity, amount ORDER BY proctime() ASC) AS rk FROM Orders) tmp\n"
+ "WHERE rk = 1\n"
+ "GROUP BY buyer",
json -> {
try {
JsonNode target = JsonTestUtils.readFromString(json);
JsonTestUtils.setExecNodeStateMetadata(
target, "stream-exec-deduplicate", 0, 6000L);
JsonTestUtils.setExecNodeStateMetadata(
target, "stream-exec-group-aggregate", 0, 9000L);
return JsonTestUtils.writeToString(target);
} catch (IOException e) {
throw new TableException("Cannot modify compiled json plan.", e);
}
});
}
@Test
void testDifferentStateTtlThroughSqlHintForDifferentOneInputStreamOperator() throws Exception {
tableEnv.getConfig().set("table.exec.mini-batch.enabled", "true");
tableEnv.getConfig().set("table.exec.mini-batch.size", "2");
tableEnv.getConfig().set("table.exec.mini-batch.allow-latency", "1");
innerTestDeduplicateAndGroupAggregate(
"INSERT INTO OrdersStats \n"
+ "SELECT /*+STATE_TTL('tmp' = '9s')*/ buyer, COUNT(1) AS ord_cnt, SUM(quantity) AS quantity_cnt, SUM(amount) AS total_amount \n"
+ "FROM (\n"
+ " SELECT *, ROW_NUMBER() OVER(PARTITION BY order_id, buyer, quantity, amount ORDER BY proctime() ASC) AS rk FROM Orders\n"
+ ") tmp\n"
+ "WHERE rk = 1\n"
+ "GROUP BY buyer",
json -> {
try {
JsonNode target = JsonTestUtils.readFromString(json);
JsonTestUtils.setExecNodeStateMetadata(
target, "stream-exec-deduplicate", 0, 6000L);
return JsonTestUtils.writeToString(target);
} catch (IOException e) {
throw new TableException("Cannot modify compiled json plan.", e);
}
});
}
@Test
void testDifferentStateTtlThroughCompiledPlanForSameTwoInputStreamOperator() throws Exception {
innerTestRegularJoin(
"INSERT INTO OrdersShipInfo \n"
+ "SELECT a.order_id, a.line_order_id, b.ship_mode FROM Orders a JOIN LineOrders b ON a.line_order_id = b.line_order_id",
json -> {
try {
JsonNode target = JsonTestUtils.readFromString(json);
JsonTestUtils.setExecNodeStateMetadata(
target, "stream-exec-join", 0, 3000L);
JsonTestUtils.setExecNodeStateMetadata(
target, "stream-exec-join", 1, 9000L);
return JsonTestUtils.writeToString(target);
} catch (IOException e) {
throw new TableException("Cannot modify compiled json plan.", e);
}
});
}
@Test
void testDifferentStateTtlThroughSqlHintForSameTwoInputStreamOperator() throws Exception {
innerTestRegularJoin(
"INSERT INTO OrdersShipInfo \n"
+ "SELECT /*+ STATE_TTL('a' = '3s', 'b' = '9s') */\n"
+ " a.order_id, a.line_order_id, b.ship_mode "
+ "FROM Orders a JOIN LineOrders b ON a.line_order_id = b.line_order_id",
json -> json);
}
private void innerTestDeduplicateAndGroupAggregate(
String sql, Function<String, String> jsonPlanTransformer) throws Exception {
String dataId =
TestValuesTableFactory.registerRowData(
Arrays.asList(
GenericRowData.of(1, StringData.fromString("Tom"), 1, 199.9d),
GenericRowData.of(2, StringData.fromString("Jerry"), 2, 99.9d),
GenericRowData.of(1, StringData.fromString("Tom"), 1, 199.9d),
GenericRowData.of(3, StringData.fromString("Tom"), 1, 29.9d),
GenericRowData.of(4, StringData.fromString("Olivia"), 1, 100d),
GenericRowData.of(4, StringData.fromString("Olivia"), 1, 100d),
GenericRowData.of(2, StringData.fromString("Jerry"), 2, 99.9d),
GenericRowData.of(5, StringData.fromString("Michael"), 3, 599.9d),
GenericRowData.of(6, StringData.fromString("Olivia"), 3, 1000d)));
createTestSourceTable(
"Orders",
new String[] {
"`order_id` INT", "`buyer` STRING", "`quantity` INT", "`amount` DOUBLE"
},
null,
getProperties(dataId, 1, "2s"));
createTestNonInsertOnlyValuesSinkTable(
"OrdersStats",
"`buyer` STRING",
"`ord_cnt` BIGINT",
"`quantity_cnt` BIGINT",
"`total_amount` DOUBLE");
compileSqlAndExecutePlan(sql, jsonPlanTransformer).await();
// with deduplicate state's TTL as 6s, record (+I,2,Jerry,2,99.9) will duplicate itself
// +-------------------+--------------------------------------+------------------+
// | data | diff(last_arriving, first_arriving) | within_time_range |
// +-------------------+-------------------------------------+-------------------+
// | 1,Tom,1,199.9 | 4s | Y |
// +-------------------+-------------------------------------+-------------------+
// | 2,Jerry,2,99.9 | 10s | N |
// +-------------------+-------------------------------------+-------------------+
// | 3,Tom,1,29.9 | 0s | Y |
// +-------------------+-------------------------------------+-------------------+
// | 4,Olivia,1,100 | 2s | Y |
// +-------------------+-------------------------------------+-------------------+
// | 5,Michael,3,599.9 | 0s | Y |
// +-------------------+-------------------------------------+-------------------+
// | 6,Olivia,3,1000 | 0s | Y |
// +-------------------+-------------------------------------+-------------------+
// with group-aggregate state's TTL as 9s, record (+I,2,Jerry,2,99.9) will be counted twice
List<String> expected =
Arrays.asList(
"+I[Tom, 2, 2, 229.8]",
"+I[Jerry, 1, 2, 99.9]",
"+I[Jerry, 1, 2, 99.9]",
"+I[Olivia, 2, 4, 1100.0]",
"+I[Michael, 1, 3, 599.9]");
assertResult(expected, TestValuesTableFactory.getResultsAsStrings("OrdersStats"));
}
private void innerTestRegularJoin(String sql, Function<String, String> jsonPlanTransformer)
throws Exception {
String leftTableDataId =
TestValuesTableFactory.registerRowData(
Arrays.asList(
GenericRowData.of(1, 1000001),
GenericRowData.of(1, 1000002),
GenericRowData.of(1, 1000003),
GenericRowData.of(1, 1000004),
GenericRowData.of(1, 1000005),
GenericRowData.of(2, 2000001)));
createTestSourceTable(
"Orders",
new String[] {"`order_id` INT", "`line_order_id` INT"},
null,
getProperties(leftTableDataId, 1, "2s"));
String rightTableDataId =
TestValuesTableFactory.registerRowData(
Arrays.asList(
GenericRowData.of(2000001, StringData.fromString("TRUCK")),
GenericRowData.of(1000005, StringData.fromString("AIR")),
GenericRowData.of(1000001, StringData.fromString("SHIP")),
GenericRowData.of(1000002, StringData.fromString("TRUCK")),
GenericRowData.of(1000003, StringData.fromString("RAIL")),
GenericRowData.of(1000004, StringData.fromString("RAIL"))));
createTestSourceTable(
"LineOrders",
new String[] {"`line_order_id` INT", "`ship_mode` STRING"},
null,
getProperties(rightTableDataId, 2, "4s"));
createTestValuesSinkTable(
"OrdersShipInfo", "`order_id` INT", "`line_order_id` INT", "`ship_mode` STRING");
compileSqlAndExecutePlan(sql, jsonPlanTransformer).await();
// with left-state TTL as 3s and right-state TTL as 9s
// +--------------+--------------+-------------------------------------+-------------------+
// | left_data | right_data | diff(left_arriving, right_arriving) | within_time_range |
// +--------------+--------------+-------------------------------------+-------------------+
// | 1,1000001 | 1000001,SHIP | 4s | N |
// +--------------+--------------+-------------------------------------+-------------------+
// | 1,1000002 | 1000002,TRUCK| 2s | Y |
// +--------------+--------------+-------------------------------------+-------------------+
// | 1,1000003 | 1000003,RAIL | 4s | N |
// +--------------+--------------+-------------------------------------+-------------------+
// | 1,1000004 | 1000004,RAIL | 2s | Y |
// +--------------+--------------+-------------------------------------+-------------------+
// | 1,1000005 | 1000005,AIR | -8s | Y |
// +--------------+--------------+-------------------------------------+-------------------+
// | 2,2000001 | 2000001,TRUCK| -10s | N |
// +--------------+--------------+-------------------------------------+-------------------+
List<String> expected =
Arrays.asList(
"+I[1, 1000002, TRUCK]", "+I[1, 1000004, RAIL]", "+I[1, 1000005, AIR]");
assertResult(expected, TestValuesTableFactory.getResultsAsStrings("OrdersShipInfo"));
}
private static Map<String, String> getProperties(
String dataId, int sleepAfterElements, String sleepTime) {
return new HashMap<String, String>() {
{
put("connector", "values");
put("bounded", "false");
put("register-internal-data", "true");
put("source.sleep-after-elements", String.valueOf(sleepAfterElements));
put("source.sleep-time", sleepTime);
put("data-id", dataId);
}
};
}
}
|
ConfigureOperatorLevelStateTtlJsonITCase
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/NamespaceHttpServerAccessDeniedHandlerTests.java
|
{
"start": 5460,
"end": 5983
}
|
class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((requests) -> requests
.anyRequest().denyAll())
.exceptionHandling((handling) -> handling
.accessDeniedHandler(accessDeniedHandler()));
return http.build();
// @formatter:on
}
@Bean
AccessDeniedHandler accessDeniedHandler() {
return mock(AccessDeniedHandler.class);
}
}
@Configuration
@EnableWebSecurity
static
|
AccessDeniedHandlerRefConfig
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtUtil.java
|
{
"start": 3836,
"end": 19535
}
|
class ____ {
private static final Logger LOGGER = LogManager.getLogger(JwtUtil.class);
/**
* Get header from threadContext, look for the scheme name, and extract the value after it.
* @param threadContext Contains the request parameters.
* @param headerName Header name to look for.
* @param schemeName Scheme name to look for
* @param ignoreSchemeNameCase Ignore case of scheme name.
* @return If found, the trimmed value after the scheme name. Null if parameter not found, or scheme mismatch.
*/
public static SecureString getHeaderValue(
final ThreadContext threadContext,
final String headerName,
final String schemeName,
final boolean ignoreSchemeNameCase
) {
final String headerValue = threadContext.getHeader(headerName);
if (Strings.hasText(headerValue)) {
final String schemeValuePlusSpace = schemeName + " ";
if (headerValue.regionMatches(ignoreSchemeNameCase, 0, schemeValuePlusSpace, 0, schemeValuePlusSpace.length())) {
final String trimmedSchemeParameters = headerValue.substring(schemeValuePlusSpace.length()).trim();
return new SecureString(trimmedSchemeParameters.toCharArray());
}
}
return null;
}
// Static method for unit testing. No need to construct a complete RealmConfig with all settings.
public static void validateClientAuthenticationSettings(
final String clientAuthenticationTypeConfigKey,
final JwtRealmSettings.ClientAuthenticationType clientAuthenticationType,
final String clientAuthenticationSharedSecretConfigKey,
final RotatableSecret clientAuthenticationSharedSecret
) throws SettingsException {
switch (clientAuthenticationType) {
case SHARED_SECRET:
// If type is "SharedSecret", the shared secret value must be set
if (clientAuthenticationSharedSecret.isSet() == false) {
throw new SettingsException(
"Missing setting for ["
+ clientAuthenticationSharedSecretConfigKey
+ "]. It is required when setting ["
+ clientAuthenticationTypeConfigKey
+ "] is ["
+ JwtRealmSettings.ClientAuthenticationType.SHARED_SECRET.value()
+ "]"
);
}
break;
case NONE:
default:
// If type is "None", the shared secret value must not be set
if (clientAuthenticationSharedSecret.isSet()) {
throw new SettingsException(
"Setting ["
+ clientAuthenticationSharedSecretConfigKey
+ "] is not supported, because setting ["
+ clientAuthenticationTypeConfigKey
+ "] is ["
+ JwtRealmSettings.ClientAuthenticationType.NONE.value()
+ "]"
);
}
LOGGER.warn(
"Setting [{}] value [{}] may not be secure. Unauthorized clients may be able to submit JWTs from the same issuer.",
clientAuthenticationSharedSecretConfigKey,
JwtRealmSettings.ClientAuthenticationType.NONE.value()
);
break;
}
}
public static void validateClientAuthentication(
final JwtRealmSettings.ClientAuthenticationType type,
final RotatableSecret expectedSecret,
final SecureString actualSecret,
final String tokenPrincipal
) throws Exception {
switch (type) {
case SHARED_SECRET:
if (Strings.hasText(actualSecret) == false) {
throw new Exception("Rejected client. Authentication type is [" + type + "] and secret is missing.");
} else if (expectedSecret.matches(actualSecret) == false) {
throw new Exception("Rejected client. Authentication type is [" + type + "] and secret did not match.");
}
LOGGER.trace("Accepted client for token [{}]. Authentication type is [{}] and secret matched.", tokenPrincipal, type);
break;
case NONE:
default:
if (Strings.hasText(actualSecret)) {
LOGGER.trace(
"Accepted client for token [{}]. Authentication type [{}]. Secret is present but ignored.",
tokenPrincipal,
type
);
} else {
LOGGER.trace("Accepted client for token [{}]. Authentication type [{}].", tokenPrincipal, type);
}
break;
}
}
public static URI parseHttpsUri(final String uriString) {
if (Strings.hasText(uriString)) {
if (uriString.startsWith("https")) {
final URI uri;
try {
uri = new URI(uriString);
} catch (Exception e) {
throw new SettingsException("Failed to parse HTTPS URI [" + uriString + "].", e);
}
if (Strings.hasText(uri.getHost()) == false) {
// Example URIs w/o host: "https:/", "https://", "https://:443"
throw new SettingsException("Host is missing in HTTPS URI [" + uriString + "].");
}
return uri;
} else if (uriString.startsWith("http")) {
throw new SettingsException("Not allowed to use HTTP URI [" + uriString + "]. Only HTTPS is supported.");
} else {
LOGGER.trace("Not a HTTPS URI [{}].", uriString);
}
}
return null;
}
public static void readUriContents(
final String jwkSetConfigKeyPkc,
final URI jwkSetPathPkcUri,
final CloseableHttpAsyncClient httpClient,
final ActionListener<JwksResponse> listener
) {
JwtUtil.readResponse(
httpClient,
jwkSetPathPkcUri,
ActionListener.wrap(
listener::onResponse,
ex -> listener.onFailure(
new SettingsException(
"Can't get contents for setting [" + jwkSetConfigKeyPkc + "] value [" + jwkSetPathPkcUri + "].",
ex
)
)
)
);
}
public static byte[] readFileContents(final String jwkSetConfigKeyPkc, final String jwkSetPathPkc, final Environment environment)
throws SettingsException {
try {
final Path path = JwtUtil.resolvePath(environment, jwkSetPathPkc);
byte[] bytes = AccessController.doPrivileged((PrivilegedExceptionAction<byte[]>) () -> Files.readAllBytes(path));
return bytes;
} catch (Exception e) {
throw new SettingsException(
"Failed to read contents for setting [" + jwkSetConfigKeyPkc + "] value [" + jwkSetPathPkc + "].",
e
);
}
}
public static String serializeJwkSet(final JWKSet jwkSet, final boolean publicKeysOnly) {
if (jwkSet == null) {
return null;
}
Map<String, Object> jwkJson = jwkSet.toJSONObject(publicKeysOnly);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.map(jwkJson);
return Strings.toString(builder);
} catch (IOException e) {
throw new ElasticsearchException(e);
}
}
public static String serializeJwkHmacOidc(final JWK key) {
return new String(key.toOctetSequenceKey().toByteArray(), StandardCharsets.UTF_8);
}
/**
* Creates a {@link CloseableHttpAsyncClient} that uses a {@link PoolingNHttpClientConnectionManager}
* @param realmConfig Realm config for a JWT realm.
* @param sslService Realm config for SSL.
* @return Initialized HTTPS client.
*/
public static CloseableHttpAsyncClient createHttpClient(final RealmConfig realmConfig, final SSLService sslService) {
try {
SpecialPermission.check();
return AccessController.doPrivileged((PrivilegedExceptionAction<CloseableHttpAsyncClient>) () -> {
final ConnectingIOReactor ioReactor = new DefaultConnectingIOReactor();
final String sslKey = RealmSettings.realmSslPrefix(realmConfig.identifier());
final SslProfile sslProfile = sslService.profile(sslKey);
final SSLContext clientContext = sslProfile.sslContext();
final HostnameVerifier verifier = sslProfile.hostnameVerifier();
final Registry<SchemeIOSessionStrategy> registry = RegistryBuilder.<SchemeIOSessionStrategy>create()
.register("http", NoopIOSessionStrategy.INSTANCE)
// TODO: Should this use profile.ioSessionStrategy4 ?
.register("https", new SSLIOSessionStrategy(clientContext, verifier))
.build();
final PoolingNHttpClientConnectionManager connectionManager = new PoolingNHttpClientConnectionManager(ioReactor, registry);
connectionManager.setDefaultMaxPerRoute(realmConfig.getSetting(JwtRealmSettings.HTTP_MAX_ENDPOINT_CONNECTIONS));
connectionManager.setMaxTotal(realmConfig.getSetting(JwtRealmSettings.HTTP_MAX_CONNECTIONS));
final RequestConfig requestConfig = RequestConfig.custom()
.setConnectTimeout(Math.toIntExact(realmConfig.getSetting(JwtRealmSettings.HTTP_CONNECT_TIMEOUT).getMillis()))
.setConnectionRequestTimeout(
Math.toIntExact(realmConfig.getSetting(JwtRealmSettings.HTTP_CONNECTION_READ_TIMEOUT).getMillis())
)
.setSocketTimeout(Math.toIntExact(realmConfig.getSetting(JwtRealmSettings.HTTP_SOCKET_TIMEOUT).getMillis()))
.build();
final HttpAsyncClientBuilder httpAsyncClientBuilder = HttpAsyncClients.custom()
.setConnectionManager(connectionManager)
.setDefaultRequestConfig(requestConfig);
if (realmConfig.hasSetting(HTTP_PROXY_HOST)) {
httpAsyncClientBuilder.setProxy(
new HttpHost(
realmConfig.getSetting(HTTP_PROXY_HOST),
realmConfig.getSetting(HTTP_PROXY_PORT),
realmConfig.getSetting(HTTP_PROXY_SCHEME)
)
);
}
final CloseableHttpAsyncClient httpAsyncClient = httpAsyncClientBuilder.build();
httpAsyncClient.start();
return httpAsyncClient;
});
} catch (PrivilegedActionException e) {
throw new IllegalStateException("Unable to create a HttpAsyncClient instance", e);
}
}
/**
* Use the HTTP Client to get URL content bytes.
* @param httpClient Configured HTTP/HTTPS client.
* @param uri URI to download.
*/
public static void readResponse(final CloseableHttpAsyncClient httpClient, final URI uri, ActionListener<JwksResponse> listener) {
AccessController.doPrivileged((PrivilegedAction<Void>) () -> {
httpClient.execute(new HttpGet(uri), new FutureCallback<>() {
@Override
public void completed(final HttpResponse result) {
final StatusLine statusLine = result.getStatusLine();
final int statusCode = statusLine.getStatusCode();
if (statusCode == 200) {
final HttpEntity entity = result.getEntity();
try (InputStream inputStream = entity.getContent()) {
listener.onResponse(
new JwksResponse(
inputStream.readAllBytes(),
firstHeaderValue(result, "Expires"),
firstHeaderValue(result, "Cache-Control")
)
);
} catch (Exception e) {
listener.onFailure(e);
}
} else {
listener.onFailure(
new ElasticsearchSecurityException(
"Get [" + uri + "] failed, status [" + statusCode + "], reason [" + statusLine.getReasonPhrase() + "]."
)
);
}
}
@Override
public void failed(Exception e) {
listener.onFailure(new ElasticsearchSecurityException("Get [" + uri + "] failed.", e));
}
@Override
public void cancelled() {
listener.onFailure(new ElasticsearchSecurityException("Get [" + uri + "] was cancelled."));
}
});
return null;
});
}
private static String firstHeaderValue(final HttpResponse response, final String headerName) {
final Header header = response.getFirstHeader(headerName);
return header != null ? header.getValue() : null;
}
public static Path resolvePath(final Environment environment, final String jwkSetPath) {
final Path directoryPath = environment.configDir();
return directoryPath.resolve(jwkSetPath);
}
/**
* Concatenate values with separator strings.
* Same method signature as {@link java.lang.String#join(CharSequence, CharSequence...)}.
*
* @param delimiter Separator string between the concatenated values.
* @param secureStrings SecureString values to concatenate.
* @return SecureString of the concatenated values with separator strings.
*/
public static SecureString join(final CharSequence delimiter, final CharSequence... secureStrings) {
final StringBuilder sb = new StringBuilder();
for (int i = 0; i < secureStrings.length; i++) {
if (i != 0) {
sb.append(delimiter);
}
sb.append(secureStrings[i]); // allow null
}
return new SecureString(sb.toString().toCharArray());
}
public static byte[] sha256(final CharSequence charSequence) {
final MessageDigest messageDigest = MessageDigests.sha256();
messageDigest.update(charSequence.toString().getBytes(StandardCharsets.UTF_8));
return messageDigest.digest();
}
public static SignedJWT parseSignedJWT(SecureString token) {
if (token == null || token.isEmpty()) {
return null;
}
// a lightweight pre-check for JWTs
if (containsAtLeastTwoDots(token) == false) {
return null;
}
try {
SignedJWT signedJWT = SignedJWT.parse(token.toString());
// trigger claim set parsing (the parsed version will be cached internally)
signedJWT.getJWTClaimsSet();
return signedJWT;
} catch (ParseException e) {
LOGGER.debug("Failed to parse JWT bearer token", e);
return null;
}
}
/**
* Helper
|
JwtUtil
|
java
|
quarkusio__quarkus
|
devtools/cli/src/main/java/io/quarkus/cli/plugin/ShellCommand.java
|
{
"start": 250,
"end": 1196
}
|
class ____ implements PluginCommand, Callable<Integer> {
private String name;
private Path command;
private OutputOptionMixin output;
private final List<String> arguments = new ArrayList<>();
public ShellCommand() {
}
public ShellCommand(String name, Path command, OutputOptionMixin output) {
this.name = name;
this.command = command;
this.output = output;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<String> getCommand() {
return List.of(command.toString());
}
public List<String> getArguments() {
return arguments;
}
@Override
public void useArguments(List<String> arguments) {
this.arguments.clear();
this.arguments.addAll(arguments);
}
public OutputOptionMixin getOutput() {
return output;
}
}
|
ShellCommand
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/cluster/api/async/BaseNodeSelectionAsyncCommands.java
|
{
"start": 1294,
"end": 7616
}
|
interface ____<K, V> {
/**
* Post a message to a channel.
*
* @param channel the channel type: key.
* @param message the message type: value.
* @return Long integer-reply the number of clients that received the message.
*/
AsyncExecutions<Long> publish(K channel, V message);
/**
* Lists the currently *active channels*.
*
* @return List<K> array-reply a list of active channels, optionally matching the specified pattern.
*/
AsyncExecutions<List<K>> pubsubChannels();
/**
* Lists the currently *active channels*.
*
* @param channel the key.
* @return List<K> array-reply a list of active channels, optionally matching the specified pattern.
*/
AsyncExecutions<List<K>> pubsubChannels(K channel);
/**
* Returns the number of subscribers (not counting clients subscribed to patterns) for the specified channels.
*
* @param channels channel keys.
* @return array-reply a list of channels and number of subscribers for every channel.
*/
AsyncExecutions<Map<K, Long>> pubsubNumsub(K... channels);
/**
* Lists the currently *active shard channels*.
*
* @return List<K> array-reply a list of active channels.
*/
AsyncExecutions<List<K>> pubsubShardChannels();
/**
* Lists the currently *active shard channels*.
*
* @param pattern the pattern type: patternkey (pattern).
* @return List<K> array-reply a list of active channels, optionally matching the specified pattern.
*/
AsyncExecutions<List<K>> pubsubShardChannels(K pattern);
/**
* Returns the number of subscribers (not counting clients subscribed to patterns) for the specified shard channels.
*
* @param shardChannels channel keys.
* @return array-reply a list of channels and number of subscribers for every channel.
* @since 6.4
*/
AsyncExecutions<Map<K, Long>> pubsubShardNumsub(K... shardChannels);
/**
* Returns the number of subscriptions to patterns.
*
* @return Long integer-reply the number of patterns all the clients are subscribed to.
*/
AsyncExecutions<Long> pubsubNumpat();
/**
* Post a message to a shard channel.
*
* @param shardChannel the shard channel type: key.
* @param message the message type: value.
* @return Long integer-reply the number of clients that received the message.
* @since 6.4
*/
AsyncExecutions<Long> spublish(K shardChannel, V message);
/**
* Echo the given string.
*
* @param msg the message type: value.
* @return V bulk-string-reply.
*/
AsyncExecutions<V> echo(V msg);
/**
* Return the role of the instance in the context of replication.
*
* @return List<Object> array-reply where the first element is one of master, slave, sentinel and the additional
* elements are role-specific.
*/
AsyncExecutions<List<Object>> role();
/**
* Ping the server.
*
* @return String simple-string-reply.
*/
AsyncExecutions<String> ping();
/**
* Instructs Redis to disconnect the connection. Note that if auto-reconnect is enabled then Lettuce will auto-reconnect if
* the connection was disconnected. Use {@link io.lettuce.core.api.StatefulConnection#close} to close connections and
* release resources.
*
* @return String simple-string-reply always OK.
*/
AsyncExecutions<String> quit();
/**
* Wait for replication.
*
* @param replicas minimum number of replicas.
* @param timeout timeout in milliseconds.
* @return number of replicas.
*/
AsyncExecutions<Long> waitForReplication(int replicas, long timeout);
/**
* Dispatch a command to the Redis Server. Please note the command output type must fit to the command response.
*
* @param type the command, must not be {@code null}.
* @param output the command output, must not be {@code null}.
* @param <T> response type.
* @return the command response.
* @deprecated since 6.2, as {@link CommandOutput} is being reused for all responses of all nodes and that leads to unwanted
* behavior. Use {@link #dispatch(ProtocolKeyword, Supplier)} instead.
*/
@Deprecated
<T> AsyncExecutions<T> dispatch(ProtocolKeyword type, CommandOutput<K, V, T> output);
/**
* Dispatch a command to the Redis Server. Please note the command output type must fit to the command response.
*
* @param type the command, must not be {@code null}.
* @param outputSupplier the command output supplier, must not be {@code null}.
* @param <T> response type.
* @return the command response.
* @since 6.2
*/
<T> AsyncExecutions<T> dispatch(ProtocolKeyword type, Supplier<CommandOutput<K, V, T>> outputSupplier);
/**
* Dispatch a command to the Redis Server. Please note the command output type must fit to the command response.
*
* @param type the command, must not be {@code null}.
* @param output the command output, must not be {@code null}.
* @param args the command arguments, must not be {@code null}.
* @param <T> response type.
* @return the command response.
* @deprecated since 6.2, as {@link CommandOutput} is being reused for all responses of all nodes and that leads to unwanted
* behavior. Use {@link #dispatch(ProtocolKeyword, Supplier, CommandArgs)} instead.
*/
@Deprecated
<T> AsyncExecutions<T> dispatch(ProtocolKeyword type, CommandOutput<K, V, T> output, CommandArgs<K, V> args);
/**
* Dispatch a command to the Redis Server. Please note the command output type must fit to the command response.
*
* @param type the command, must not be {@code null}.
* @param outputSupplier the command output supplier, must not be {@code null}.
* @param args the command arguments, must not be {@code null}.
* @param <T> response type.
* @return the command response.
* @since 6.2
*/
<T> AsyncExecutions<T> dispatch(ProtocolKeyword type, Supplier<CommandOutput<K, V, T>> outputSupplier,
CommandArgs<K, V> args);
}
|
BaseNodeSelectionAsyncCommands
|
java
|
spring-projects__spring-boot
|
module/spring-boot-graphql/src/test/java/org/springframework/boot/graphql/autoconfigure/security/GraphQlWebFluxSecurityAutoConfigurationTests.java
|
{
"start": 6807,
"end": 7600
}
|
class ____ {
@Bean
SecurityWebFilterChain springWebFilterChain(ServerHttpSecurity http) {
return http.csrf(CsrfSpec::disable)
// Demonstrate that method security works
// Best practice to use both for defense in depth
.authorizeExchange((requests) -> requests.anyExchange().permitAll())
.httpBasic(withDefaults())
.build();
}
@Bean
@SuppressWarnings("deprecation")
MapReactiveUserDetailsService userDetailsService() {
User.UserBuilder userBuilder = User.withDefaultPasswordEncoder();
UserDetails rob = userBuilder.username("rob").password("rob").roles("USER").build();
UserDetails admin = userBuilder.username("admin").password("admin").roles("USER", "ADMIN").build();
return new MapReactiveUserDetailsService(rob, admin);
}
}
}
|
SecurityConfig
|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-api/src/main/java/org/apache/dubbo/remoting/transport/AbstractEndpoint.java
|
{
"start": 1553,
"end": 4212
}
|
class ____ extends AbstractPeer implements Resetable {
protected final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(getClass());
private Codec2 codec;
private int connectTimeout;
public AbstractEndpoint(URL url, ChannelHandler handler) {
super(url, handler);
this.codec = getChannelCodec(url);
this.connectTimeout =
url.getPositiveParameter(Constants.CONNECT_TIMEOUT_KEY, Constants.DEFAULT_CONNECT_TIMEOUT);
}
protected AbstractEndpoint() {}
protected static Codec2 getChannelCodec(URL url) {
String codecName = url.getParameter(Constants.CODEC_KEY);
if (StringUtils.isEmpty(codecName)) {
// codec extension name must stay the same with protocol name
codecName = url.getProtocol();
}
FrameworkModel frameworkModel = getFrameworkModel(url.getScopeModel());
if (frameworkModel.getExtensionLoader(Codec2.class).hasExtension(codecName)) {
return frameworkModel.getExtensionLoader(Codec2.class).getExtension(codecName);
} else if (frameworkModel.getExtensionLoader(Codec.class).hasExtension(codecName)) {
return new CodecAdapter(
frameworkModel.getExtensionLoader(Codec.class).getExtension(codecName));
} else {
return frameworkModel.getExtensionLoader(Codec2.class).getExtension("default");
}
}
@Override
public void reset(URL url) {
if (isClosed()) {
throw new IllegalStateException(
"Failed to reset parameters " + url + ", cause: Channel closed. channel: " + getLocalAddress());
}
try {
if (url.hasParameter(Constants.CONNECT_TIMEOUT_KEY)) {
int t = url.getParameter(Constants.CONNECT_TIMEOUT_KEY, 0);
if (t > 0) {
this.connectTimeout = t;
}
}
} catch (Throwable t) {
logger.error(INTERNAL_ERROR, "", "", t.getMessage(), t);
}
try {
if (url.hasParameter(Constants.CODEC_KEY)) {
this.codec = getChannelCodec(url);
}
} catch (Throwable t) {
logger.error(INTERNAL_ERROR, "unknown error in remoting module", "", t.getMessage(), t);
}
}
@Deprecated
public void reset(org.apache.dubbo.common.Parameters parameters) {
reset(getUrl().addParameters(parameters.getParameters()));
}
protected Codec2 getCodec() {
return codec;
}
protected int getConnectTimeout() {
return connectTimeout;
}
}
|
AbstractEndpoint
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/OptionalMapToOptionalTest.java
|
{
"start": 1224,
"end": 1681
}
|
class ____ {
public boolean test(Optional<Integer> optional) {
// BUG: Diagnostic contains:
return optional.map(i -> Optional.of(1)).isPresent();
}
}
""")
.doTest();
}
@Test
public void positiveWithGuavaOptional() {
helper
.addSourceLines(
"Test.java",
"""
import com.google.common.base.Optional;
|
Test
|
java
|
apache__camel
|
components/camel-http/src/main/java/org/apache/camel/component/http/OAuth2ClientConfigurer.java
|
{
"start": 1950,
"end": 7273
}
|
class ____ extends ServiceSupport implements HttpClientConfigurer {
private final String clientId;
private final String clientSecret;
private final String tokenEndpoint;
private final String scope;
private final boolean cacheTokens;
private final Long cachedTokensDefaultExpirySeconds;
private final Long cachedTokensExpirationMarginSeconds;
private final static ConcurrentMap<OAuth2URIAndCredentials, TokenCache> tokenCache = new ConcurrentHashMap<>();
private final boolean useBodyAuthentication;
private final String resourceIndicator;
private HttpClient httpClient;
public OAuth2ClientConfigurer(String clientId, String clientSecret, String tokenEndpoint, String resourceIndicator,
String scope, boolean cacheTokens,
long cachedTokensDefaultExpirySeconds, long cachedTokensExpirationMarginSeconds,
boolean useBodyAuthentication) {
this.clientId = clientId;
this.clientSecret = clientSecret;
this.tokenEndpoint = tokenEndpoint;
this.resourceIndicator = resourceIndicator;
this.scope = scope;
this.cacheTokens = cacheTokens;
this.cachedTokensDefaultExpirySeconds = cachedTokensDefaultExpirySeconds;
this.cachedTokensExpirationMarginSeconds = cachedTokensExpirationMarginSeconds;
this.useBodyAuthentication = useBodyAuthentication;
}
@Override
public void configureHttpClient(HttpClientBuilder clientBuilder) {
// create a new http client only used for oauth token requests
this.httpClient = clientBuilder.build();
clientBuilder.addRequestInterceptorFirst((HttpRequest request, EntityDetails entity, HttpContext context) -> {
URI requestUri = getUriFromRequest(request);
OAuth2URIAndCredentials uriAndCredentials = new OAuth2URIAndCredentials(requestUri, clientId, clientSecret);
if (cacheTokens) {
if (tokenCache.containsKey(uriAndCredentials)
&& !tokenCache.get(uriAndCredentials).isExpiredWithMargin(cachedTokensExpirationMarginSeconds)) {
request.setHeader(HttpHeaders.AUTHORIZATION, "Bearer " + tokenCache.get(uriAndCredentials).getToken());
} else {
JsonObject accessTokenResponse = getAccessTokenResponse(httpClient);
String accessToken = accessTokenResponse.getString("access_token");
String expiresIn = accessTokenResponse.getString("expires_in");
if (expiresIn != null && !expiresIn.isEmpty()) {
tokenCache.put(uriAndCredentials, new TokenCache(accessToken, expiresIn));
} else if (cachedTokensDefaultExpirySeconds > 0) {
tokenCache.put(uriAndCredentials, new TokenCache(accessToken, cachedTokensDefaultExpirySeconds));
}
request.setHeader(HttpHeaders.AUTHORIZATION, "Bearer " + accessToken);
}
} else {
JsonObject accessTokenResponse = getAccessTokenResponse(httpClient);
String accessToken = accessTokenResponse.getString("access_token");
request.setHeader(HttpHeaders.AUTHORIZATION, "Bearer " + accessToken);
}
});
}
private JsonObject getAccessTokenResponse(HttpClient httpClient) throws IOException {
String bodyStr = "grant_type=client_credentials";
if (scope != null) {
bodyStr += "&scope=" + scope;
}
final HttpPost httpPost = new HttpPost(tokenEndpoint);
if (useBodyAuthentication) {
bodyStr += "&client_id=" + clientId;
bodyStr += "&client_secret=" + clientSecret;
} else {
httpPost.addHeader(HttpHeaders.AUTHORIZATION,
HttpCredentialsHelper.generateBasicAuthHeader(clientId, clientSecret));
}
if (null != resourceIndicator) {
bodyStr = String.join(bodyStr, "&resource=" + resourceIndicator);
}
httpPost.setEntity(new StringEntity(bodyStr, ContentType.APPLICATION_FORM_URLENCODED));
AtomicReference<JsonObject> result = new AtomicReference<>();
httpClient.execute(httpPost, response -> {
try {
String responseString = EntityUtils.toString(response.getEntity());
if (response.getCode() == 200) {
result.set((JsonObject) Jsoner.deserialize(responseString));
} else {
throw new HttpException(
"Received error response from token request with Status Code: " + response.getCode());
}
} catch (DeserializationException e) {
throw new HttpException("Something went wrong when reading token request response", e);
}
return null;
});
return result.get();
}
private URI getUriFromRequest(HttpRequest request) {
URI result;
try {
result = request.getUri();
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
return result;
}
private static
|
OAuth2ClientConfigurer
|
java
|
spring-projects__spring-boot
|
module/spring-boot-session/src/main/java/org/springframework/boot/session/actuate/endpoint/SessionsEndpoint.java
|
{
"start": 1535,
"end": 2966
}
|
class ____ {
private final SessionRepository<? extends Session> sessionRepository;
private final @Nullable FindByIndexNameSessionRepository<? extends Session> indexedSessionRepository;
/**
* Create a new {@link SessionsEndpoint} instance.
* @param sessionRepository the session repository
* @param indexedSessionRepository the indexed session repository
*/
public SessionsEndpoint(SessionRepository<? extends Session> sessionRepository,
@Nullable FindByIndexNameSessionRepository<? extends Session> indexedSessionRepository) {
Assert.notNull(sessionRepository, "'sessionRepository' must not be null");
this.sessionRepository = sessionRepository;
this.indexedSessionRepository = indexedSessionRepository;
}
@ReadOperation
public @Nullable SessionsDescriptor sessionsForUsername(String username) {
if (this.indexedSessionRepository == null) {
return null;
}
Map<String, ? extends Session> sessions = this.indexedSessionRepository.findByPrincipalName(username);
return new SessionsDescriptor(sessions);
}
@ReadOperation
public @Nullable SessionDescriptor getSession(@Selector String sessionId) {
Session session = this.sessionRepository.findById(sessionId);
if (session == null) {
return null;
}
return new SessionDescriptor(session);
}
@DeleteOperation
public void deleteSession(@Selector String sessionId) {
this.sessionRepository.deleteById(sessionId);
}
}
|
SessionsEndpoint
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/common/time/EpochTimeTests.java
|
{
"start": 818,
"end": 14212
}
|
class ____ extends ESTestCase {
public void testNegativeEpochMillis() {
DateFormatter formatter = MILLIS_FORMATTER;
// validate that negative epoch millis around rounded appropriately by the parser
LongSupplier supplier = () -> 0L;
{
Instant instant = formatter.toDateMathParser().parse("0", supplier, true, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.000999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-0", supplier, true, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.000999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("0", supplier, false, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-0", supplier, false, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("1", supplier, true, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.001999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-1", supplier, true, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.999999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("1", supplier, false, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.001Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-1", supplier, false, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("0.999999", supplier, true, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.000999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-0.999999", supplier, true, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.999000001Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("0.999999", supplier, false, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.000999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-0.999999", supplier, false, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.999000001Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("6250000430768", supplier, true, ZoneId.of("UTC"));
assertEquals("2168-01-20T23:13:50.768999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-6250000430768", supplier, true, ZoneId.of("UTC"));
assertEquals("1771-12-12T00:46:09.232999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("6250000430768", supplier, false, ZoneId.of("UTC"));
assertEquals("2168-01-20T23:13:50.768Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-6250000430768", supplier, false, ZoneId.of("UTC"));
assertEquals("1771-12-12T00:46:09.232Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("0.123450", supplier, true, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.000123450Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-0.123450", supplier, true, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.999876550Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("0.123450", supplier, false, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.000123450Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-0.123450", supplier, false, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.999876550Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("0.123456", supplier, true, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.000123456Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-0.123456", supplier, true, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.999876544Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("0.123456", supplier, false, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.000123456Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-0.123456", supplier, false, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.999876544Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("86400000", supplier, true, ZoneId.of("UTC"));
assertEquals("1970-01-02T00:00:00.000999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-86400000", supplier, true, ZoneId.of("UTC"));
assertEquals("1969-12-31T00:00:00.000999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("86400000", supplier, false, ZoneId.of("UTC"));
assertEquals("1970-01-02T00:00:00Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-86400000", supplier, false, ZoneId.of("UTC"));
assertEquals("1969-12-31T00:00:00Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("86400000.999999", supplier, true, ZoneId.of("UTC"));
assertEquals("1970-01-02T00:00:00.000999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-86400000.999999", supplier, true, ZoneId.of("UTC"));
assertEquals("1969-12-30T23:59:59.999000001Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("86400000.999999", supplier, false, ZoneId.of("UTC"));
assertEquals("1970-01-02T00:00:00.000999999Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-86400000.999999", supplier, false, ZoneId.of("UTC"));
assertEquals("1969-12-30T23:59:59.999000001Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("200.89", supplier, true, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.200890Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-200.89", supplier, true, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.799110Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("200.89", supplier, false, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.200890Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-200.89", supplier, false, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.799110Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("200.", supplier, true, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.200Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-200.", supplier, true, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.800Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("200.", supplier, false, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.200Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-200.", supplier, false, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.800Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("0.200", supplier, true, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.000200Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-0.200", supplier, true, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.999800Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("0.200", supplier, false, ZoneId.of("UTC"));
assertEquals("1970-01-01T00:00:00.000200Z", instant.toString());
}
{
Instant instant = formatter.toDateMathParser().parse("-0.200", supplier, false, ZoneId.of("UTC"));
assertEquals("1969-12-31T23:59:59.999800Z", instant.toString());
}
{
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> formatter.toDateMathParser().parse(".200", supplier, true, ZoneId.of("UTC"))
);
assertThat(e.getMessage().split(":")[0], is("failed to parse date field [.200] with format [epoch_millis]"));
}
{
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> formatter.toDateMathParser().parse("-.200", supplier, true, ZoneId.of("UTC"))
);
assertThat(e.getMessage().split(":")[0], is("failed to parse date field [-.200] with format [epoch_millis]"));
}
{
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> formatter.toDateMathParser().parse(".200", supplier, false, ZoneId.of("UTC"))
);
assertThat(e.getMessage().split(":")[0], is("failed to parse date field [.200] with format [epoch_millis]"));
}
{
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> formatter.toDateMathParser().parse("-.200", supplier, false, ZoneId.of("UTC"))
);
assertThat(e.getMessage().split(":")[0], is("failed to parse date field [-.200] with format [epoch_millis]"));
}
// tilda was included in the parsers at one point for delineating negative and positive infinity rounding and we want to
// ensure it doesn't show up unexpectedly in the parser with its original "~" value
{
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> formatter.toDateMathParser().parse("~-0.200", supplier, false, ZoneId.of("UTC"))
);
assertThat(e.getMessage().split(":")[0], is("failed to parse date field [~-0.200] with format [epoch_millis]"));
}
{
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> formatter.toDateMathParser().parse("~0.200", supplier, false, ZoneId.of("UTC"))
);
assertThat(e.getMessage().split(":")[0], is("failed to parse date field [~0.200] with format [epoch_millis]"));
}
{
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> formatter.toDateMathParser().parse("~-1", supplier, false, ZoneId.of("UTC"))
);
assertThat(e.getMessage().split(":")[0], is("failed to parse date field [~-1] with format [epoch_millis]"));
}
{
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> formatter.toDateMathParser().parse("~1", supplier, false, ZoneId.of("UTC"))
);
assertThat(e.getMessage().split(":")[0], is("failed to parse date field [~1] with format [epoch_millis]"));
}
{
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> formatter.toDateMathParser().parse("~-1.", supplier, false, ZoneId.of("UTC"))
);
assertThat(e.getMessage().split(":")[0], is("failed to parse date field [~-1.] with format [epoch_millis]"));
}
{
ElasticsearchParseException e = expectThrows(
ElasticsearchParseException.class,
() -> formatter.toDateMathParser().parse("~1.", supplier, false, ZoneId.of("UTC"))
);
assertThat(e.getMessage().split(":")[0], is("failed to parse date field [~1.] with format [epoch_millis]"));
}
}
}
|
EpochTimeTests
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/annotation/AnnotationFilter.java
|
{
"start": 860,
"end": 1253
}
|
interface ____ been
* designed for) always ignores lang annotations according to the {@link #PLAIN}
* filter (for efficiency reasons). Any additional filters and even custom filter
* implementations apply within this boundary and may only narrow further from here.
*
* @author Phillip Webb
* @author Juergen Hoeller
* @since 5.2
* @see MergedAnnotations
*/
@FunctionalInterface
public
|
has
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/ManyToOneInheritanceSubTypeTest.java
|
{
"start": 6593,
"end": 6676
}
|
class ____ extends JoinedA {
}
@Entity( name = "JoinedB" )
public static
|
SubJoinedA
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAppendSnapshotTruncate.java
|
{
"start": 9090,
"end": 13365
}
|
class ____ extends Worker {
final Path file;
final File localFile;
FileWorker(Path dir, File localDir, String filename) throws IOException {
super(filename);
this.file = new Path(dir, filename);
this.localFile = new File(localDir, filename);
localFile.createNewFile();
dfs.create(file, false, 4096, REPLICATION, BLOCK_SIZE).close();
}
@Override
public String call() throws IOException {
final int op = ThreadLocalRandom.current().nextInt(9);
if (op == 0) {
return checkFullFile();
} else {
final int nBlocks = ThreadLocalRandom.current().nextInt(4) + 1;
final int lastBlockSize = ThreadLocalRandom.current()
.nextInt(BLOCK_SIZE) + 1;
final int nBytes = nBlocks*BLOCK_SIZE + lastBlockSize;
if (op <= 4) {
return append(nBytes);
} else if (op <= 6) {
return truncateArbitrarily(nBytes);
} else {
return truncateToBlockBoundary(nBlocks);
}
}
}
String append(int n) throws IOException {
final StringBuilder b = new StringBuilder("append ")
.append(n).append(" bytes to ").append(file.getName());
final byte[] bytes = new byte[n];
ThreadLocalRandom.current().nextBytes(bytes);
{ // write to local file
final FileOutputStream out = new FileOutputStream(localFile, true);
out.write(bytes, 0, bytes.length);
out.close();
}
{
final FSDataOutputStream out = dfs.append(file);
out.write(bytes, 0, bytes.length);
out.close();
}
return b.toString();
}
String truncateArbitrarily(int nBytes) throws IOException {
Preconditions.checkArgument(nBytes > 0);
final int length = checkLength();
final StringBuilder b = new StringBuilder("truncateArbitrarily: ")
.append(nBytes).append(" bytes from ").append(file.getName())
.append(", length=" + length);
truncate(length > nBytes? length - nBytes: 0, b);
return b.toString();
}
String truncateToBlockBoundary(int nBlocks) throws IOException {
Preconditions.checkArgument(nBlocks > 0);
final int length = checkLength();
final StringBuilder b = new StringBuilder("truncateToBlockBoundary: ")
.append(nBlocks).append(" blocks from ").append(file.getName())
.append(", length=" + length);
final int n = (nBlocks - 1)*BLOCK_SIZE + (length%BLOCK_SIZE);
Preconditions.checkState(truncate(length > n? length - n: 0, b), b);
return b.toString();
}
private boolean truncate(long newLength, StringBuilder b) throws IOException {
final RandomAccessFile raf = new RandomAccessFile(localFile, "rw");
raf.setLength(newLength);
raf.close();
final boolean isReady = dfs.truncate(file, newLength);
b.append(", newLength=").append(newLength)
.append(", isReady=").append(isReady);
if (!isReady) {
TestFileTruncate.checkBlockRecovery(file, dfs, 100, 300L);
}
return isReady;
}
int checkLength() throws IOException {
return checkLength(file, localFile);
}
static int checkLength(Path file, File localFile) throws IOException {
final long length = dfs.getFileStatus(file).getLen();
assertEquals(localFile.length(), length);
assertTrue(length <= Integer.MAX_VALUE);
return (int)length;
}
String checkFullFile() throws IOException {
return checkFullFile(file, localFile);
}
static String checkFullFile(Path file, File localFile) throws IOException {
final StringBuilder b = new StringBuilder("checkFullFile: ")
.append(file.getName()).append(" vs ").append(localFile);
final byte[] bytes = new byte[checkLength(file, localFile)];
b.append(", length=").append(bytes.length);
final FileInputStream in = new FileInputStream(localFile);
for(int n = 0; n < bytes.length; ) {
n += in.read(bytes, n, bytes.length - n);
}
in.close();
AppendTestUtil.checkFullFile(dfs, file, bytes.length, bytes,
"File content mismatch: " + b, false);
return b.toString();
}
}
static abstract
|
FileWorker
|
java
|
quarkusio__quarkus
|
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/deployment/GrpcServerProcessorTest.java
|
{
"start": 4013,
"end": 4195
}
|
class ____ extends BlockingRoot {
static final Set<String> EXPECTED = ImmutableSet.of();
void method() {
}
}
@NonBlocking
static
|
ExtendsBlockingRoot
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/atomic/longadder/LongAdderAssert_hasValue_Test.java
|
{
"start": 1016,
"end": 1951
}
|
class ____ {
@Test
void should_pass_when_actual_has_the_expected_value() {
long initialValue = 123L;
LongAdder actual = new LongAdder();
actual.add(initialValue);
assertThat(actual).hasValue(initialValue);
}
@Test
void should_fail_when_actual_does_not_have_the_expected_value() {
long initialValue = 123L;
LongAdder actual = new LongAdder();
actual.add(initialValue);
long expectedValue = 1234L;
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(actual).hasValue(expectedValue))
.withMessage(shouldHaveValue(actual, expectedValue).create());
}
@Test
void should_fail_when_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> {
LongAdder actual = null;
assertThat(actual).hasValue(1234L);
}).withMessage(actualIsNull());
}
}
|
LongAdderAssert_hasValue_Test
|
java
|
spring-projects__spring-boot
|
loader/spring-boot-loader-tools/src/test/java/org/springframework/boot/loader/tools/layer/ApplicationContentFilterTests.java
|
{
"start": 991,
"end": 1936
}
|
class ____ {
@Test
@SuppressWarnings("NullAway") // Test null check
void createWhenPatternIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> new ApplicationContentFilter(null))
.withMessage("'pattern' must not be empty");
}
@Test
void createWhenPatternIsEmptyThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> new ApplicationContentFilter(""))
.withMessage("'pattern' must not be empty");
}
@Test
void matchesWhenWildcardPatternMatchesReturnsTrue() {
ApplicationContentFilter filter = new ApplicationContentFilter("META-INF/**");
assertThat(filter.matches("META-INF/resources/application.yml")).isTrue();
}
@Test
void matchesWhenWildcardPatternDoesNotMatchReturnsFalse() {
ApplicationContentFilter filter = new ApplicationContentFilter("META-INF/**");
assertThat(filter.matches("src/main/resources/application.yml")).isFalse();
}
}
|
ApplicationContentFilterTests
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.