language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/support/TestConstructorUtils.java | {
"start": 5910,
"end": 6412
} | class ____
* {@link TestConstructor#autowireMode() autowireMode} set to
* {@link AutowireMode#ALL ALL}.</li>
* <li>The default <em>test constructor autowire mode</em> has been set to
* {@code ALL} in {@link SpringProperties} or in the supplied fallback
* {@link PropertyProvider} (see
* {@link TestConstructor#TEST_CONSTRUCTOR_AUTOWIRE_MODE_PROPERTY_NAME}).</li>
* </ol>
* @param constructor a constructor for the test class
* @param testClass the test class, typically the declaring | with |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CheckReturnValueTest.java | {
"start": 33479,
"end": 33971
} | class ____ {
void foo() {
makeBarOrThrow();
}
String makeBarOrThrow() {
throw new UnsupportedOperationException();
}
}
""")
.addOutputLines(
"Test.java",
"""
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.CheckReturnValue;
@CheckReturnValue
| Test |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit4/rules/EnabledAndIgnoredSpringRuleTests.java | {
"start": 896,
"end": 1161
} | class ____ an extension of {@link EnabledAndIgnoredSpringRunnerTests}
* that has been modified to use {@link SpringClassRule} and
* {@link SpringMethodRule}.
*
* @author Sam Brannen
* @since 4.2
*/
@RunWith(JUnit4.class)
@SuppressWarnings("deprecation")
public | is |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java | {
"start": 1792,
"end": 4122
} | class ____ extends UnaryScalarFunction implements ConvertFunction {
// the numeric types convert functions need to handle; the other numeric types are converted upstream to one of these
private static final List<DataType> NUMERIC_TYPES = List.of(DataType.INTEGER, DataType.LONG, DataType.UNSIGNED_LONG, DataType.DOUBLE);
protected AbstractConvertFunction(Source source, Expression field) {
super(source, field);
}
protected AbstractConvertFunction(StreamInput in) throws IOException {
this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class));
}
/**
* Build the evaluator given the evaluator a multivalued field.
*/
protected final ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) {
DataType sourceType = field().dataType().widenSmallNumeric();
var factory = factories().get(sourceType);
if (factory == null) {
throw EsqlIllegalArgumentException.illegalDataType(sourceType);
}
return factory.build(source(), fieldEval);
}
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
return new TypeResolution("Unresolved children");
}
return isTypeOrUnionType(field(), factories()::containsKey, sourceText(), null, supportedTypesNames(supportedTypes()));
}
@Override
public Set<DataType> supportedTypes() {
return factories().keySet();
}
static String supportedTypesNames(Set<DataType> types) {
List<String> supportedTypesNames = new ArrayList<>(types.size());
HashSet<DataType> supportTypes = new HashSet<>(types);
if (supportTypes.containsAll(NUMERIC_TYPES)) {
supportedTypesNames.add("numeric");
NUMERIC_TYPES.forEach(supportTypes::remove);
}
if (types.containsAll(DataType.stringTypes())) {
supportedTypesNames.add("string");
DataType.stringTypes().forEach(supportTypes::remove);
}
supportTypes.forEach(t -> supportedTypesNames.add(t.nameUpper().toLowerCase(Locale.ROOT)));
supportedTypesNames.sort(String::compareTo);
return Strings.join(supportedTypesNames, " or ");
}
@FunctionalInterface
public | AbstractConvertFunction |
java | quarkusio__quarkus | extensions/kubernetes/spi/src/main/java/io/quarkus/kubernetes/spi/ConfigurationSupplierBuildItem.java | {
"start": 281,
"end": 785
} | class ____ extends MultiBuildItem {
/**
* The configuration supplier
*/
private final Object configurationSupplier;
public ConfigurationSupplierBuildItem(Object configurationSupplier) {
this.configurationSupplier = configurationSupplier;
}
public Object getConfigurationSupplier() {
return this.configurationSupplier;
}
public boolean matches(Class type) {
return type.isInstance(configurationSupplier);
}
}
| ConfigurationSupplierBuildItem |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/typeutils/base/ShortComparatorTest.java | {
"start": 1074,
"end": 2336
} | class ____ extends ComparatorTestBase<Short> {
@Override
protected TypeComparator<Short> createComparator(boolean ascending) {
return new ShortComparator(ascending);
}
@Override
protected TypeSerializer<Short> createSerializer() {
return new ShortSerializer();
}
@Override
protected Short[] getSortedTestData() {
Random rnd = new Random(874597969123412338L);
short rndShort = Integer.valueOf(rnd.nextInt()).shortValue();
if (rndShort < 0) {
rndShort = Integer.valueOf(-rndShort).shortValue();
}
if (rndShort == Short.MAX_VALUE) {
rndShort -= 3;
}
if (rndShort <= 2) {
rndShort += 3;
}
return new Short[] {
Short.valueOf(Short.MIN_VALUE),
Short.valueOf(Integer.valueOf(-rndShort).shortValue()),
Short.valueOf(Integer.valueOf(-1).shortValue()),
Short.valueOf(Integer.valueOf(0).shortValue()),
Short.valueOf(Integer.valueOf(1).shortValue()),
Short.valueOf(Integer.valueOf(2).shortValue()),
Short.valueOf(Integer.valueOf(rndShort).shortValue()),
Short.valueOf(Short.MAX_VALUE)
};
}
}
| ShortComparatorTest |
java | quarkusio__quarkus | integration-tests/maven/src/test/java/io/quarkus/maven/it/PackageIT.java | {
"start": 955,
"end": 20665
} | class ____ extends MojoTestBase {
private RunningInvoker running;
private File testDir;
@Test
public void testConfigTracking() throws Exception {
testDir = initProject("projects/config-tracking");
running = new RunningInvoker(testDir, false);
var configDump = new File(new File(testDir, ".quarkus"), "quarkus-prod-config-dump");
var configCheck = new File(new File(testDir, "target"), "quarkus-prod-config-check");
// initial build that generates .quarkus/quarkus-prod-config-dump
var result = running.execute(List.of("clean package -DskipTests"), Map.of());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
assertThat(configDump).exists();
assertThat(configCheck).doesNotExist();
// rebuild and compare the files
result = running.execute(List.of("package -DskipTests"), Map.of());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
assertThat(configDump).exists();
assertThat(configCheck).exists();
assertThat(configDump).hasSameTextualContentAs(configCheck);
var props = new Properties();
try (BufferedReader reader = Files.newBufferedReader(configDump.toPath())) {
props.load(reader);
}
assertThat(props).containsEntry("quarkus.application.name", HashUtil.sha512("code-with-quarkus"));
assertThat(props).doesNotContainKey("quarkus.platform.group-id");
for (var name : props.stringPropertyNames()) {
assertThat(name).doesNotStartWith("quarkus.test.");
}
result = running.execute(List.of("package -DskipTests -Dquarkus.package.jar.type=uber-jar"), Map.of());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
assertThat(running.log())
.contains("Option quarkus.package.jar.type has changed since the last build from fast-jar to uber-jar");
}
@Test
public void testConfigTrackingCustomFile() throws Exception {
testDir = initProject("projects/config-tracking-custom-file");
running = new RunningInvoker(testDir, false);
var configDump = new File(new File(testDir, ".quarkus"), "quarkus-prod-used-config-options");
var configCheck = new File(new File(testDir, "target"), "quarkus-prod-config-check");
// initial build that generates .quarkus/quarkus-prod-config-dump
var result = running.execute(List.of("clean package -DskipTests"), Map.of());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
assertThat(configDump).exists();
assertThat(configCheck).doesNotExist();
// rebuild and compare the files
result = running.execute(List.of("clean package -DskipTests"), Map.of());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
assertThat(configDump).exists();
assertThat(configCheck).exists();
assertThat(configDump).hasSameTextualContentAs(configCheck);
var props = new Properties();
try (BufferedReader reader = Files.newBufferedReader(configDump.toPath())) {
props.load(reader);
}
assertThat(props).containsEntry("quarkus.application.name", HashUtil.sha512("code-with-quarkus"));
assertThat(props).doesNotContainKey("quarkus.platform.group-id");
for (var name : props.stringPropertyNames()) {
assertThat(name).doesNotStartWith("quarkus.test.");
}
result = running.execute(List.of("package -DskipTests -Dquarkus.package.jar.type=uber-jar"), Map.of());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
assertThat(running.log())
.contains("Option quarkus.package.jar.type has changed since the last build from fast-jar to uber-jar");
}
@Test
public void testPluginClasspathConfig() throws Exception {
testDir = initProject("projects/test-plugin-classpath-config");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(Collections.singletonList("package"),
Collections.emptyMap());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
}
@Test
public void testExtensionRemovedResources() throws Exception {
testDir = initProject("projects/extension-removed-resources");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(List.of("verify"), Map.of());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
}
@Test
public void testExtensionTestWithNoMain() throws Exception {
testDir = initProject("projects/extension-test-with-no-main");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(List.of("verify"), Map.of());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
}
@Test
public void testUberJarMavenPluginConfiguration()
throws MavenInvocationException, IOException, InterruptedException {
testDir = initProject("projects/uberjar-maven-plugin-config");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(List.of("install", "-DskipTests"), Map.of());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
final Path localRunner = getTargetDir().toPath().resolve("acme-quarkus-runner.jar");
assertThat(localRunner).exists();
// make sure the runner jar was attached, there was a bug of the runner jar not being attached when the
// finalName option in the Quarkus plugin didn't match the finalName ocnfigured in the POM's build section
final Path installedRunner = running.getLocalRepositoryDirectory().toPath().resolve("org").resolve("acme")
.resolve("acme")
.resolve("1.0-SNAPSHOT").resolve("acme-1.0-SNAPSHOT-runner.jar");
assertThat(installedRunner).exists();
verifyUberJar();
}
private void ensureManifestOfJarIsReadableByJarInputStream(File jar) throws IOException {
try (InputStream fileInputStream = new FileInputStream(jar)) {
try (JarInputStream stream = new JarInputStream(fileInputStream)) {
Manifest manifest = stream.getManifest();
assertThat(manifest).isNotNull();
}
}
}
@Test
public void testQuarkusPackageOutputDirectory()
throws MavenInvocationException, IOException, InterruptedException {
testDir = initProject("projects/quarkus.package.output-directory");
running = new RunningInvoker(testDir, false);
// we do want to run the tests too
final MavenProcessInvocationResult result = running.execute(Collections.singletonList("package"),
Collections.emptyMap());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
File targetDir = getTargetDir();
List<File> jars = getFilesEndingWith(targetDir, ".jar");
assertThat(jars).hasSize(1);
targetDir = new File(targetDir, "custom-output-dir");
assertThat(targetDir).exists();
jars = getFilesEndingWith(targetDir, ".jar");
assertThat(jars).hasSize(1);
}
/**
* POM files are often found among the project's dependencies.
* This test makes sure such projects can be built with mutable-jar format
* without choking on non-jar dependencies.
*/
@Test
public void testDependencyOnPomMutableJar()
throws MavenInvocationException, IOException, InterruptedException {
testDir = initProject("projects/dependency-on-pom");
running = new RunningInvoker(testDir, false);
// we do want to run the tests too
final MavenProcessInvocationResult result = running.execute(List.of("package"),
Collections.emptyMap());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
File targetDir = getTargetDir();
List<File> jars = getFilesEndingWith(targetDir, ".jar");
assertThat(jars).hasSize(1);
}
@Test
public void testPackageWorksWhenUberjarIsTrue()
throws MavenInvocationException, IOException, InterruptedException {
testDir = initProject("projects/uberjar-check");
createAndVerifyUberJar();
// ensure that subsequent package without clean also works
createAndVerifyUberJar();
}
private void createAndVerifyUberJar() throws IOException, MavenInvocationException, InterruptedException {
Properties p = new Properties();
p.setProperty("quarkus.package.jar.type", "uber-jar");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(List.of("package"),
Map.of(), p);
assertThat(result.getProcess().waitFor()).isEqualTo(0);
verifyUberJar();
}
private void verifyUberJar() throws IOException {
final File targetDir = getTargetDir();
List<File> jars = getFilesEndingWith(targetDir, ".jar");
assertThat(jars).hasSize(1);
assertThat(getNumberOfFilesEndingWith(targetDir, ".original")).isEqualTo(1);
File uberJar = jars.get(0);
assertMultiReleaseJar(uberJar);
ensureManifestOfJarIsReadableByJarInputStream(uberJar);
}
protected void assertMultiReleaseJar(File uberJar) throws IOException {
try (JarFile jarFile = new JarFile(uberJar)) {
// we expect this uber jar to be a multi-release jar since one of its
// dependencies (smallrye-classloader artifact), from which we composed this uber-jar,
// is a multi-release jar
Assertions.assertTrue(jarFile.isMultiRelease(), "uber-jar " + uberJar
+ " was expected to be a multi-release jar but wasn't");
}
}
@Test
public void testUberJarWithoutRunnerSuffix()
throws Exception {
testDir = initProject("projects/uberjar-check", "projects/uberjar-runner-suffix-off");
Properties p = new Properties();
p.setProperty("quarkus.package.jar.type", "uber-jar");
p.setProperty("quarkus.package.jar.add-runner-suffix", "false");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(List.of("-DskipTests", "package"),
Map.of(), p);
assertThat(result.getProcess().waitFor()).isEqualTo(0);
final File targetDir = getTargetDir();
List<File> jars = getFilesEndingWith(targetDir, ".jar");
assertThat(jars).hasSize(1);
File jarFile = jars.get(0);
assertThat(jarFile.getName()).isEqualTo("acme-1.0-SNAPSHOT.jar");
assertMultiReleaseJar(jarFile);
ensureManifestOfJarIsReadableByJarInputStream(jarFile);
}
@Test
public void testCustomPackaging()
throws Exception {
testDir = getTargetDir("projects/custom-packaging-plugin");
running = new RunningInvoker(testDir, false);
MavenProcessInvocationResult result = running.execute(Collections.singletonList("install"),
Collections.emptyMap());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
testDir = getTargetDir("projects/custom-packaging-app");
running = new RunningInvoker(testDir, false);
result = running.execute(Collections.singletonList("package"),
Collections.emptyMap());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
final File targetDir = getTargetDir();
final File[] files = targetDir.listFiles(f -> f.getName().endsWith(".jar"));
Set<String> jarNames = new HashSet<>(files.length);
for (File f : files) {
jarNames.add(f.getName());
}
final Path runnerJar = getTargetDir().toPath().resolve("quarkus-app").resolve("quarkus-run.jar");
Assertions.assertTrue(Files.exists(runnerJar), "Runner jar " + runnerJar + " is missing");
assertZipEntriesCanBeOpenedAndClosed(runnerJar);
}
/**
* Tests that the uber runner jar created by Quarkus has valid CRC entries. The verification
* is pretty trivial and involves opening and closing the ZipEntry entries that are part of the
* runner jar. That internally triggers the CRC checks.
*
* @throws Exception
* @see <a href="https://github.com/quarkusio/quarkus/issues/4782"/>
*/
@Test
public void testRunnerUberJarHasValidCRC() throws Exception {
testDir = initProject("projects/uberjar-check", "projects/project-uberjar-crc");
running = new RunningInvoker(testDir, false);
Properties p = new Properties();
p.setProperty("quarkus.package.jar.type", "uber-jar");
final MavenProcessInvocationResult result = running.execute(Collections.singletonList("package"),
Collections.emptyMap(), p);
assertThat(result.getProcess().waitFor()).isEqualTo(0);
final File targetDir = getTargetDir();
assertThat(getNumberOfFilesEndingWith(targetDir, ".jar")).isEqualTo(1);
assertThat(getNumberOfFilesEndingWith(targetDir, ".original")).isEqualTo(1);
final Path runnerJar = targetDir.toPath().resolve("acme-1.0-SNAPSHOT-runner.jar");
Assertions.assertTrue(Files.exists(runnerJar), "Runner jar " + runnerJar + " is missing");
assertZipEntriesCanBeOpenedAndClosed(runnerJar);
}
/**
* Tests that the runner jar created by Quarkus has valid CRC entries. The verification
* is pretty trivial and involves opening and closing the ZipEntry entries that are part of the
* runner jar. That internally triggers the CRC checks.
*
* @throws Exception
* @see <a href="https://github.com/quarkusio/quarkus/issues/4782"/>
*/
@Test
public void testLegacyJarHasValidCRC() throws Exception {
testDir = initProject("projects/uberjar-check", "projects/project-legacyjar-crc");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(Collections.singletonList("package"),
Collections.singletonMap("QUARKUS_PACKAGE_JAR_TYPE", "legacy-jar"));
assertThat(result.getProcess().waitFor()).isEqualTo(0);
final File targetDir = getTargetDir();
assertThat(getNumberOfFilesEndingWith(targetDir, ".jar")).isEqualTo(2);
final Path runnerJar = targetDir.toPath().resolve("acme-1.0-SNAPSHOT-runner.jar");
Assertions.assertTrue(Files.exists(runnerJar), "Runner jar " + runnerJar + " is missing");
assertZipEntriesCanBeOpenedAndClosed(runnerJar);
}
/**
* Tests that the runner jar created by Quarkus has valid CRC entries. The verification
* is pretty trivial and involves opening and closing the ZipEntry entries that are part of the
* runner jar. That internally triggers the CRC checks.
*
* @throws Exception
* @see <a href="https://github.com/quarkusio/quarkus/issues/4782"/>
*/
@Test
public void testFastJarHasValidCRC() throws Exception {
testDir = initProject("projects/uberjar-check", "projects/project-fastjar-crc");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(Collections.singletonList("package"),
Collections.emptyMap());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
final Path runnerJar = getTargetDir().toPath().resolve("quarkus-app").resolve("quarkus-run.jar");
Assertions.assertTrue(Files.exists(runnerJar), "Runner jar " + runnerJar + " is missing");
assertZipEntriesCanBeOpenedAndClosed(runnerJar);
}
/**
* Tests that quarkus.index-dependency.* can be used for modules in a multimodule project
*/
@Test
public void testQuarkusIndexDependencyOnLocalModule() throws Exception {
testDir = initProject("projects/quarkus-index-dependencies");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(Collections.singletonList("package"),
Collections.emptyMap());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
final File targetDir = new File(testDir.getAbsoluteFile(), "runner" + File.separator + "target");
final Path runnerJar = targetDir.toPath().resolve("quarkus-app").resolve("quarkus-run.jar");
Assertions.assertTrue(Files.exists(runnerJar), "Runner jar " + runnerJar + " is missing");
assertZipEntriesCanBeOpenedAndClosed(runnerJar);
}
/**
* Tests that quarkus.index-dependency.* can be used for modules in a multimodule project. artifact-id is optional.
*/
@Test
public void testQuarkusIndexDependencyGroupIdOnLocalModule() throws Exception {
testDir = initProject("projects/quarkus-index-dependencies-groupid");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(Collections.singletonList("package"),
Collections.emptyMap());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
final File targetDir = new File(testDir.getAbsoluteFile(), "runner" + File.separator + "target");
final Path runnerJar = targetDir.toPath().resolve("quarkus-app").resolve("quarkus-run.jar");
Assertions.assertTrue(Files.exists(runnerJar), "Runner jar " + runnerJar + " is missing");
assertZipEntriesCanBeOpenedAndClosed(runnerJar);
}
@Test
public void testNativeSourcesPackage() throws Exception {
testDir = initProject("projects/uberjar-check", "projects/project-native-sources");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(
Arrays.asList("package", "-Dquarkus.native.enabled=true", "-Dquarkus.native.sources-only=true"),
Collections.emptyMap());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
final File targetDir = getTargetDir();
final Path nativeSourcesDir = targetDir.toPath().resolve("native-sources");
assertThat(nativeSourcesDir).exists()
.isDirectoryContaining(p -> "native-image.args".equals(p.getFileName().toString()))
.isDirectoryContaining(p -> "acme-1.0-SNAPSHOT-runner.jar".equals(p.getFileName().toString()));
}
@Test
public void testMultiJarModulesPackage() throws Exception {
testDir = initProject("projects/multijar-module", "projects/multijar-module-package");
running = new RunningInvoker(testDir, false);
final MavenProcessInvocationResult result = running.execute(
Arrays.asList("package"),
Collections.emptyMap());
assertThat(result.getProcess().waitFor()).isEqualTo(0);
}
private int getNumberOfFilesEndingWith(File dir, String suffix) {
return getFilesEndingWith(dir, suffix).size();
}
private File getTargetDir() {
return new File(testDir.getAbsoluteFile() + "/target");
}
private void assertZipEntriesCanBeOpenedAndClosed(final Path jar) throws Exception {
try (final InputStream is = Files.newInputStream(jar)) {
final ZipInputStream zis = new ZipInputStream(is);
ZipEntry e = null;
while ((e = zis.getNextEntry()) != null) {
zis.closeEntry();
}
}
}
}
| PackageIT |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissions.java | {
"start": 2949,
"end": 3030
} | class ____ the business logic for how to remove per outbound version.
*/
public | owns |
java | elastic__elasticsearch | x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineMethods.java | {
"start": 1817,
"end": 2438
} | class ____ extends SinglePassSimpleStatisticsMethod {
static final String NAME = "z-score";
private final double stdev;
ZScore(double[] values) {
super(values);
double variance = 0.0;
for (Double value : values) {
if (value.isNaN() == false) {
variance += Math.pow(value - mean, 2);
}
}
this.stdev = Math.sqrt(variance / count);
}
@Override
public double applyAsDouble(double value) {
return (value - mean) / stdev;
}
}
static | ZScore |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/jsonplan/JsonPlanGenerator.java | {
"start": 1876,
"end": 8831
} | class ____ {
private static final String NOT_SET = "";
private static final String EMPTY = "{}";
public static JobPlanInfo.Plan generatePlan(JobGraph jg) {
return generatePlan(
jg.getJobID(),
jg.getName(),
jg.getJobType(),
jg.getVertices(),
VertexParallelism.empty());
}
public static JobPlanInfo.Plan generatePlan(
JobID jobID,
String jobName,
JobType jobType,
Iterable<JobVertex> vertices,
VertexParallelism vertexParallelism) {
try {
Collection<JobPlanInfo.Plan.Node> nodes = new ArrayList<>();
// info per vertex
for (JobVertex vertex : vertices) {
String operator =
vertex.getOperatorName() != null ? vertex.getOperatorName() : NOT_SET;
String operatorDescr =
vertex.getOperatorDescription() != null
? vertex.getOperatorDescription()
: NOT_SET;
String optimizerProps =
vertex.getResultOptimizerProperties() != null
? vertex.getResultOptimizerProperties()
: EMPTY;
String description =
vertex.getOperatorPrettyName() != null
? vertex.getOperatorPrettyName()
: vertex.getName();
// make sure the encoding is HTML pretty
description = StringEscapeUtils.escapeHtml4(description);
description = description.replace("\n", "<br/>");
description = description.replace("\\", "\");
operatorDescr = StringEscapeUtils.escapeHtml4(operatorDescr);
operatorDescr = operatorDescr.replace("\n", "<br/>");
JobVertexID vertexID = vertex.getID();
long parallelism =
vertexParallelism
.getParallelismOptional(vertexID)
.orElse(vertex.getParallelism());
Collection<JobPlanInfo.Plan.Node.Input> inputs = new ArrayList<>();
if (!vertex.isInputVertex()) {
for (int inputNum = 0; inputNum < vertex.getInputs().size(); inputNum++) {
JobEdge edge = vertex.getInputs().get(inputNum);
if (edge.getSource() == null) {
continue;
}
JobVertex predecessor = edge.getSource().getProducer();
if (predecessor == null || predecessor.getID() == null) {
continue;
}
String inputId = predecessor.getID().toString();
if (edge.getSource().getResultType() == null
|| edge.getSource().getResultType().name() == null) {
continue;
}
String exchange = edge.getSource().getResultType().name().toLowerCase();
String shipStrategy = edge.getShipStrategyName();
String preProcessingOperation = edge.getPreProcessingOperationName();
String operatorLevelCaching = edge.getOperatorLevelCachingDescription();
inputs.add(
new JobPlanInfo.Plan.Node.Input(
inputId,
inputNum,
exchange,
shipStrategy,
preProcessingOperation,
operatorLevelCaching));
}
}
nodes.add(
new JobPlanInfo.Plan.Node(
vertexID.toString(),
operator,
parallelism,
operatorDescr,
description,
new JobPlanInfo.RawJson(optimizerProps),
inputs));
}
return new JobPlanInfo.Plan(jobID.toString(), jobName, jobType.name(), nodes);
} catch (Exception e) {
throw new RuntimeException("Failed to generate plan", e);
}
}
public static String generateStreamGraphJson(
StreamGraph sg, Map<Integer, JobVertexID> jobVertexIdMap) {
try (final StringWriter writer = new StringWriter(1024)) {
try (final JsonGenerator gen = new JsonFactory().createGenerator(writer)) {
// start of everything
gen.writeStartObject();
gen.writeArrayFieldStart("nodes");
// info per vertex
for (StreamNode node : sg.getStreamNodes()) {
gen.writeStartObject();
gen.writeStringField("id", String.valueOf(node.getId()));
gen.writeNumberField("parallelism", node.getParallelism());
gen.writeStringField("operator", node.getOperatorName());
gen.writeStringField("description", node.getOperatorDescription());
if (jobVertexIdMap.containsKey(node.getId())) {
gen.writeStringField(
"job_vertex_id", jobVertexIdMap.get(node.getId()).toString());
}
// write the input edge properties
gen.writeArrayFieldStart("inputs");
List<StreamEdge> inEdges = node.getInEdges();
for (int inputNum = 0; inputNum < inEdges.size(); inputNum++) {
StreamEdge edge = inEdges.get(inputNum);
gen.writeStartObject();
gen.writeNumberField("num", inputNum);
gen.writeStringField("id", String.valueOf(edge.getSourceId()));
gen.writeStringField("ship_strategy", edge.getPartitioner().toString());
gen.writeStringField("exchange", edge.getExchangeMode().name());
gen.writeEndObject();
}
gen.writeEndArray();
gen.writeEndObject();
}
// end of everything
gen.writeEndArray();
gen.writeEndObject();
}
return writer.toString();
} catch (Exception e) {
throw new RuntimeException("Failed to generate json stream plan", e);
}
}
}
| JsonPlanGenerator |
java | google__auto | value/src/test/java/com/google/auto/value/processor/JavaScannerTest.java | {
"start": 951,
"end": 2257
} | class ____ {
private static final ImmutableList<String> TOKENS =
ImmutableList.of(
" ",
"\"hello \\\" world\\n\"",
"'a'",
" ",
"'\\t'",
" ",
"`com.google.Foo`",
" ",
"\n ",
"/* comment * comment \" whatever\n comment continued */",
" ",
"t",
"h",
"i",
"n",
"g",
" ",
"t",
"h",
"i",
"n",
"g",
" ",
"// line comment",
"\n",
"/*/ tricky comment */",
"\n");
/**
* Tests basic scanner functionality. The test concatenates the tokens in {@link #TOKENS} and then
* retokenizes that string, checking that the same list of tokens is produced.
*/
@Test
public void testScanner() {
String input = Joiner.on("").join(TOKENS);
ImmutableList.Builder<String> tokensBuilder = ImmutableList.builder();
JavaScanner tokenizer = new JavaScanner(input);
int end;
for (int i = 0; i < input.length(); i = end) {
end = tokenizer.tokenEnd(i);
tokensBuilder.add(input.substring(i, end));
}
assertThat(tokensBuilder.build()).containsExactlyElementsIn(TOKENS).inOrder();
}
}
| JavaScannerTest |
java | spring-projects__spring-framework | spring-expression/src/main/java/org/springframework/expression/spel/ast/Indexer.java | {
"start": 18302,
"end": 24138
} | class ____ implements ValueRef {
private final TypeConverter typeConverter;
private final Object array;
private final int index;
private final TypeDescriptor typeDescriptor;
ArrayIndexingValueRef(TypeConverter typeConverter, Object array, int index, TypeDescriptor typeDescriptor) {
this.typeConverter = typeConverter;
this.array = array;
this.index = index;
this.typeDescriptor = typeDescriptor;
}
@Override
public TypedValue getValue() {
Object arrayElement = getArrayElement(this.array, this.index);
return new TypedValue(arrayElement, this.typeDescriptor.elementTypeDescriptor(arrayElement));
}
@Override
public void setValue(@Nullable Object newValue) {
TypeDescriptor elementType = this.typeDescriptor.getElementTypeDescriptor();
Assert.state(elementType != null, "No element type");
setArrayElement(this.typeConverter, this.array, this.index, newValue, elementType.getType());
}
@Override
public boolean isWritable() {
return true;
}
private Object getArrayElement(Object ctx, int idx) throws SpelEvaluationException {
Class<?> arrayComponentType = ctx.getClass().componentType();
if (arrayComponentType == boolean.class) {
boolean[] array = (boolean[]) ctx;
checkAccess(array.length, idx);
setExitTypeDescriptor("Z");
Indexer.this.arrayTypeDescriptor = "[Z";
return array[idx];
}
else if (arrayComponentType == byte.class) {
byte[] array = (byte[]) ctx;
checkAccess(array.length, idx);
setExitTypeDescriptor("B");
Indexer.this.arrayTypeDescriptor = "[B";
return array[idx];
}
else if (arrayComponentType == char.class) {
char[] array = (char[]) ctx;
checkAccess(array.length, idx);
setExitTypeDescriptor("C");
Indexer.this.arrayTypeDescriptor = "[C";
return array[idx];
}
else if (arrayComponentType == double.class) {
double[] array = (double[]) ctx;
checkAccess(array.length, idx);
setExitTypeDescriptor("D");
Indexer.this.arrayTypeDescriptor = "[D";
return array[idx];
}
else if (arrayComponentType == float.class) {
float[] array = (float[]) ctx;
checkAccess(array.length, idx);
setExitTypeDescriptor("F");
Indexer.this.arrayTypeDescriptor = "[F";
return array[idx];
}
else if (arrayComponentType == int.class) {
int[] array = (int[]) ctx;
checkAccess(array.length, idx);
setExitTypeDescriptor("I");
Indexer.this.arrayTypeDescriptor = "[I";
return array[idx];
}
else if (arrayComponentType == long.class) {
long[] array = (long[]) ctx;
checkAccess(array.length, idx);
setExitTypeDescriptor("J");
Indexer.this.arrayTypeDescriptor = "[J";
return array[idx];
}
else if (arrayComponentType == short.class) {
short[] array = (short[]) ctx;
checkAccess(array.length, idx);
setExitTypeDescriptor("S");
Indexer.this.arrayTypeDescriptor = "[S";
return array[idx];
}
else {
Object[] array = (Object[]) ctx;
checkAccess(array.length, idx);
Object retValue = array[idx];
Indexer.this.exitTypeDescriptor = CodeFlow.toDescriptor(arrayComponentType);
Indexer.this.arrayTypeDescriptor = CodeFlow.toDescriptor(array.getClass());
return retValue;
}
}
private void setArrayElement(TypeConverter converter, Object ctx, int idx, @Nullable Object newValue,
Class<?> arrayComponentType) throws EvaluationException {
if (arrayComponentType == boolean.class) {
boolean[] array = (boolean[]) ctx;
checkAccess(array.length, idx);
array[idx] = convertValue(converter, newValue, boolean.class);
}
else if (arrayComponentType == byte.class) {
byte[] array = (byte[]) ctx;
checkAccess(array.length, idx);
array[idx] = convertValue(converter, newValue, byte.class);
}
else if (arrayComponentType == char.class) {
char[] array = (char[]) ctx;
checkAccess(array.length, idx);
array[idx] = convertValue(converter, newValue, char.class);
}
else if (arrayComponentType == double.class) {
double[] array = (double[]) ctx;
checkAccess(array.length, idx);
array[idx] = convertValue(converter, newValue, double.class);
}
else if (arrayComponentType == float.class) {
float[] array = (float[]) ctx;
checkAccess(array.length, idx);
array[idx] = convertValue(converter, newValue, float.class);
}
else if (arrayComponentType == int.class) {
int[] array = (int[]) ctx;
checkAccess(array.length, idx);
array[idx] = convertValue(converter, newValue, int.class);
}
else if (arrayComponentType == long.class) {
long[] array = (long[]) ctx;
checkAccess(array.length, idx);
array[idx] = convertValue(converter, newValue, long.class);
}
else if (arrayComponentType == short.class) {
short[] array = (short[]) ctx;
checkAccess(array.length, idx);
array[idx] = convertValue(converter, newValue, short.class);
}
else {
Object[] array = (Object[]) ctx;
checkAccess(array.length, idx);
array[idx] = convertValue(converter, newValue, arrayComponentType);
}
}
private void checkAccess(int arrayLength, int index) throws SpelEvaluationException {
if (index >= arrayLength) {
throw new SpelEvaluationException(getStartPosition(), SpelMessage.ARRAY_INDEX_OUT_OF_BOUNDS,
arrayLength, index);
}
}
@SuppressWarnings("unchecked")
private static <T> T convertValue(TypeConverter converter, @Nullable Object value, Class<T> targetType) {
T result = (T) converter.convertValue(
value, TypeDescriptor.forObject(value), TypeDescriptor.valueOf(targetType));
if (result == null) {
throw new IllegalStateException("Null conversion result for index [" + value + "]");
}
return result;
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
private | ArrayIndexingValueRef |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/annotation/RequestAttribute.java | {
"start": 1758,
"end": 2438
} | interface ____ {
*
* }
* </pre>
*
* <p>When declared as a binding annotation the {@code @RequestAttribute} annotation is declared on each parameter to be bound:</p>
*
* <pre class="code">
* @Get('/user')
* User get(@RequestAttribute('X-Username') String username, @RequestAttribute('X-MyParam') String myparam) {
* return new User(username, myparam);
* }
* </pre>
*
* @author Ahmed Lafta
* @since 1.0
*/
@Documented
@Retention(RUNTIME)
@Target({ElementType.PARAMETER, ElementType.TYPE, ElementType.METHOD, ElementType.ANNOTATION_TYPE}) // this can be either type or param
@Repeatable(value = RequestAttributes.class)
@Bindable
public @ | UserClient |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_336.java | {
"start": 161,
"end": 1257
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
RemoteInvocation remoteInvocation = new RemoteInvocation();
remoteInvocation.setMethodName("test");
remoteInvocation.setParameterTypes(new Class[] { int.class, Date.class, String.class });
remoteInvocation.setArguments(new Object[] { 1, new Date(1460538273131L), "this is a test" });
String json = JSON.toJSONString(remoteInvocation);
Assert.assertEquals("{\"arguments\":[1,1460538273131,\"this is a test\"],\"methodName\":\"test\",\"parameterTypes\":[\"int\",\"java.util.Date\",\"java.lang.String\"]}", json);
remoteInvocation = JSON.parseObject(json, RemoteInvocation.class);
Assert.assertEquals(3, remoteInvocation.parameterTypes.length);
Assert.assertEquals(int.class, remoteInvocation.parameterTypes[0]);
Assert.assertEquals(Date.class, remoteInvocation.parameterTypes[1]);
Assert.assertEquals(String.class, remoteInvocation.parameterTypes[2]);
}
public static | Bug_for_issue_336 |
java | apache__camel | components/camel-rxjava/src/main/java/org/apache/camel/component/rxjava/engine/RxJavaStreamsService.java | {
"start": 2125,
"end": 12038
} | class ____ extends ServiceSupport implements CamelReactiveStreamsService {
private final CamelContext context;
private final Supplier<UnwrapStreamProcessor> unwrapStreamProcessorSupplier;
private final ConcurrentMap<String, RxJavaCamelProcessor> publishers;
private final ConcurrentMap<String, ReactiveStreamsCamelSubscriber> subscribers;
private final ConcurrentMap<String, String> publishedUriToStream;
private final ConcurrentMap<String, String> requestedUriToStream;
RxJavaStreamsService(CamelContext context) {
this.context = context;
this.publishers = new ConcurrentHashMap<>();
this.subscribers = new ConcurrentHashMap<>();
this.publishedUriToStream = new ConcurrentHashMap<>();
this.requestedUriToStream = new ConcurrentHashMap<>();
this.unwrapStreamProcessorSupplier = Suppliers.memorize(UnwrapStreamProcessor::new);
}
@Override
public String getId() {
return RxJavaStreamsConstants.SERVICE_NAME;
}
@Override
public CamelContext getCamelContext() {
return context;
}
// ******************************************
// Lifecycle
// ******************************************
@Override
public void doStart() throws Exception {
}
@Override
public void doStop() throws Exception {
for (RxJavaCamelProcessor processor : publishers.values()) {
processor.close();
}
for (ReactiveStreamsCamelSubscriber subscriber : subscribers.values()) {
subscriber.close();
}
}
// ******************************************
//
// ******************************************
@Override
public Publisher<Exchange> fromStream(String name) {
return getCamelProcessor(name).getPublisher();
}
@Override
public <T> Publisher<T> fromStream(String name, Class<T> type) {
final Publisher<Exchange> publisher = fromStream(name);
if (Exchange.class.isAssignableFrom(type)) {
return Publisher.class.cast(publisher);
}
return Flowable.fromPublisher(publisher).map(BodyConverter.forType(type)::apply);
}
@Override
public ReactiveStreamsCamelSubscriber streamSubscriber(String name) {
return subscribers.computeIfAbsent(name, n -> new ReactiveStreamsCamelSubscriber(name));
}
@Override
@SuppressWarnings("unchecked")
public <T> Subscriber<T> streamSubscriber(String name, Class<T> type) {
final Subscriber<Exchange> subscriber = streamSubscriber(name);
if (Exchange.class.equals(type)) {
return Subscriber.class.cast(subscriber);
}
return new ConvertingSubscriber<>(subscriber, context, type);
}
@Override
public Publisher<Exchange> toStream(String name, Object data) {
return doRequest(
name,
ReactiveStreamsHelper.convertToExchange(context, data));
}
@Override
public Function<?, ? extends Publisher<Exchange>> toStream(String name) {
return data -> toStream(name, data);
}
@Override
public <T> Publisher<T> toStream(String name, Object data, Class<T> type) {
return new ConvertingPublisher<>(toStream(name, data), type);
}
@Override
public <T> Function<Object, Publisher<T>> toStream(String name, Class<T> type) {
return data -> toStream(name, data, type);
}
@Override
public Publisher<Exchange> from(String uri) {
final String name = publishedUriToStream.computeIfAbsent(uri, camelUri -> {
try {
String uuid = context.getUuidGenerator().generateUuid();
RouteBuilder.addRoutes(context, rb -> rb.from(camelUri).to("reactive-streams:" + uuid));
return uuid;
} catch (Exception e) {
throw new IllegalStateException("Unable to create source reactive stream from direct URI: " + uri, e);
}
});
return fromStream(name);
}
@Override
public <T> Publisher<T> from(String name, Class<T> type) {
final Publisher<Exchange> publisher = from(name);
if (Exchange.class.isAssignableFrom(type)) {
return Publisher.class.cast(publisher);
}
return Flowable.fromPublisher(publisher).map(BodyConverter.forType(type)::apply);
}
@Override
public Subscriber<Exchange> subscriber(String uri) {
try {
String uuid = context.getUuidGenerator().generateUuid();
RouteBuilder.addRoutes(context, rb -> rb.from("reactive-streams:" + uuid).to(uri));
return streamSubscriber(uuid);
} catch (Exception e) {
throw new IllegalStateException("Unable to create source reactive stream towards direct URI: " + uri, e);
}
}
@Override
public <T> Subscriber<T> subscriber(String uri, Class<T> type) {
return new ConvertingSubscriber<>(subscriber(uri), context, type);
}
@Override
public Publisher<Exchange> to(String uri, Object data) {
String streamName = requestedUriToStream.computeIfAbsent(uri, camelUri -> {
try {
String uuid = context.getUuidGenerator().generateUuid();
RouteBuilder.addRoutes(context, rb -> rb.from("reactive-streams:" + uuid).to(camelUri));
return uuid;
} catch (Exception e) {
throw new IllegalStateException("Unable to create requested reactive stream from direct URI: " + uri, e);
}
});
return toStream(streamName, data);
}
@Override
public Function<Object, Publisher<Exchange>> to(String uri) {
return data -> to(uri, data);
}
@Override
public <T> Publisher<T> to(String uri, Object data, Class<T> type) {
Publisher<Exchange> publisher = to(uri, data);
return Flowable.fromPublisher(publisher).map(BodyConverter.forType(type)::apply);
}
@Override
public <T> Function<Object, Publisher<T>> to(String uri, Class<T> type) {
return data -> to(uri, data, type);
}
@Override
public void process(String uri, Function<? super Publisher<Exchange>, ?> processor) {
try {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from(uri)
.process(exchange -> {
Exchange copy = exchange.copy();
Object result = processor.apply(Flowable.just(copy));
exchange.getIn().setBody(result);
})
.process(unwrapStreamProcessorSupplier.get());
}
});
} catch (Exception e) {
throw new IllegalStateException("Unable to add reactive stream processor to the direct URI: " + uri, e);
}
}
@Override
public <T> void process(String uri, Class<T> type, Function<? super Publisher<T>, ?> processor) {
process(
uri,
publisher -> processor.apply(
Flowable.fromPublisher(publisher).map(BodyConverter.forType(type)::apply)));
}
// ******************************************
// Producer
// ******************************************
@Override
public void attachCamelProducer(String name, ReactiveStreamsProducer producer) {
getCamelProcessor(name).attach(producer);
}
@Override
public void detachCamelProducer(String name) {
getCamelProcessor(name).detach();
}
@Override
public void sendCamelExchange(String name, Exchange exchange) {
getCamelProcessor(name).send(exchange);
}
private RxJavaCamelProcessor getCamelProcessor(String name) {
return publishers.computeIfAbsent(name, key -> new RxJavaCamelProcessor(this, key));
}
// ******************************************
// Consumer
// ******************************************
@Override
public ReactiveStreamsCamelSubscriber attachCamelConsumer(String name, ReactiveStreamsConsumer consumer) {
ReactiveStreamsCamelSubscriber subscriber = streamSubscriber(name);
subscriber.attachConsumer(consumer);
return subscriber;
}
@Override
public void detachCamelConsumer(String name) {
ReactiveStreamsCamelSubscriber subscriber = streamSubscriber(name);
subscriber.detachConsumer();
}
// *******************************************
// Helpers
// *******************************************
protected Publisher<Exchange> doRequest(String name, Exchange data) {
ReactiveStreamsConsumer consumer = streamSubscriber(name).getConsumer();
if (consumer == null) {
throw new IllegalStateException("No consumers attached to the stream " + name);
}
Single<Exchange> source = Single.<Exchange> create(
emitter -> data.getExchangeExtension().addOnCompletion(new Synchronization() {
@Override
public void onComplete(Exchange exchange) {
emitter.onSuccess(exchange);
}
@Override
public void onFailure(Exchange exchange) {
Throwable throwable = exchange.getException();
if (throwable == null) {
throwable = new IllegalStateException("Unknown Exception");
}
emitter.onError(throwable);
}
})).doOnSubscribe(
subs -> consumer.process(data, RxJavaStreamsConstants.EMPTY_ASYNC_CALLBACK));
return source.toFlowable();
}
}
| RxJavaStreamsService |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/resume/OffsetKey.java | {
"start": 995,
"end": 1346
} | interface ____<K> extends Serializable {
/**
* Sets the key value
*
* @param key the key value
*/
void setValue(K key);
/**
* Gets the key value
*
* @return the key instance
*/
K getValue();
@Override
default ByteBuffer serialize() {
return serialize(getValue());
}
}
| OffsetKey |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/condition/DisabledIfSystemProperties.java | {
"start": 1471,
"end": 1666
} | interface ____ {
/**
* An array of one or more {@link DisabledIfSystemProperty @DisabledIfSystemProperty}
* declarations.
*/
DisabledIfSystemProperty[] value();
}
| DisabledIfSystemProperties |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/NettyEndpointBuilderFactory.java | {
"start": 29039,
"end": 29553
} | class ____ could be used to return an SSL Handler.
*
* The option is a: <code>io.netty.handler.ssl.SslHandler</code> type.
*
* Group: security
*
* @param sslHandler the value to set
* @return the dsl builder
*/
default NettyEndpointConsumerBuilder sslHandler(io.netty.handler.ssl.SslHandler sslHandler) {
doSetProperty("sslHandler", sslHandler);
return this;
}
/**
* Reference to a | that |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/StarReferenceFlatteningRule.java | {
"start": 1549,
"end": 2076
} | class ____ implements ResolverRule {
@Override
public List<Expression> apply(List<Expression> expression, ResolutionContext context) {
final List<ColumnExpansionStrategy> strategies =
context.configuration().get(TableConfigOptions.TABLE_COLUMN_EXPANSION_STRATEGY);
return expression.stream()
.flatMap(e -> e.accept(new FieldFlatteningVisitor(context, strategies)).stream())
.collect(Collectors.toList());
}
private static | StarReferenceFlatteningRule |
java | netty__netty | microbench/src/main/java/io/netty/microbench/channel/epoll/EpollSocketChannelBenchmark.java | {
"start": 1572,
"end": 5715
} | class ____ extends AbstractMicrobenchmark {
private static final Runnable runnable = new Runnable() {
@Override
public void run() { }
};
private EventLoopGroup group;
private Channel serverChan;
private Channel chan;
private ByteBuf abyte;
private Future<?> future;
@Setup
public void setup() throws Exception {
group = new MultiThreadIoEventLoopGroup(1, EpollIoHandler.newFactory());
// add an arbitrary timeout to make the timer reschedule
future = group.schedule(new Runnable() {
@Override
public void run() {
throw new AssertionError();
}
}, 5, TimeUnit.MINUTES);
serverChan = new ServerBootstrap()
.channel(EpollServerSocketChannel.class)
.group(group)
.childHandler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) {
ch.pipeline().addLast(new ChannelDuplexHandler() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
if (msg instanceof ByteBuf) {
ctx.writeAndFlush(msg, ctx.voidPromise());
} else {
throw new AssertionError();
}
}
});
}
})
.bind(0)
.sync()
.channel();
chan = new Bootstrap()
.channel(EpollSocketChannel.class)
.handler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) {
ch.pipeline().addLast(new ChannelDuplexHandler() {
private ChannelPromise lastWritePromise;
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
if (msg instanceof ByteBuf) {
ByteBuf buf = (ByteBuf) msg;
try {
if (buf.readableBytes() == 1) {
lastWritePromise.trySuccess();
lastWritePromise = null;
} else {
throw new AssertionError();
}
} finally {
buf.release();
}
} else {
throw new AssertionError();
}
}
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise)
throws Exception {
if (lastWritePromise != null) {
throw new IllegalStateException();
}
lastWritePromise = promise;
super.write(ctx, msg, ctx.voidPromise());
}
});
}
})
.group(group)
.connect(serverChan.localAddress())
.sync()
.channel();
abyte = chan.alloc().directBuffer(1);
abyte.writeByte('a');
}
@TearDown
public void tearDown() throws Exception {
chan.close().sync();
serverChan.close().sync();
future.cancel(true);
group.shutdownGracefully(0, 0, TimeUnit.SECONDS).sync();
abyte.release();
}
@Benchmark
public Object pingPong() throws Exception {
return chan.pipeline().writeAndFlush(abyte.retainedSlice()).sync();
}
@Benchmark
public Object executeSingle() throws Exception {
return chan.eventLoop().submit(runnable).get();
}
@Benchmark
@GroupThreads(3)
public Object executeMulti() throws Exception {
return chan.eventLoop().submit(runnable).get();
}
}
| EpollSocketChannelBenchmark |
java | netty__netty | codec-http/src/test/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker07Test.java | {
"start": 1019,
"end": 2709
} | class ____ extends WebSocketClientHandshakerTest {
@Test
public void testHostHeaderPreserved() {
URI uri = URI.create("ws://localhost:9999");
WebSocketClientHandshaker handshaker = newHandshaker(uri, null,
new DefaultHttpHeaders().set(HttpHeaderNames.HOST, "test.netty.io"), false, true);
FullHttpRequest request = handshaker.newHandshakeRequest();
try {
assertEquals("/", request.uri());
assertEquals("test.netty.io", request.headers().get(HttpHeaderNames.HOST));
} finally {
request.release();
}
}
@Override
protected WebSocketClientHandshaker newHandshaker(URI uri, String subprotocol, HttpHeaders headers,
boolean absoluteUpgradeUrl, boolean generateOriginHeader) {
return new WebSocketClientHandshaker07(uri, WebSocketVersion.V07, subprotocol, false, headers,
1024, true, false, 10000,
absoluteUpgradeUrl, generateOriginHeader);
}
@Override
protected CharSequence getOriginHeaderName() {
return HttpHeaderNames.SEC_WEBSOCKET_ORIGIN;
}
@Override
protected CharSequence getProtocolHeaderName() {
return HttpHeaderNames.SEC_WEBSOCKET_PROTOCOL;
}
@Override
protected CharSequence[] getHandshakeRequiredHeaderNames() {
return new CharSequence[] {
HttpHeaderNames.UPGRADE,
HttpHeaderNames.CONNECTION,
HttpHeaderNames.SEC_WEBSOCKET_KEY,
HttpHeaderNames.HOST,
HttpHeaderNames.SEC_WEBSOCKET_VERSION,
};
}
}
| WebSocketClientHandshaker07Test |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/SetCookieResource.java | {
"start": 237,
"end": 564
} | class ____ {
@GET
public void setCookies(@Context HttpResponse response) {
response.getOutputHeaders().add(HttpHeaders.SET_COOKIE, "c1=c1");
response.getOutputHeaders().add(HttpHeaders.SET_COOKIE, "c2=c2");
response.getOutputHeaders().add(HttpHeaders.SET_COOKIE, "c3=c3");
}
}
| SetCookieResource |
java | alibaba__fastjson | src/test/java/com/derbysoft/spitfire/fastjson/dto/SuccessDTO.java | {
"start": 105,
"end": 367
} | class ____ extends AbstractDTO {
private List<String> messages = new ArrayList<String>();
public List<String> getMessages() {
return messages;
}
public void setMessages(List<String> infos) {
this.messages = infos;
}
}
| SuccessDTO |
java | quarkusio__quarkus | test-framework/junit5-component/src/main/java/io/quarkus/test/component/ComponentContainer.java | {
"start": 4245,
"end": 7873
} | class ____ configuration.
*
* @param testClass
* @param configuration
* @param buildShouldFail
* @param tracedClasses
* @return the build result
*/
static BuildResult build(Class<?> testClass, QuarkusComponentTestConfiguration configuration, boolean buildShouldFail,
Set<String> tracedClasses) {
if (configuration.componentClasses.isEmpty()) {
throw new IllegalStateException("No component classes to test");
}
long start = System.nanoTime();
if (LOG.isDebugEnabled()) {
LOG.debugf("Tested components: \n - %s",
configuration.componentClasses.stream().map(Object::toString).collect(Collectors.joining("\n - ")));
}
// Build index
IndexView index;
try {
Indexer indexer = new Indexer();
for (Class<?> componentClass : configuration.componentClasses) {
// Make sure that component hierarchy and all annotations present are indexed
indexComponentClass(indexer, componentClass);
}
if (configuration.hasCallbacks()) {
BeforeIndexContextImpl context = new BeforeIndexContextImpl(testClass, configuration.componentClasses);
for (QuarkusComponentTestCallbacks callback : configuration.callbacks) {
callback.beforeIndex(context);
}
for (Class<?> clazz : context.additionalComponentsClasses) {
indexComponentClass(indexer, clazz);
}
}
indexer.indexClass(ConfigProperty.class);
index = BeanArchives.buildImmutableBeanArchiveIndex(indexer.complete());
} catch (IOException e) {
throw new IllegalStateException("Failed to create index", e);
}
ClassLoader testClassLoader = testClass.getClassLoader();
boolean isContinuousTesting = Conditions.isContinuousTestingDiscovery();
IndexView computingIndex = BeanArchives.buildComputingBeanArchiveIndex(testClassLoader,
new ConcurrentHashMap<>(), index);
Map<String, byte[]> generatedClasses = new HashMap<>();
AtomicReference<byte[]> componentsProvider = new AtomicReference<>();
Map<String, Set<String>> configMappings = new HashMap<>();
Map<String, String[]> interceptorMethods = new HashMap<>();
Throwable buildFailure = null;
List<BytecodeTransformer> bytecodeTransformers = new ArrayList<>();
List<AnnotationTransformation> annotationTransformations = new ArrayList<>();
for (AnnotationsTransformer transformer : configuration.annotationsTransformers) {
annotationTransformations.add(transformer);
}
List<BeanRegistrar> beanRegistrars = new ArrayList<>();
if (configuration.hasCallbacks()) {
BeforeBuildContext beforeBuildContext = new BeforeBulidContextImpl(testClass, index, computingIndex,
bytecodeTransformers, annotationTransformations, beanRegistrars);
for (QuarkusComponentTestCallbacks callback : configuration.callbacks) {
callback.beforeBuild(beforeBuildContext);
}
}
try {
// These are populated after BeanProcessor.registerCustomContexts() is called
List<DotName> qualifiers = new ArrayList<>();
Set<String> interceptorBindings = new HashSet<>();
AtomicReference<BeanResolver> beanResolver = new AtomicReference<>();
// Collect all @Inject and @InjectMock test | and |
java | hibernate__hibernate-orm | hibernate-spatial/src/test/java/org/hibernate/spatial/testing/HQLTemplate.java | {
"start": 167,
"end": 489
} | class ____ {
final private String hqlTemplate;
public HQLTemplate(String template) {
this.hqlTemplate = template;
}
public static HQLTemplate from(String s) {
return new HQLTemplate( s );
}
public String mkHQLString(Object... params) {
return String.format( Locale.ROOT, hqlTemplate, params );
}
}
| HQLTemplate |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/support/DelegatePerTargetObjectIntroductionInterceptor.java | {
"start": 2133,
"end": 2887
} | class ____ extends IntroductionInfoSupport
implements IntroductionInterceptor {
/**
* Hold weak references to keys as we don't want to interfere with garbage collection..
*/
private final Map<Object, Object> delegateMap = new WeakHashMap<>();
private final Class<?> defaultImplType;
private final Class<?> interfaceType;
public DelegatePerTargetObjectIntroductionInterceptor(Class<?> defaultImplType, Class<?> interfaceType) {
this.defaultImplType = defaultImplType;
this.interfaceType = interfaceType;
// Create a new delegate now (but don't store it in the map).
// We do this for two reasons:
// 1) to fail early if there is a problem instantiating delegates
// 2) to populate the | DelegatePerTargetObjectIntroductionInterceptor |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/util/CancellableThreads.java | {
"start": 5602,
"end": 5701
} | interface ____ {
void run() throws InterruptedException;
}
public static | Interruptible |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java | {
"start": 2319,
"end": 8863
} | class ____ extends EsqlConfigurationFunction implements OptionalArgument {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
Expression.class,
"DateFormat",
DateFormat::new
);
private final Expression field;
private final Expression format;
@FunctionInfo(
returnType = "keyword",
description = "Returns a string representation of a date, in the provided format.",
examples = @Example(file = "date", tag = "docsDateFormat")
)
public DateFormat(
Source source,
@Param(optional = true, name = "dateFormat", type = { "keyword", "text", "date", "date_nanos" }, description = """
Date format (optional). If no format is specified, the `yyyy-MM-dd'T'HH:mm:ss.SSSZ` format is used.
If `null`, the function returns `null`.""") Expression format,
@Param(
name = "date",
type = { "date", "date_nanos" },
description = "Date expression. If `null`, the function returns `null`."
) Expression date,
Configuration configuration
) {
super(source, date != null ? List.of(format, date) : List.of(format), configuration);
this.field = date != null ? date : format;
this.format = date != null ? format : null;
}
private DateFormat(StreamInput in) throws IOException {
this(
Source.readFrom((PlanStreamInput) in),
in.readNamedWriteable(Expression.class),
in.readOptionalNamedWriteable(Expression.class),
((PlanStreamInput) in).configuration()
);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
source().writeTo(out);
out.writeNamedWriteable(children().get(0));
out.writeOptionalNamedWriteable(children().size() == 2 ? children().get(1) : null);
}
@Override
public String getWriteableName() {
return ENTRY.name;
}
Expression field() {
return field;
}
Expression format() {
return format;
}
@Override
public DataType dataType() {
return DataType.KEYWORD;
}
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
return new TypeResolution("Unresolved children");
}
TypeResolution resolution;
if (format != null) {
resolution = isStringAndExact(format, sourceText(), FIRST);
if (resolution.unresolved()) {
return resolution;
}
}
String operationName = sourceText();
TypeResolutions.ParamOrdinal paramOrd = format == null ? FIRST : SECOND;
resolution = TypeResolutions.isType(field, DataType::isDate, operationName, paramOrd, "datetime or date_nanos");
if (resolution.unresolved()) {
return resolution;
}
return TypeResolution.TYPE_RESOLVED;
}
@Override
public boolean foldable() {
return field.foldable() && (format == null || format.foldable());
}
@Evaluator(extraName = "MillisConstant")
static BytesRef processMillis(long val, @Fixed DateFormatter formatter) {
return new BytesRef(dateTimeToString(val, formatter));
}
@Evaluator(extraName = "Millis")
static BytesRef processMillis(long val, BytesRef formatter, @Fixed Locale locale) {
return new BytesRef(dateTimeToString(val, toFormatter(formatter, locale)));
}
@Evaluator(extraName = "NanosConstant")
static BytesRef processNanos(long val, @Fixed DateFormatter formatter) {
return new BytesRef(nanoTimeToString(val, formatter));
}
@Evaluator(extraName = "Nanos")
static BytesRef processNanos(long val, BytesRef formatter, @Fixed Locale locale) {
return new BytesRef(nanoTimeToString(val, toFormatter(formatter, locale)));
}
private ExpressionEvaluator.Factory getConstantEvaluator(
DataType dateType,
EvalOperator.ExpressionEvaluator.Factory fieldEvaluator,
DateFormatter formatter
) {
if (dateType == DATE_NANOS) {
return new DateFormatNanosConstantEvaluator.Factory(source(), fieldEvaluator, formatter);
}
return new DateFormatMillisConstantEvaluator.Factory(source(), fieldEvaluator, formatter);
}
private ExpressionEvaluator.Factory getEvaluator(
DataType dateType,
EvalOperator.ExpressionEvaluator.Factory fieldEvaluator,
EvalOperator.ExpressionEvaluator.Factory formatEvaluator
) {
if (dateType == DATE_NANOS) {
return new DateFormatNanosEvaluator.Factory(source(), fieldEvaluator, formatEvaluator, configuration().locale());
}
return new DateFormatMillisEvaluator.Factory(source(), fieldEvaluator, formatEvaluator, configuration().locale());
}
@Override
public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) {
var fieldEvaluator = toEvaluator.apply(field);
if (format == null) {
return getConstantEvaluator(field().dataType(), fieldEvaluator, DEFAULT_DATE_TIME_FORMATTER);
}
if (DataType.isString(format.dataType()) == false) {
throw new IllegalArgumentException("unsupported data type for format [" + format.dataType() + "]");
}
if (format.foldable()) {
DateFormatter formatter = toFormatter(format.fold(toEvaluator.foldCtx()), configuration().locale());
return getConstantEvaluator(field.dataType(), fieldEvaluator, formatter);
}
var formatEvaluator = toEvaluator.apply(format);
return getEvaluator(field().dataType(), fieldEvaluator, formatEvaluator);
}
private static DateFormatter toFormatter(Object format, Locale locale) {
DateFormatter result = format == null ? DEFAULT_DATE_TIME_FORMATTER : DateFormatter.forPattern(((BytesRef) format).utf8ToString());
return result.withLocale(locale);
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
return new DateFormat(source(), newChildren.get(0), newChildren.size() > 1 ? newChildren.get(1) : null, configuration());
}
@Override
protected NodeInfo<? extends Expression> info() {
Expression first = format != null ? format : field;
Expression second = format != null ? field : null;
return NodeInfo.create(this, DateFormat::new, first, second, configuration());
}
}
| DateFormat |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/utils/DubboAppender.java | {
"start": 1594,
"end": 4881
} | class ____ extends AbstractOutputStreamAppender.Builder<Builder>
implements org.apache.logging.log4j.core.util.Builder<DubboAppender> {
@PluginBuilderAttribute
private String fileName;
@PluginBuilderAttribute
private boolean append = true;
@PluginBuilderAttribute
private boolean locking;
public Builder setFileName(String fileName) {
this.fileName = fileName;
return this;
}
public Builder setAppend(boolean append) {
this.append = append;
return this;
}
public Builder setLocking(boolean locking) {
this.locking = locking;
return this;
}
@Override
public DubboAppender build() {
return new DubboAppender(getName(), buildFileAppender());
}
private <B extends FileAppender.Builder<B>> FileAppender buildFileAppender() {
FileAppender.Builder<B> builder = FileAppender.newBuilder();
builder.setIgnoreExceptions(isIgnoreExceptions());
builder.setLayout(getLayout());
builder.setName(getName() + "-File");
builder.setConfiguration(getConfiguration());
builder.setBufferedIo(isBufferedIo());
builder.setBufferSize(getBufferSize());
builder.setImmediateFlush(isImmediateFlush());
builder.withFileName(fileName == null || fileName.isEmpty() ? DEFAULT_FILE_NAME : fileName);
builder.withAppend(append);
builder.withLocking(locking);
return builder.build();
}
}
private static final String DEFAULT_FILE_NAME = "dubbo.log";
public static boolean available = false;
public static List<Log> logList = new ArrayList<>();
private final FileAppender fileAppender;
public DubboAppender() {
this("Dubbo", null);
}
private DubboAppender(String name, FileAppender fileAppender) {
super(name, null, null, true, Property.EMPTY_ARRAY);
this.fileAppender = fileAppender;
}
@PluginBuilderFactory
public static Builder newBuilder() {
return new Builder().asBuilder();
}
public static void doStart() {
available = true;
}
public static void doStop() {
available = false;
}
public static void clear() {
logList.clear();
}
@Override
public void append(LogEvent event) {
if (fileAppender != null) {
fileAppender.append(event);
}
if (available) {
logList.add(parseLog(event));
}
}
@Override
public void initialize() {
fileAppender.initialize();
super.initialize();
}
@Override
public void start() {
fileAppender.start();
super.start();
}
@Override
public void stop() {
super.stop();
fileAppender.stop();
}
private Log parseLog(LogEvent event) {
Log log = new Log();
log.setLogName(event.getLoggerName());
log.setLogLevel(Level.valueOf(event.getLevel().name()));
log.setLogThread(event.getThreadName());
log.setLogMessage(event.getMessage().getFormattedMessage());
return log;
}
}
| Builder |
java | dropwizard__dropwizard | dropwizard-jetty/src/main/java/io/dropwizard/jetty/ConnectorFactory.java | {
"start": 474,
"end": 1002
} | interface ____ extends Discoverable {
/**
* Create a new connector.
*
* @param server the application's {@link Server} instance
* @param metrics the application's metrics
* @param name the application's name
* @param threadPool the application's thread pool
* @return a {@link Connector}
*/
Connector build(Server server,
MetricRegistry metrics,
String name,
@Nullable ThreadPool threadPool);
}
| ConnectorFactory |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/OptionsTests.java | {
"start": 970,
"end": 22175
} | class ____ extends ESTestCase {
public void testNullOptions_SingleDataTypeAllowed() {
Map<String, DataType> allowedOptions = Map.of("keyword_option", DataType.KEYWORD);
Expression.TypeResolution resolution = Options.resolve(null, Source.EMPTY, TypeResolutions.ParamOrdinal.DEFAULT, allowedOptions);
assertTrue(resolution.resolved());
}
public void testSingleEntryOptions_SingleDataTypeAllowed_ShouldResolve() {
Map<String, DataType> allowedOptions = Map.of("keyword_option", DataType.KEYWORD);
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "keyword_option"), Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)))
);
Expression.TypeResolution resolution = Options.resolve(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.resolved());
}
public void testSingleEntryOptions_SingleDataTypeAllowed_UnknownOption_ShouldNotResolve() {
Map<String, DataType> allowedOptions = Map.of("keyword_option", DataType.KEYWORD);
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "unknown_option"), Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)))
);
Expression.TypeResolution resolution = Options.resolve(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.unresolved());
}
public void testMultipleEntryOptions_SingleDataTypeAllowed_ShouldResolve() {
Map<String, DataType> allowedOptions = Map.of(
"keyword_option",
DataType.KEYWORD,
"int_option",
DataType.INTEGER,
"double_option",
DataType.DOUBLE
);
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(
Literal.keyword(Source.EMPTY, "keyword_option"),
Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)),
Literal.keyword(Source.EMPTY, "int_option"),
Literal.integer(Source.EMPTY, 1),
Literal.keyword(Source.EMPTY, "double_option"),
Literal.fromDouble(Source.EMPTY, 1.0)
)
);
Expression.TypeResolution resolution = Options.resolve(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.resolved());
}
public void testMultipleEntryOptions_SingleDataTypeAllowed_UnknownOption_ShouldNotResolve() {
Map<String, DataType> allowedOptions = Map.of(
"keyword_option",
DataType.KEYWORD,
"int_option",
DataType.INTEGER,
"double_option",
DataType.DOUBLE
);
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(
Literal.keyword(Source.EMPTY, "unknown_option"),
Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)),
Literal.keyword(Source.EMPTY, "int_option"),
Literal.integer(Source.EMPTY, 1),
Literal.keyword(Source.EMPTY, "double_option"),
Literal.fromDouble(Source.EMPTY, 1.0)
)
);
Expression.TypeResolution resolution = Options.resolve(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.unresolved());
}
public void testSingleEntryOptions_NullDataType_ShouldNotResolve() {
Map<String, DataType> allowedOptions = new HashMap<>();
allowedOptions.put("keyword_option", null);
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "keyword_option"), Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)))
);
Expression.TypeResolution resolution = Options.resolve(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.unresolved());
}
public void testSingleEntryOptions_SingleDataTypeAllowed_MapExpressionAsValue_ShouldNotResolve() {
Map<String, DataType> allowedOptions = Map.of("map_option", DataType.OBJECT);
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(
Literal.keyword(Source.EMPTY, "map_option"),
new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "some_option"), Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)))
)
)
);
Expression.TypeResolution resolution = Options.resolve(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.unresolved());
}
public void testNullOptions_MultipleDataTypesAllowed() {
Map<String, Collection<DataType>> allowedOptions = Map.of("keyword_text_option", List.of(DataType.KEYWORD));
Expression.TypeResolution resolution = Options.resolveWithMultipleDataTypesAllowed(
null,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.resolved());
}
public void testSingleEntryOptions_MultipleDataTypesAllowed_ShouldResolve() {
Map<String, Collection<DataType>> allowedOptions = Map.of("keyword_text_option", List.of(DataType.KEYWORD, DataType.TEXT));
// Keyword resolution
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "keyword_text_option"), Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)))
);
Expression.TypeResolution resolution = Options.resolveWithMultipleDataTypesAllowed(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.resolved());
// Text resolution
mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "keyword_text_option"), Literal.text(Source.EMPTY, randomAlphaOfLength(10)))
);
resolution = Options.resolveWithMultipleDataTypesAllowed(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.resolved());
}
public void testSingleEntryOptions_MultipleDataTypesAllowed_UnknownOption_ShouldNotResolve() {
Map<String, Collection<DataType>> allowedOptions = Map.of("keyword_string_option", List.of(DataType.KEYWORD, DataType.TEXT));
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "unknown_option"), Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)))
);
Expression.TypeResolution resolution = Options.resolveWithMultipleDataTypesAllowed(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.unresolved());
}
public void testMultipleEntryOptions_MultipleDataTypesAllowed_ShouldResolve() {
Map<String, Collection<DataType>> allowedOptions = Map.of(
"keyword_text_option",
List.of(DataType.KEYWORD, DataType.TEXT),
"double_int_option",
List.of(DataType.DOUBLE, DataType.INTEGER)
);
// Keyword & double resolution
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(
Literal.keyword(Source.EMPTY, "keyword_text_option"),
Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)),
Literal.keyword(Source.EMPTY, "double_int_option"),
Literal.integer(Source.EMPTY, randomInt())
)
);
Expression.TypeResolution resolution = Options.resolveWithMultipleDataTypesAllowed(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.resolved());
// Text & double resolution
mapExpression = new MapExpression(
Source.EMPTY,
List.of(
Literal.keyword(Source.EMPTY, "keyword_text_option"),
Literal.text(Source.EMPTY, randomAlphaOfLength(10)),
Literal.keyword(Source.EMPTY, "double_int_option"),
Literal.fromDouble(Source.EMPTY, randomDouble())
)
);
resolution = Options.resolveWithMultipleDataTypesAllowed(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.resolved());
}
public void testMultipleEntryOptions_MultipleDataTypesAllowed_UnknownOption_ShouldNotResolve() {
Map<String, Collection<DataType>> allowedOptions = Map.of(
"keyword_text_option",
List.of(DataType.KEYWORD, DataType.TEXT),
"double_int_option",
List.of(DataType.DOUBLE, DataType.INTEGER)
);
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(
Literal.keyword(Source.EMPTY, "unknown_option"),
Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)),
Literal.keyword(Source.EMPTY, "double_int_option"),
Literal.integer(Source.EMPTY, randomInt())
)
);
Expression.TypeResolution resolution = Options.resolveWithMultipleDataTypesAllowed(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.unresolved());
}
public void testSingleEntryOptions_MultipleDataTypesAllowed_NullDataType_ShouldNotResolve() {
Collection<DataType> allowedDataTypes = new ArrayList<>();
allowedDataTypes.add(null);
Map<String, Collection<DataType>> allowedOptions = Map.of("null_option", allowedDataTypes);
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "null_option"), Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)))
);
Expression.TypeResolution resolution = Options.resolveWithMultipleDataTypesAllowed(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.unresolved());
}
public void testSingleEntryOptions_MultipleDataTypeAllowed_MapExpressionAsValue_ShouldNotResolve() {
Map<String, Collection<DataType>> allowedOptions = Map.of("map_option", List.of(DataType.OBJECT, DataType.TEXT));
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(
Literal.keyword(Source.EMPTY, "map_option"),
new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "some_option"), Literal.keyword(Source.EMPTY, randomAlphaOfLength(10)))
)
)
);
Expression.TypeResolution resolution = Options.resolveWithMultipleDataTypesAllowed(
mapExpression,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertTrue(resolution.unresolved());
}
public void testPopulateMapWithExpressions_SingleEntry_KeywordDataType() throws InvalidArgumentException {
Map<String, Collection<DataType>> allowedOptions = Map.of("keyword_option", List.of(DataType.KEYWORD));
Map<String, Object> optionsMap = new HashMap<>();
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "keyword_option"), Literal.keyword(Source.EMPTY, "test_value"))
);
Options.populateMapWithExpressionsMultipleDataTypesAllowed(
mapExpression,
optionsMap,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertEquals(1, optionsMap.size());
assertTrue(optionsMap.containsKey("keyword_option"));
assertTrue(optionsMap.get("keyword_option") instanceof Literal);
Literal storedLiteral = (Literal) optionsMap.get("keyword_option");
assertEquals(DataType.KEYWORD, storedLiteral.dataType());
assertEquals("test_value", ((BytesRef) storedLiteral.value()).utf8ToString());
}
public void testPopulateMapWithExpressions_SingleEntry_MultipleAllowedDataTypes_Keyword() throws InvalidArgumentException {
Map<String, Collection<DataType>> allowedOptions = Map.of("keyword_text_option", List.of(DataType.KEYWORD, DataType.TEXT));
Map<String, Object> optionsMap = new HashMap<>();
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "keyword_text_option"), Literal.keyword(Source.EMPTY, "keyword_value"))
);
Options.populateMapWithExpressionsMultipleDataTypesAllowed(
mapExpression,
optionsMap,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertEquals(1, optionsMap.size());
assertTrue(optionsMap.containsKey("keyword_text_option"));
Literal storedLiteral = (Literal) optionsMap.get("keyword_text_option");
assertEquals(DataType.KEYWORD, storedLiteral.dataType());
assertEquals("keyword_value", ((BytesRef) storedLiteral.value()).utf8ToString());
}
public void testPopulateMapWithExpressions_MultipleEntries() throws InvalidArgumentException {
Map<String, Collection<DataType>> allowedOptions = Map.of(
"keyword_text_option",
List.of(DataType.KEYWORD, DataType.TEXT),
"double_int_option",
List.of(DataType.DOUBLE, DataType.INTEGER)
);
Map<String, Object> optionsMap = new HashMap<>();
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(
Literal.keyword(Source.EMPTY, "keyword_text_option"),
Literal.keyword(Source.EMPTY, "keyword_value"),
Literal.keyword(Source.EMPTY, "double_int_option"),
Literal.integer(Source.EMPTY, 42)
)
);
Options.populateMapWithExpressionsMultipleDataTypesAllowed(
mapExpression,
optionsMap,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
assertEquals(2, optionsMap.size());
// Check first option
assertTrue(optionsMap.containsKey("keyword_text_option"));
Literal firstLiteral = (Literal) optionsMap.get("keyword_text_option");
assertEquals(DataType.KEYWORD, firstLiteral.dataType());
assertEquals("keyword_value", ((BytesRef) firstLiteral.value()).utf8ToString());
// Check second option
assertTrue(optionsMap.containsKey("double_int_option"));
Literal secondLiteral = (Literal) optionsMap.get("double_int_option");
assertEquals(DataType.INTEGER, secondLiteral.dataType());
assertEquals(42, secondLiteral.value());
}
public void testPopulateMapWithExpressions_UnknownOption_ShouldThrowException() {
Map<String, Collection<DataType>> allowedOptions = Map.of("known_option", List.of(DataType.KEYWORD));
Map<String, Object> optionsMap = new HashMap<>();
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "unknown_option"), Literal.keyword(Source.EMPTY, "value"))
);
InvalidArgumentException exception = assertThrows(InvalidArgumentException.class, () -> {
Options.populateMapWithExpressionsMultipleDataTypesAllowed(
mapExpression,
optionsMap,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
});
assertTrue(exception.getMessage().contains("Invalid option [unknown_option]"));
assertTrue(exception.getMessage().contains("expected one of [known_option]"));
}
public void testPopulateMapWithExpressions_WrongDataType_ShouldThrowException() {
Map<String, Collection<DataType>> allowedOptions = Map.of("keyword_only_option", List.of(DataType.KEYWORD));
Map<String, Object> optionsMap = new HashMap<>();
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "keyword_only_option"), Literal.text(Source.EMPTY, "text_value"))
);
InvalidArgumentException exception = assertThrows(InvalidArgumentException.class, () -> {
Options.populateMapWithExpressionsMultipleDataTypesAllowed(
mapExpression,
optionsMap,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
});
assertTrue(exception.getMessage().contains("Invalid option [keyword_only_option]"));
assertTrue(exception.getMessage().contains("allowed types"));
}
public void testPopulateMapWithExpressions_EmptyAllowedDataTypes_ShouldThrowException() {
Map<String, Collection<DataType>> allowedOptions = Map.of("empty_option", List.of());
Map<String, Object> optionsMap = new HashMap<>();
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "empty_option"), Literal.keyword(Source.EMPTY, "value"))
);
InvalidArgumentException exception = assertThrows(InvalidArgumentException.class, () -> {
Options.populateMapWithExpressionsMultipleDataTypesAllowed(
mapExpression,
optionsMap,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
});
assertTrue(exception.getMessage().contains("Invalid option [empty_option]"));
}
public void testPopulateMapWithExpressions_NullAllowedDataTypes_ShouldThrowException() {
Map<String, Collection<DataType>> allowedOptions = new HashMap<>();
allowedOptions.put("null_option", null);
Map<String, Object> optionsMap = new HashMap<>();
MapExpression mapExpression = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "null_option"), Literal.keyword(Source.EMPTY, "value"))
);
InvalidArgumentException exception = assertThrows(InvalidArgumentException.class, () -> {
Options.populateMapWithExpressionsMultipleDataTypesAllowed(
mapExpression,
optionsMap,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
});
assertTrue(exception.getMessage().contains("Invalid option [null_option]"));
}
public void testPopulateMapWithExpressions_NonLiteralValue_ShouldThrowException() {
Map<String, Collection<DataType>> allowedOptions = Map.of("map_option", List.of(DataType.OBJECT));
Map<String, Object> optionsMap = new HashMap<>();
MapExpression nestedMap = new MapExpression(
Source.EMPTY,
List.of(Literal.keyword(Source.EMPTY, "nested_key"), Literal.keyword(Source.EMPTY, "nested_value"))
);
MapExpression mapExpression = new MapExpression(Source.EMPTY, List.of(Literal.keyword(Source.EMPTY, "map_option"), nestedMap));
InvalidArgumentException exception = assertThrows(InvalidArgumentException.class, () -> {
Options.populateMapWithExpressionsMultipleDataTypesAllowed(
mapExpression,
optionsMap,
Source.EMPTY,
TypeResolutions.ParamOrdinal.DEFAULT,
allowedOptions
);
});
assertTrue(exception.getMessage().contains("Invalid option [map_option]"));
}
}
| OptionsTests |
java | apache__dubbo | dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/support/wrapper/AbstractCluster.java | {
"start": 2053,
"end": 3579
} | class ____ implements Cluster {
private <T> Invoker<T> buildClusterInterceptors(AbstractClusterInvoker<T> clusterInvoker) {
AbstractClusterInvoker<T> last = buildInterceptorInvoker(new ClusterFilterInvoker<>(clusterInvoker));
if (Boolean.parseBoolean(ConfigurationUtils.getProperty(
clusterInvoker.getDirectory().getConsumerUrl().getScopeModel(),
CLUSTER_INTERCEPTOR_COMPATIBLE_KEY,
"false"))) {
return build27xCompatibleClusterInterceptors(clusterInvoker, last);
}
return last;
}
@Override
public <T> Invoker<T> join(Directory<T> directory, boolean buildFilterChain) throws RpcException {
if (buildFilterChain) {
return buildClusterInterceptors(doJoin(directory));
} else {
return doJoin(directory);
}
}
private <T> AbstractClusterInvoker<T> buildInterceptorInvoker(AbstractClusterInvoker<T> invoker) {
List<InvocationInterceptorBuilder> builders = ScopeModelUtil.getApplicationModel(
invoker.getUrl().getScopeModel())
.getExtensionLoader(InvocationInterceptorBuilder.class)
.getActivateExtensions();
if (CollectionUtils.isEmpty(builders)) {
return invoker;
}
return new InvocationInterceptorInvoker<>(invoker, builders);
}
protected abstract <T> AbstractClusterInvoker<T> doJoin(Directory<T> directory) throws RpcException;
static | AbstractCluster |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/ClassVisitor.java | {
"start": 3072,
"end": 3676
} | class ____ to which this visitor must delegate method calls. May be
* null.
*/
protected ClassVisitor(final int api, final ClassVisitor classVisitor) {
if (api != Opcodes.ASM9
&& api != Opcodes.ASM8
&& api != Opcodes.ASM7
&& api != Opcodes.ASM6
&& api != Opcodes.ASM5
&& api != Opcodes.ASM4
&& api != Opcodes.ASM10_EXPERIMENTAL) {
throw new IllegalArgumentException("Unsupported api " + api);
}
// SPRING PATCH: no preview mode check for ASM experimental
this.api = api;
this.cv = classVisitor;
}
/**
* The | visitor |
java | apache__flink | flink-core-api/src/main/java/org/apache/flink/api/common/typeinfo/utils/TypeUtils.java | {
"start": 990,
"end": 1054
} | class ____ create objects via reflection. */
@Experimental
public | to |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/ClassUtilsTest.java | {
"start": 3235,
"end": 4051
} | class ____ {
// empty
}
}
private static final int MAX_ARRAY_DIMENSIONS = 255;
private static final String OBJECT_CANONICAL_NAME = "java.lang.Object";
private void assertGetClassReturnsClass(final Class<?> c) throws Exception {
assertEquals(c, ClassUtils.getClass(c.getName()));
}
private void assertGetClassThrowsClassNotFound(final String className) {
assertGetClassThrowsException(className, ClassNotFoundException.class);
}
private void assertGetClassThrowsException(final String className, final Class<? extends Exception> exceptionType) {
assertThrows(exceptionType, () -> ClassUtils.getClass(className),
"ClassUtils.getClass() should fail with an exception of type " + exceptionType.getName() + " when given | DeeplyNested |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java | {
"start": 4620,
"end": 6088
} | class ____ implements NamedDiff<Metadata.ProjectCustom> {
final Diff<Map<String, ComponentTemplate>> componentTemplateDiff;
ComponentTemplateMetadataDiff(ComponentTemplateMetadata before, ComponentTemplateMetadata after) {
this.componentTemplateDiff = DiffableUtils.diff(
before.componentTemplates,
after.componentTemplates,
DiffableUtils.getStringKeySerializer()
);
}
ComponentTemplateMetadataDiff(StreamInput in) throws IOException {
this.componentTemplateDiff = DiffableUtils.readJdkMapDiff(
in,
DiffableUtils.getStringKeySerializer(),
ComponentTemplate::new,
ComponentTemplate::readComponentTemplateDiffFrom
);
}
@Override
public Metadata.ProjectCustom apply(Metadata.ProjectCustom part) {
return new ComponentTemplateMetadata(componentTemplateDiff.apply(((ComponentTemplateMetadata) part).componentTemplates));
}
@Override
public void writeTo(StreamOutput out) throws IOException {
componentTemplateDiff.writeTo(out);
}
@Override
public String getWriteableName() {
return TYPE;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
}
}
| ComponentTemplateMetadataDiff |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponse.java | {
"start": 1611,
"end": 8217
} | class ____ extends ActionResponse {
private final Set<String> cluster;
private final Set<ConfigurableClusterPrivilege> configurableClusterPrivileges;
private final Set<Indices> index;
private final Set<RoleDescriptor.ApplicationResourcePrivileges> application;
private final Set<String> runAs;
private final Set<RemoteIndices> remoteIndex;
private final RemoteClusterPermissions remoteClusterPermissions;
public GetUserPrivilegesResponse(StreamInput in) throws IOException {
cluster = in.readCollectionAsImmutableSet(StreamInput::readString);
configurableClusterPrivileges = in.readCollectionAsImmutableSet(ConfigurableClusterPrivileges.READER);
index = in.readCollectionAsImmutableSet(Indices::new);
application = in.readCollectionAsImmutableSet(RoleDescriptor.ApplicationResourcePrivileges::new);
runAs = in.readCollectionAsImmutableSet(StreamInput::readString);
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) {
remoteIndex = in.readCollectionAsImmutableSet(RemoteIndices::new);
} else {
remoteIndex = Set.of();
}
if (in.getTransportVersion().onOrAfter(ROLE_REMOTE_CLUSTER_PRIVS)) {
remoteClusterPermissions = new RemoteClusterPermissions(in);
} else {
remoteClusterPermissions = RemoteClusterPermissions.NONE;
}
}
public GetUserPrivilegesResponse(
Set<String> cluster,
Set<ConfigurableClusterPrivilege> conditionalCluster,
Set<Indices> index,
Set<RoleDescriptor.ApplicationResourcePrivileges> application,
Set<String> runAs,
Set<RemoteIndices> remoteIndex,
RemoteClusterPermissions remoteClusterPermissions
) {
this.cluster = Collections.unmodifiableSet(cluster);
this.configurableClusterPrivileges = Collections.unmodifiableSet(conditionalCluster);
this.index = Collections.unmodifiableSet(index);
this.application = Collections.unmodifiableSet(application);
this.runAs = Collections.unmodifiableSet(runAs);
this.remoteIndex = Collections.unmodifiableSet(remoteIndex);
this.remoteClusterPermissions = remoteClusterPermissions;
}
public Set<String> getClusterPrivileges() {
return cluster;
}
public Set<ConfigurableClusterPrivilege> getConditionalClusterPrivileges() {
return configurableClusterPrivileges;
}
public Set<Indices> getIndexPrivileges() {
return index;
}
public Set<RemoteIndices> getRemoteIndexPrivileges() {
return remoteIndex;
}
public RemoteClusterPermissions getRemoteClusterPermissions() {
return remoteClusterPermissions;
}
public Set<RoleDescriptor.ApplicationResourcePrivileges> getApplicationPrivileges() {
return application;
}
public Set<String> getRunAs() {
return runAs;
}
public boolean hasRemoteIndicesPrivileges() {
return false == remoteIndex.isEmpty();
}
public boolean hasRemoteClusterPrivileges() {
return remoteClusterPermissions.hasAnyPrivileges();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeStringCollection(cluster);
out.writeCollection(configurableClusterPrivileges, ConfigurableClusterPrivileges.WRITER);
out.writeCollection(index);
out.writeCollection(application);
out.writeStringCollection(runAs);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) {
out.writeCollection(remoteIndex);
} else if (hasRemoteIndicesPrivileges()) {
throw new IllegalArgumentException(
"versions of Elasticsearch before ["
+ TransportVersions.V_8_8_0.toReleaseVersion()
+ "] can't handle remote indices privileges and attempted to send to ["
+ out.getTransportVersion().toReleaseVersion()
+ "]"
);
}
if (out.getTransportVersion().onOrAfter(ROLE_REMOTE_CLUSTER_PRIVS)) {
remoteClusterPermissions.writeTo(out);
} else if (hasRemoteClusterPrivileges()) {
throw new IllegalArgumentException(
"versions of Elasticsearch before ["
+ ROLE_REMOTE_CLUSTER_PRIVS.toReleaseVersion()
+ "] can't handle remote cluster privileges and attempted to send to ["
+ out.getTransportVersion().toReleaseVersion()
+ "]"
);
}
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final GetUserPrivilegesResponse that = (GetUserPrivilegesResponse) other;
return Objects.equals(cluster, that.cluster)
&& Objects.equals(configurableClusterPrivileges, that.configurableClusterPrivileges)
&& Objects.equals(index, that.index)
&& Objects.equals(application, that.application)
&& Objects.equals(runAs, that.runAs)
&& Objects.equals(remoteIndex, that.remoteIndex)
&& Objects.equals(remoteClusterPermissions, that.remoteClusterPermissions);
}
@Override
public int hashCode() {
return Objects.hash(cluster, configurableClusterPrivileges, index, application, runAs, remoteIndex, remoteClusterPermissions);
}
public record RemoteIndices(Indices indices, Set<String> remoteClusters) implements ToXContentObject, Writeable {
public RemoteIndices(StreamInput in) throws IOException {
this(new Indices(in), Collections.unmodifiableSet(new TreeSet<>(in.readCollectionAsSet(StreamInput::readString))));
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
indices.innerToXContent(builder);
builder.field(RoleDescriptor.Fields.CLUSTERS.getPreferredName(), remoteClusters);
return builder.endObject();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
indices.writeTo(out);
out.writeStringCollection(remoteClusters);
}
}
/**
* This is modelled on {@link RoleDescriptor.IndicesPrivileges}, with support for multiple DLS and FLS field sets.
*/
public static | GetUserPrivilegesResponse |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/PostOptimizationPlanVerificationAware.java | {
"start": 927,
"end": 1072
} | class ____ to implement this interface. Otherwise it may implement {@link PostOptimizationVerificationAware},
* as more convenient.
*/
public | needs |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/codec/AbstractEncoder.java | {
"start": 956,
"end": 1121
} | class ____ {@link Encoder} implementations.
*
* @author Sebastien Deleuze
* @author Arjen Poutsma
* @since 5.0
* @param <T> the element type
*/
public abstract | for |
java | quarkusio__quarkus | integration-tests/virtual-threads/jms-virtual-threads/src/test/java/io/quarkus/it/vthreads/jms/WireMockExtension.java | {
"start": 407,
"end": 1311
} | class ____ implements QuarkusTestResourceLifecycleManager {
private WireMockServer wireMockServer;
@Override
public Map<String, String> start() {
wireMockServer = new WireMockServer();
wireMockServer.start();
wireMockServer.stubFor(post(urlEqualTo("/price/alert"))
.willReturn(aResponse().withBody("ok")));
wireMockServer.stubFor(post(urlEqualTo("/price/alert-message"))
.willReturn(aResponse().withBody("ok")));
return Map.of("price-alert/mp-rest/url", wireMockServer.baseUrl());
}
@Override
public void inject(TestInjector testInjector) {
testInjector.injectIntoFields(wireMockServer, f -> f.getType().isAssignableFrom(WireMockServer.class));
}
@Override
public void stop() {
if (null != wireMockServer) {
wireMockServer.stop();
}
}
}
| WireMockExtension |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/ai/model/mcp/registry/Icon.java | {
"start": 2765,
"end": 4179
} | enum ____ {
/**
* PNG mime type.
*/
IMAGE_PNG("image/png"),
/**
* JPEG mime type.
*/
IMAGE_JPEG("image/jpeg"),
/**
* JPG mime type.
*/
IMAGE_JPG("image/jpg"),
/**
* SVG XML mime type.
*/
IMAGE_SVG_XML("image/svg+xml"),
/**
* WebP mime type.
*/
IMAGE_WEBP("image/webp");
private final String value;
/**
* Constructor.
*
* @param value value
*/
MimeType(String value) {
this.value = value;
}
/**
* Get value.
*
* @return value
*/
@JsonValue
public String getValue() {
return value;
}
/**
* Create from value.
*
* @param value value
* @return MimeType
*/
@JsonCreator
public static MimeType fromValue(String value) {
for (MimeType t : MimeType.values()) {
if (t.value.equalsIgnoreCase(value)) {
return t;
}
}
throw new IllegalArgumentException("Unknown mimeType: " + value);
}
}
/**
* Theme enum: light or dark.
* Serialized/deserialized as the lowercase string value.
*/
public static | MimeType |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/ResponseInfo.java | {
"start": 1247,
"end": 2837
} | class ____ {
public final String key;
public final String url;
public final Object value;
public final boolean isRaw;
Item(String key, String url, Object value, boolean isRaw) {
this.key = key;
this.url = url;
this.value = value;
this.isRaw = isRaw;
}
public static Item of(String key, Object value, boolean isRaw) {
return new Item(key, null, value, isRaw);
}
public static Item of(String key, String url, Object value) {
return new Item(key, url, value, false);
}
}
final List<Item> items = Lists.newArrayList();
String about = "Info";
// Do NOT add any constructors here, unless...
public static ResponseInfo $about(String about) {
ResponseInfo info = new ResponseInfo();
info.about = about;
return info;
}
public ResponseInfo about(String about) {
this.about = about;
return this;
}
public String about() {
return about;
}
public ResponseInfo __(String key, Object value) {
items.add(Item.of(key, value, false));
return this;
}
public ResponseInfo __(String key, String url, Object anchor) {
if (url == null) {
items.add(Item.of(key, anchor, false));
} else {
items.add(Item.of(key, url, anchor));
}
return this;
}
//Value is raw HTML and shouldn't be escaped
public ResponseInfo _r(String key, Object value) {
items.add(Item.of(key, value, true));
return this;
}
public void clear() {
items.clear();
}
@Override
public Iterator<Item> iterator() {
return items.iterator();
}
}
| Item |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/convert/DefaultMutableConversionService.java | {
"start": 3607,
"end": 59644
} | class ____ implements MutableConversionService {
private static final int CACHE_MAX = 256;
private static final int CACHE_EVICTION_BATCH = 64;
private static final TypeConverter UNCONVERTIBLE = (object, targetType, context) -> Optional.empty();
private static final Map<Class<?>, List<Class<?>>> COMMON_TYPE_HIERARCHY = CollectionUtils.newHashMap(30);
static {
// Optimize common hierarchy scenarios
COMMON_TYPE_HIERARCHY.put(String.class, List.of(String.class, CharSequence.class, Object.class));
COMMON_TYPE_HIERARCHY.put(CharSequence.class, List.of(CharSequence.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Character.class, List.of(Character.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Number.class, List.of(Number.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Integer.class, List.of(Integer.class, Number.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Double.class, List.of(Double.class, Number.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Float.class, List.of(Float.class, Number.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Long.class, List.of(Long.class, Number.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Short.class, List.of(Short.class, Number.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Byte.class, List.of(Byte.class, Number.class, Object.class));
COMMON_TYPE_HIERARCHY.put(BigInteger.class, List.of(BigInteger.class, Number.class, Object.class));
COMMON_TYPE_HIERARCHY.put(BigDecimal.class, List.of(BigDecimal.class, Number.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Iterable.class, List.of(Iterable.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Collection.class, List.of(Collection.class, Iterable.class, Object.class));
COMMON_TYPE_HIERARCHY.put(List.class, List.of(List.class, Collection.class, Iterable.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Set.class, List.of(Set.class, Collection.class, Iterable.class, Object.class));
COMMON_TYPE_HIERARCHY.put(ArrayList.class, List.of(ArrayList.class, List.class, Collection.class, Iterable.class, Object.class));
COMMON_TYPE_HIERARCHY.put(LinkedList.class, List.of(LinkedList.class, List.class, Collection.class, Iterable.class, Object.class));
COMMON_TYPE_HIERARCHY.put(HashSet.class, List.of(HashSet.class, Set.class, Collection.class, Iterable.class, Object.class));
COMMON_TYPE_HIERARCHY.put(LinkedHashSet.class, List.of(LinkedHashSet.class, Set.class, Collection.class, Iterable.class, Object.class));
COMMON_TYPE_HIERARCHY.put(Map.class, List.of(Map.class, Object.class));
COMMON_TYPE_HIERARCHY.put(HashMap.class, List.of(HashMap.class, Map.class, Object.class));
COMMON_TYPE_HIERARCHY.put(LinkedHashMap.class, List.of(LinkedHashMap.class, Map.class, Object.class));
}
/**
* The internal converters added during the startup.
* The collection should be modified in the synchronous way only during the startup, after that it should be immutable.
*/
private final Map<ConvertiblePair, TypeConverter> internalConverters = CollectionUtils.newHashMap(300);
/**
* The custom converters added after the startup or in a way we cannot prevent concurrent access.
*/
private final Map<ConvertiblePair, TypeConverter> customConverters = new ConcurrentHashMap<>();
/**
* The additional converters which are variations of different converters.
*/
private final Map<ConvertiblePair, TypeConverter> converterCache = new ConcurrentHashMap<>();
/**
* The mutable conversion service which is adding new converters to the internal collection which is not synchronized.
*/
private final MutableConversionService internalMutableConversionService = new MutableConversionService() {
@Override
public <S, T> void addConverter(Class<S> sourceType, Class<T> targetType, Function<S, T> typeConverter) {
addInternalConverter(sourceType, targetType, typeConverter);
}
@Override
public <S, T> void addConverter(Class<S> sourceType, Class<T> targetType, TypeConverter<S, T> typeConverter) {
addInternalConverter(sourceType, targetType, typeConverter);
}
@Override
public <T> Optional<T> convert(Object object, Class<T> targetType, ConversionContext context) {
return DefaultMutableConversionService.this.convert(object, targetType, context);
}
@Override
public <S, T> Optional<T> convert(S object, Class<? super S> sourceType, Class<T> targetType, ConversionContext context) {
return DefaultMutableConversionService.this.convert(object, sourceType, targetType, context);
}
@Override
public <S, T> boolean canConvert(Class<S> sourceType, Class<T> targetType) {
return DefaultMutableConversionService.this.canConvert(sourceType, targetType);
}
@Override
public <T> Optional<T> convert(Object object, Class<T> targetType) {
return DefaultMutableConversionService.this.convert(object, targetType);
}
@Override
public <T> Optional<T> convert(Object object, Argument<T> targetType) {
return DefaultMutableConversionService.this.convert(object, targetType);
}
@Override
public <T> Optional<T> convert(Object object, ArgumentConversionContext<T> context) {
return DefaultMutableConversionService.this.convert(object, context);
}
@Override
public <T> T convertRequired(Object value, Class<T> type) {
return DefaultMutableConversionService.this.convertRequired(value, type);
}
@Override
public <T> T convertRequired(Object value, Argument<T> argument) {
return DefaultMutableConversionService.this.convertRequired(value, argument);
}
@Override
public <T> T convertRequired(Object value, ArgumentConversionContext<T> context) {
return DefaultMutableConversionService.this.convertRequired(value, context);
}
};
/**
* Constructor.
*/
public DefaultMutableConversionService() {
registerDefaultConverters();
}
@SuppressWarnings("unchecked")
@Override
public <S, T> Optional<T> convert(S object, Class<? super S> sourceType, Class<T> targetType, ConversionContext context) {
if (object == null || targetType == null || context == null) {
return Optional.empty();
}
if (targetType == Object.class) {
return Optional.of((T) object);
}
targetType = targetType.isPrimitive() ? (Class<T>) ReflectionUtils.getWrapperType(targetType) : targetType;
if (targetType.isInstance(object) && !(object instanceof Iterable) && !(object instanceof Map)) {
return Optional.of((T) object);
}
final AnnotationMetadata annotationMetadata = context.getAnnotationMetadata();
String formattingAnnotation;
if (annotationMetadata.hasStereotypeNonRepeating(Format.class)) {
formattingAnnotation = annotationMetadata.getAnnotationNameByStereotype(Format.class).orElse(null);
} else {
formattingAnnotation = null;
}
ConvertiblePair pair = new ConvertiblePair(sourceType, targetType, formattingAnnotation);
TypeConverter<Object, T> typeConverter = findConverter(pair);
if (typeConverter == null) {
typeConverter = findTypeConverter(sourceType, targetType, formattingAnnotation);
if (typeConverter == null) {
addToConverterCache(pair, UNCONVERTIBLE);
return Optional.empty();
} else {
addToConverterCache(pair, typeConverter);
}
}
if (typeConverter == UNCONVERTIBLE) {
return Optional.empty();
}
return typeConverter.convert(object, targetType, context);
}
@Override
public <S, T> boolean canConvert(Class<S> sourceType, Class<T> targetType) {
ConvertiblePair pair = new ConvertiblePair(sourceType, targetType, null);
TypeConverter<Object, T> typeConverter = findConverter(pair);
if (typeConverter == null) {
typeConverter = findTypeConverter(sourceType, targetType, null);
if (typeConverter != null) {
addToConverterCache(pair, typeConverter);
return typeConverter != UNCONVERTIBLE;
}
return false;
}
return typeConverter != UNCONVERTIBLE;
}
private <T, S> TypeConverter<T, S> findConverter(ConvertiblePair pair) {
TypeConverter typeConverter = internalConverters.get(pair);
if (typeConverter != null) {
return typeConverter;
}
return converterCache.get(pair);
}
private <T, S> TypeConverter<T, S> findRegisteredConverter(ConvertiblePair pair) {
TypeConverter typeConverter = internalConverters.get(pair);
if (typeConverter != null) {
return typeConverter;
}
return customConverters.get(pair);
}
@Override
public <S, T> void addConverter(Class<S> sourceType, Class<T> targetType, TypeConverter<S, T> typeConverter) {
addConverterAnalyzeSource(customConverters, sourceType, targetType, typeConverter);
}
/**
* Add internal converter.
*
* @param sourceType The source type
* @param targetType The target type
* @param typeConverter The converter
* @param <S> The source type
* @param <T> The target type
*/
@Internal
public <S, T> void addInternalConverter(Class<S> sourceType, Class<T> targetType, TypeConverter<S, T> typeConverter) {
addConverterAnalyzeSource(internalConverters, sourceType, targetType, typeConverter);
}
private <S, T> void addConverterAnalyzeSource(Map<ConvertiblePair, TypeConverter> typeConverters,
Class<S> sourceType,
Class<T> targetType,
TypeConverter<S, T> typeConverter) {
addConverterToMap(typeConverters, sourceType, targetType, typeConverter);
// Add variations of common representations of the source type
if (sourceType == CharSequence.class) {
TypeConverter<String, T> converter;
if (typeConverter instanceof FormattingTypeConverter<S, T, ?> formattingTypeConverter) {
converter = new FormattingTypeConverter<>() {
@Override
public Class<Annotation> annotationType() {
return (Class<Annotation>) formattingTypeConverter.annotationType();
}
@Override
public Optional<T> convert(String value, Class<T> targetType, ConversionContext context) {
return typeConverter.convert((S) value.toString(), (Class<T>) CharSequence.class, context);
}
};
} else {
converter = (value, theTarget, context) -> typeConverter.convert((S) value.toString(), theTarget, context);
}
addConverterToMap(typeConverters, String.class, targetType, converter);
} else if (sourceType == String.class) {
addConverterToMap(typeConverters, CharSequence.class, targetType, (TypeConverter) typeConverter);
} else if (sourceType == Iterable.class) {
// Recursively add implementations
addConverterAnalyzeSource(typeConverters, Collection.class, targetType, (TypeConverter) typeConverter);
} else if (sourceType == Collection.class) {
// Recursively add implementations
addConverterAnalyzeSource(typeConverters, List.class, targetType, (TypeConverter) typeConverter);
addConverterAnalyzeSource(typeConverters, Set.class, targetType, (TypeConverter) typeConverter);
addConverterAnalyzeSource(typeConverters, Queue.class, targetType, (TypeConverter) typeConverter);
addConverterAnalyzeSource(typeConverters, Deque.class, targetType, (TypeConverter) typeConverter);
} else if (sourceType == Queue.class) {
// Recursively add implementations
addConverterAnalyzeSource(typeConverters, Deque.class, targetType, (TypeConverter) typeConverter);
} else if (sourceType == List.class) {
addConverterToMap(typeConverters, ArrayList.class, targetType, (TypeConverter) typeConverter);
addConverterToMap(typeConverters, LinkedList.class, targetType, (TypeConverter) typeConverter);
} else if (sourceType == Set.class) {
addConverterToMap(typeConverters, HashSet.class, targetType, (TypeConverter) typeConverter);
addConverterToMap(typeConverters, LinkedHashSet.class, targetType, (TypeConverter) typeConverter);
} else if (sourceType == Map.class) {
addConverterToMap(typeConverters, HashMap.class, targetType, (TypeConverter) typeConverter);
addConverterToMap(typeConverters, LinkedHashMap.class, targetType, (TypeConverter) typeConverter);
addConverterToMap(typeConverters, ConcurrentHashMap.class, targetType, (TypeConverter) typeConverter);
} else if (sourceType == Deque.class) {
addConverterToMap(typeConverters, LinkedList.class, targetType, (TypeConverter) typeConverter);
addConverterToMap(typeConverters, ArrayDeque.class, targetType, (TypeConverter) typeConverter);
}
}
private <S, T> void addConverterToMap(Map<ConvertiblePair, TypeConverter> typeConverters,
Class<S> sourceType,
Class<T> targetType,
TypeConverter<S, T> typeConverter) {
ConvertiblePair pair = newPair(sourceType, targetType, typeConverter);
typeConverters.put(pair, typeConverter);
if (typeConverters != internalConverters) {
addToConverterCache(pair, typeConverter);
}
}
@Override
public <S, T> void addConverter(Class<S> sourceType, Class<T> targetType, Function<S, T> function) {
addConverter(sourceType, targetType, TypeConverter.of(sourceType, targetType, function));
}
/**
* Add internal converter.
*
* @param sourceType The source type
* @param targetType The target type
* @param function The converter function
* @param <S> The source type
* @param <T> The target type
*/
@Internal
public <S, T> void addInternalConverter(Class<S> sourceType, Class<T> targetType, Function<S, T> function) {
addInternalConverter(sourceType, targetType, TypeConverter.of(sourceType, targetType, function));
}
private void addToConverterCache(ConvertiblePair pair, TypeConverter<?, ?> typeConverter) {
converterCache.put(pair, typeConverter);
if (converterCache.size() > CACHE_MAX) {
CopyOnWriteMap.evict(converterCache, CACHE_EVICTION_BATCH);
}
}
/**
* Default Converters.
*/
@SuppressWarnings({"OptionalIsPresent", "unchecked"})
private void registerDefaultConverters() {
LinkedHashMap<Class<?>, Class<?>> primitiveArrays = new LinkedHashMap<>();
primitiveArrays.put(Boolean[].class, boolean[].class);
primitiveArrays.put(Byte[].class, byte[].class);
primitiveArrays.put(Character[].class, char[].class);
primitiveArrays.put(Double[].class, double[].class);
primitiveArrays.put(Float[].class, float[].class);
primitiveArrays.put(Integer[].class, int[].class);
primitiveArrays.put(Long[].class, long[].class);
primitiveArrays.put(Short[].class, short[].class);
// primitive array to wrapper array
@SuppressWarnings("rawtypes")
Function primitiveArrayToWrapperArray = ArrayUtils::toWrapperArray;
// wrapper to primitive array converters
Function<Object[], Object> wrapperArrayToPrimitiveArray = ArrayUtils::toPrimitiveArray;
for (Map.Entry<Class<?>, Class<?>> e : primitiveArrays.entrySet()) {
Class<?> wrapperArray = e.getKey();
Class<?> primitiveArray = e.getValue();
addInternalConverter(primitiveArray, wrapperArray, primitiveArrayToWrapperArray);
addInternalConverter(wrapperArray, primitiveArray, (Function) wrapperArrayToPrimitiveArray);
}
// Object -> List
addInternalConverter(Object.class, List.class, (object, targetType, context) -> {
Optional<Argument<?>> firstTypeVariable = context.getFirstTypeVariable();
Argument<?> argument = firstTypeVariable.orElse(Argument.OBJECT_ARGUMENT);
Optional converted = DefaultMutableConversionService.this.convert(object, context.with(argument));
if (converted.isPresent()) {
return Optional.of(Collections.singletonList(converted.get()));
}
return Optional.empty();
});
addInternalConverter(byte[].class, String.class, (bytes, targetType, context) -> Optional.of(new String(bytes, context.getCharset())));
// String -> Class
addInternalConverter(CharSequence.class, Class.class, (object, targetType, context) -> {
ClassLoader classLoader = targetType.getClassLoader();
if (classLoader == null) {
classLoader = DefaultMutableConversionService.class.getClassLoader();
}
//noinspection rawtypes
return (Optional) ClassUtils.forName(object.toString(), classLoader);
});
// AnnotationClassValue -> Class
addInternalConverter(AnnotationClassValue.class, Class.class, (object, targetType, context) -> object.getType());
addInternalConverter(AnnotationClassValue.class, Object.class, (object, targetType, context) -> {
if (targetType.equals(Class.class)) {
return object.getType();
} else {
if (CharSequence.class.isAssignableFrom(targetType)) {
return Optional.of(object.getName());
} else {
Optional i = object.getInstance();
if (i.isPresent() && targetType.isInstance(i.get())) {
return i;
}
return Optional.empty();
}
}
});
addInternalConverter(AnnotationClassValue[].class, Class.class, (object, targetType, context) -> {
if (object.length > 0) {
final AnnotationClassValue o = object[0];
if (o != null) {
return o.getType();
}
}
return Optional.empty();
});
addInternalConverter(AnnotationClassValue[].class, Class[].class, (object, targetType, context) -> {
List<Class<?>> classes = new ArrayList<>(object.length);
for (AnnotationClassValue<?> annotationClassValue : object) {
if (annotationClassValue != null) {
final Optional<? extends Class<?>> type = annotationClassValue.getType();
if (type.isPresent()) {
classes.add(type.get());
}
}
}
return Optional.of(classes.toArray(EMPTY_CLASS_ARRAY));
});
// URI -> URL
addInternalConverter(URI.class, URL.class, uri -> {
try {
return uri.toURL();
} catch (MalformedURLException e) {
return null;
}
});
// InputStream -> String
addInternalConverter(InputStream.class, String.class, (object, targetType, context) -> {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(object))) {
return Optional.of(IOUtils.readText(reader));
} catch (IOException e) {
context.reject(e);
return Optional.empty();
}
});
// String -> byte[]
addInternalConverter(CharSequence.class, byte[].class, (object, targetType, context) -> Optional.of(object.toString().getBytes(context.getCharset())));
addInternalConverter(Integer.class, byte[].class, (object, targetType, context) -> Optional.of(ByteBuffer.allocate(Integer.BYTES).putInt(object).array()));
addInternalConverter(Character.class, byte[].class, (object, targetType, context) -> Optional.of(ByteBuffer.allocate(Integer.BYTES).putChar(object).array()));
addInternalConverter(Long.class, byte[].class, (object, targetType, context) -> Optional.of(ByteBuffer.allocate(Long.BYTES).putLong(object).array()));
addInternalConverter(Short.class, byte[].class, (object, targetType, context) -> Optional.of(ByteBuffer.allocate(Short.BYTES).putShort(object).array()));
addInternalConverter(Double.class, byte[].class, (object, targetType, context) -> Optional.of(ByteBuffer.allocate(Double.BYTES).putDouble(object).array()));
addInternalConverter(Float.class, byte[].class, (object, targetType, context) -> Optional.of(ByteBuffer.allocate(Float.BYTES).putFloat(object).array()));
// InputStream -> Number
addInternalConverter(InputStream.class, Number.class, (object, targetType, context) -> {
Optional<String> convert = DefaultMutableConversionService.this.convert(object, String.class, context);
if (convert.isPresent()) {
return convert.flatMap(val -> DefaultMutableConversionService.this.convert(val, targetType, context));
}
return Optional.empty();
});
// Reader -> String
addInternalConverter(Reader.class, String.class, (object, targetType, context) -> {
try (BufferedReader reader = object instanceof BufferedReader bufferedReader ? bufferedReader : new BufferedReader(object)) {
return Optional.of(IOUtils.readText(reader));
} catch (IOException e) {
context.reject(e);
return Optional.empty();
}
});
// String -> File
addInternalConverter(CharSequence.class, File.class, (object, targetType, context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
return Optional.of(new File(object.toString()));
});
// String[] -> Enum
addInternalConverter(String[].class, Enum.class, (object, targetType, context) -> {
if (object == null || object.length == 0) {
return Optional.empty();
}
StringJoiner joiner = new StringJoiner("");
for (String string : object) {
joiner.add(string);
}
final String val = joiner.toString();
return convert(val, targetType, context);
});
addInternalConverter(String[].class, CharSequence.class, (object, targetType, context) -> {
if (object == null || object.length == 0) {
return Optional.empty();
}
StringJoiner joiner = new StringJoiner("");
for (String string : object) {
joiner.add(string);
}
return convert(joiner.toString(), targetType, context);
});
// CharSequence -> Long for bytes
ReadableBytesTypeConverter readableBytesTypeConverter = new ReadableBytesTypeConverter();
addInternalConverter(CharSequence.class, Number.class, readableBytesTypeConverter);
addInternalConverter(CharSequence.class, Long.class, new FormattingTypeConverter<CharSequence, Long, ReadableBytes>() {
@Override
public Class<ReadableBytes> annotationType() {
return readableBytesTypeConverter.annotationType();
}
@Override
public Optional<Long> convert(CharSequence object, Class<Long> targetType, ConversionContext context) {
return readableBytesTypeConverter.convert(object, Number.class, context).map(Number::longValue);
}
});
addInternalConverter(CharSequence.class, Integer.class, new FormattingTypeConverter<CharSequence, Integer, ReadableBytes>() {
@Override
public Class<ReadableBytes> annotationType() {
return readableBytesTypeConverter.annotationType();
}
@Override
public Optional<Integer> convert(CharSequence object, Class<Integer> targetType, ConversionContext context) {
return readableBytesTypeConverter.convert(object, Number.class, context).map(Number::intValue);
}
});
// CharSequence -> Date
addInternalConverter(
CharSequence.class,
Date.class,
(object, targetType, context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
SimpleDateFormat format = resolveFormat(context);
return Optional.of(format.parse(object.toString()));
} catch (ParseException e) {
context.reject(object, e);
return Optional.empty();
}
}
);
// Date -> CharSequence
addInternalConverter(
Date.class,
CharSequence.class,
(object, targetType, context) -> {
SimpleDateFormat format = resolveFormat(context);
return Optional.of(format.format(object));
}
);
// Number -> CharSequence
addInternalConverter(
Number.class,
CharSequence.class,
(object, targetType, context) -> {
NumberFormat format = resolveNumberFormat(context);
if (format != null) {
return Optional.of(format.format(object));
} else {
return Optional.of(object.toString());
}
}
);
// String -> Path
addInternalConverter(
CharSequence.class,
Path.class, (object, targetType, context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
return Optional.of(Paths.get(object.toString()));
} catch (Exception e) {
context.reject("Invalid path [" + object + " ]: " + e.getMessage(), e);
return Optional.empty();
}
});
// String -> Integer
addInternalConverter(CharSequence.class, Integer.class, (CharSequence object, Class<Integer> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
Integer converted = Integer.valueOf(object.toString());
return Optional.of(converted);
} catch (NumberFormatException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> BigInteger
addInternalConverter(CharSequence.class, BigInteger.class, (CharSequence object, Class<BigInteger> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
BigInteger converted = new BigInteger(object.toString());
return Optional.of(converted);
} catch (NumberFormatException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> Float
addInternalConverter(CharSequence.class, Float.class, (CharSequence object, Class<Float> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
Float converted = Float.valueOf(object.toString());
return Optional.of(converted);
} catch (NumberFormatException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> Double
addInternalConverter(CharSequence.class, Double.class, (CharSequence object, Class<Double> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
Double converted = Double.valueOf(object.toString());
return Optional.of(converted);
} catch (NumberFormatException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> Long
addInternalConverter(CharSequence.class, Long.class, (CharSequence object, Class<Long> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
Long converted = Long.valueOf(object.toString());
return Optional.of(converted);
} catch (NumberFormatException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> Short
addInternalConverter(CharSequence.class, Short.class, (CharSequence object, Class<Short> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
Short converted = Short.valueOf(object.toString());
return Optional.of(converted);
} catch (NumberFormatException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> Byte
addInternalConverter(CharSequence.class, Byte.class, (CharSequence object, Class<Byte> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
Byte converted = Byte.valueOf(object.toString());
return Optional.of(converted);
} catch (NumberFormatException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> BigDecimal
addInternalConverter(CharSequence.class, BigDecimal.class, (CharSequence object, Class<BigDecimal> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
BigDecimal converted = new BigDecimal(object.toString());
return Optional.of(converted);
} catch (NumberFormatException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> Boolean
addInternalConverter(CharSequence.class, Boolean.class, (CharSequence object, Class<Boolean> targetType, ConversionContext context) -> {
String booleanString = object.toString().toLowerCase(Locale.ENGLISH);
return switch (booleanString) {
case "yes", "y", "on", "true" -> Optional.of(Boolean.TRUE);
default -> Optional.of(Boolean.FALSE);
};
});
// String -> URL
addInternalConverter(CharSequence.class, URL.class, (CharSequence object, Class<URL> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
String spec = object.toString();
if (!spec.contains("://")) {
spec = "http://" + spec;
}
return Optional.of(new URL(spec));
} catch (MalformedURLException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> URI
addInternalConverter(CharSequence.class, URI.class, (CharSequence object, Class<URI> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
return Optional.of(new URI(object.toString()));
} catch (URISyntaxException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> Locale
addInternalConverter(CharSequence.class, Locale.class, object -> StringUtils.parseLocale(object.toString()));
// String -> UUID
addInternalConverter(CharSequence.class, UUID.class, (CharSequence object, Class<UUID> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
return Optional.of(UUID.fromString(object.toString()));
} catch (IllegalArgumentException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> Currency
addInternalConverter(CharSequence.class, Currency.class, (CharSequence object, Class<Currency> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
return Optional.of(Currency.getInstance(object.toString()));
} catch (IllegalArgumentException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> TimeZone
addInternalConverter(CharSequence.class, TimeZone.class, (CharSequence object, Class<TimeZone> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
return Optional.of(TimeZone.getTimeZone(object.toString()));
});
// String -> Charset
addInternalConverter(CharSequence.class, Charset.class, (CharSequence object, Class<Charset> targetType, ConversionContext context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
}
try {
return Optional.of(Charset.forName(object.toString()));
} catch (IllegalCharsetNameException | UnsupportedCharsetException e) {
context.reject(object, e);
return Optional.empty();
}
});
// String -> Character
addInternalConverter(CharSequence.class, Character.class, (CharSequence object, Class<Character> targetType, ConversionContext context) -> {
String str = object.toString();
if (str.length() == 1) {
return Optional.of(str.charAt(0));
} else {
return Optional.empty();
}
});
// String -> Array
addInternalConverter(CharSequence.class, Object[].class, (CharSequence object, Class<Object[]> targetType, ConversionContext context) -> {
if (object instanceof AnnotationClassValue<?> annotationClassValue && targetType.equals(AnnotationClassValue[].class)) {
AnnotationClassValue<?>[] array = new AnnotationClassValue<?>[1];
array[0] = annotationClassValue;
return Optional.of(array);
}
String str = object.toString();
String[] strings = str.split(",");
Class<?> componentType = ReflectionUtils.getWrapperType(targetType.getComponentType());
Object newArray = Array.newInstance(componentType, strings.length);
for (int i = 0; i < strings.length; i++) {
String string = strings[i];
Optional<?> converted = convert(string, componentType);
if (converted.isPresent()) {
Array.set(newArray, i, converted.get());
}
}
return Optional.of((Object[]) newArray);
});
// String -> Int Array
addInternalConverter(CharSequence.class, int[].class, (CharSequence object, Class<int[]> targetType, ConversionContext context) -> {
String str = object.toString();
String[] strings = str.split(",");
Object newArray = Array.newInstance(int.class, strings.length);
for (int i = 0; i < strings.length; i++) {
String string = strings[i];
Optional<?> converted = convert(string, int.class);
if (converted.isPresent()) {
Array.set(newArray, i, converted.get());
}
}
return Optional.of((int[]) newArray);
});
// String -> Char Array
addInternalConverter(String.class, char[].class, (String object, Class<char[]> targetType, ConversionContext context) -> Optional.of(object.toCharArray()));
// Object[] -> String[]
addInternalConverter(Object[].class, String[].class, (Object[] object, Class<String[]> targetType, ConversionContext context) -> {
String[] strings = new String[object.length];
for (int i = 0; i < object.length; i++) {
Object o = object[i];
if (o != null) {
strings[i] = o.toString();
}
}
return Optional.of(strings);
});
// Collection -> String[]
addInternalConverter(Collection.class, String[].class, (Collection collection, Class<String[]> targetType, ConversionContext context) -> {
String[] strings = new String[collection.size()];
int i = 0;
for (Object o : collection) {
if (o != null) {
strings[i++] = o.toString();
}
}
return Optional.of(strings);
});
// String -> Enum
addInternalConverter(CharSequence.class, Enum.class, new CharSequenceToEnumConverter<>());
// Object -> String
addInternalConverter(Object.class, String.class, (Object object, Class<String> targetType, ConversionContext context) -> Optional.of(object.toString()));
// Number -> Number
addInternalConverter(Number.class, Number.class, (Number object, Class<Number> targetType, ConversionContext context) -> {
Class<?> targetNumberType = ReflectionUtils.getWrapperType(targetType);
if (targetNumberType.isInstance(object)) {
return Optional.of(object);
}
if (targetNumberType == Integer.class) {
return Optional.of(object.intValue());
}
if (targetNumberType == Long.class) {
return Optional.of(object.longValue());
}
if (targetNumberType == Short.class) {
return Optional.of(object.shortValue());
}
if (targetNumberType == Byte.class) {
return Optional.of(object.byteValue());
}
if (targetNumberType == Float.class) {
return Optional.of(object.floatValue());
}
if (targetNumberType == Double.class) {
return Optional.of(object.doubleValue());
}
if (targetNumberType == BigInteger.class) {
if (object instanceof BigDecimal bigDecimal) {
return Optional.of(bigDecimal.toBigInteger());
}
return Optional.of(BigInteger.valueOf(object.longValue()));
}
if (targetNumberType == BigDecimal.class) {
return Optional.of(new BigDecimal(object.toString()));
}
return Optional.empty();
});
// String -> List/Iterable
addInternalConverter(CharSequence.class, Iterable.class, (CharSequence object, Class<Iterable> targetType, ConversionContext context) -> {
Optional<Argument<?>> typeVariable = context.getFirstTypeVariable();
Argument<?> componentType = typeVariable.orElse(Argument.OBJECT_ARGUMENT);
ConversionContext newContext = context.with(componentType);
Class<?> targetComponentType = ReflectionUtils.getWrapperType(componentType.getType());
String[] strings = object.toString().split(",");
List<Object> list = new ArrayList<>();
for (String string : strings) {
Optional<?> converted = convert(string, targetComponentType, newContext);
if (converted.isPresent()) {
list.add(converted.get());
}
}
return CollectionUtils.convertCollection((Class) targetType, list);
});
TypeConverter<Object, Optional> objectToOptionalConverter = (object, targetType, context) -> {
Optional<Argument<?>> typeVariable = context.getFirstTypeVariable();
Argument<?> componentType = typeVariable.orElse(Argument.OBJECT_ARGUMENT);
Class<?> targetComponentType = ReflectionUtils.getWrapperType(componentType.getType());
ConversionContext newContext = context.with(componentType).with(context.getAnnotationMetadata());
Optional converted = convert(object, targetComponentType, newContext);
if (converted.isPresent()) {
return Optional.of(converted);
}
return Optional.of(Optional.empty());
};
// Optional handling
addInternalConverter(Object.class, Optional.class, objectToOptionalConverter);
addInternalConverter(Object.class, OptionalInt.class, (object, targetType, context) -> {
Optional<Integer> converted = convert(object, Integer.class, context);
return converted.map(OptionalInt::of).or(() -> Optional.of(OptionalInt.empty()));
});
addInternalConverter(Object.class, OptionalLong.class, (object, targetType, context) -> {
Optional<Long> converted = convert(object, Long.class, context);
return converted.map(OptionalLong::of).or(() -> Optional.of(OptionalLong.empty()));
});
// Iterable -> String
addInternalConverter(Iterable.class, String.class, (object, targetType, context) -> Optional.of(CollectionUtils.toString(object)));
// Iterable -> Object
addInternalConverter(Iterable.class, Object.class, (object, targetType, context) -> {
if (Optional.class.isAssignableFrom(targetType)) {
return objectToOptionalConverter.convert(object, (Class) targetType, context);
}
Iterator<?> i = object.iterator();
int count = 0;
Object value = null;
while (i.hasNext()) {
if (count > 0) {
context.reject(object, new ConversionErrorException(Argument.of(targetType), new IllegalArgumentException("Cannot convert an iterable with more than 1 value to a non collection object")));
return Optional.empty();
}
count++;
value = i.next();
}
return convert(value, targetType, context);
});
// Iterable -> Iterable (inner type conversion)
addInternalConverter(Iterable.class, Iterable.class, (object, targetType, context) -> {
if (ConvertibleValues.class.isAssignableFrom(targetType)) {
if (object instanceof ConvertibleValues) {
return Optional.of(object);
}
return Optional.empty();
}
Optional<Argument<?>> typeVariable = context.getFirstTypeVariable();
Argument<?> componentType = typeVariable.orElse(Argument.OBJECT_ARGUMENT);
Class<?> targetComponentType = ReflectionUtils.getWrapperType(componentType.getType());
if (targetType.isInstance(object) && targetComponentType == Object.class) {
return Optional.of(object);
}
List<Object> list = new ArrayList<>();
ConversionContext newContext = context.with(componentType);
for (Object o : object) {
Optional<?> converted = convert(o, targetComponentType, newContext);
if (converted.isPresent()) {
list.add(converted.get());
}
}
return CollectionUtils.convertCollection((Class) targetType, list);
});
// Object[] -> String
addInternalConverter(Object[].class, String.class, (object, targetType, context) -> Optional.of(ArrayUtils.toString(object)));
// Object[] -> Object[] (inner type conversion)
addInternalConverter(Object[].class, Object[].class, (object, targetType, context) -> {
Class<?> targetComponentType = targetType.getComponentType();
List<Object> results = new ArrayList<>(object.length);
for (Object o : object) {
Optional<?> converted = convert(o, targetComponentType, context);
if (converted.isPresent()) {
results.add(converted.get());
}
}
return Optional.of(results.toArray((Object[]) Array.newInstance(targetComponentType, results.size())));
});
// Iterable -> Object[]
addInternalConverter(Iterable.class, Object[].class, (object, targetType, context) -> {
Class<?> targetComponentType = targetType.getComponentType();
List<Object> results = new ArrayList<>();
for (Object o : object) {
Optional<?> converted = convert(o, targetComponentType, context);
if (converted.isPresent()) {
results.add(converted.get());
}
}
return Optional.of(results.toArray((Object[]) Array.newInstance(targetComponentType, results.size())));
});
addInternalConverter(Object[].class, Iterable.class, (object, targetType, context) ->
convert(Arrays.asList(object), targetType, context)
);
addInternalConverter(Object.class, Object[].class, (object, targetType, context) -> {
Class<?> targetComponentType = targetType.getComponentType();
Optional<?> converted = convert(object, targetComponentType);
if (converted.isPresent()) {
Object[] result = (Object[]) Array.newInstance(targetComponentType, 1);
result[0] = converted.get();
return Optional.of(result);
}
return Optional.empty();
});
// Map -> Map (inner type conversion)
addInternalConverter(Map.class, Map.class, (object, targetType, context) -> {
Argument<?> keyArgument = context.getTypeVariable("K").orElse(Argument.of(String.class, "K"));
boolean isProperties = targetType.equals(Properties.class);
Argument<?> valArgument = context.getTypeVariable("V").orElseGet(() -> {
if (isProperties) {
return Argument.of(String.class, "V");
}
return Argument.of(Object.class, "V");
});
Class<?> keyType = isProperties ? Object.class : keyArgument.getType();
Class<?> valueType = isProperties ? Object.class : valArgument.getType();
ConversionContext keyContext = context.with(keyArgument);
ConversionContext valContext = context.with(valArgument);
Map<Object, Object> newMap = isProperties ? new Properties() : new LinkedHashMap<>();
for (Object o : object.entrySet()) {
Map.Entry<?, ?> entry = (Map.Entry) o;
Object key = entry.getKey();
Object value = entry.getValue();
if (!keyType.isInstance(key)) {
Optional<?> convertedKey = convert(key, keyType, keyContext);
if (convertedKey.isPresent()) {
key = convertedKey.get();
} else {
continue;
}
}
if (!valueType.isInstance(value) || value instanceof Map || value instanceof Collection) {
Optional<?> converted = convert(value, valueType, valContext);
if (converted.isPresent()) {
value = converted.get();
} else {
continue;
}
}
newMap.put(key, value);
}
return Optional.of(newMap);
});
addInternalConverter(Map.class, ConvertibleValues.class, (object, targetType, context) -> Optional.of(new ConvertibleValuesMap<Object>(object)));
// Micronaut ByteBuffer -> byte for streamed results from HTTP clients
addInternalConverter(io.micronaut.core.io.buffer.ByteBuffer.class, byte[].class, (object, targetType, context) -> {
byte[] result = object.toByteArray();
((ReferenceCounted) object).release();
return Optional.of(result);
});
// ConvertibleMultiValues -> [?]
addInternalConverter(io.micronaut.core.convert.value.ConvertibleMultiValues.class, Iterable.class,
new MultiValuesConverterFactory.MultiValuesToIterableConverter(this));
addInternalConverter(io.micronaut.core.convert.value.ConvertibleMultiValues.class, Map.class,
new MultiValuesConverterFactory.MultiValuesToMapConverter(this));
addInternalConverter(io.micronaut.core.convert.value.ConvertibleMultiValues.class, Object.class,
new MultiValuesConverterFactory.MultiValuesToObjectConverter(this));
// [?] -> ConvertibleMultiValues
addInternalConverter(Iterable.class, io.micronaut.core.convert.value.ConvertibleMultiValues.class,
new MultiValuesConverterFactory.IterableToMultiValuesConverter(this));
addInternalConverter(Map.class, io.micronaut.core.convert.value.ConvertibleMultiValues.class,
new MultiValuesConverterFactory.MapToMultiValuesConverter(this));
addInternalConverter(Object.class, io.micronaut.core.convert.value.ConvertibleMultiValues.class,
new MultiValuesConverterFactory.ObjectToMultiValuesConverter(this));
// CharSequence -> java.net.Proxy.Type
addInternalConverter(CharSequence.class, Proxy.Type.class, new CharSequenceToEnumConverter<>());
// Boolean -> String
addInternalConverter(Boolean.class, String.class, Object::toString);
Collection<TypeConverterRegistrar> registrars = new ArrayList<>();
SoftServiceLoader.load(TypeConverterRegistrar.class)
.disableFork()
.collectAll(registrars);
for (TypeConverterRegistrar registrar : registrars) {
registrar.register(internalMutableConversionService);
}
}
/**
* Register converters using the internal mutable conversion service.
*
* @param registrars The converters
* @since 4.2.0
*/
@Internal
public void registerInternalTypeConverters(Collection<TypeConverterRegistrar> registrars) {
for (TypeConverterRegistrar registrar : registrars) {
registrar.register(internalMutableConversionService);
}
}
/**
* Find the type converter.
*
* @param sourceType sourceType
* @param targetType targetType
* @param formattingAnnotation formattingAnnotation
* @param <T> Generic type
* @return type converter
*/
protected <T> TypeConverter<Object, T> findTypeConverter(Class<?> sourceType, Class<T> targetType, String formattingAnnotation) {
List<Class<?>> sourceHierarchy = resolveHierarchy(sourceType);
List<Class<?>> targetHierarchy = resolveHierarchy(targetType);
for (Class<?> sourceSuperType : sourceHierarchy) {
for (Class<?> targetSuperType : targetHierarchy) {
ConvertiblePair pair = new ConvertiblePair(sourceSuperType, targetSuperType, formattingAnnotation);
TypeConverter<Object, T> typeConverter = findRegisteredConverter(pair);
if (typeConverter != null) {
addToConverterCache(pair, typeConverter);
return typeConverter;
}
}
}
boolean hasFormatting = formattingAnnotation != null;
if (hasFormatting) {
for (Class<?> sourceSuperType : sourceHierarchy) {
for (Class<?> targetSuperType : targetHierarchy) {
ConvertiblePair pair = new ConvertiblePair(sourceSuperType, targetSuperType);
TypeConverter<Object, T> typeConverter = findRegisteredConverter(pair);
if (typeConverter != null) {
addToConverterCache(pair, typeConverter);
return typeConverter;
}
}
}
}
return UNCONVERTIBLE;
}
private List<Class<?>> resolveHierarchy(Class<?> sourceType) {
List<Class<?>> hierarchy = COMMON_TYPE_HIERARCHY.get(sourceType);
if (hierarchy != null) {
return hierarchy;
}
return ClassUtils.resolveHierarchy(sourceType);
}
private SimpleDateFormat resolveFormat(ConversionContext context) {
AnnotationMetadata annotationMetadata = context.getAnnotationMetadata();
Optional<String> format = annotationMetadata.stringValue(Format.class);
return format
.map(pattern -> new SimpleDateFormat(pattern, context.getLocale()))
.orElseGet(() -> new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z", context.getLocale()));
}
private NumberFormat resolveNumberFormat(ConversionContext context) {
return context.getAnnotationMetadata().stringValue(Format.class)
.map(DecimalFormat::new)
.orElse(null);
}
private <S, T> ConvertiblePair newPair(Class<S> sourceType, Class<T> targetType, TypeConverter<S, T> typeConverter) {
ConvertiblePair pair;
if (typeConverter instanceof FormattingTypeConverter<S, T, ?> formattingTypeConverter) {
pair = new ConvertiblePair(sourceType, targetType, formattingTypeConverter.annotationType().getName());
} else {
pair = new ConvertiblePair(sourceType, targetType);
}
return pair;
}
/**
* Reset the internal state.
*/
public final void reset() {
internalConverters.clear();
customConverters.clear();
converterCache.clear();
registerDefaultConverters();
}
/**
* Binds the source and target.
*/
private static final | DefaultMutableConversionService |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/JoinedDiscSameAttributeNameTest.java | {
"start": 1309,
"end": 4640
} | class ____ {
@Test
void testCoalesceSameType(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var cb = session.getCriteriaBuilder();
final var query = cb.createTupleQuery();
final var root = query.from( Ancestor.class );
final var dscCRoot = cb.treat( root, DescendantTak.class );
query.select( cb.tuple(
root.get( JoinedDiscSameAttributeNameTest_.Ancestor_.id ).alias( "id" ),
cb.coalesce(
dscCRoot.get( JoinedDiscSameAttributeNameTest_.DescendantTak_.subtitle ),
dscCRoot.get( JoinedDiscSameAttributeNameTest_.DescendantTak_.title )
).alias( "description" )
) ).orderBy( cb.asc( root.get( JoinedDiscSameAttributeNameTest_.Ancestor_.id ) ) );
final var resultList = session.createSelectionQuery( query ).getResultList();
assertResults( resultList, null, "title", null );
} );
}
@Test
void testCoalesceDifferentTypes(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var cb = session.getCriteriaBuilder();
final var query = cb.createTupleQuery();
final var root = query.from( Ancestor.class );
final var dscARoot = cb.treat( root, DescendantA.class );
final var dscCRoot = cb.treat( root, DescendantTak.class );
final var dscDRoot = cb.treat( root, DescendantD.class );
query.select( cb.tuple(
root.get( JoinedDiscSameAttributeNameTest_.Ancestor_.id ).alias( "id" ),
cb.coalesce(
dscDRoot.get( JoinedDiscSameAttributeNameTest_.DescendantD_.subtitle ),
cb.coalesce(
cb.coalesce(
dscARoot.get( JoinedDiscSameAttributeNameTest_.DescendantA_.subtitle ),
dscARoot.get( JoinedDiscSameAttributeNameTest_.DescendantA_.title )
),
cb.coalesce(
dscCRoot.get( JoinedDiscSameAttributeNameTest_.DescendantTak_.subtitle ),
dscCRoot.get( JoinedDiscSameAttributeNameTest_.DescendantTak_.title )
)
)
).alias( "description" )
) ).orderBy( cb.asc( root.get( JoinedDiscSameAttributeNameTest_.Ancestor_.id ) ) );
final var resultList = session.createSelectionQuery( query ).getResultList();
assertResults( resultList, null, "title", "subtitle" );
} );
}
private static void assertResults(List<Tuple> resultList, String... expected) {
assertThat( resultList ).hasSize( expected.length );
for ( int i = 0; i < expected.length; i++ ) {
final var r = resultList.get( i );
assertThat( r.get( 0, Integer.class) ).isEqualTo( i + 1 );
assertThat( r.get( 1, String.class ) ).isEqualTo( expected[i] );
}
}
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var descendantA = new DescendantA();
descendantA.id = 1;
session.persist( descendantA );
final var descendantTak = new DescendantTak();
descendantTak.id = 2;
descendantTak.title = "title";
session.persist( descendantTak );
final var descendantD = new DescendantD();
descendantD.id = 3;
descendantD.subtitle = "subtitle";
session.persist( descendantD );
} );
}
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.dropData();
}
@Entity(name = "Ancestor")
@Table(name = "t_ancestor")
@Inheritance(strategy = InheritanceType.JOINED)
@DiscriminatorColumn(name = "def_type_id")
static abstract | JoinedDiscSameAttributeNameTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java | {
"start": 7862,
"end": 11323
} | class ____ extends IndexRouting {
private final boolean routingRequired;
private final IndexVersion creationVersion;
private final IndexMode indexMode;
IdAndRoutingOnly(IndexMetadata metadata) {
super(metadata);
this.creationVersion = metadata.getCreationVersion();
MappingMetadata mapping = metadata.mapping();
this.routingRequired = mapping == null ? false : mapping.routingRequired();
this.indexMode = metadata.getIndexMode();
}
protected abstract int shardId(String id, @Nullable String routing);
@Override
public void preProcess(IndexRequest indexRequest) {
// Generate id if not already provided.
// This is needed for routing, so it has to happen in pre-processing.
final String id = indexRequest.id();
if (id == null) {
if (shouldUseTimeBasedId(indexMode, creationVersion)) {
indexRequest.autoGenerateTimeBasedId();
} else {
indexRequest.autoGenerateId();
}
} else if (id.isEmpty()) {
throw new IllegalArgumentException("if _id is specified it must not be empty");
}
}
private static boolean shouldUseTimeBasedId(final IndexMode indexMode, final IndexVersion creationVersion) {
return indexMode == IndexMode.LOGSDB && isNewIndexVersion(creationVersion);
}
private static boolean isNewIndexVersion(final IndexVersion creationVersion) {
return creationVersion.between(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0)
|| creationVersion.onOrAfter(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID);
}
@Override
public int indexShard(IndexRequest indexRequest) {
String id = indexRequest.id();
String routing = indexRequest.routing();
if (id == null) {
throw new IllegalStateException("id is required and should have been set by process");
}
checkRoutingRequired(id, routing);
int shardId = shardId(id, routing);
return rerouteWritesIfResharding(shardId);
}
@Override
public int rerouteToTarget(IndexRequest indexRequest) {
return indexShard(indexRequest);
}
@Override
public int updateShard(String id, @Nullable String routing) {
checkRoutingRequired(id, routing);
int shardId = shardId(id, routing);
return rerouteWritesIfResharding(shardId);
}
@Override
public int deleteShard(String id, @Nullable String routing) {
checkRoutingRequired(id, routing);
int shardId = shardId(id, routing);
return rerouteWritesIfResharding(shardId);
}
@Override
public int getShard(String id, @Nullable String routing) {
checkRoutingRequired(id, routing);
return shardId(id, routing);
}
private void checkRoutingRequired(String id, @Nullable String routing) {
if (routingRequired && routing == null) {
throw new RoutingMissingException(indexName, id);
}
}
}
/**
* Strategy for indices that are not partitioned.
*/
private static | IdAndRoutingOnly |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java | {
"start": 2564,
"end": 15022
} | class ____ extends AbstractQueryBuilder<NestedQueryBuilder> {
public static final String NAME = "nested";
/**
* The default value for ignore_unmapped.
*/
public static final boolean DEFAULT_IGNORE_UNMAPPED = false;
private static final ParseField SCORE_MODE_FIELD = new ParseField("score_mode");
private static final ParseField PATH_FIELD = new ParseField("path");
private static final ParseField QUERY_FIELD = new ParseField("query");
private static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits");
private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped");
private final String path;
private final ScoreMode scoreMode;
private final QueryBuilder query;
private InnerHitBuilder innerHitBuilder;
private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
public NestedQueryBuilder(String path, QueryBuilder query, ScoreMode scoreMode) {
this(path, query, scoreMode, null);
}
private NestedQueryBuilder(String path, QueryBuilder query, ScoreMode scoreMode, InnerHitBuilder innerHitBuilder) {
this.path = requireValue(path, "[" + NAME + "] requires 'path' field");
this.query = requireValue(query, "[" + NAME + "] requires 'query' field");
this.scoreMode = requireValue(scoreMode, "[" + NAME + "] requires 'score_mode' field");
this.innerHitBuilder = innerHitBuilder;
}
/**
* Read from a stream.
*/
public NestedQueryBuilder(StreamInput in) throws IOException {
super(in);
path = in.readString();
scoreMode = ScoreMode.values()[in.readVInt()];
query = in.readNamedWriteable(QueryBuilder.class);
innerHitBuilder = in.readOptionalWriteable(InnerHitBuilder::new);
ignoreUnmapped = in.readBoolean();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(path);
out.writeVInt(scoreMode.ordinal());
out.writeNamedWriteable(query);
out.writeOptionalWriteable(innerHitBuilder);
out.writeBoolean(ignoreUnmapped);
}
/**
* Returns the nested query to execute.
*/
public QueryBuilder query() {
return query;
}
/**
* Returns path to the searched nested object.
*/
public String path() {
return path;
}
/**
* Returns inner hit definition in the scope of this query and reusing the defined type and query.
*/
public InnerHitBuilder innerHit() {
return innerHitBuilder;
}
public NestedQueryBuilder innerHit(InnerHitBuilder innerHitBuilder) {
this.innerHitBuilder = innerHitBuilder;
if (innerHitBuilder != null) {
innerHitBuilder.setIgnoreUnmapped(ignoreUnmapped);
}
return this;
}
/**
* Returns how the scores from the matching child documents are mapped into the nested parent document.
*/
public ScoreMode scoreMode() {
return scoreMode;
}
/**
* Sets whether the query builder should ignore unmapped paths (and run a
* {@link MatchNoDocsQuery} in place of this query) or throw an exception if
* the path is unmapped.
*/
public NestedQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) {
this.ignoreUnmapped = ignoreUnmapped;
if (innerHitBuilder != null) {
innerHitBuilder.setIgnoreUnmapped(ignoreUnmapped);
}
return this;
}
/**
* Gets whether the query builder will ignore unmapped fields (and run a
* {@link MatchNoDocsQuery} in place of this query) or throw an exception if
* the path is unmapped.
*/
public boolean ignoreUnmapped() {
return ignoreUnmapped;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.field(QUERY_FIELD.getPreferredName());
query.toXContent(builder, params);
builder.field(PATH_FIELD.getPreferredName(), path);
builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped);
if (scoreMode != null) {
builder.field(SCORE_MODE_FIELD.getPreferredName(), scoreModeAsString(scoreMode));
}
printBoostAndQueryName(builder);
if (innerHitBuilder != null) {
builder.field(INNER_HITS_FIELD.getPreferredName(), innerHitBuilder, params);
}
builder.endObject();
}
public static NestedQueryBuilder fromXContent(XContentParser parser) throws IOException {
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
ScoreMode scoreMode = ScoreMode.Avg;
String queryName = null;
QueryBuilder query = null;
String path = null;
String currentFieldName = null;
InnerHitBuilder innerHitBuilder = null;
boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
query = parseInnerQueryBuilder(parser);
} else if (INNER_HITS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
innerHitBuilder = InnerHitBuilder.fromXContent(parser);
} else {
throw new ParsingException(parser.getTokenLocation(), "[nested] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (PATH_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
path = parser.text();
} else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
boost = parser.floatValue();
} else if (IGNORE_UNMAPPED_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
ignoreUnmapped = parser.booleanValue();
} else if (SCORE_MODE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
scoreMode = parseScoreMode(parser.text());
} else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[nested] query does not support [" + currentFieldName + "]");
}
}
}
NestedQueryBuilder queryBuilder = new NestedQueryBuilder(path, query, scoreMode, innerHitBuilder).ignoreUnmapped(ignoreUnmapped)
.queryName(queryName)
.boost(boost);
return queryBuilder;
}
public static ScoreMode parseScoreMode(String scoreModeString) {
if ("none".equals(scoreModeString)) {
return ScoreMode.None;
} else if ("min".equals(scoreModeString)) {
return ScoreMode.Min;
} else if ("max".equals(scoreModeString)) {
return ScoreMode.Max;
} else if ("avg".equals(scoreModeString)) {
return ScoreMode.Avg;
} else if ("sum".equals(scoreModeString)) {
return ScoreMode.Total;
}
throw new IllegalArgumentException("No score mode for child query [" + scoreModeString + "] found");
}
public static String scoreModeAsString(ScoreMode scoreMode) {
if (scoreMode == ScoreMode.Total) {
// Lucene uses 'total' but 'sum' is more consistent with other elasticsearch APIs
return "sum";
} else {
return scoreMode.name().toLowerCase(Locale.ROOT);
}
}
@Override
public final String getWriteableName() {
return NAME;
}
@Override
protected boolean doEquals(NestedQueryBuilder that) {
return Objects.equals(query, that.query)
&& Objects.equals(path, that.path)
&& Objects.equals(scoreMode, that.scoreMode)
&& Objects.equals(innerHitBuilder, that.innerHitBuilder)
&& Objects.equals(ignoreUnmapped, that.ignoreUnmapped);
}
@Override
protected int doHashCode() {
return Objects.hash(query, path, scoreMode, innerHitBuilder, ignoreUnmapped);
}
@Override
protected Query doToQuery(SearchExecutionContext context) throws IOException {
return toQuery((this.query::toQuery), path, scoreMode, ignoreUnmapped, context);
}
/**
* Returns the primitive Lucene query for a nested query given the primitive query to wrap
* @param <E> exception that the queryProvider may throw
* @param queryProvider Retrieves tye query to use given the SearchExecutionContext
* @param path nested path
* @param scoreMode score mode to use
* @param ignoreUnmapped whether to ignore unmapped fields
* @param context search execution context
* @return the primitive Lucene query
*/
public static <E extends Exception> Query toQuery(
CheckedFunction<SearchExecutionContext, Query, E> queryProvider,
String path,
ScoreMode scoreMode,
boolean ignoreUnmapped,
SearchExecutionContext context
) throws E {
if (context.allowExpensiveQueries() == false) {
throw new ElasticsearchException(
"[joining] queries cannot be executed when '" + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false."
);
}
NestedObjectMapper mapper = context.nestedLookup().getNestedMappers().get(path);
if (mapper == null) {
if (ignoreUnmapped) {
return new MatchNoDocsQuery();
} else {
throw new QueryShardException(context, "[" + NAME + "] failed to find nested object under path [" + path + "]");
}
}
final BitSetProducer parentFilter;
Query innerQuery;
NestedObjectMapper objectMapper = context.nestedScope().getObjectMapper();
if (objectMapper == null) {
parentFilter = context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated()));
} else {
parentFilter = context.bitsetFilter(objectMapper.nestedTypeFilter());
}
try {
context.nestedScope().nextLevel(mapper);
innerQuery = queryProvider.apply(context);
} finally {
context.nestedScope().previousLevel();
}
// ToParentBlockJoinQuery requires that the inner query only matches documents
// in its child space
if (NestedHelper.mightMatchNonNestedDocs(innerQuery, path, context)) {
innerQuery = Queries.filtered(innerQuery, mapper.nestedTypeFilter());
}
return new ESToParentBlockJoinQuery(innerQuery, parentFilter, scoreMode, objectMapper == null ? null : objectMapper.fullPath());
}
@Override
protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
QueryBuilder rewrittenQuery = query.rewrite(queryRewriteContext);
if (rewrittenQuery != query) {
NestedQueryBuilder nestedQuery = new NestedQueryBuilder(path, rewrittenQuery, scoreMode, innerHitBuilder);
nestedQuery.ignoreUnmapped(ignoreUnmapped);
return nestedQuery;
}
return this;
}
@Override
public void extractInnerHitBuilders(Map<String, InnerHitContextBuilder> innerHits) {
if (innerHitBuilder != null) {
String name = innerHitBuilder.getName() != null ? innerHitBuilder.getName() : path;
if (innerHits.containsKey(name)) {
throw new IllegalArgumentException("[inner_hits] already contains an entry for key [" + name + "]");
}
Map<String, InnerHitContextBuilder> children = new HashMap<>();
InnerHitContextBuilder.extractInnerHits(query, children);
InnerHitContextBuilder innerHitContextBuilder = new NestedInnerHitContextBuilder(path, query, innerHitBuilder, children);
innerHits.put(name, innerHitContextBuilder);
}
}
static | NestedQueryBuilder |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/util/pattern/LiteralPathElement.java | {
"start": 1046,
"end": 2717
} | class ____ extends PathElement {
private final String text;
private final int len;
private final boolean caseSensitive;
public LiteralPathElement(int pos, char[] literalText, boolean caseSensitive, char separator) {
super(pos, separator);
this.len = literalText.length;
this.caseSensitive = caseSensitive;
this.text = new String(literalText);
}
@Override
public boolean matches(int pathIndex, MatchingContext matchingContext) {
if (pathIndex >= matchingContext.pathLength) {
// no more path left to match this element
return false;
}
Element element = matchingContext.pathElements.get(pathIndex);
if (!(element instanceof PathSegment pathSegment)) {
return false;
}
String value = pathSegment.valueToMatch();
if (value.length() != this.len) {
// Not enough data to match this path element
return false;
}
if (this.caseSensitive) {
if (!this.text.equals(value)) {
return false;
}
}
else {
if (!this.text.equalsIgnoreCase(value)) {
return false;
}
}
pathIndex++;
if (isNoMorePattern()) {
if (matchingContext.determineRemainingPath) {
matchingContext.remainingPathIndex = pathIndex;
return true;
}
else {
return (pathIndex == matchingContext.pathLength);
}
}
else {
return (this.next != null && this.next.matches(pathIndex, matchingContext));
}
}
@Override
public int getNormalizedLength() {
return this.len;
}
@Override
public char[] getChars() {
return this.text.toCharArray();
}
@Override
public boolean isLiteral() {
return true;
}
@Override
public String toString() {
return "Literal(" + this.text + ")";
}
}
| LiteralPathElement |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableRightJoin.java | {
"start": 1633,
"end": 2439
} | class ____<K, V1, V2, VOut> extends KTableKTableAbstractJoin<K, V1, V2, VOut> {
private static final Logger LOG = LoggerFactory.getLogger(KTableKTableRightJoin.class);
KTableKTableRightJoin(final KTableImpl<K, ?, V1> table1,
final KTableImpl<K, ?, V2> table2,
final ValueJoiner<? super V1, ? super V2, ? extends VOut> joiner) {
super(table1, table2, joiner);
}
@Override
public Processor<K, Change<V1>, K, Change<VOut>> get() {
return new KTableKTableRightJoinProcessor(valueGetterSupplier2.get());
}
@Override
public KTableValueGetterSupplier<K, VOut> view() {
return new KTableKTableRightJoinValueGetterSupplier(valueGetterSupplier1, valueGetterSupplier2);
}
private | KTableKTableRightJoin |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/async/utils/AsyncRun.java | {
"start": 1146,
"end": 1321
} | interface ____ responsible for
* performing a task and completing a {@link CompletableFuture} with the
* result of the operation.
*
* <p>
* The {@code run} method of this | are |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java | {
"start": 6725,
"end": 8202
} | class ____ {
private String host = "";
private int port;
private String user = "";
ConnectionInfo(String hst, int prt, String usr) {
this.host = hst;
this.port = prt;
this.user = usr;
}
public String getHost() {
return host;
}
public void setHost(String hst) {
this.host = hst;
}
public int getPort() {
return port;
}
public void setPort(int prt) {
this.port = prt;
}
public String getUser() {
return user;
}
public void setUser(String usr) {
this.user = usr;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof ConnectionInfo) {
ConnectionInfo con = (ConnectionInfo) obj;
boolean ret = true;
if (this.host == null || !this.host.equalsIgnoreCase(con.host)) {
ret = false;
}
if (this.port >= 0 && this.port != con.port) {
ret = false;
}
if (this.user == null || !this.user.equalsIgnoreCase(con.user)) {
ret = false;
}
return ret;
} else {
return false;
}
}
@Override
public int hashCode() {
int hashCode = 0;
if (host != null) {
hashCode += host.hashCode();
}
hashCode += port;
if (user != null) {
hashCode += user.hashCode();
}
return hashCode;
}
}
}
| ConnectionInfo |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java | {
"start": 2440,
"end": 11520
} | class ____ {
private static final Logger logger = LogManager.getLogger(WatchParser.class);
private final TriggerService triggerService;
private final ActionRegistry actionRegistry;
private final InputRegistry inputRegistry;
private final CryptoService cryptoService;
private final Clock clock;
private final ExecutableInput<?, ?> defaultInput;
private final ExecutableCondition defaultCondition;
private final List<ActionWrapper> defaultActions;
public WatchParser(
TriggerService triggerService,
ActionRegistry actionRegistry,
InputRegistry inputRegistry,
@Nullable CryptoService cryptoService,
Clock clock
) {
this.triggerService = triggerService;
this.actionRegistry = actionRegistry;
this.inputRegistry = inputRegistry;
this.cryptoService = cryptoService;
this.clock = clock;
this.defaultInput = new ExecutableNoneInput();
this.defaultCondition = InternalAlwaysCondition.INSTANCE;
this.defaultActions = Collections.emptyList();
}
public Watch parse(
String name,
boolean includeStatus,
BytesReference source,
XContentType xContentType,
long sourceSeqNo,
long sourcePrimaryTerm
) throws IOException {
ZonedDateTime now = clock.instant().atZone(ZoneOffset.UTC);
return parse(name, includeStatus, false, source, now, xContentType, false, sourceSeqNo, sourcePrimaryTerm);
}
/**
* Parses the watch represented by the given source. When parsing, any sensitive data that the
* source might contain (e.g. passwords) will be converted to {@link Secret secrets}
* Such that the returned watch will potentially hide this sensitive data behind a "secret". A secret
* is an abstraction around sensitive data (text). When security is enabled, the
* {@link CryptoService} is used to encrypt the secrets.
*
* This method is only called once - when the user adds a new watch. From that moment on, all representations
* of the watch in the system will be use secrets for sensitive data.
*
*/
public Watch parseWithSecrets(
String id,
boolean includeStatus,
BytesReference source,
ZonedDateTime now,
XContentType xContentType,
boolean allowRedactedPasswords,
long sourceSeqNo,
long sourcePrimaryTerm
) throws IOException {
return parse(id, includeStatus, true, source, now, xContentType, allowRedactedPasswords, sourceSeqNo, sourcePrimaryTerm);
}
public Watch parseWithSecrets(
String id,
boolean includeStatus,
BytesReference source,
ZonedDateTime now,
XContentType xContentType,
long sourceSeqNo,
long sourcePrimaryTerm
) throws IOException {
return parse(id, includeStatus, true, source, now, xContentType, false, sourceSeqNo, sourcePrimaryTerm);
}
private Watch parse(
String id,
boolean includeStatus,
boolean withSecrets,
BytesReference source,
ZonedDateTime now,
XContentType xContentType,
boolean allowRedactedPasswords,
long sourceSeqNo,
long sourcePrimaryTerm
) throws IOException {
if (logger.isTraceEnabled()) {
logger.trace("parsing watch [{}] ", source.utf8ToString());
}
// EMPTY is safe here because we never use namedObject
try (
WatcherXContentParser parser = new WatcherXContentParser(
XContentHelper.createParserNotCompressed(LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, source, xContentType),
now,
withSecrets ? cryptoService : null,
allowRedactedPasswords
)
) {
parser.nextToken();
return parse(id, includeStatus, parser, sourceSeqNo, sourcePrimaryTerm);
} catch (IOException ioe) {
throw ioException("could not parse watch [{}]", ioe, id);
}
}
public Watch parse(String id, boolean includeStatus, WatcherXContentParser parser, long sourceSeqNo, long sourcePrimaryTerm)
throws IOException {
Trigger trigger = null;
ExecutableInput<?, ?> input = defaultInput;
ExecutableCondition condition = defaultCondition;
List<ActionWrapper> actions = defaultActions;
ExecutableTransform<?, ?> transform = null;
TimeValue throttlePeriod = null;
Map<String, Object> metatdata = null;
WatchStatus status = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == null) {
throw new ElasticsearchParseException("could not parse watch [{}]. null token", id);
} else if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (currentFieldName == null) {
throw new ElasticsearchParseException("could not parse watch [{}], unexpected token [{}]", id, token);
} else if (WatchField.TRIGGER.match(currentFieldName, parser.getDeprecationHandler())) {
trigger = triggerService.parseTrigger(id, parser);
} else if (WatchField.INPUT.match(currentFieldName, parser.getDeprecationHandler())) {
input = inputRegistry.parse(id, parser);
} else if (WatchField.CONDITION.match(currentFieldName, parser.getDeprecationHandler())) {
condition = actionRegistry.getConditionRegistry().parseExecutable(id, parser);
} else if (WatchField.TRANSFORM.match(currentFieldName, parser.getDeprecationHandler())) {
transform = actionRegistry.getTransformRegistry().parse(id, parser);
} else if (WatchField.THROTTLE_PERIOD.match(currentFieldName, parser.getDeprecationHandler())) {
throttlePeriod = timeValueMillis(parser.longValue());
} else if (WatchField.THROTTLE_PERIOD_HUMAN.match(currentFieldName, parser.getDeprecationHandler())) {
// Parser for human specified and 2.x backwards compatible throttle period
try {
throttlePeriod = WatcherDateTimeUtils.parseTimeValue(parser, WatchField.THROTTLE_PERIOD_HUMAN.toString());
} catch (ElasticsearchParseException pe) {
throw new ElasticsearchParseException(
"could not parse watch [{}]. failed to parse time value for field [{}]",
pe,
id,
currentFieldName
);
}
} else if (WatchField.ACTIONS.match(currentFieldName, parser.getDeprecationHandler())) {
actions = actionRegistry.parseActions(id, parser);
} else if (WatchField.METADATA.match(currentFieldName, parser.getDeprecationHandler())) {
metatdata = parser.map();
} else if (WatchField.STATUS.match(currentFieldName, parser.getDeprecationHandler())) {
if (includeStatus) {
status = WatchStatus.parse(id, parser);
} else {
parser.skipChildren();
}
} else {
throw new ElasticsearchParseException("could not parse watch [{}]. unexpected field [{}]", id, currentFieldName);
}
}
if (trigger == null) {
throw new ElasticsearchParseException(
"could not parse watch [{}]. missing required field [{}]",
id,
WatchField.TRIGGER.getPreferredName()
);
}
if (status != null) {
// verify the status is valid (that every action indeed has a status)
for (ActionWrapper action : actions) {
if (status.actionStatus(action.id()) == null) {
throw new ElasticsearchParseException(
"could not parse watch [{}]. watch status in invalid state. action [{}] " + "status is missing",
id,
action.id()
);
}
}
} else {
// we need to create the initial statuses for the actions
Map<String, ActionStatus> actionsStatuses = new HashMap<>();
for (ActionWrapper action : actions) {
actionsStatuses.put(action.id(), new ActionStatus(parser.getParseDateTime()));
}
status = new WatchStatus(parser.getParseDateTime(), unmodifiableMap(actionsStatuses));
}
return new Watch(
id,
trigger,
input,
condition,
transform,
throttlePeriod,
actions,
metatdata,
status,
sourceSeqNo,
sourcePrimaryTerm
);
}
}
| WatchParser |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/results/graph/embeddable/internal/NonAggregatedIdentifierMappingInitializer.java | {
"start": 10892,
"end": 16395
} | class ____ this is a find by id lookup, we just use that instance
if ( data.isFindByIdLookup ) {
data.setInstance( data.getRowProcessingState().getEntityId() );
data.setState( State.INITIALIZED );
}
else {
data.setState( State.RESOLVED );
// We need to possibly wrap the processing state if the embeddable is within an aggregate
extractRowState( data );
data.setInstance( data.getState() == State.MISSING ? null : embeddableInstantiator.instantiate( data ) );
if ( parent == null ) {
data.setState( State.INITIALIZED );
}
}
}
}
@Override
public void resolveInstance(@Nullable Object instance, NonAggregatedIdentifierMappingInitializerData data) {
if ( instance == null ) {
data.setState( State.MISSING );
data.setInstance( null );
}
else if ( hasIdClass ) {
resolveKey( data );
resolveInstance( data );
}
else {
data.setState( State.INITIALIZED );
data.setInstance( instance );
final var rowProcessingState = data.getRowProcessingState();
resolveInstanceSubInitializers( instance, rowProcessingState );
if ( rowProcessingState.needsResolveState() ) {
for ( var assembler : assemblers ) {
assembler.resolveState( rowProcessingState );
}
}
}
}
private void resolveInstanceSubInitializers(Object instance, RowProcessingState rowProcessingState) {
for ( int i = 0; i < subInitializersForResolveFromInitialized.length; i++ ) {
final var initializer = subInitializersForResolveFromInitialized[i];
if ( initializer != null ) {
final Object subInstance = representationEmbeddable.getValue( instance, i );
if ( subInstance == LazyPropertyInitializer.UNFETCHED_PROPERTY ) {
// Go through the normal initializer process
initializer.resolveKey( rowProcessingState );
}
else {
initializer.resolveInstance( subInstance, rowProcessingState );
}
}
}
}
@Override
public void initializeInstance(NonAggregatedIdentifierMappingInitializerData data) {
if ( data.getState() == State.RESOLVED ) {
data.setState( State.INITIALIZED );
if ( parent != null ) {
assert parent.isEntityInitializer();
final Object parentInstance = parent.getResolvedInstance( data.getRowProcessingState() );
assert parentInstance != null;
final var lazyInitializer = HibernateProxy.extractLazyInitializer( parentInstance );
// If the composite instance has a lazy initializer attached, this means that the embeddable is actually virtual
// and the compositeInstance == entity, so we have to inject the row state into the entity when it finishes resolution
if ( lazyInitializer != null ) {
virtualIdEmbeddable.setValues( lazyInitializer.getImplementation(), data.virtualIdState );
}
else {
virtualIdEmbeddable.setValues( parentInstance, data.virtualIdState );
}
}
}
}
@Override
protected void forEachSubInitializer(BiConsumer<Initializer<?>, RowProcessingState> consumer, InitializerData data) {
final var rowProcessingState = data.getRowProcessingState();
for ( var initializer : initializers ) {
if ( initializer != null ) {
consumer.accept( initializer, rowProcessingState );
}
}
}
private void extractRowState(NonAggregatedIdentifierMappingInitializerData data) {
final var rowProcessingState = data.getRowProcessingState();
for ( int i = 0; i < assemblers.length; i++ ) {
final Object contributorValue = assemblers[i].assemble( rowProcessingState );
if ( contributorValue == null ) {
// This is a key and there is a null part, the whole thing has to be turned into null
data.setState( State.MISSING );
return;
}
if ( contributorValue == BATCH_PROPERTY ) {
data.virtualIdState[i] = null;
data.idClassState[i] = null;
}
else {
data.virtualIdState[i] = contributorValue;
data.idClassState[i] = contributorValue;
if ( hasIdClass ) {
final var virtualIdAttribute = virtualIdEmbeddable.getAttributeMapping( i );
final var mappedIdAttribute = representationEmbeddable.getAttributeMapping( i );
if ( virtualIdAttribute instanceof ToOneAttributeMapping toOneAttributeMapping
&& !( mappedIdAttribute instanceof ToOneAttributeMapping ) ) {
final Object associationKey =
toOneAttributeMapping.getForeignKeyDescriptor()
.getAssociationKeyFromSide(
data.virtualIdState[i],
toOneAttributeMapping.getSideNature().inverse(),
rowProcessingState.getSession()
);
data.idClassState[i] = associationKey;
}
}
}
}
}
@Override
public void resolveState(NonAggregatedIdentifierMappingInitializerData data) {
if ( !data.isFindByIdLookup ) {
final var rowProcessingState = data.getRowProcessingState();
for ( var assembler : assemblers ) {
assembler.resolveState( rowProcessingState );
}
}
}
@Override
public boolean isPartOfKey() {
return true;
}
@Override
public boolean isEager() {
// Embeddables are never lazy
return true;
}
@Override
public boolean isLazyCapable() {
return lazyCapable;
}
@Override
public boolean hasLazySubInitializers() {
return hasLazySubInitializer;
}
/*
* Used by Hibernate Reactive
*/
protected @Nullable Initializer<InitializerData>[] getInitializers() {
return initializers;
}
@Override
public String toString() {
return "NonAggregatedIdentifierMappingInitializer(" + navigablePath + ") : `"
+ getInitializedPart().getJavaType().getJavaTypeClass() + "`";
}
}
| and |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/hive/HiveCreateTableTest_24.java | {
"start": 970,
"end": 3512
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = //
"create table aliyun_cdm.test_905_table \n" +
"(col1 BIGINT,col2 STRING,col3 BOOLEAN,col4 DOUBLE,col5 DATETIME) row format delimited fields terminated by \"\\\\001\"\n";
List<SQLStatement> statementList = SQLUtils.toStatementList(sql, JdbcConstants.HIVE);
SQLStatement stmt = statementList.get(0);
System.out.println(stmt.toString());
assertEquals(1, statementList.size());
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.HIVE);
stmt.accept(visitor);
{
String text = SQLUtils.toSQLString(stmt, JdbcConstants.HIVE);
assertEquals("CREATE TABLE aliyun_cdm.test_905_table (\n" +
"\tcol1 BIGINT,\n" +
"\tcol2 STRING,\n" +
"\tcol3 BOOLEAN,\n" +
"\tcol4 DOUBLE,\n" +
"\tcol5 DATETIME\n" +
")\n" +
"ROW FORMAT DELIMITED\n" +
"\tFIELDS TERMINATED BY '\\\\001'", text);
}
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(5, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertEquals(0, visitor.getRelationships().size());
assertEquals(0, visitor.getOrderByColumns().size());
assertTrue(visitor.containsTable("aliyun_cdm.test_905_table"));
}
public void test_1_error() throws Exception {
String sql = //
"create table aliyun_cdm.test_905_table \n" +
"(col1 BIGINT,col2 STRING,col3 BOOLEAN,col4 DOUBLE,col5 DATETIME) row format delimited field terminated by \"\\\\001\"\n";
Exception error = null;
try {
SQLUtils.toStatementList(sql, JdbcConstants.HIVE);
} catch (ParserException ex) {
error = ex;
}
assertNotNull(error);
assertEquals("syntax error, expect FIELDS, pos 131, line 2, column 87, token IDENTIFIER field", error.getMessage());
}
}
| HiveCreateTableTest_24 |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/rsocket/service/RSocketExchangeReflectiveProcessor.java | {
"start": 1545,
"end": 2668
} | class ____ implements ReflectiveProcessor {
private final BindingReflectionHintsRegistrar bindingRegistrar = new BindingReflectionHintsRegistrar();
@Override
public void registerReflectionHints(ReflectionHints hints, AnnotatedElement element) {
if (element instanceof Method method) {
this.registerMethodHints(hints, method);
}
}
protected void registerMethodHints(ReflectionHints hints, Method method) {
hints.registerMethod(method, ExecutableMode.INVOKE);
for (Parameter parameter : method.getParameters()) {
// Also register non-annotated parameters to handle metadata
this.bindingRegistrar.registerReflectionHints(hints,
MethodParameter.forParameter(parameter).getGenericParameterType());
}
registerReturnTypeHints(hints, MethodParameter.forExecutable(method, -1));
}
protected void registerReturnTypeHints(ReflectionHints hints, MethodParameter returnTypeParameter) {
if (!void.class.equals(returnTypeParameter.getParameterType())) {
this.bindingRegistrar.registerReflectionHints(hints, returnTypeParameter.getGenericParameterType());
}
}
}
| RSocketExchangeReflectiveProcessor |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/matching/PreMatchAcceptInHeaderTest.java | {
"start": 5754,
"end": 6239
} | class ____ implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext requestContext) {
MultivaluedMap<String, String> headers = requestContext.getHeaders();
if ("true".equals(headers.getFirst("x-set-accept-to-text"))) {
headers.putSingle(HttpHeaders.ACCEPT, MediaType.TEXT_PLAIN);
}
}
}
@Provider
@Produces(MediaType.TEXT_PLAIN)
public static | SetAcceptHeaderFilter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/collectionelement/ElementCollectionWithEntityAndFKForElementTest.java | {
"start": 2044,
"end": 2503
} | class ____ implements Serializable {
private String element;
@Column(name = "parent_id")
private Long parentId;
public ElementEmbeddedId() {
}
public ElementEmbeddedId(String element, Long parentId) {
this.parentId = parentId;
this.element = element;
}
public String getElement() {
return element;
}
public Long getParentId() {
return parentId;
}
}
@Entity
@Table(name = "element_table")
public static | ElementEmbeddedId |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/ReadOnlyCollectionTest_final_field.java | {
"start": 459,
"end": 531
} | class ____ {
public final List<Object> list = null;
}
}
| Entity |
java | redisson__redisson | redisson-spring-data/redisson-spring-data-27/src/main/java/org/redisson/spring/data/connection/RedissonReactiveClusterServerCommands.java | {
"start": 1502,
"end": 5027
} | class ____ extends RedissonReactiveServerCommands implements ReactiveClusterServerCommands {
RedissonReactiveClusterServerCommands(CommandReactiveExecutor executorService) {
super(executorService);
}
@Override
public Mono<String> bgReWriteAof(RedisClusterNode node) {
return execute(node, BGREWRITEAOF);
}
@Override
public Mono<String> bgSave(RedisClusterNode node) {
return execute(node, BGSAVE);
}
@Override
public Mono<Long> lastSave(RedisClusterNode node) {
return execute(node, RedisCommands.LASTSAVE);
}
@Override
public Mono<String> save(RedisClusterNode node) {
return execute(node, SAVE);
}
@Override
public Mono<Long> dbSize(RedisClusterNode node) {
return execute(node, RedisCommands.DBSIZE);
}
private static final RedisStrictCommand<String> FLUSHDB = new RedisStrictCommand<String>("FLUSHDB");
@Override
public Mono<String> flushDb(RedisClusterNode node) {
return execute(node, FLUSHDB);
}
private static final RedisStrictCommand<String> FLUSHALL = new RedisStrictCommand<String>("FLUSHALL");
@Override
public Mono<String> flushAll(RedisClusterNode node) {
return execute(node, FLUSHALL);
}
@Override
public Mono<String> flushDb(RedisClusterNode node, RedisServerCommands.FlushOption option) {
if (option == RedisServerCommands.FlushOption.ASYNC) {
return execute(node, FLUSHDB, option.toString());
}
return execute(node, FLUSHDB);
}
@Override
public Mono<String> flushAll(RedisClusterNode node, RedisServerCommands.FlushOption option) {
if (option == RedisServerCommands.FlushOption.ASYNC) {
return execute(node, FLUSHALL, option.toString());
}
return execute(node, FLUSHALL);
}
@Override
public Mono<Properties> info() {
return read(null, StringCodec.INSTANCE, INFO_DEFAULT);
}
@Override
public Mono<Properties> info(String section) {
return read(null, StringCodec.INSTANCE, INFO, section);
}
@Override
public Mono<Properties> info(RedisClusterNode node) {
return execute(node, INFO_DEFAULT);
}
@Override
public Mono<Properties> info(RedisClusterNode node, String section) {
return execute(node, INFO, section);
}
@Override
public Mono<Properties> getConfig(RedisClusterNode node, String pattern) {
return execute(node, CONFIG_GET, pattern);
}
@Override
public Mono<String> setConfig(RedisClusterNode node, String param, String value) {
return execute(node, CONFIG_SET, param, value);
}
@Override
public Mono<String> resetConfigStats(RedisClusterNode node) {
return execute(node, CONFIG_RESETSTAT);
}
@Override
public Mono<Long> time(RedisClusterNode node) {
return execute(node, TIME);
}
private static final StringToRedisClientInfoConverter CONVERTER = new StringToRedisClientInfoConverter();
@Override
public Flux<RedisClientInfo> getClientList(RedisClusterNode node) {
RedisClient entry = getEntry(node);
Mono<List<String>> m = executorService.reactive(() -> {
return executorService.readAsync(entry, StringCodec.INSTANCE, RedisCommands.CLIENT_LIST);
});
return m.flatMapMany(s -> Flux.fromIterable(CONVERTER.convert(s.toArray(new String[s.size()]))));
}
}
| RedissonReactiveClusterServerCommands |
java | elastic__elasticsearch | modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java | {
"start": 3338,
"end": 3739
} | interface ____ return value */
public void testVoidReturn() {
assertEquals(
2,
exec(
"List list = new ArrayList(); "
+ "list.add(2); "
+ "List list2 = new ArrayList(); "
+ "list.forEach(x -> list2.add(x));"
+ "return list[0]"
)
);
}
/** | ignores |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/ServiceLocatorFactoryBean.java | {
"start": 4659,
"end": 4771
} | class ____ might then
* look something like this:
*
* <pre class="code">package a.b.c;
*
*public | implementation |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/GeneratedColumn.java | {
"start": 911,
"end": 1103
} | interface ____ {
/**
* The expression to include in the generated DDL.
*
* @return the SQL expression that is evaluated to generate the column value.
*/
String value();
}
| GeneratedColumn |
java | google__error-prone | annotations/src/main/java/com/google/errorprone/annotations/InlineMeValidationDisabled.java | {
"start": 1022,
"end": 1319
} | interface ____ {
/**
* An explanation as to why the validation is disabled (e.g.: moving from a constructor to a
* static factory method that delegates to this constructor, which is behavior-perserving, but
* isn't strictly an inlining).
*/
String value();
}
| InlineMeValidationDisabled |
java | apache__camel | components/camel-huawei/camel-huaweicloud-frs/src/test/java/org/apache/camel/component/huaweicloud/frs/mock/MockResult.java | {
"start": 1174,
"end": 2136
} | class ____ {
private MockResult() {
}
public static List<DetectFace> getFaceDetectionResult() {
BoundingBox faceBox = new BoundingBox().withWidth(170).withHeight(150).withTopLeftX(30).withTopLeftY(20);
return Collections.singletonList(new DetectFace().withBoundingBox(faceBox));
}
public static CompareFace getCompareFaceResult() {
BoundingBox faceBox = new BoundingBox().withWidth(170).withHeight(150).withTopLeftX(30).withTopLeftY(20);
return new CompareFace().withBoundingBox(faceBox);
}
public static LiveDetectRespVideoresult getLiveDetectResult() {
List<ActionsList> actions = new ArrayList<>();
actions.add(new ActionsList().withAction(1).withConfidence(0.8));
actions.add(new ActionsList().withAction(3).withConfidence(0.9));
return new LiveDetectRespVideoresult().withAlive(true).withPicture("test_face_picture_base_64").withActions(actions);
}
}
| MockResult |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/file/SegmentPartitionFileReader.java | {
"start": 2389,
"end": 7831
} | class ____ implements PartitionFileReader {
private final ByteBuffer reusedHeaderBuffer = BufferReaderWriterUtil.allocatedHeaderBuffer();
/**
* Opened file channels and segment id of related segment files stored in map.
*
* <p>The key is partition id and subpartition id. The value is file channel and segment id.
*/
private final Map<
TieredStoragePartitionId,
Map<TieredStorageSubpartitionId, Tuple2<ReadableByteChannel, Integer>>>
openedChannelAndSegmentIds = new HashMap<>();
private final String dataFilePath;
private FileSystem fileSystem;
public SegmentPartitionFileReader(String dataFilePath) {
this.dataFilePath = dataFilePath;
try {
this.fileSystem = new Path(dataFilePath).getFileSystem();
} catch (IOException e) {
ExceptionUtils.rethrow(e, "Failed to initialize the FileSystem.");
}
}
@Override
public ReadBufferResult readBuffer(
TieredStoragePartitionId partitionId,
TieredStorageSubpartitionId subpartitionId,
int segmentId,
int bufferIndex,
MemorySegment memorySegment,
BufferRecycler recycler,
@Nullable ReadProgress readProgress,
@Nullable CompositeBuffer partialBuffer)
throws IOException {
// Get the channel of the segment file for a subpartition.
Map<TieredStorageSubpartitionId, Tuple2<ReadableByteChannel, Integer>> subpartitionInfo =
openedChannelAndSegmentIds.computeIfAbsent(partitionId, ignore -> new HashMap<>());
Tuple2<ReadableByteChannel, Integer> fileChannelAndSegmentId =
subpartitionInfo.getOrDefault(subpartitionId, Tuple2.of(null, -1));
ReadableByteChannel channel = fileChannelAndSegmentId.f0;
// Create the channel if there is a new segment file for a subpartition.
if (channel == null || fileChannelAndSegmentId.f1 != segmentId) {
if (channel != null) {
channel.close();
}
channel = openNewChannel(partitionId, subpartitionId, segmentId);
if (channel == null) {
// return null if the segment file doesn't exist.
return null;
}
subpartitionInfo.put(subpartitionId, Tuple2.of(channel, segmentId));
}
// Try to read a buffer from the channel.
reusedHeaderBuffer.clear();
int bufferHeaderResult = channel.read(reusedHeaderBuffer);
if (bufferHeaderResult == -1) {
channel.close();
openedChannelAndSegmentIds.get(partitionId).remove(subpartitionId);
return getSingletonReadResult(
new NetworkBuffer(memorySegment, recycler, Buffer.DataType.END_OF_SEGMENT));
}
reusedHeaderBuffer.flip();
BufferHeader header = parseBufferHeader(reusedHeaderBuffer);
int dataBufferResult = channel.read(memorySegment.wrap(0, header.getLength()));
if (dataBufferResult != header.getLength()) {
channel.close();
throw new IOException("The length of data buffer is illegal.");
}
Buffer.DataType dataType = header.getDataType();
return getSingletonReadResult(
new NetworkBuffer(
memorySegment,
recycler,
dataType,
header.isCompressed(),
header.getLength()));
}
@Override
public long getPriority(
TieredStoragePartitionId partitionId,
TieredStorageSubpartitionId subpartitionId,
int segmentId,
int bufferIndex,
@Nullable ReadProgress readProgress)
throws IOException {
// noop
return -1;
}
private ReadableByteChannel openNewChannel(
TieredStoragePartitionId partitionId,
TieredStorageSubpartitionId subpartitionId,
int segmentId)
throws IOException {
Path currentSegmentPath =
getSegmentPath(
dataFilePath, partitionId, subpartitionId.getSubpartitionId(), segmentId);
if (!fileSystem.exists(currentSegmentPath)) {
return null;
}
return Channels.newChannel(fileSystem.open(currentSegmentPath));
}
@Override
public void release() {
openedChannelAndSegmentIds.values().stream()
.map(Map::values)
.flatMap(
(Function<
Collection<Tuple2<ReadableByteChannel, Integer>>,
Stream<Tuple2<ReadableByteChannel, Integer>>>)
Collection::stream)
.filter(Objects::nonNull)
.forEach(
channel -> {
try {
channel.f0.close();
} catch (IOException e) {
ExceptionUtils.rethrow(e);
}
});
}
private static ReadBufferResult getSingletonReadResult(NetworkBuffer buffer) {
return new ReadBufferResult(Collections.singletonList(buffer), false, null);
}
}
| SegmentPartitionFileReader |
java | apache__logging-log4j2 | log4j-1.2-api/src/main/java/org/apache/log4j/spi/LocationInfo.java | {
"start": 1039,
"end": 1834
} | class ____ implements Serializable {
/**
* When location information is not available the constant <code>NA</code> is returned. Current value of this string
* constant is <b>?</b>.
*/
public static final String NA = "?";
static final long serialVersionUID = -1325822038990805636L;
private final StackTraceElement stackTraceElement;
public String fullInfo;
/**
* Constructs a new instance.
*/
public LocationInfo(final StackTraceElement stackTraceElement) {
this.stackTraceElement = Objects.requireNonNull(stackTraceElement, "stackTraceElement");
this.fullInfo = stackTraceElement.toString();
}
/**
* Constructs a new instance.
*
* @param file source file name
* @param declaringClass | LocationInfo |
java | spring-projects__spring-boot | module/spring-boot-rsocket/src/test/java/org/springframework/boot/rsocket/autoconfigure/RSocketStrategiesAutoConfigurationTests.java | {
"start": 5659,
"end": 5944
} | class ____ {
@Bean
RSocketStrategies customRSocketStrategies() {
return RSocketStrategies.builder()
.encoder(CharSequenceEncoder.textPlainOnly())
.decoder(StringDecoder.textPlainOnly())
.build();
}
}
@Configuration(proxyBeanMethods = false)
static | UserStrategies |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/web/annotation/ServletEndpointDiscovererTests.java | {
"start": 8659,
"end": 8872
} | class ____ extends GenericServlet {
@Override
public void service(ServletRequest req, ServletResponse res) throws ServletException, IOException {
}
}
@ServletEndpoint(id = "testservlet")
static | TestServlet |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/access/AnyGetterAccessTest.java | {
"start": 635,
"end": 994
} | class ____ {
public int id;
protected Map<String,String> other = new HashMap<String,String>();
@JsonAnyGetter
public Map<String,String> any() {
return other;
}
@JsonAnySetter
public void set(String name, String value) {
other.put(name, value);
}
}
static | DynaBean |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java | {
"start": 36681,
"end": 37404
} | class ____ implements IndexingOperationListener {
private final AtomicReference<CheckedBiConsumer<ShardId, Engine.Index, Exception>> preIndexCheckRef;
InjectablePreIndexOperationListener(AtomicReference<CheckedBiConsumer<ShardId, Engine.Index, Exception>> preIndexCheckRef) {
this.preIndexCheckRef = preIndexCheckRef;
}
@Override
public Engine.Index preIndex(ShardId shardId, Engine.Index index) {
try {
preIndexCheckRef.get().accept(shardId, index);
} catch (Exception e) {
throw new AssertionError("unexpected error", e);
}
return index;
}
}
}
| InjectablePreIndexOperationListener |
java | quarkusio__quarkus | devtools/maven/src/main/java/io/quarkus/maven/components/Prompter.java | {
"start": 432,
"end": 2670
} | class ____ {
private final String prompt;
private final String defaultValue;
private final Consumer<String> inputConsumer;
public Prompt(String prompt, String defaultValue, Consumer<String> inputConsumer) {
this.prompt = prompt;
this.defaultValue = defaultValue;
this.inputConsumer = inputConsumer;
}
}
private final List<Prompt> prompts = new ArrayList<>();
public Prompter() throws IOException {
}
public Prompter addPrompt(String prompt, Consumer<String> inputConsumer) {
prompts.add(new Prompt(prompt, null, inputConsumer));
return this;
}
public Prompter addPrompt(String prompt, String defaultValue, Consumer<String> inputConsumer) {
prompts.add(new Prompt(prompt, defaultValue, inputConsumer));
return this;
}
public void collectInput() throws IOException {
if (prompts.isEmpty()) {
return;
}
final TerminalConnection connection = new TerminalConnection();
connection.setSignalHandler(interruptionSignalHandler());
try {
read(connection, ReadlineBuilder.builder().enableHistory(false).build(), prompts.iterator());
connection.openBlocking();
} finally {
connection.close();
}
}
private static void read(TerminalConnection connection, Readline readline, Iterator<Prompt> prompts) {
final Prompt prompt = prompts.next();
readline.readline(connection, prompt.prompt, input -> {
prompt.inputConsumer.accept(
(input == null || input.isBlank()) && prompt.defaultValue != null ? prompt.defaultValue : input);
if (!prompts.hasNext()) {
connection.close();
} else {
read(connection, readline, prompts);
}
});
}
private Consumer<Signal> interruptionSignalHandler() {
return new Consumer<Signal>() {
@Override
public void accept(Signal signal) {
if (signal == Signal.INT) {
throw new RuntimeException("Process interrupted");
}
}
};
}
}
| Prompt |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/error/ShouldStartWith_create_Test.java | {
"start": 1438,
"end": 2880
} | class ____ {
private ErrorMessageFactory factory;
@BeforeEach
public void setUp() {
factory = shouldStartWith(list("Yoda", "Luke"), list("Han", "Leia"));
}
@Test
void should_create_error_message() {
// WHEN
String message = factory.create(new TextDescription("Test"), STANDARD_REPRESENTATION);
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting actual:%n" +
" [\"Yoda\", \"Luke\"]%n" +
"to start with:%n" +
" [\"Han\", \"Leia\"]%n"));
}
@Test
void should_create_error_message_with_custom_comparison_strategy() {
// GIVEN
factory = shouldStartWith(list("Yoda", "Luke"), list("Han", "Leia"),
new ComparatorBasedComparisonStrategy(CaseInsensitiveStringComparator.INSTANCE));
// WHEN
String message = factory.create(new TextDescription("Test"), STANDARD_REPRESENTATION);
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting actual:%n" +
" [\"Yoda\", \"Luke\"]%n" +
"to start with:%n" +
" [\"Han\", \"Leia\"]%n" +
"when comparing values using CaseInsensitiveStringComparator"));
}
}
| ShouldStartWith_create_Test |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/AndArgumentTypeStrategy.java | {
"start": 2275,
"end": 4738
} | class ____ implements ArgumentTypeStrategy {
private final List<? extends ArgumentTypeStrategy> argumentStrategies;
public AndArgumentTypeStrategy(List<? extends ArgumentTypeStrategy> argumentStrategies) {
Preconditions.checkArgument(argumentStrategies.size() > 0);
this.argumentStrategies = argumentStrategies;
}
@Override
public Optional<DataType> inferArgumentType(
CallContext callContext, int argumentPos, boolean throwOnFailure) {
final DataType actualDataType = callContext.getArgumentDataTypes().get(argumentPos);
final LogicalType actualType = actualDataType.getLogicalType();
Optional<DataType> closestDataType = Optional.empty();
for (ArgumentTypeStrategy strategy : argumentStrategies) {
final Optional<DataType> inferredDataType =
strategy.inferArgumentType(callContext, argumentPos, throwOnFailure);
// argument type does not match at all
if (!inferredDataType.isPresent()) {
return Optional.empty();
}
final LogicalType inferredType = inferredDataType.get().getLogicalType();
// a more specific, casted argument type is available
if (!supportsAvoidingCast(actualType, inferredType) && !closestDataType.isPresent()) {
closestDataType = inferredDataType;
}
}
if (closestDataType.isPresent()) {
return closestDataType;
}
return Optional.of(actualDataType);
}
@Override
public Signature.Argument getExpectedArgument(
FunctionDefinition functionDefinition, int argumentPos) {
final String argument =
argumentStrategies.stream()
.map(v -> v.getExpectedArgument(functionDefinition, argumentPos).getType())
.collect(Collectors.joining(" & ", "[", "]"));
return Signature.Argument.of(argument);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AndArgumentTypeStrategy that = (AndArgumentTypeStrategy) o;
return Objects.equals(argumentStrategies, that.argumentStrategies);
}
@Override
public int hashCode() {
return Objects.hash(argumentStrategies);
}
}
| AndArgumentTypeStrategy |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/BeanMapPutTest.java | {
"start": 1127,
"end": 1946
} | class ____ extends ContextTestSupport {
private final Map<String, String> myMap = new HashMap<>();
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("myMap", myMap);
return jndi;
}
@Test
public void testMapPut() {
assertEquals(0, myMap.size());
template.sendBody("direct:start", "Hello World");
assertEquals(1, myMap.size());
assertEquals("true", myMap.get("isMaster"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").bean("myMap", "put('isMaster','true')");
}
};
}
}
| BeanMapPutTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestRollingLevelDBTimelineStore.java | {
"start": 2353,
"end": 2464
} | class ____ verify RollingLevelDBTimelineStore. */
@InterfaceAudience.Private
@InterfaceStability.Unstable
public | to |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/resource/beans/spi/ManagedBeanRegistry.java | {
"start": 537,
"end": 850
} | interface ____ extends Service {
/**
* Get a bean reference by class.
*/
<T> ManagedBean<T> getBean(Class<T> beanClass);
/**
* Get a bean reference by name and contract.
*/
<T> ManagedBean<? extends T> getBean(String beanName, Class<T> beanContract);
/**
* Get a bean reference by | ManagedBeanRegistry |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/converter/xml/MappingJackson2XmlHttpMessageConverterTests.java | {
"start": 9919,
"end": 10588
} | class ____ {
@JsonView(MyJacksonView1.class)
private String withView1;
@JsonView(MyJacksonView2.class)
private String withView2;
private String withoutView;
public String getWithView1() {
return withView1;
}
public void setWithView1(String withView1) {
this.withView1 = withView1;
}
public String getWithView2() {
return withView2;
}
public void setWithView2(String withView2) {
this.withView2 = withView2;
}
public String getWithoutView() {
return withoutView;
}
public void setWithoutView(String withoutView) {
this.withoutView = withoutView;
}
}
@SuppressWarnings("serial")
private static | JacksonViewBean |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/nulliteral/EyeColor.java | {
"start": 194,
"end": 234
} | enum ____ {
BROWN,
GREEN,
BLUE
}
| EyeColor |
java | elastic__elasticsearch | x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java | {
"start": 1298,
"end": 23880
} | class ____ extends AbstractUpgradeTestCase {
private Collection<RestClient> twoClients = null;
@Before
private void collectClientsByVersion() throws IOException {
Map<String, RestClient> clientsByVersion = getRestClientByVersion();
if (clientsByVersion.size() == 2) {
// usual case, clients have different versions
twoClients = clientsByVersion.values();
} else {
assert clientsByVersion.size() == 1
: "A rolling upgrade has a maximum of two distinct node versions, found: " + clientsByVersion.keySet();
// tests assumes exactly two clients to simplify some logic
twoClients = new ArrayList<>();
twoClients.add(clientsByVersion.values().iterator().next());
twoClients.add(clientsByVersion.values().iterator().next());
}
}
@After
protected void cleanUpClients() throws IOException {
for (RestClient client : twoClients) {
client.close();
}
twoClients = null;
}
public void testGeneratingTokensInOldCluster() throws Exception {
assumeTrue("this test should only run against the old cluster", CLUSTER_TYPE == ClusterType.OLD);
// Creates two access and refresh tokens and stores them in the token_backwards_compatibility_it index to be used for tests in the
// mixed/upgraded clusters
Map<String, Object> responseMap = createTokens(client(), "test_user", "x-pack-test-password");
String accessToken = (String) responseMap.get("access_token");
assertNotNull(accessToken);
assertAccessTokenWorks(accessToken);
String refreshToken = (String) responseMap.get("refresh_token");
assertNotNull(refreshToken);
storeTokens(client(), 1, accessToken, refreshToken);
responseMap = createTokens(client(), "test_user", "x-pack-test-password");
accessToken = (String) responseMap.get("access_token");
assertNotNull(accessToken);
assertAccessTokenWorks(accessToken);
refreshToken = (String) responseMap.get("refresh_token");
assertNotNull(refreshToken);
storeTokens(client(), 2, accessToken, refreshToken);
}
public void testRefreshingTokensInOldCluster() throws Exception {
assumeTrue("this test should only run against the old cluster", CLUSTER_TYPE == ClusterType.OLD);
// Creates access and refresh tokens and uses the refresh token. The new resulting tokens are used in different phases
Map<String, Object> responseMap = createTokens(client(), "test_user", "x-pack-test-password");
String accessToken = (String) responseMap.get("access_token");
assertNotNull(accessToken);
assertAccessTokenWorks(accessToken);
String refreshToken = (String) responseMap.get("refresh_token");
assertNotNull(refreshToken);
storeTokens(client(), 3, accessToken, refreshToken);
// refresh the token just created. The old token is invalid (tested further) and the new refresh token is tested in the upgraded
// cluster
Map<String, Object> refreshResponseMap = refreshToken(client(), refreshToken);
String refreshedAccessToken = (String) refreshResponseMap.get("access_token");
String refreshedRefreshToken = (String) refreshResponseMap.get("refresh_token");
assertNotNull(refreshedAccessToken);
assertNotNull(refreshedRefreshToken);
assertAccessTokenWorks(refreshedAccessToken);
// assert previous access token still works
assertAccessTokenWorks(accessToken);
storeTokens(client(), 4, refreshedAccessToken, refreshedRefreshToken);
}
public void testInvalidatingTokensInOldCluster() throws Exception {
assumeTrue("this test should only run against the old cluster", CLUSTER_TYPE == ClusterType.OLD);
// Creates access and refresh tokens and tries to use the access tokens several times
Map<String, Object> responseMap = createTokens(client(), "test_user", "x-pack-test-password");
String accessToken = (String) responseMap.get("access_token");
assertNotNull(accessToken);
assertAccessTokenWorks(accessToken);
String refreshToken = (String) responseMap.get("refresh_token");
assertNotNull(refreshToken);
storeTokens(client(), 5, accessToken, refreshToken);
// invalidate access token
invalidateAccessToken(client(), accessToken);
assertAccessTokenDoesNotWork(accessToken);
// invalidate refresh token
invalidateRefreshToken(client(), refreshToken);
assertRefreshTokenInvalidated(refreshToken);
}
public void testAccessTokensWorkInMixedCluster() throws Exception {
// Verify that an old token continues to work during all stages of the rolling upgrade
assumeTrue("this test should only run against the mixed cluster", CLUSTER_TYPE == ClusterType.MIXED);
extendExpirationTimeForAllTokens();
for (int tokenIdx : Arrays.asList(1, 3, 4)) { // 2 is invalidated in another mixed-cluster test, 5 is invalidated in the old cluster
Map<String, Object> source = retrieveStoredTokens(client(), tokenIdx);
assertAccessTokenWorks((String) source.get("token"));
}
}
public void testTokensStayInvalidatedInMixedCluster() throws Exception {
// Verify that an old, invalidated token remains invalidated during all stages of the rolling upgrade
assumeTrue("this test should only run against the mixed cluster", CLUSTER_TYPE == ClusterType.MIXED);
Map<String, Object> source = retrieveStoredTokens(client(), 5);
assertAccessTokenDoesNotWork((String) source.get("token"));
assertRefreshTokenInvalidated((String) source.get("refresh_token"));
}
public void testGeneratingTokensInMixedCluster() throws Exception {
assumeTrue("this test should only run against the mixed cluster", CLUSTER_TYPE == ClusterType.MIXED);
// Creates two access and refresh tokens and stores them in the token_backwards_compatibility_it index to be used for tests in the
// mixed/upgraded clusters
int generatedTokenIdxDuringMixed = 10;
for (RestClient client : twoClients) {
Map<String, Object> responseMap = createTokens(client, "test_user", "x-pack-test-password");
String accessToken = (String) responseMap.get("access_token");
assertNotNull(accessToken);
assertAccessTokenWorks(accessToken);
String refreshToken = (String) responseMap.get("refresh_token");
assertNotNull(refreshToken);
storeTokens(client(), generatedTokenIdxDuringMixed++, accessToken, refreshToken);
responseMap = createTokens(client, "test_user", "x-pack-test-password");
accessToken = (String) responseMap.get("access_token");
assertNotNull(accessToken);
assertAccessTokenWorks(accessToken);
refreshToken = (String) responseMap.get("refresh_token");
assertNotNull(refreshToken);
storeTokens(client(), generatedTokenIdxDuringMixed++, accessToken, refreshToken);
}
}
public void testRefreshingTokensInMixedCluster() throws Exception {
// verify new nodes can refresh tokens created by old nodes and vice versa
assumeTrue("this test should only run against the mixed cluster", CLUSTER_TYPE == ClusterType.MIXED);
for (RestClient client1 : twoClients) {
Map<String, Object> responseMap = createTokens(client1, "test_user", "x-pack-test-password");
String accessToken = (String) responseMap.get("access_token");
assertNotNull(accessToken);
assertAccessTokenWorks(accessToken);
String refreshToken = (String) responseMap.get("refresh_token");
assertNotNull(refreshToken);
for (RestClient client2 : twoClients) {
responseMap = refreshToken(client2, refreshToken);
accessToken = (String) responseMap.get("access_token");
assertNotNull(accessToken);
assertAccessTokenWorks(accessToken);
refreshToken = (String) responseMap.get("refresh_token");
assertNotNull(refreshToken);
}
}
}
public void testInvalidatingTokensInMixedCluster() throws Exception {
// Verify that we can invalidate an access and refresh token in a mixed cluster
assumeTrue("this test should only run against the mixed cluster", CLUSTER_TYPE == ClusterType.MIXED);
Map<String, Object> source = retrieveStoredTokens(client(), 2);
String accessToken = (String) source.get("token");
String refreshToken = (String) source.get("refresh_token");
// The token might be already invalidated by running testInvalidatingTokenInMixedCluster in a previous stage
// we don't try to assert it works before invalidating. This case is handled by testTokenWorksInMixedCluster
invalidateAccessToken(client(), accessToken);
assertAccessTokenDoesNotWork(accessToken);
// invalidate refresh token
invalidateRefreshToken(client(), refreshToken);
assertRefreshTokenInvalidated(refreshToken);
}
public void testTokensStayInvalidatedInUpgradedCluster() throws Exception {
assumeTrue("this test should only run against the upgraded cluster", CLUSTER_TYPE == ClusterType.UPGRADED);
for (int tokenIdx : Arrays.asList(2, 5)) {
Map<String, Object> source = retrieveStoredTokens(client(), tokenIdx);
assertAccessTokenDoesNotWork((String) source.get("token"));
assertRefreshTokenInvalidated((String) source.get("refresh_token"));
}
}
public void testAccessTokensWorkInUpgradedCluster() throws Exception {
assumeTrue("this test should only run against the upgraded cluster", CLUSTER_TYPE == ClusterType.UPGRADED);
extendExpirationTimeForAllTokens();
for (int tokenIdx : Arrays.asList(3, 4, 10, 12)) {
Map<String, Object> source = retrieveStoredTokens(client(), tokenIdx);
assertAccessTokenWorks((String) source.get("token"));
}
}
public void testGeneratingTokensInUpgradedCluster() throws Exception {
assumeTrue("this test should only run against the upgraded cluster", CLUSTER_TYPE == ClusterType.UPGRADED);
Map<String, Object> responseMap = createTokens(client(), "test_user", "x-pack-test-password");
String accessToken = (String) responseMap.get("access_token");
assertNotNull(accessToken);
assertAccessTokenWorks(accessToken);
String refreshToken = (String) responseMap.get("refresh_token");
assertNotNull(refreshToken);
}
public void testRefreshingTokensInUpgradedCluster() throws Exception {
assumeTrue("this test should only run against the upgraded cluster", CLUSTER_TYPE == ClusterType.UPGRADED);
for (int tokenIdx : Arrays.asList(4, 10, 12)) {
Map<String, Object> source = retrieveStoredTokens(client(), tokenIdx);
Map<String, Object> refreshedResponseMap = refreshToken(client(), (String) source.get("refresh_token"));
String accessToken = (String) refreshedResponseMap.get("access_token");
assertNotNull(accessToken);
assertAccessTokenWorks(accessToken);
String refreshToken = (String) refreshedResponseMap.get("refresh_token");
assertNotNull(refreshToken);
}
}
public void testInvalidatingTokensInUpgradedCluster() throws Exception {
assumeTrue("this test should only run against the upgraded cluster", CLUSTER_TYPE == ClusterType.UPGRADED);
for (int tokenIdx : Arrays.asList(1, 11, 13)) {
Map<String, Object> source = retrieveStoredTokens(client(), tokenIdx);
String accessToken = (String) source.get("token");
String refreshToken = (String) source.get("refresh_token");
// invalidate access token
invalidateAccessToken(client(), accessToken);
assertAccessTokenDoesNotWork(accessToken);
// invalidate refresh token
invalidateRefreshToken(client(), refreshToken);
assertRefreshTokenInvalidated(refreshToken);
}
}
private void assertAccessTokenWorks(String token) throws IOException {
for (RestClient client : twoClients) {
Request request = new Request("GET", "/_security/_authenticate");
RequestOptions.Builder options = request.getOptions().toBuilder();
options.addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + token);
request.setOptions(options);
Response authenticateResponse = client.performRequest(request);
assertOK(authenticateResponse);
assertEquals("test_user", entityAsMap(authenticateResponse).get("username"));
}
}
private void assertAccessTokenDoesNotWork(String token) {
for (RestClient client : twoClients) {
Request request = new Request("GET", "/_security/_authenticate");
RequestOptions.Builder options = request.getOptions().toBuilder();
options.addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + token);
request.setOptions(options);
ResponseException e = expectThrows(ResponseException.class, () -> client.performRequest(request));
assertEquals(401, e.getResponse().getStatusLine().getStatusCode());
Response response = e.getResponse();
assertEquals("""
Bearer realm="security", error="invalid_token", error_description="The access token expired"\
""", response.getHeader("WWW-Authenticate"));
}
}
private void assertRefreshTokenInvalidated(String refreshToken) throws IOException {
for (RestClient client : twoClients) {
Request refreshTokenRequest = new Request("POST", "/_security/oauth2/token");
refreshTokenRequest.setJsonEntity(Strings.format("""
{
"refresh_token": "%s",
"grant_type": "refresh_token"
}
""", refreshToken));
ResponseException e = expectThrows(ResponseException.class, () -> client.performRequest(refreshTokenRequest));
assertEquals(400, e.getResponse().getStatusLine().getStatusCode());
Response response = e.getResponse();
Map<String, Object> responseMap = entityAsMap(response);
assertEquals("invalid_grant", responseMap.get("error"));
assertEquals("token has been invalidated", responseMap.get("error_description"));
}
}
@SuppressWarnings("unchecked")
private Map<String, RestClient> getRestClientByVersion() throws IOException {
Response response = client().performRequest(new Request("GET", "_nodes"));
assertOK(response);
ObjectPath objectPath = ObjectPath.createFromResponse(response);
Map<String, Object> nodesAsMap = objectPath.evaluate("nodes");
Map<String, List<HttpHost>> hostsByVersion = new HashMap<>();
for (Map.Entry<String, Object> entry : nodesAsMap.entrySet()) {
Map<String, Object> nodeDetails = (Map<String, Object>) entry.getValue();
String version = (String) nodeDetails.get("version");
Map<String, Object> httpInfo = (Map<String, Object>) nodeDetails.get("http");
hostsByVersion.computeIfAbsent(version, k -> new ArrayList<>()).add(HttpHost.create((String) httpInfo.get("publish_address")));
}
Map<String, RestClient> clientsByVersion = new HashMap<>();
for (Map.Entry<String, List<HttpHost>> entry : hostsByVersion.entrySet()) {
clientsByVersion.put(entry.getKey(), buildClient(restClientSettings(), entry.getValue().toArray(new HttpHost[0])));
}
return clientsByVersion;
}
private Map<String, Object> createTokens(RestClient client, String username, String password) throws IOException {
final Request createTokenRequest = new Request("POST", "/_security/oauth2/token");
createTokenRequest.setJsonEntity(Strings.format("""
{
"username": "%s",
"password": "%s",
"grant_type": "password"
}""", username, password));
Response response = client.performRequest(createTokenRequest);
assertOK(response);
return entityAsMap(response);
}
private void storeTokens(RestClient client, int idx, String accessToken, String refreshToken) throws IOException {
final Request indexRequest = new Request("PUT", "token_backwards_compatibility_it/_doc/old_cluster_token" + idx);
indexRequest.setJsonEntity(Strings.format("""
{
"token": "%s",
"refresh_token": "%s"
}""", accessToken, refreshToken));
Response indexResponse1 = client.performRequest(indexRequest);
assertOK(indexResponse1);
}
@SuppressWarnings("unchecked")
private Map<String, Object> retrieveStoredTokens(RestClient client, int tokenIdx) throws IOException {
Request getRequest = new Request("GET", "token_backwards_compatibility_it/_doc/old_cluster_token" + tokenIdx);
Response getResponse = client().performRequest(getRequest);
assertOK(getResponse);
return (Map<String, Object>) entityAsMap(getResponse).get("_source");
}
private Map<String, Object> refreshToken(RestClient client, String refreshToken) throws IOException {
final Request refreshTokenRequest = new Request("POST", "/_security/oauth2/token");
refreshTokenRequest.setJsonEntity(Strings.format("""
{
"refresh_token": "%s",
"grant_type": "refresh_token"
}""", refreshToken));
Response refreshResponse = client.performRequest(refreshTokenRequest);
assertOK(refreshResponse);
return entityAsMap(refreshResponse);
}
private void invalidateAccessToken(RestClient client, String accessToken) throws IOException {
Request invalidateRequest = new Request("DELETE", "/_security/oauth2/token");
invalidateRequest.setJsonEntity("{\"token\": \"" + accessToken + "\"}");
invalidateRequest.addParameter("error_trace", "true");
Response invalidateResponse = client.performRequest(invalidateRequest);
assertOK(invalidateResponse);
}
private void invalidateRefreshToken(RestClient client, String refreshToken) throws IOException {
Request invalidateRequest = new Request("DELETE", "/_security/oauth2/token");
invalidateRequest.setJsonEntity("{\"refresh_token\": \"" + refreshToken + "\"}");
invalidateRequest.addParameter("error_trace", "true");
Response invalidateResponse = client.performRequest(invalidateRequest);
assertOK(invalidateResponse);
}
/**
* Hack to account for long-running tests. The max lifetime of a token is 1h, but sometimes our tests take longer so tokens created in
* the old cluster may be expired by the time we run tests in the mixed/upgraded clusters.
*
* This method extends the expiration time of all tokens by writing to the `.security-token` index directly.
*
* We extend the expiration time for all tokens, instead of selected ones because it requires true hackery to get a hold of a docId
* given only an access token and refresh token.
*/
private void extendExpirationTimeForAllTokens() throws Exception {
final List<String> tokensIds = getAllTokenIds();
final var bulkRequest = new Request("POST", "/.security-tokens/_bulk?refresh=true");
bulkRequest.setOptions(bulkRequest.getOptions().toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE));
final long newExpirationTime = Instant.now().plus(1, ChronoUnit.HOURS).toEpochMilli();
bulkRequest.setJsonEntity(tokensIds.stream().map(tokenId -> Strings.format("""
{"update": {"_id": "%s"}}
{"doc": {"access_token": {"user_token": {"expiration_time": %s}}}}
""", tokenId, newExpirationTime)).collect(Collectors.joining("\n")));
final Response bulkResponse = client().performRequest(bulkRequest);
assertOK(bulkResponse);
final Map<String, Object> bulkResponseMap = entityAsMap(bulkResponse);
assertEquals(false, bulkResponseMap.get("errors"));
}
private void refreshSecurityTokensIndex() throws IOException {
// Ensure all tokens are available for search (token creation and other tokens operations have a WAIT_UNTIL refresh policy)
final var refreshRequest = new Request("POST", "/.security-tokens/_refresh");
refreshRequest.setOptions(refreshRequest.getOptions().toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE));
assertOK(client().performRequest(refreshRequest));
}
private List<String> getAllTokenIds() throws IOException {
refreshSecurityTokensIndex();
final long searchSize = 100L;
final var searchRequest = new Request("POST", "/.security-tokens/_search?size=" + searchSize);
searchRequest.setOptions(searchRequest.getOptions().toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE));
searchRequest.setJsonEntity("""
{
"query": {
"term": {
"doc_type": "token"
}
}
}""");
final Response searchResponse = client().performRequest(searchRequest);
assertOK(searchResponse);
var response = SearchResponseUtils.responseAsSearchResponse(searchResponse);
try {
final SearchHits searchHits = response.getHits();
assertThat(
"Search request used with size parameter that was too small to fetch all tokens.",
searchHits.getTotalHits().value(),
lessThanOrEqualTo(searchSize)
);
final List<String> tokenIds = Arrays.stream(searchHits.getHits()).map(searchHit -> {
assertNotNull(searchHit.getId());
return searchHit.getId();
}).toList();
assertThat(tokenIds, not(empty()));
return tokenIds;
} finally {
response.decRef();
}
}
}
| TokenBackwardsCompatibilityIT |
java | quarkusio__quarkus | extensions/undertow/deployment/src/test/java/io/quarkus/undertow/test/AnnotatedServletInitParam.java | {
"start": 606,
"end": 968
} | class ____ extends HttpServlet {
public static final String SERVLET_ENDPOINT = "/annotatedInitParamServlet";
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) throws ServletException, IOException {
resp.getWriter().println(getInitParameter("AnnotatedInitParamName"));
}
}
| AnnotatedServletInitParam |
java | apache__hadoop | hadoop-tools/hadoop-gcp/src/test/java/org/apache/hadoop/fs/gs/contract/ITestGoogleContractAppend.java | {
"start": 1080,
"end": 1434
} | class ____ extends AbstractContractAppendTest {
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new GoogleContract(conf);
}
@Override
public void testRenameFileBeingAppended() throws Throwable {
ContractTestUtils.skip("blobstores can not rename file that being appended");
}
}
| ITestGoogleContractAppend |
java | quarkusio__quarkus | extensions/hibernate-orm/runtime-dev/src/main/java/io/quarkus/hibernate/orm/dev/HibernateOrmDevIntegrator.java | {
"start": 577,
"end": 1800
} | class ____ implements Integrator {
@Override
public void integrate(Metadata metadata, BootstrapContext bootstrapContext, SessionFactoryImplementor sf) {
String name = (String) sf.getProperties()
.get(AvailableSettings.PERSISTENCE_UNIT_NAME);
HibernateOrmDevController.get().pushPersistenceUnit(
sf,
getPersistenceUnitDescriptor(name, sf),
name,
metadata,
sf.getServiceRegistry(),
(String) sf.getProperties().get(JAKARTA_HBM2DDL_LOAD_SCRIPT_SOURCE));
}
private static QuarkusPersistenceUnitDescriptor getPersistenceUnitDescriptor(String name, SessionFactoryImplementor sf) {
// This is not great but avoids needing to depend on reactive
boolean isReactive = sf.getClass().getPackage().getName().contains("reactive");
return PersistenceUnitsHolder.getPersistenceUnitDescriptor(name, isReactive);
}
@Override
public void disintegrate(SessionFactoryImplementor sessionFactoryImplementor,
SessionFactoryServiceRegistry sessionFactoryServiceRegistry) {
HibernateOrmDevController.get().clearData();
}
}
| HibernateOrmDevIntegrator |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/creators/TestCustomValueInstDefaults.java | {
"start": 14403,
"end": 15083
} | class ____ extends StdValueInstantiator {
protected VerifyingValueInstantiator(StdValueInstantiator src) {
super(src);
}
@Override
public Object createFromObjectWith(DeserializationContext ctxt, SettableBeanProperty[] props, PropertyValueBuffer buffer)
{
for (SettableBeanProperty prop : props) {
assertTrue(buffer.hasParameter(prop), "prop " + prop.getName() + " was expected to have buffer.hasParameter(prop) be true but was false");
}
return super.createFromObjectWith(ctxt, props, buffer);
}
}
// [databind#1432]
public static | VerifyingValueInstantiator |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/main/java/org/springframework/boot/web/server/autoconfigure/ServerProperties.java | {
"start": 8593,
"end": 8879
} | enum ____ {
/**
* Use the underlying container's native support for forwarded headers.
*/
NATIVE,
/**
* Use Spring's support for handling forwarded headers.
*/
FRAMEWORK,
/**
* Ignore X-Forwarded-* headers.
*/
NONE
}
public static | ForwardHeadersStrategy |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KubernetesJobComponentBuilderFactory.java | {
"start": 1381,
"end": 1884
} | interface ____ {
/**
* Kubernetes Job (camel-kubernetes)
* Perform operations on Kubernetes Jobs.
*
* Category: container,cloud
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesJobComponentBuilder kubernetesJob() {
return new KubernetesJobComponentBuilderImpl();
}
/**
* Builder for the Kubernetes Job component.
*/
| KubernetesJobComponentBuilderFactory |
java | google__error-prone | core/src/test/java/com/google/errorprone/matchers/MethodReturnsNonNullToStringTest.java | {
"start": 2682,
"end": 3970
} | class ____ {
public String ToString() {
return "match should be case sensitive";
}
public String testMethodWithDifferentCase() {
return ToString();
}
}
""");
assertCompiles(
methodInvocationMatches(/* shouldMatch= */ false, Matchers.methodReturnsNonNull()));
}
@Test
public void shouldNotMatchWhenMethodSignatureDiffers() {
writeFile(
"A.java",
" public String toString(int i) {",
" return \"toString method with param\";",
" }",
" public String testMethodWithParam() {",
" return toString(3);",
" }",
"}");
}
private Scanner methodInvocationMatches(boolean shouldMatch, Matcher<ExpressionTree> toMatch) {
return new Scanner() {
@Override
public Void visitMethodInvocation(MethodInvocationTree node, VisitorState visitorState) {
ExpressionTree methodSelect = node.getMethodSelect();
if (!methodSelect.toString().equals("super")) {
assertWithMessage(methodSelect.toString())
.that(!shouldMatch ^ toMatch.matches(node, visitorState))
.isTrue();
}
return super.visitMethodInvocation(node, visitorState);
}
};
}
}
| A |
java | apache__camel | components/camel-servicenow/camel-servicenow-component/src/main/java/org/apache/camel/component/servicenow/ServiceNowComponent.java | {
"start": 1450,
"end": 5503
} | class ____ extends DefaultComponent implements SSLContextParametersAware {
@Metadata(label = "advanced")
private String instanceName;
@Metadata
private ServiceNowConfiguration configuration;
@Metadata(label = "security", defaultValue = "false")
private boolean useGlobalSslContextParameters;
public ServiceNowComponent() {
this(null);
}
public ServiceNowComponent(CamelContext camelContext) {
super(camelContext);
this.configuration = new ServiceNowConfiguration();
registerExtension(ServiceNowMetaDataExtension::new);
}
// ****************************************
// Properties
// ****************************************
public String getInstanceName() {
return instanceName;
}
/**
* The ServiceNow instance name
*/
public void setInstanceName(String instanceName) {
this.instanceName = instanceName;
}
public ServiceNowConfiguration getConfiguration() {
return configuration;
}
/**
* Component configuration
*/
public void setConfiguration(ServiceNowConfiguration configuration) {
this.configuration = configuration;
}
@Override
public boolean isUseGlobalSslContextParameters() {
return this.useGlobalSslContextParameters;
}
/**
* Enable usage of global SSL context parameters.
*/
@Override
public void setUseGlobalSslContextParameters(boolean useGlobalSslContextParameters) {
this.useGlobalSslContextParameters = useGlobalSslContextParameters;
}
// ****************************************
// Component impl
// ****************************************
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
final CamelContext context = getCamelContext();
final ServiceNowConfiguration configuration = this.configuration.copy();
Map<String, Object> models = PropertiesHelper.extractProperties(parameters, "model.");
for (Map.Entry<String, Object> entry : models.entrySet()) {
configuration.addModel(
entry.getKey(),
EndpointHelper.resolveParameter(context, (String) entry.getValue(), Class.class));
}
Map<String, Object> requestModels = PropertiesHelper.extractProperties(parameters, "requestModel.");
for (Map.Entry<String, Object> entry : requestModels.entrySet()) {
configuration.addRequestModel(
entry.getKey(),
EndpointHelper.resolveParameter(context, (String) entry.getValue(), Class.class));
}
Map<String, Object> responseModels = PropertiesHelper.extractProperties(parameters, "responseModel.");
for (Map.Entry<String, Object> entry : responseModels.entrySet()) {
configuration.addResponseModel(
entry.getKey(),
EndpointHelper.resolveParameter(context, (String) entry.getValue(), Class.class));
}
if (ObjectHelper.isEmpty(remaining)) {
// If an instance is not set on the endpoint uri, use the one set on component.
remaining = instanceName;
}
String instanceName = getCamelContext().resolvePropertyPlaceholders(remaining);
ServiceNowEndpoint endpoint = new ServiceNowEndpoint(uri, this, configuration, instanceName);
setProperties(endpoint, parameters);
if (!configuration.hasApiUrl()) {
configuration.setApiUrl(String.format("https://%s.service-now.com/api", instanceName));
}
if (!configuration.hasOauthTokenUrl()) {
configuration.setOauthTokenUrl(String.format("https://%s.service-now.com/oauth_token.do", instanceName));
}
if (configuration.getSslContextParameters() == null) {
configuration.setSslContextParameters(retrieveGlobalSslContextParameters());
}
return endpoint;
}
}
| ServiceNowComponent |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/read/DataInputTest.java | {
"start": 450,
"end": 1683
} | class ____
extends JacksonCoreTestBase
{
private final JsonFactory JSON_F = new JsonFactory();
@Test
void eofAfterArray() throws Exception
{
JsonParser p = createParser(JSON_F, MODE_DATA_INPUT, "[ 1 ] ");
assertToken(JsonToken.START_ARRAY, p.nextToken());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertToken(JsonToken.END_ARRAY, p.nextToken());
assertNull(p.nextToken());
p.close();
}
@Test
void eofAfterObject() throws Exception
{
JsonParser p = createParser(JSON_F, MODE_DATA_INPUT, "{ \"value\" : true }");
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken());
assertToken(JsonToken.VALUE_TRUE, p.nextToken());
assertToken(JsonToken.END_OBJECT, p.nextToken());
assertNull(p.nextToken());
p.close();
}
@Test
void eofAfterScalar() throws Exception
{
JsonParser p = createParser(JSON_F, MODE_DATA_INPUT, "\"foobar\" ");
assertToken(JsonToken.VALUE_STRING, p.nextToken());
assertEquals("foobar", p.getString());
assertNull(p.nextToken());
p.close();
}
}
| DataInputTest |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInput.java | {
"start": 10376,
"end": 10726
} | interface ____ {
ParseField REQUEST = new ParseField("request");
ParseField EXTRACT = new ParseField("extract");
ParseField TIMEOUT = new ParseField("timeout_in_millis");
ParseField TIMEOUT_HUMAN = new ParseField("timeout");
ParseField DYNAMIC_NAME_TIMEZONE = new ParseField("dynamic_name_timezone");
}
}
| Field |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/Exceptions.java | {
"start": 28667,
"end": 29146
} | class ____ extends UnsupportedOperationException {
ErrorCallbackNotImplemented(Throwable cause) {
super(cause);
}
@Override
public synchronized Throwable fillInStackTrace() {
return this;
}
private static final long serialVersionUID = 2491425227432776143L;
}
/**
* An error signal from downstream subscribers consuming data when their state is
* denying any additional event.
*
* @author Stephane Maldini
*/
static final | ErrorCallbackNotImplemented |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/SimplifyJoinConditionRule.java | {
"start": 1466,
"end": 2948
} | class ____
extends RelRule<SimplifyJoinConditionRule.SimplifyJoinConditionRuleConfig> {
public static final SimplifyJoinConditionRule INSTANCE =
SimplifyJoinConditionRule.SimplifyJoinConditionRuleConfig.DEFAULT.toRule();
protected SimplifyJoinConditionRule(SimplifyJoinConditionRuleConfig config) {
super(config);
}
public void onMatch(RelOptRuleCall call) {
LogicalJoin join = call.rel(0);
RexNode condition = join.getCondition();
if (join.getCondition().isAlwaysTrue()) {
return;
}
RexNode simpleCondExp =
FlinkRexUtil.simplify(
join.getCluster().getRexBuilder(),
condition,
join.getCluster().getPlanner().getExecutor());
RexNode newCondExp = RexUtil.pullFactors(join.getCluster().getRexBuilder(), simpleCondExp);
if (newCondExp.equals(condition)) {
return;
}
LogicalJoin newJoin =
join.copy(
join.getTraitSet(),
newCondExp,
join.getLeft(),
join.getRight(),
join.getJoinType(),
join.isSemiJoinDone());
call.transformTo(newJoin);
call.getPlanner().prune(join);
}
/** Rule configuration. */
@Value.Immutable(singleton = false)
public | SimplifyJoinConditionRule |
java | quarkusio__quarkus | independent-projects/arc/runtime/src/main/java/io/quarkus/arc/LockException.java | {
"start": 56,
"end": 252
} | class ____ extends RuntimeException {
private static final long serialVersionUID = 4486284740873061615L;
public LockException(String message) {
super(message);
}
}
| LockException |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/OutputStreamAppender.java | {
"start": 3834,
"end": 7052
} | class ____ implements ManagerFactory<OutputStreamManager, FactoryData> {
/**
* Creates an OutputStreamManager.
*
* @param name
* The name of the entity to manage.
* @param data
* The data required to create the entity.
* @return The OutputStreamManager
*/
@Override
public OutputStreamManager createManager(final String name, final FactoryData data) {
return new OutputStreamManager(data.os, data.name, data.layout, true);
}
}
private static OutputStreamManagerFactory factory = new OutputStreamManagerFactory();
/**
* Creates an OutputStream Appender.
*
* @param layout
* The layout to use or null to get the default layout.
* @param filter
* The Filter or null.
* @param target
* an output stream.
* @param follow
* If true will follow changes to the underlying output stream.
* Use false as the default.
* @param name
* The name of the Appender (required).
* @param ignore
* If {@code "true"} (default) exceptions encountered when
* appending events are logged; otherwise they are propagated to
* the caller. Use true as the default.
* @return The ConsoleAppender.
*/
@PluginFactory
public static OutputStreamAppender createAppender(
Layout<? extends Serializable> layout,
final Filter filter,
final OutputStream target,
final String name,
final boolean follow,
final boolean ignore) {
if (name == null) {
LOGGER.error("No name provided for OutputStreamAppender");
return null;
}
if (layout == null) {
layout = PatternLayout.createDefaultLayout();
}
return new OutputStreamAppender(name, layout, filter, getManager(target, follow, layout), ignore, null);
}
private static OutputStreamManager getManager(
final OutputStream target, final boolean follow, final Layout<? extends Serializable> layout) {
final OutputStream os = target == null ? NullOutputStream.getInstance() : new CloseShieldOutputStream(target);
final OutputStream targetRef = target == null ? os : target;
final String managerName =
targetRef.getClass().getName() + "@" + Integer.toHexString(targetRef.hashCode()) + '.' + follow;
return OutputStreamManager.getManager(managerName, new FactoryData(os, managerName, layout), factory);
}
@PluginBuilderFactory
public static <B extends Builder<B>> B newBuilder() {
return new Builder<B>().asBuilder();
}
private OutputStreamAppender(
final String name,
final Layout<? extends Serializable> layout,
final Filter filter,
final OutputStreamManager manager,
final boolean ignoreExceptions,
final Property[] properties) {
super(name, layout, filter, ignoreExceptions, true, properties, manager);
}
}
| OutputStreamManagerFactory |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/BeanPropertyRowMapper.java | {
"start": 4059,
"end": 4228
} | class ____<T> implements RowMapper<T> {
/** Logger available to subclasses. */
protected final Log logger = LogFactory.getLog(getClass());
/** The | BeanPropertyRowMapper |
java | apache__camel | components/camel-google/camel-google-calendar/src/generated/java/org/apache/camel/component/google/calendar/internal/GoogleCalendarApiName.java | {
"start": 277,
"end": 707
} | enum ____ implements ApiName {
ACL("acl"),
LIST("list"),
CALENDARS("calendars"),
CHANNELS("channels"),
COLORS("colors"),
FREEBUSY("freebusy"),
EVENTS("events"),
SETTINGS("settings");
private final String name;
private GoogleCalendarApiName(String name) {
this.name = name;
}
@Override
public String getName() {
return name;
}
}
| GoogleCalendarApiName |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_2200/Issue2238.java | {
"start": 707,
"end": 1833
} | class ____ {
private BigDecimal maxChargeMoney;
private BigDecimal minChargeMoney;
private BigDecimal maxWithdrawMoney;
private BigDecimal minWithdrawMoney;
public BigDecimal getMaxChargeMoney() {
return maxChargeMoney;
}
public void setMaxChargeMoney(BigDecimal maxChargeMoney) {
this.maxChargeMoney = maxChargeMoney;
}
public BigDecimal getMinChargeMoney() {
return minChargeMoney;
}
public void setMinChargeMoney(BigDecimal minChargeMoney) {
this.minChargeMoney = minChargeMoney;
}
public BigDecimal getMaxWithdrawMoney() {
return maxWithdrawMoney;
}
public void setMaxWithdrawMoney(BigDecimal maxWithdrawMoney) {
this.maxWithdrawMoney = maxWithdrawMoney;
}
public BigDecimal getMinWithdrawMoney() {
return minWithdrawMoney;
}
public void setMinWithdrawMoney(BigDecimal minWithdrawMoney) {
this.minWithdrawMoney = minWithdrawMoney;
}
}
}
| CapitalLimitMonenyDTO |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryBuilderTestCase.java | {
"start": 10004,
"end": 10418
} | class ____ extends PointTester {
public LeftTester() {
super(randomDoubleBetween(-Double.MAX_VALUE, GeoUtils.MIN_LON, true));
}
@Override
public void fillIn(double coordinate, GeoBoundingBoxQueryBuilder qb) {
qb.setCorners(qb.topLeft().getLat(), coordinate, qb.bottomRight().getLat(), qb.bottomRight().getLon());
}
}
protected static | LeftTester |
java | elastic__elasticsearch | modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/TransportGetDatabaseConfigurationAction.java | {
"start": 1938,
"end": 13391
} | class ____ extends TransportNodesAction<
GetDatabaseConfigurationAction.Request,
GetDatabaseConfigurationAction.Response,
GetDatabaseConfigurationAction.NodeRequest,
GetDatabaseConfigurationAction.NodeResponse,
List<DatabaseConfigurationMetadata>> {
private final DatabaseNodeService databaseNodeService;
private final ProjectResolver projectResolver;
@Inject
public TransportGetDatabaseConfigurationAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters,
DatabaseNodeService databaseNodeService,
ProjectResolver projectResolver
) {
super(
GetDatabaseConfigurationAction.NAME,
clusterService,
transportService,
actionFilters,
GetDatabaseConfigurationAction.NodeRequest::new,
threadPool.executor(ThreadPool.Names.MANAGEMENT)
);
this.databaseNodeService = databaseNodeService;
this.projectResolver = projectResolver;
}
protected List<DatabaseConfigurationMetadata> createActionContext(Task task, GetDatabaseConfigurationAction.Request request) {
final Set<String> ids;
if (request.getDatabaseIds().length == 0) {
// if we did not ask for a specific name, then return all databases
ids = Set.of("*");
} else {
ids = new LinkedHashSet<>(Arrays.asList(request.getDatabaseIds()));
}
if (ids.size() > 1 && ids.stream().anyMatch(Regex::isSimpleMatchPattern)) {
throw new IllegalArgumentException(
"wildcard only supports a single value, please use comma-separated values or a single wildcard value"
);
}
List<DatabaseConfigurationMetadata> results = new ArrayList<>();
ProjectMetadata projectMetadata = projectResolver.getProjectMetadata(clusterService.state());
PersistentTasksCustomMetadata tasksMetadata = PersistentTasksCustomMetadata.get(projectMetadata);
String geoIpTaskId = GeoIpDownloaderTaskExecutor.getTaskId(projectMetadata.id(), projectResolver.supportsMultipleProjects());
for (String id : ids) {
results.addAll(getWebDatabases(geoIpTaskId, tasksMetadata, id));
results.addAll(getMaxmindDatabases(projectMetadata, id));
}
return results;
}
/*
* This returns read-only database information about the databases managed by the standard downloader
*/
private static Collection<DatabaseConfigurationMetadata> getWebDatabases(
String geoIpTaskId,
PersistentTasksCustomMetadata tasksMetadata,
String id
) {
List<DatabaseConfigurationMetadata> webDatabases = new ArrayList<>();
if (tasksMetadata != null) {
PersistentTasksCustomMetadata.PersistentTask<?> maybeGeoIpTask = tasksMetadata.getTask(geoIpTaskId);
if (maybeGeoIpTask != null) {
GeoIpTaskState geoIpTaskState = (GeoIpTaskState) maybeGeoIpTask.getState();
if (geoIpTaskState != null) {
Map<String, GeoIpTaskState.Metadata> databases = geoIpTaskState.getDatabases();
for (String databaseFileName : databases.keySet()) {
String databaseName = getDatabaseNameForFileName(databaseFileName);
String databaseId = getDatabaseIdForFileName(DatabaseConfiguration.Web.NAME, databaseFileName);
if ((Regex.isSimpleMatchPattern(id) && Regex.simpleMatch(id, databaseId)) || id.equals(databaseId)) {
webDatabases.add(
new DatabaseConfigurationMetadata(
new DatabaseConfiguration(databaseId, databaseName, new DatabaseConfiguration.Web()),
-1,
databases.get(databaseFileName).lastUpdate()
)
);
}
}
}
}
}
return webDatabases;
}
private static String getDatabaseIdForFileName(String providerType, String databaseFileName) {
return "_" + providerType + "_" + Base64.getEncoder().encodeToString(databaseFileName.getBytes(StandardCharsets.UTF_8));
}
private static String getDatabaseNameForFileName(String databaseFileName) {
return databaseFileName.endsWith(".mmdb")
? databaseFileName.substring(0, databaseFileName.length() + 1 - ".mmmdb".length())
: databaseFileName;
}
/*
* This returns information about databases that are downloaded from maxmind.
*/
private static Collection<DatabaseConfigurationMetadata> getMaxmindDatabases(ProjectMetadata projectMetadata, String id) {
List<DatabaseConfigurationMetadata> maxmindDatabases = new ArrayList<>();
final IngestGeoIpMetadata geoIpMeta = projectMetadata.custom(IngestGeoIpMetadata.TYPE, IngestGeoIpMetadata.EMPTY);
if (Regex.isSimpleMatchPattern(id)) {
for (Map.Entry<String, DatabaseConfigurationMetadata> entry : geoIpMeta.getDatabases().entrySet()) {
if (Regex.simpleMatch(id, entry.getKey())) {
maxmindDatabases.add(entry.getValue());
}
}
} else {
DatabaseConfigurationMetadata meta = geoIpMeta.getDatabases().get(id);
if (meta != null) {
maxmindDatabases.add(meta);
}
}
return maxmindDatabases;
}
@Override
protected void newResponseAsync(
Task task,
GetDatabaseConfigurationAction.Request request,
List<DatabaseConfigurationMetadata> results,
List<GetDatabaseConfigurationAction.NodeResponse> responses,
List<FailedNodeException> failures,
ActionListener<GetDatabaseConfigurationAction.Response> listener
) {
ActionListener.run(listener, l -> {
List<DatabaseConfigurationMetadata> combinedResults = new ArrayList<>(results);
combinedResults.addAll(
deduplicateNodeResponses(responses, results.stream().map(result -> result.database().name()).collect(Collectors.toSet()))
);
ActionListener.respondAndRelease(
l,
new GetDatabaseConfigurationAction.Response(combinedResults, clusterService.getClusterName(), responses, failures)
);
});
}
/*
* This deduplicates the nodeResponses by name, favoring the most recent. This is because each node is reporting the local databases
* that it has, and we don't want to report duplicates to the user. It also filters out any that already exist in the set of
* preExistingNames. This is because the non-local databases take precedence, so any local database with the same name as a non-local
* one will not be used.
* Non-private for unit testing
*/
static Collection<DatabaseConfigurationMetadata> deduplicateNodeResponses(
List<GetDatabaseConfigurationAction.NodeResponse> nodeResponses,
Set<String> preExistingNames
) {
/*
* Each node reports the list of databases that are in its config/ingest-geoip directory. For the sake of this API we assume all
* local databases with the same name are the same database, and deduplicate by name and just return the newest.
*/
return nodeResponses.stream()
.flatMap(response -> response.getDatabases().stream())
.collect(
Collectors.groupingBy(
database -> database.database().name(),
Collectors.maxBy(Comparator.comparing(DatabaseConfigurationMetadata::modifiedDate))
)
)
.values()
.stream()
.filter(Optional::isPresent)
.map(Optional::get)
.filter(database -> preExistingNames.contains(database.database().name()) == false)
.toList();
}
@Override
protected GetDatabaseConfigurationAction.Response newResponse(
GetDatabaseConfigurationAction.Request request,
List<GetDatabaseConfigurationAction.NodeResponse> nodeResponses,
List<FailedNodeException> failures
) {
throw new UnsupportedOperationException("Use newResponseAsync instead");
}
@Override
protected GetDatabaseConfigurationAction.NodeRequest newNodeRequest(GetDatabaseConfigurationAction.Request request) {
return new GetDatabaseConfigurationAction.NodeRequest(request.getDatabaseIds());
}
@Override
protected GetDatabaseConfigurationAction.NodeResponse newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException {
return new GetDatabaseConfigurationAction.NodeResponse(in);
}
@Override
protected GetDatabaseConfigurationAction.NodeResponse nodeOperation(GetDatabaseConfigurationAction.NodeRequest request, Task task) {
final Set<String> ids;
if (request.getDatabaseIds().length == 0) {
// if we did not ask for a specific name, then return all databases
ids = Set.of("*");
} else {
ids = new LinkedHashSet<>(Arrays.asList(request.getDatabaseIds()));
}
if (ids.size() > 1 && ids.stream().anyMatch(Regex::isSimpleMatchPattern)) {
throw new IllegalArgumentException(
"wildcard only supports a single value, please use comma-separated values or a single wildcard value"
);
}
List<DatabaseConfigurationMetadata> results = new ArrayList<>();
for (String id : ids) {
results.addAll(getLocalDatabases(databaseNodeService, id));
}
return new GetDatabaseConfigurationAction.NodeResponse(transportService.getLocalNode(), results);
}
/*
* This returns information about the databases that users have put in the config/ingest-geoip directory on the node.
*/
private static List<DatabaseConfigurationMetadata> getLocalDatabases(DatabaseNodeService databaseNodeService, String id) {
List<DatabaseConfigurationMetadata> localDatabases = new ArrayList<>();
Map<String, DatabaseNodeService.ConfigDatabaseDetail> configDatabases = databaseNodeService.getConfigDatabasesDetail();
for (DatabaseNodeService.ConfigDatabaseDetail configDatabase : configDatabases.values()) {
String databaseId = getDatabaseIdForFileName(DatabaseConfiguration.Local.NAME, configDatabase.name());
if ((Regex.isSimpleMatchPattern(id) && Regex.simpleMatch(id, databaseId)) || id.equals(databaseId)) {
localDatabases.add(
new DatabaseConfigurationMetadata(
new DatabaseConfiguration(
databaseId,
getDatabaseNameForFileName(configDatabase.name()),
new DatabaseConfiguration.Local(configDatabase.type())
),
-1,
configDatabase.buildDateInMillis() == null ? -1 : configDatabase.buildDateInMillis()
)
);
}
}
return localDatabases;
}
}
| TransportGetDatabaseConfigurationAction |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/Policy.java | {
"start": 1883,
"end": 2655
} | interface ____ {
/**
* Hook invoked before the wrap.
* <p/>
* This allows you to do any custom logic before the processor is wrapped. For example to manipulate the
* {@link org.apache.camel.model.ProcessorDefinition definiton}.
*
* @param route the route context
* @param definition the processor definition
*/
void beforeWrap(Route route, NamedNode definition);
/**
* Wraps any applicable interceptors around the given processor.
*
* @param route the route context
* @param processor the processor to be intercepted
* @return either the original processor or a processor wrapped in one or more processors
*/
Processor wrap(Route route, Processor processor);
}
| Policy |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/web/NimbusJwkSetEndpointFilter.java | {
"start": 1852,
"end": 3872
} | class ____ extends OncePerRequestFilter {
/**
* The default endpoint {@code URI} for JWK Set requests.
*/
private static final String DEFAULT_JWK_SET_ENDPOINT_URI = "/oauth2/jwks";
private final JWKSource<SecurityContext> jwkSource;
private final JWKSelector jwkSelector;
private final RequestMatcher requestMatcher;
/**
* Constructs a {@code NimbusJwkSetEndpointFilter} using the provided parameters.
* @param jwkSource the {@code com.nimbusds.jose.jwk.source.JWKSource}
*/
public NimbusJwkSetEndpointFilter(JWKSource<SecurityContext> jwkSource) {
this(jwkSource, DEFAULT_JWK_SET_ENDPOINT_URI);
}
/**
* Constructs a {@code NimbusJwkSetEndpointFilter} using the provided parameters.
* @param jwkSource the {@code com.nimbusds.jose.jwk.source.JWKSource}
* @param jwkSetEndpointUri the endpoint {@code URI} for JWK Set requests
*/
public NimbusJwkSetEndpointFilter(JWKSource<SecurityContext> jwkSource, String jwkSetEndpointUri) {
Assert.notNull(jwkSource, "jwkSource cannot be null");
Assert.hasText(jwkSetEndpointUri, "jwkSetEndpointUri cannot be empty");
this.jwkSource = jwkSource;
this.jwkSelector = new JWKSelector(new JWKMatcher.Builder().build());
this.requestMatcher = PathPatternRequestMatcher.withDefaults().matcher(HttpMethod.GET, jwkSetEndpointUri);
}
@Override
protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
throws ServletException, IOException {
if (!this.requestMatcher.matches(request)) {
filterChain.doFilter(request, response);
return;
}
JWKSet jwkSet;
try {
jwkSet = new JWKSet(this.jwkSource.get(this.jwkSelector, null));
}
catch (Exception ex) {
throw new IllegalStateException("Failed to select the JWK(s) -> " + ex.getMessage(), ex);
}
response.setContentType(MediaType.APPLICATION_JSON_VALUE);
try (Writer writer = response.getWriter()) {
writer.write(jwkSet.toString()); // toString() excludes private keys
}
}
}
| NimbusJwkSetEndpointFilter |
java | netty__netty | transport-native-unix-common/src/main/java/io/netty/channel/unix/PreferredDirectByteBufAllocator.java | {
"start": 852,
"end": 3717
} | class ____ implements ByteBufAllocator {
private ByteBufAllocator allocator;
public void updateAllocator(ByteBufAllocator allocator) {
this.allocator = allocator;
}
@Override
public ByteBuf buffer() {
return allocator.directBuffer();
}
@Override
public ByteBuf buffer(int initialCapacity) {
return allocator.directBuffer(initialCapacity);
}
@Override
public ByteBuf buffer(int initialCapacity, int maxCapacity) {
return allocator.directBuffer(initialCapacity, maxCapacity);
}
@Override
public ByteBuf ioBuffer() {
return allocator.directBuffer();
}
@Override
public ByteBuf ioBuffer(int initialCapacity) {
return allocator.directBuffer(initialCapacity);
}
@Override
public ByteBuf ioBuffer(int initialCapacity, int maxCapacity) {
return allocator.directBuffer(initialCapacity, maxCapacity);
}
@Override
public ByteBuf heapBuffer() {
return allocator.heapBuffer();
}
@Override
public ByteBuf heapBuffer(int initialCapacity) {
return allocator.heapBuffer(initialCapacity);
}
@Override
public ByteBuf heapBuffer(int initialCapacity, int maxCapacity) {
return allocator.heapBuffer(initialCapacity, maxCapacity);
}
@Override
public ByteBuf directBuffer() {
return allocator.directBuffer();
}
@Override
public ByteBuf directBuffer(int initialCapacity) {
return allocator.directBuffer(initialCapacity);
}
@Override
public ByteBuf directBuffer(int initialCapacity, int maxCapacity) {
return allocator.directBuffer(initialCapacity, maxCapacity);
}
@Override
public CompositeByteBuf compositeBuffer() {
return allocator.compositeDirectBuffer();
}
@Override
public CompositeByteBuf compositeBuffer(int maxNumComponents) {
return allocator.compositeDirectBuffer(maxNumComponents);
}
@Override
public CompositeByteBuf compositeHeapBuffer() {
return allocator.compositeHeapBuffer();
}
@Override
public CompositeByteBuf compositeHeapBuffer(int maxNumComponents) {
return allocator.compositeHeapBuffer(maxNumComponents);
}
@Override
public CompositeByteBuf compositeDirectBuffer() {
return allocator.compositeDirectBuffer();
}
@Override
public CompositeByteBuf compositeDirectBuffer(int maxNumComponents) {
return allocator.compositeDirectBuffer(maxNumComponents);
}
@Override
public boolean isDirectBufferPooled() {
return allocator.isDirectBufferPooled();
}
@Override
public int calculateNewCapacity(int minNewCapacity, int maxCapacity) {
return allocator.calculateNewCapacity(minNewCapacity, maxCapacity);
}
}
| PreferredDirectByteBufAllocator |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.