language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/tree/expression/SqmNamedExpression.java | {
"start": 562,
"end": 2415
} | class ____<T> extends AbstractSqmExpression<T> {
private final SqmExpression<T> expression;
private final String name;
public SqmNamedExpression(SqmExpression<T> expression, String name) {
super( expression.getExpressible(), expression.nodeBuilder() );
this.expression = expression;
this.name = name;
}
@Override
public SqmNamedExpression<T> copy(SqmCopyContext context) {
final SqmNamedExpression<T> existing = context.getCopy( this );
if ( existing != null ) {
return existing;
}
final SqmNamedExpression<T> expression = context.registerCopy(
this,
new SqmNamedExpression<>( this.expression.copy( context ), name )
);
copyTo( expression, context );
return expression;
}
public SqmExpression<T> getExpression() {
return expression;
}
public String getName() {
return name;
}
@Override
public <X> X accept(SemanticQueryWalker<X> walker) {
return walker.visitNamedExpression( this );
}
@Override
public void appendHqlString(StringBuilder hql, SqmRenderContext context) {
expression.appendHqlString( hql, context );
hql.append( " as " );
hql.append( name );
}
@Override
public boolean equals(@Nullable Object object) {
return object instanceof SqmNamedExpression<?> that
&& Objects.equals( this.name, that.name )
&& this.expression.equals( that.expression );
}
@Override
public int hashCode() {
int result = expression.hashCode();
result = 31 * result + name.hashCode();
return result;
}
@Override
public boolean isCompatible(Object object) {
return object instanceof SqmNamedExpression<?> that
&& Objects.equals( this.name, that.name )
&& this.expression.isCompatible( that.expression );
}
@Override
public int cacheHashCode() {
int result = expression.cacheHashCode();
result = 31 * result + name.hashCode();
return result;
}
}
| SqmNamedExpression |
java | grpc__grpc-java | istio-interop-testing/src/generated/main/grpc/io/istio/test/EchoTestServiceGrpc.java | {
"start": 7086,
"end": 7185
} | class ____ the server implementation of the service EchoTestService.
*/
public static abstract | for |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/engine/bitmap_recycle/SizeStrategy.java | {
"start": 3205,
"end": 3481
} | class ____ extends BaseKeyPool<Key> {
public Key get(int size) {
Key result = super.get();
result.init(size);
return result;
}
@Override
protected Key create() {
return new Key(this);
}
}
@VisibleForTesting
static final | KeyPool |
java | google__auto | common/src/test/java/com/google/auto/common/BasicAnnotationProcessorTest.java | {
"start": 32272,
"end": 33609
} | class ____ {}");
OneOverloadedMethodAtATimeProcessor oneOverloadedMethodAtATimeProcessor =
new OneOverloadedMethodAtATimeProcessor();
Compilation compilation =
javac()
.withProcessors(oneOverloadedMethodAtATimeProcessor, new GeneratesCodeProcessor())
.compile(testFileObject, generatesCodeFileObject);
assertThat(compilation).succeeded();
assertThat(oneOverloadedMethodAtATimeProcessor.rejectedRounds).isEqualTo(1);
assertThat(oneOverloadedMethodAtATimeProcessor.processArguments())
.comparingElementsUsing(setMultimapValuesByString())
.containsExactly(
ImmutableSetMultimap.of(
OneMethodAtATime.class.getCanonicalName(),
"overloadedMethod(test.SomeGeneratedClass)",
OneMethodAtATime.class.getCanonicalName(),
"overloadedMethod(int)"),
ImmutableSetMultimap.of(
OneMethodAtATime.class.getCanonicalName(), "overloadedMethod(int)"))
.inOrder();
}
@Test
public void properlySkipsMissingAnnotations_generatesClass() {
JavaFileObject source =
JavaFileObjects.forSourceLines(
"test.ValidInRound2",
"package test;",
"",
"@" + AnAnnotation.class.getCanonicalName(),
"public | ClassB |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/inlineme/InlineMeData.java | {
"start": 7968,
"end": 9347
} | class ____ extends Pretty {
private final StringWriter writer;
GooglePrinter(StringWriter writer) {
super(writer, false /* don't dump extra comments */);
this.writer = writer;
}
@Override
public void visitTypeCast(JCTypeCast jcTypeCast) {
// TODO(glorioso): we *should* use package-private open precedence methods
try {
print("(");
printExpr(jcTypeCast.clazz);
print(") ");
printExpr(jcTypeCast.expr);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public void visitLambda(JCLambda jcLambda) {
// We manually print lambdas to match our own style
try {
boolean paramsAreExplicit = jcLambda.paramKind == ParameterKind.EXPLICIT;
boolean paramsNeedParentheses = jcLambda.params.size() != 1 || paramsAreExplicit;
if (paramsNeedParentheses) {
print("(");
}
if (paramsAreExplicit) {
printExprs(jcLambda.params);
} else {
Joiner.on(", ").appendTo(writer, jcLambda.params.map(JCVariableDecl::getName));
}
if (paramsNeedParentheses) {
print(")");
}
print(" -> ");
printExpr(jcLambda.body);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
private static | GooglePrinter |
java | redisson__redisson | redisson-spring-data/redisson-spring-data-20/src/main/java/org/redisson/spring/data/connection/RedissonReactiveClusterGeoCommands.java | {
"start": 837,
"end": 1078
} | class ____ extends RedissonReactiveGeoCommands implements ReactiveClusterGeoCommands {
RedissonReactiveClusterGeoCommands(CommandReactiveExecutor executorService) {
super(executorService);
}
}
| RedissonReactiveClusterGeoCommands |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/model/source/selector/LifecycleOverloadDeduplicateSelector.java | {
"start": 1903,
"end": 5849
} | class ____ implements MethodSelector {
@Override
public <T extends Method> List<SelectedMethod<T>> getMatchingMethods(List<SelectedMethod<T>> methods,
SelectionContext context) {
if ( !context.getSelectionCriteria().isLifecycleCallbackRequired() || methods.size() <= 1 ) {
return methods;
}
Collection<List<SelectedMethod<T>>> methodSignatureGroups =
methods.stream()
.collect( Collectors.groupingBy(
LifecycleOverloadDeduplicateSelector::buildSignatureKey,
LinkedHashMap::new,
Collectors.toList()
) )
.values();
List<SelectedMethod<T>> deduplicatedMethods = new ArrayList<>( methods.size() );
for ( List<SelectedMethod<T>> signatureGroup : methodSignatureGroups ) {
if ( signatureGroup.size() == 1 ) {
deduplicatedMethods.add( signatureGroup.get( 0 ) );
continue;
}
SelectedMethod<T> bestInheritanceMethod = signatureGroup.get( 0 );
for ( int i = 1; i < signatureGroup.size(); i++ ) {
SelectedMethod<T> candidateMethod = signatureGroup.get( i );
if ( isInheritanceBetter( candidateMethod, bestInheritanceMethod ) ) {
bestInheritanceMethod = candidateMethod;
}
}
deduplicatedMethods.add( bestInheritanceMethod );
}
return deduplicatedMethods;
}
/**
* Builds a grouping key for a method based on its defining type,
* method name, and a detailed breakdown of each parameter binding.
* <p>
* The key consists of:
* <ul>
* <li>The type that defines the method</li>
* <li>The method name</li>
* <li>parameter bindings</li>
* </ul>
* This ensures that methods are grouped together only if all these aspects match,
* except for differences in type hierarchy, which are handled separately.
*/
private static <T extends Method> List<Object> buildSignatureKey(SelectedMethod<T> method) {
List<Object> key = new ArrayList<>();
key.add( method.getMethod().getDefiningType() );
key.add( method.getMethod().getName() );
for ( ParameterBinding binding : method.getParameterBindings() ) {
key.add( binding.getType() );
key.add( binding.getVariableName() );
}
return key;
}
/**
* Compare the inheritance distance of parameters between two methods to determine if candidateMethod is better.
* Compares parameters in order, returns as soon as a better one is found.
*/
private <T extends Method> boolean isInheritanceBetter(SelectedMethod<T> candidateMethod,
SelectedMethod<T> currentBestMethod) {
List<ParameterBinding> candidateBindings = candidateMethod.getParameterBindings();
List<ParameterBinding> bestBindings = currentBestMethod.getParameterBindings();
List<Parameter> candidateParams = candidateMethod.getMethod().getParameters();
List<Parameter> bestParams = currentBestMethod.getMethod().getParameters();
int paramCount = candidateBindings.size();
for ( int i = 0; i < paramCount; i++ ) {
int candidateDistance = candidateBindings.get( i )
.getType()
.distanceTo( candidateParams.get( i ).getType() );
int bestDistance = bestBindings.get( i ).getType().distanceTo( bestParams.get( i ).getType() );
if ( candidateDistance < bestDistance ) {
return true;
}
else if ( candidateDistance > bestDistance ) {
return false;
}
}
return false;
}
}
| LifecycleOverloadDeduplicateSelector |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/impl/Log4jProvider.java | {
"start": 2938,
"end": 5261
} | class ____.
* </p>
* @see #getThreadContextMap
*/
private static final String GARBAGE_FREE_CONTEXT_MAP = "GarbageFree";
// Property keys relevant for context map selection
private static final String DISABLE_CONTEXT_MAP = "log4j2.disableThreadContextMap";
private static final String DISABLE_THREAD_CONTEXT = "log4j2.disableThreadContext";
private static final String THREAD_CONTEXT_MAP_PROPERTY = "log4j2.threadContextMap";
private static final String GC_FREE_THREAD_CONTEXT_PROPERTY = "log4j2.garbagefree.threadContextMap";
// Name of the context map implementations
private static final String WEB_APP_CLASS_NAME = "org.apache.logging.log4j.spi.DefaultThreadContextMap";
private static final String GARBAGE_FREE_CLASS_NAME =
"org.apache.logging.log4j.core.context.internal.GarbageFreeSortedArrayThreadContextMap";
private static final Logger LOGGER = StatusLogger.getLogger();
private final Lazy<LoggerContextFactory> loggerContextFactoryLazy = Lazy.lazy(Log4jContextFactory::new);
private final Lazy<ThreadContextMap> threadContextMapLazy = Lazy.lazy(this::createThreadContextMap);
public Log4jProvider() {
super(10, CURRENT_VERSION, Log4jContextFactory.class);
}
@Override
public LoggerContextFactory getLoggerContextFactory() {
return loggerContextFactoryLazy.get();
}
@Override
public ThreadContextMap getThreadContextMapInstance() {
return threadContextMapLazy.get();
}
private ThreadContextMap createThreadContextMap() {
// Properties
final PropertiesUtil props = PropertiesUtil.getProperties();
if (props.getBooleanProperty(DISABLE_CONTEXT_MAP) || props.getBooleanProperty(DISABLE_THREAD_CONTEXT)) {
return NoOpThreadContextMap.INSTANCE;
}
String threadContextMapClass = props.getStringProperty(THREAD_CONTEXT_MAP_PROPERTY);
// Default based on properties
if (threadContextMapClass == null) {
threadContextMapClass = props.getBooleanProperty(GC_FREE_THREAD_CONTEXT_PROPERTY)
? GARBAGE_FREE_CONTEXT_MAP
: WEB_APP_CONTEXT_MAP;
}
/*
* The constructors are called explicitly to improve GraalVM support.
*
* The | name |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/spr10546/Spr10546Tests.java | {
"start": 3431,
"end": 3629
} | class ____ extends ParentWithParentConfig {}
}
@Test
void importChildConfigThenChildConfig() {
assertLoadsMyBean(ImportChildConfig.class,ChildConfig.class);
}
@Configuration
static | ChildConfig |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/support/AnnotationSupport.java | {
"start": 13789,
"end": 14085
} | interface ____ are declared to be of the specified {@code fieldType}
* and are annotated or <em>meta-annotated</em> with the specified
* {@code annotationType}.
*
* <p>Consult the Javadoc for {@link Class#getFields()} for details on
* inheritance and ordering.
*
* @param clazz the | that |
java | quarkusio__quarkus | extensions/grpc/runtime/src/main/java/io/grpc/override/ContextStorageOverride.java | {
"start": 220,
"end": 1886
} | class ____ extends Context.Storage {
private static final ThreadLocal<Context> fallback = new ThreadLocal<>();
private static final String GRPC_CONTEXT = "GRPC_CONTEXT";
@Override
public Context doAttach(Context toAttach) {
Context current = current();
io.vertx.core.Context dc = Vertx.currentContext();
if (dc != null && VertxContext.isDuplicatedContext(dc)) {
dc.putLocal(GRPC_CONTEXT, toAttach);
} else {
fallback.set(toAttach);
}
return current;
}
@Override
public void detach(Context context, Context toRestore) {
io.vertx.core.Context dc = Vertx.currentContext();
if (toRestore != Context.ROOT) {
if (dc != null && VertxContext.isDuplicatedContext(dc)) {
dc.putLocal(GRPC_CONTEXT, toRestore);
} else {
fallback.set(toRestore);
}
} else {
if (dc != null && VertxContext.isDuplicatedContext(dc)) {
// Do nothing - duplicated context are not shared.
} else {
fallback.set(null);
}
}
}
@Override
public Context current() {
if (VertxContext.isOnDuplicatedContext()) {
Context current = Vertx.currentContext().getLocal(GRPC_CONTEXT);
if (current == null) {
return Context.ROOT;
}
return current;
} else {
Context current = fallback.get();
if (current == null) {
return Context.ROOT;
}
return current;
}
}
}
| ContextStorageOverride |
java | apache__camel | components/camel-quartz/src/test/java/org/apache/camel/routepolicy/quartz/SpringCronScheduledRoutePolicyTest.java | {
"start": 1613,
"end": 1809
} | class ____ extends SpringCronScheduledRoutePolicyTest {
@Test
public void testScheduledStopRoutePolicy() throws Exception {
stopTest();
}
}
| Test2SpringCronScheduledRoutePolicyTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/codec/postings/Lucene90BlockTreeTermsWriter.java | {
"start": 16796,
"end": 18205
} | class ____ extends PendingEntry {
public final byte[] termBytes;
// stats + metadata
public final BlockTermState state;
PendingTerm(BytesRef term, BlockTermState state) {
super(true);
this.termBytes = new byte[term.length];
System.arraycopy(term.bytes, term.offset, termBytes, 0, term.length);
this.state = state;
}
@Override
public String toString() {
return "TERM: " + ToStringUtils.bytesRefToString(termBytes);
}
}
/**
* Encodes long value to variable length byte[], in MSB order. Use {@link
* FieldReader readMSBVLong} to decode.
*
* <p>Package private for testing
*/
static void writeMSBVLong(long l, DataOutput scratchBytes) throws IOException {
assert l >= 0;
// Keep zero bits on most significant byte to have more chance to get prefix bytes shared.
// e.g. we expect 0x7FFF stored as [0x81, 0xFF, 0x7F] but not [0xFF, 0xFF, 0x40]
final int bytesNeeded = (Long.SIZE - Long.numberOfLeadingZeros(l) - 1) / 7 + 1;
l <<= Long.SIZE - bytesNeeded * 7;
for (int i = 1; i < bytesNeeded; i++) {
scratchBytes.writeByte((byte) (((l >>> 57) & 0x7FL) | 0x80));
l = l << 7;
}
scratchBytes.writeByte((byte) (((l >>> 57) & 0x7FL)));
}
private final | PendingTerm |
java | elastic__elasticsearch | x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/ArrowFormatIT.java | {
"start": 1345,
"end": 8574
} | class ____ extends ESRestTestCase {
private static final RootAllocator ALLOCATOR = new RootAllocator();
@AfterClass
public static void afterClass() {
ALLOCATOR.close();
}
@ClassRule
public static ElasticsearchCluster cluster = Clusters.testCluster();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
@Before
@After
public void assertRequestBreakerEmpty() throws Exception {
EsqlSpecTestCase.assertRequestBreakerEmpty();
}
@Before
public void initIndex() throws IOException {
Request request = new Request("PUT", "/arrow-test");
request.setJsonEntity("""
{
"mappings": {
"properties": {
"value": {
"type": "integer"
},
"description": {
"type": "keyword"
},
"ip": {
"type": "ip"
},
"v": {
"type": "version"
}
}
}
}
""");
assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode());
request = new Request("POST", "/_bulk?index=arrow-test&refresh=true");
// 4 documents with a null in the middle, leading to 3 ESQL pages and 3 Arrow batches
request.setJsonEntity("""
{"index": {"_id": "1"}}
{"value": 1, "ip": "192.168.0.1", "v": "1.0.1", "description": "number one"}
{"index": {"_id": "2"}}
{"value": 2, "ip": "192.168.0.2", "v": "1.0.2", "description": "number two"}
{"index": {"_id": "3"}}
{"value": 3, "ip": "2001:db8::1:0:0:1"}
{"index": {"_id": "4"}}
{"value": 4, "ip": "::afff:4567:890a", "v": "1.0.4", "description": "number four"}
""");
assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode());
}
private VectorSchemaRoot esql(String query) throws IOException {
Request request = new Request("POST", "/_query?format=arrow");
request.setJsonEntity(query);
Response response = client().performRequest(request);
assertEquals("application/vnd.apache.arrow.stream", response.getEntity().getContentType().getValue());
return readArrow(response.getEntity().getContent());
}
public void testInteger() throws Exception {
try (VectorSchemaRoot root = esql("""
{
"query": "FROM arrow-test | SORT value | LIMIT 100 | KEEP value"
}""")) {
List<Field> fields = root.getSchema().getFields();
assertEquals(1, fields.size());
assertValues(root);
}
}
public void testString() throws Exception {
try (VectorSchemaRoot root = esql("""
{
"query": "FROM arrow-test | SORT value | LIMIT 100 | KEEP description"
}""")) {
List<Field> fields = root.getSchema().getFields();
assertEquals(1, fields.size());
assertDescription(root);
}
}
public void testIp() throws Exception {
try (VectorSchemaRoot root = esql("""
{
"query": "FROM arrow-test | SORT value | LIMIT 100 | KEEP ip"
}""")) {
List<Field> fields = root.getSchema().getFields();
assertEquals(1, fields.size());
assertIp(root);
}
}
public void testVersion() throws Exception {
try (VectorSchemaRoot root = esql("""
{
"query": "FROM arrow-test | SORT value | LIMIT 100 | KEEP v"
}""")) {
List<Field> fields = root.getSchema().getFields();
assertEquals(1, fields.size());
assertVersion(root);
}
}
public void testEverything() throws Exception {
try (VectorSchemaRoot root = esql("""
{
"query": "FROM arrow-test | SORT value | LIMIT 100"
}""")) {
List<Field> fields = root.getSchema().getFields();
assertEquals(4, fields.size());
assertDescription(root);
assertValues(root);
assertIp(root);
assertVersion(root);
}
}
private VectorSchemaRoot readArrow(InputStream input) throws IOException {
try (
ArrowStreamReader reader = new ArrowStreamReader(input, ALLOCATOR);
VectorSchemaRoot readerRoot = reader.getVectorSchemaRoot();
) {
VectorSchemaRoot root = VectorSchemaRoot.create(readerRoot.getSchema(), ALLOCATOR);
root.allocateNew();
while (reader.loadNextBatch()) {
VectorSchemaRootAppender.append(root, readerRoot);
}
return root;
}
}
private void assertValues(VectorSchemaRoot root) {
var valueVector = (IntVector) root.getVector("value");
assertEquals(1, valueVector.get(0));
assertEquals(2, valueVector.get(1));
assertEquals(3, valueVector.get(2));
assertEquals(4, valueVector.get(3));
}
private void assertDescription(VectorSchemaRoot root) {
var descVector = (VarCharVector) root.getVector("description");
assertEquals("number one", descVector.getObject(0).toString());
assertEquals("number two", descVector.getObject(1).toString());
assertTrue(descVector.isNull(2));
assertEquals("number four", descVector.getObject(3).toString());
}
private void assertIp(VectorSchemaRoot root) {
// Test data that has been transformed during output (ipV4 truncated to 32bits)
var ipVector = (VarBinaryVector) root.getVector("ip");
assertArrayEquals(new byte[] { (byte) 192, (byte) 168, 0, 1 }, ipVector.getObject(0));
assertArrayEquals(new byte[] { (byte) 192, (byte) 168, 0, 2 }, ipVector.getObject(1));
assertArrayEquals(
new byte[] { 0x20, 0x01, 0x0d, (byte) 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01 },
ipVector.getObject(2)
);
assertArrayEquals(
new byte[] {
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
(byte) 0xaf,
(byte) 0xff,
0x45,
0x67,
(byte) 0x89,
0x0A },
ipVector.getObject(3)
);
}
private void assertVersion(VectorSchemaRoot root) {
// Version is binary-encoded in ESQL vectors, turned into a string in arrow output
var versionVector = (VarCharVector) root.getVector("v");
assertEquals("1.0.1", versionVector.getObject(0).toString());
assertEquals("1.0.2", versionVector.getObject(1).toString());
assertTrue(versionVector.isNull(2));
assertEquals("1.0.4", versionVector.getObject(3).toString());
}
}
| ArrowFormatIT |
java | grpc__grpc-java | netty/src/main/java/io/grpc/netty/CancelClientStreamCommand.java | {
"start": 816,
"end": 1410
} | class ____ extends WriteQueue.AbstractQueuedCommand {
private final NettyClientStream.TransportState stream;
@Nullable private final Status reason;
CancelClientStreamCommand(NettyClientStream.TransportState stream, Status reason) {
this.stream = Preconditions.checkNotNull(stream, "stream");
Preconditions.checkArgument(
reason == null || !reason.isOk(), "Should not cancel with OK status");
this.reason = reason;
}
NettyClientStream.TransportState stream() {
return stream;
}
@Nullable
Status reason() {
return reason;
}
}
| CancelClientStreamCommand |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/TestJsonSerialize2.java | {
"start": 561,
"end": 758
} | class ____ {
protected final String key;
public SimpleKey(String str) { key = str; }
@Override public String toString() { return "toString:"+key; }
}
static | SimpleKey |
java | quarkusio__quarkus | integration-tests/devtools/src/test/java/io/quarkus/devtools/commands/CreateProjectPlatformMetadataTest.java | {
"start": 993,
"end": 4031
} | class ____ extends PlatformAwareTestBase {
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
@Test
public void create() throws Exception {
final File file = new File("target/meta-rest");
SnapshotTesting.deleteTestDirectory(file);
createProject(BuildTool.MAVEN, file, "io.quarkus", "basic-rest", "1.0.0-SNAPSHOT");
assertThat(file.toPath().resolve("pom.xml"))
.exists()
.satisfies(checkContains("<id>redhat</id>"))
.satisfies(checkContains("<url>https://maven.repository.redhat.com</url>"))
.satisfies(checkContains("<snapshots>"))
.satisfies(checkContains("<releases>"))
.satisfies(checkContains("<repositories>"))
.satisfies(checkContains("<pluginRepositories>"));
}
@Test
public void createGradle() throws Exception {
final File file = new File("target/meta-rest-gradle");
SnapshotTesting.deleteTestDirectory(file);
createProject(BuildTool.GRADLE, file, "io.quarkus", "basic-rest", "1.0.0-SNAPSHOT");
assertThat(file.toPath().resolve("build.gradle"))
.exists()
.satisfies(checkContains("maven { url \"https://maven.repository.redhat.com\" }"));
}
@Test
public void createGradleKotlin() throws Exception {
final File file = new File("target/meta-rest-gradle-kts");
SnapshotTesting.deleteTestDirectory(file);
createProject(BuildTool.GRADLE_KOTLIN_DSL, file, "io.quarkus", "basic-rest", "1.0.0-SNAPSHOT");
assertThat(file.toPath().resolve("build.gradle.kts"))
.exists()
.satisfies(checkContains("maven { url = uri(\"https://maven.repository.redhat.com\") }"));
}
private Consumer<Path> checkContains(String s) {
return (p) -> assertThat(Files.contentOf(p.toFile(), StandardCharsets.UTF_8)).contains(s);
}
private Map<String, Object> getMetadata() throws java.io.IOException {
return JSON_MAPPER.reader().readValue(CreateProjectPlatformMetadataTest.class.getResource("/platform-metadata.json"),
Map.class);
}
private void createProject(BuildTool buildTool, File file, String groupId, String artifactId, String version)
throws QuarkusCommandException, IOException {
final ExtensionCatalog platformDescriptor = getExtensionsCatalog();
final ExtensionCatalog spy = spy(platformDescriptor);
when(spy.getMetadata()).thenReturn(getMetadata());
QuarkusProject project = QuarkusProjectHelper.getProject(file.toPath(), spy, buildTool);
final QuarkusCommandOutcome result = new CreateProject(project)
.groupId(groupId)
.artifactId(artifactId)
.version(version)
.quarkusPluginVersion(buildTool == BuildTool.MAVEN ? "2.3.5" : "2.3.5-gradle")
.execute();
assertTrue(result.isSuccess());
}
}
| CreateProjectPlatformMetadataTest |
java | google__guava | guava-tests/test/com/google/common/io/SourceSinkFactories.java | {
"start": 17134,
"end": 18344
} | class ____ extends Jdk7FileFactory implements CharSinkFactory {
private final String initialString;
private PathCharSinkFactory(@Nullable String initialString) {
this.initialString = initialString;
}
@Override
public CharSink createSink() throws IOException {
Path file = createFile();
if (initialString != null) {
try (Writer writer = java.nio.file.Files.newBufferedWriter(file, UTF_8)) {
writer.write(initialString);
}
return MoreFiles.asCharSink(file, UTF_8, StandardOpenOption.APPEND);
}
return MoreFiles.asCharSink(file, UTF_8);
}
@Override
public String getExpected(String string) {
checkNotNull(string);
return initialString == null ? string : initialString + string;
}
@Override
public String getSinkContents() throws IOException {
Path file = getPath();
try (Reader reader = java.nio.file.Files.newBufferedReader(file, UTF_8)) {
StringBuilder builder = new StringBuilder();
for (int c = reader.read(); c != -1; c = reader.read()) {
builder.append((char) c);
}
return builder.toString();
}
}
}
}
| PathCharSinkFactory |
java | apache__camel | components/camel-aws/camel-aws2-sqs/src/test/java/org/apache/camel/component/aws2/sqs/integration/SqsProducerDeleteMessageLocalstackIT.java | {
"start": 1195,
"end": 2517
} | class ____ extends Aws2SQSBaseTest {
@EndpointInject("direct:start")
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint result;
@Test
public void sendInOnly() throws Exception {
result.expectedMessageCount(1);
Exchange exchange = template.send("direct:start", ExchangePattern.InOnly, new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody("Test sqs");
}
});
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").startupOrder(2).toF("aws2-sqs://%s?autoCreateQueue=true", sharedNameGenerator.getName());
fromF("aws2-sqs://%s?deleteAfterRead=false&autoCreateQueue=true", sharedNameGenerator.getName())
.startupOrder(1).log("${body}")
.toF("aws2-sqs://%s?operation=deleteMessage", sharedNameGenerator.getName())
.log("${body}")
.log("${header.CamelAwsSqsReceiptHandle}").to("mock:result");
}
};
}
}
| SqsProducerDeleteMessageLocalstackIT |
java | google__dagger | javatests/dagger/internal/codegen/DaggerSuperficialValidationTest.java | {
"start": 23267,
"end": 23428
} | class ____<T> {}",
"}"),
CompilerTests.kotlinSource(
"test.Outer.kt",
"package test",
"",
" | Parent |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/support/xml/SqlXmlHandler.java | {
"start": 6330,
"end": 7067
} | class ____ be used
* @return the content as character stream, or {@code null} in case of SQL NULL
* @throws SQLException if thrown by JDBC methods
* @see java.sql.ResultSet#getSQLXML
* @see java.sql.SQLXML#getSource
*/
@Nullable Source getXmlAsSource(ResultSet rs, String columnName, @Nullable Class<? extends Source> sourceClass) throws SQLException;
/**
* Retrieve the given column as Source implemented using the specified source class
* from the given ResultSet.
* <p>Might work with {@code SQLXML} or database-specific classes depending
* on the database and driver.
* @param rs the ResultSet to retrieve the content from
* @param columnIndex the column index to use
* @param sourceClass the implementation | to |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/requests/ListGroupsResponse.java | {
"start": 1083,
"end": 2033
} | class ____ extends AbstractResponse {
private final ListGroupsResponseData data;
public ListGroupsResponse(ListGroupsResponseData data) {
super(ApiKeys.LIST_GROUPS);
this.data = data;
}
@Override
public ListGroupsResponseData data() {
return data;
}
@Override
public int throttleTimeMs() {
return data.throttleTimeMs();
}
@Override
public void maybeSetThrottleTimeMs(int throttleTimeMs) {
data.setThrottleTimeMs(throttleTimeMs);
}
@Override
public Map<Errors, Integer> errorCounts() {
return errorCounts(Errors.forCode(data.errorCode()));
}
public static ListGroupsResponse parse(Readable readable, short version) {
return new ListGroupsResponse(new ListGroupsResponseData(readable, version));
}
@Override
public boolean shouldClientThrottle(short version) {
return version >= 2;
}
}
| ListGroupsResponse |
java | apache__camel | core/camel-main/src/test/java/org/apache/camel/main/scan3/Foo3RouteBuilder.java | {
"start": 895,
"end": 1100
} | class ____ extends RouteBuilder {
@Override
public void configure() {
from("direct:start3").routeId("foo2") // duplicate on purpose
.process("hello3");
}
}
| Foo3RouteBuilder |
java | elastic__elasticsearch | x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/frozen/FrozenIndexShardTests.java | {
"start": 714,
"end": 2108
} | class ____ extends IndexShardTestCase {
/**
* Make sure we can recover from a frozen engine
*/
public void testRecoverFromFrozenPrimary() throws IOException {
IndexShard indexShard = newStartedShard(true);
indexDoc(indexShard, "_doc", "1");
indexDoc(indexShard, "_doc", "2");
indexDoc(indexShard, "_doc", "3");
flushAndCloseShardNoCheck(indexShard);
final ShardRouting shardRouting = indexShard.routingEntry();
IndexShard frozenShard = reinitShard(
indexShard,
ShardRoutingHelper.initWithSameId(
shardRouting,
shardRouting.primary() ? RecoverySource.ExistingStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE
),
indexShard.indexSettings().getIndexMetadata(),
config -> new FrozenEngine(config, true, randomBoolean())
);
recoverShardFromStore(frozenShard);
assertThat(frozenShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(frozenShard.seqNoStats().getMaxSeqNo()));
assertDocCount(frozenShard, 3);
IndexShard replica = newShard(false, Settings.EMPTY, config -> new FrozenEngine(config, true, randomBoolean()));
recoverReplica(replica, frozenShard, true);
assertDocCount(replica, 3);
closeShards(frozenShard, replica);
}
}
| FrozenIndexShardTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/TextParams.java | {
"start": 1233,
"end": 10813
} | class ____ {
public final Parameter<NamedAnalyzer> indexAnalyzer;
public final Parameter<NamedAnalyzer> searchAnalyzer;
public final Parameter<NamedAnalyzer> searchQuoteAnalyzer;
public final Parameter<Integer> positionIncrementGap;
public final IndexAnalyzers indexAnalyzers;
public Analyzers(
IndexAnalyzers indexAnalyzers,
Function<FieldMapper, NamedAnalyzer> analyzerInitFunction,
Function<FieldMapper, Integer> positionGapInitFunction,
IndexVersion indexCreatedVersion
) {
this.indexAnalyzer = Parameter.analyzerParam(
"analyzer",
indexCreatedVersion.isLegacyIndexVersion(),
analyzerInitFunction,
indexAnalyzers::getDefaultIndexAnalyzer,
indexCreatedVersion
)
.setSerializerCheck(
(id, ic, a) -> id
|| ic
|| Objects.equals(a, getSearchAnalyzer()) == false
|| Objects.equals(a, getSearchQuoteAnalyzer()) == false
)
.addValidator(a -> a.checkAllowedInMode(AnalysisMode.INDEX_TIME));
this.searchAnalyzer = Parameter.analyzerParam(
"search_analyzer",
true,
m -> m.fieldType().getTextSearchInfo().searchAnalyzer(),
() -> {
if (indexAnalyzer.isConfigured() == false) {
NamedAnalyzer defaultAnalyzer = indexAnalyzers.get(AnalysisRegistry.DEFAULT_SEARCH_ANALYZER_NAME);
if (defaultAnalyzer != null) {
return defaultAnalyzer;
}
}
return indexAnalyzer.get();
},
indexCreatedVersion
)
.setSerializerCheck((id, ic, a) -> id || ic || Objects.equals(a, getSearchQuoteAnalyzer()) == false)
.addValidator(a -> a.checkAllowedInMode(AnalysisMode.SEARCH_TIME));
this.searchQuoteAnalyzer = Parameter.analyzerParam(
"search_quote_analyzer",
true,
m -> m.fieldType().getTextSearchInfo().searchQuoteAnalyzer(),
() -> {
if (searchAnalyzer.isConfigured() == false && indexAnalyzer.isConfigured() == false) {
NamedAnalyzer defaultAnalyzer = indexAnalyzers.get(AnalysisRegistry.DEFAULT_SEARCH_QUOTED_ANALYZER_NAME);
if (defaultAnalyzer != null) {
return defaultAnalyzer;
}
}
return searchAnalyzer.get();
},
indexCreatedVersion
).addValidator(a -> a.checkAllowedInMode(AnalysisMode.SEARCH_TIME));
this.positionIncrementGap = Parameter.intParam(
"position_increment_gap",
false,
positionGapInitFunction,
TextFieldMapper.Defaults.POSITION_INCREMENT_GAP
).addValidator(v -> {
if (v < 0) {
throw new MapperParsingException("[position_increment_gap] must be positive, got [" + v + "]");
}
});
this.indexAnalyzers = indexAnalyzers;
}
public NamedAnalyzer getIndexAnalyzer() {
return wrapAnalyzer(indexAnalyzer.getValue());
}
public NamedAnalyzer getSearchAnalyzer() {
return wrapAnalyzer(searchAnalyzer.getValue());
}
public NamedAnalyzer getSearchQuoteAnalyzer() {
return wrapAnalyzer(searchQuoteAnalyzer.getValue());
}
private NamedAnalyzer wrapAnalyzer(NamedAnalyzer a) {
if (positionIncrementGap.isConfigured() == false) {
return a;
}
return new NamedAnalyzer(a, positionIncrementGap.get());
}
}
public static Parameter<SimilarityProvider> similarity(Function<FieldMapper, SimilarityProvider> init) {
return new Parameter<>(
"similarity",
false,
() -> null,
(n, c, o) -> TypeParsers.resolveSimilarity(c, n, o),
init,
(b, f, v) -> b.field(f, v == null ? null : v.name()),
v -> v == null ? null : v.name()
).acceptsNull();
}
public static Parameter<String> keywordIndexOptions(Function<FieldMapper, String> initializer) {
return Parameter.stringParam("index_options", false, initializer, "docs").addValidator(v -> {
switch (v) {
case "docs":
case "freqs":
return;
default:
throw new MapperParsingException(
"Unknown value [" + v + "] for field [index_options] - accepted values are [docs, freqs]"
);
}
});
}
public static Parameter<String> textIndexOptions(Function<FieldMapper, String> initializer) {
return Parameter.stringParam("index_options", false, initializer, "positions").addValidator(v -> {
switch (v) {
case "positions":
case "docs":
case "freqs":
case "offsets":
return;
default:
throw new MapperParsingException(
"Unknown value [" + v + "] for field [index_options] - accepted values are [positions, docs, freqs, offsets]"
);
}
});
}
public static FieldType buildFieldType(
Supplier<Boolean> indexed,
Supplier<Boolean> stored,
Supplier<String> indexOptions,
Supplier<Boolean> norms,
Supplier<String> termVectors
) {
FieldType ft = new FieldType();
ft.setStored(stored.get());
ft.setTokenized(true);
ft.setIndexOptions(toIndexOptions(indexed.get(), indexOptions.get()));
ft.setOmitNorms(norms.get() == false);
setTermVectorParams(termVectors.get(), ft);
return ft;
}
public static IndexOptions toIndexOptions(boolean indexed, String indexOptions) {
if (indexed == false) {
return IndexOptions.NONE;
}
switch (indexOptions) {
case "docs":
return IndexOptions.DOCS;
case "freqs":
return IndexOptions.DOCS_AND_FREQS;
case "positions":
return IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
case "offsets":
return IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
}
throw new IllegalArgumentException("Unknown [index_options] value: [" + indexOptions + "]");
}
public static Parameter<String> termVectors(Function<FieldMapper, String> initializer) {
return Parameter.stringParam("term_vector", false, initializer, "no").addValidator(v -> {
switch (v) {
case "no":
case "yes":
case "with_positions":
case "with_offsets":
case "with_positions_offsets":
case "with_positions_payloads":
case "with_positions_offsets_payloads":
return;
default:
throw new MapperParsingException(
"Unknown value ["
+ v
+ "] for field [term_vector] - accepted values are [no, yes, with_positions, with_offsets, "
+ "with_positions_offsets, with_positions_payloads, with_positions_offsets_payloads]"
);
}
});
}
public static void setTermVectorParams(String configuration, FieldType fieldType) {
switch (configuration) {
case "no" -> {
fieldType.setStoreTermVectors(false);
return;
}
case "yes" -> {
fieldType.setStoreTermVectors(true);
return;
}
case "with_positions" -> {
fieldType.setStoreTermVectors(true);
fieldType.setStoreTermVectorPositions(true);
return;
}
case "with_offsets" -> {
fieldType.setStoreTermVectors(true);
fieldType.setStoreTermVectorOffsets(true);
return;
}
case "with_positions_offsets" -> {
fieldType.setStoreTermVectors(true);
fieldType.setStoreTermVectorPositions(true);
fieldType.setStoreTermVectorOffsets(true);
return;
}
case "with_positions_payloads" -> {
fieldType.setStoreTermVectors(true);
fieldType.setStoreTermVectorPositions(true);
fieldType.setStoreTermVectorPayloads(true);
return;
}
case "with_positions_offsets_payloads" -> {
fieldType.setStoreTermVectors(true);
fieldType.setStoreTermVectorPositions(true);
fieldType.setStoreTermVectorOffsets(true);
fieldType.setStoreTermVectorPayloads(true);
return;
}
}
throw new IllegalArgumentException("Unknown [term_vector] setting: [" + configuration + "]");
}
}
| Analyzers |
java | apache__kafka | jmh-benchmarks/src/main/java/org/apache/kafka/jmh/coordinator/GroupCoordinatorShardLoadingBenchmark.java | {
"start": 4816,
"end": 7128
} | class ____ extends MockLog {
private final int batchCount;
private final SimpleRecord[] batch;
public OffsetCommitLog(TopicPartition tp, int batchSize, int batchCount) throws IOException {
super(tp);
this.batchCount = batchCount;
List<SimpleRecord> batchRecords = new ArrayList<>();
for (int i = 0; i < batchSize; i++) {
String topic = "topic-" + i;
int partition = 0;
OffsetAndMetadata offsetAndMetadata = new OffsetAndMetadata(
0L,
OptionalInt.of(0),
OffsetAndMetadata.NO_METADATA,
0L,
OptionalLong.empty(),
Uuid.randomUuid()
);
CoordinatorRecord coordinatorRecord = GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
GROUP_ID, topic, partition, offsetAndMetadata
);
byte[] keyBytes = new GroupCoordinatorRecordSerde().serializeKey(coordinatorRecord);
byte[] valueBytes = new GroupCoordinatorRecordSerde().serializeValue(coordinatorRecord);
SimpleRecord simpleRecord = new SimpleRecord(keyBytes, valueBytes);
batchRecords.add(simpleRecord);
}
this.batch = batchRecords.toArray(new SimpleRecord[0]);
}
@Override
public long logStartOffset() {
return 0L;
}
@Override
public long logEndOffset() {
if (batch == null) {
return 0L;
}
return (long) batchCount * (long) batch.length;
}
@Override
public FetchDataInfo read(long startOffset, int maxLength, FetchIsolation isolation, boolean minOneMessage) {
if (startOffset < 0 || startOffset >= logEndOffset()) {
return new FetchDataInfo(new LogOffsetMetadata(startOffset), MemoryRecords.EMPTY);
}
MemoryRecords records = MemoryRecords.withRecords(
startOffset,
Compression.NONE,
batch
);
return new FetchDataInfo(new LogOffsetMetadata(startOffset), records);
}
}
static | OffsetCommitLog |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/requests/LeaveGroupRequest.java | {
"start": 1442,
"end": 5046
} | class ____ extends AbstractRequest.Builder<LeaveGroupRequest> {
private final String groupId;
private final List<MemberIdentity> members;
public Builder(String groupId, List<MemberIdentity> members) {
this(groupId, members, ApiKeys.LEAVE_GROUP.oldestVersion(), ApiKeys.LEAVE_GROUP.latestVersion());
}
Builder(String groupId, List<MemberIdentity> members, short oldestVersion, short latestVersion) {
super(ApiKeys.LEAVE_GROUP, oldestVersion, latestVersion);
this.groupId = groupId;
this.members = members;
if (members.isEmpty()) {
throw new IllegalArgumentException("leaving members should not be empty");
}
}
/**
* Based on the request version to choose fields.
*/
@Override
public LeaveGroupRequest build(short version) {
final LeaveGroupRequestData data;
// Starting from version 3, all the leave group request will be in batch.
if (version >= 3) {
data = new LeaveGroupRequestData()
.setGroupId(groupId)
.setMembers(members);
} else {
if (members.size() != 1) {
throw new UnsupportedVersionException("Version " + version + " leave group request only " +
"supports single member instance than " + members.size() + " members");
}
data = new LeaveGroupRequestData()
.setGroupId(groupId)
.setMemberId(members.get(0).memberId());
}
return new LeaveGroupRequest(data, version);
}
@Override
public String toString() {
return "(type=LeaveGroupRequest" +
", groupId=" + groupId +
", members=" + MessageUtil.deepToString(members.iterator()) +
")";
}
}
private final LeaveGroupRequestData data;
private LeaveGroupRequest(LeaveGroupRequestData data, short version) {
super(ApiKeys.LEAVE_GROUP, version);
this.data = data;
}
@Override
public LeaveGroupRequestData data() {
return data;
}
public LeaveGroupRequestData normalizedData() {
if (version() >= 3) {
return data;
} else {
return new LeaveGroupRequestData()
.setGroupId(data.groupId())
.setMembers(Collections.singletonList(
new MemberIdentity().setMemberId(data.memberId())));
}
}
public List<MemberIdentity> members() {
// Before version 3, leave group request is still in single mode
return version() <= 2 ? Collections.singletonList(
new MemberIdentity()
.setMemberId(data.memberId())) : data.members();
}
@Override
public AbstractResponse getErrorResponse(int throttleTimeMs, Throwable e) {
LeaveGroupResponseData responseData = new LeaveGroupResponseData()
.setErrorCode(Errors.forException(e).code());
if (version() >= 1) {
responseData.setThrottleTimeMs(throttleTimeMs);
}
return new LeaveGroupResponse(responseData);
}
public static LeaveGroupRequest parse(Readable readable, short version) {
return new LeaveGroupRequest(new LeaveGroupRequestData(readable, version), version);
}
}
| Builder |
java | google__dagger | javatests/dagger/internal/codegen/ComponentProcessorTest.java | {
"start": 7298,
"end": 8224
} | interface ____ {",
" SomeInjectableType someInjectableType();",
" Lazy<SomeInjectableType> lazySomeInjectableType();",
" Provider<SomeInjectableType> someInjectableTypeProvider();",
"}");
CompilerTests.daggerCompiler(injectableTypeFile, componentFile)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(0);
subject.generatedSource(goldenFileRule.goldenSource("test/DaggerSimpleComponent"));
});
}
@Test
public void componentWithScope() throws Exception {
Source injectableTypeFile = CompilerTests.javaSource("test.SomeInjectableType",
"package test;",
"",
"import javax.inject.Inject;",
"import javax.inject.Singleton;",
"",
"@Singleton",
"final | SimpleComponent |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/exception/ExceptionUtils.java | {
"start": 38484,
"end": 38900
} | class ____ not match - see
* {@link #throwableOfType(Throwable, Class)} for the opposite.
*
* <p>A {@code null} throwable returns {@code null}.
* A {@code null} type returns {@code null}.
* No match in the chain returns {@code null}.</p>
*
* @param <T> the type of Throwable you are searching.
* @param throwable the throwable to inspect, may be null.
* @param clazz the | do |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1600/Issue1662_1.java | {
"start": 550,
"end": 838
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
String json = "{\"value\":123}";
Model model = JSON.parseObject(json, Model.class);
assertEquals("{\"value\":\"12300元\"}",JSON.toJSONString(model));
}
public static | Issue1662_1 |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_2400/Issue2447.java | {
"start": 1164,
"end": 1348
} | class ____ {
public int id;
@JSONField(unwrapped = true)
public Map<String, Object> properties = new LinkedHashMap<String, Object>();
}
public static | VO2 |
java | quarkusio__quarkus | extensions/oidc-client-filter/deployment/src/test/java/io/quarkus/oidc/client/filter/ExtendedOidcClientRequestFilterResource.java | {
"start": 233,
"end": 928
} | class ____ {
@Inject
@RestClient
ProtectedResourceServiceConfigPropertyOidcClient protectedResourceServiceConfigPropertyOidcClient;
@Inject
@RestClient
ProtectedResourceServiceExtendedOidcClientRequestFilter protectedResourceServiceExtendedOidcClientRequestFilter;
@GET
@Path("/annotation/user-name")
public String userName() {
return protectedResourceServiceConfigPropertyOidcClient.getUserName();
}
@GET
@Path("/extended-provider/user-name")
public String extendedOidcClientRequestFilterUserName() {
return protectedResourceServiceExtendedOidcClientRequestFilter.getUserName();
}
}
| ExtendedOidcClientRequestFilterResource |
java | quarkusio__quarkus | extensions/smallrye-graphql-client/runtime/src/main/java/io/quarkus/smallrye/graphql/client/runtime/GraphQLClientsConfig.java | {
"start": 860,
"end": 1091
} | class ____ that annotation, in which case
* it is possible to use the short name, as well as fully qualified.
*/
@ConfigDocMapKey("config-key")
@WithParentName
Map<String, GraphQLClientConfig> clients();
}
| bearing |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_58_alias_dot.java | {
"start": 924,
"end": 5784
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "SELECT m.*, m.icon AS micon, md.uid as md.uid, md.lastmsg,md.postnum,md.rvrc,md.money,md.credit,md.currency,md.lastvisit,md.thisvisit,md.onlinetime,md.lastpost,md.todaypost, md.monthpost,md.onlineip,md.uploadtime,md.uploadnum,md.starttime,md.pwdctime,md.monoltime,md.digests,md.f_num,md.creditpop, md.jobnum,md.lastgrab,md.follows,md.fans,md.newfans,md.newreferto,md.newcomment,md.postcheck,md.punch, mi.customdata " +
"FROM pw_members m LEFT JOIN pw_memberdata md ON m.uid=md.uid LEFT JOIN pw_memberinfo mi ON mi.uid=m.uid WHERE m.uid IN (?)";
System.out.println(sql);
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL, true);
SQLStatement stmt = statementList.get(0);
assertEquals(1, statementList.size());
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.MYSQL);
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
// assertEquals(1, visitor.getTables().size());
// assertEquals(1, visitor.getColumns().size());
// assertEquals(0, visitor.getConditions().size());
// assertEquals(0, visitor.getOrderByColumns().size());
{
String output = SQLUtils.toMySqlString(stmt);
assertEquals("SELECT m.*, m.icon AS micon, md.uid AS `md.uid`, md.lastmsg, md.postnum\n" +
"\t, md.rvrc, md.money, md.credit, md.currency, md.lastvisit\n" +
"\t, md.thisvisit, md.onlinetime, md.lastpost, md.todaypost, md.monthpost\n" +
"\t, md.onlineip, md.uploadtime, md.uploadnum, md.starttime, md.pwdctime\n" +
"\t, md.monoltime, md.digests, md.f_num, md.creditpop, md.jobnum\n" +
"\t, md.lastgrab, md.follows, md.fans, md.newfans, md.newreferto\n" +
"\t, md.newcomment, md.postcheck, md.punch, mi.customdata\n" +
"FROM pw_members m\n" +
"\tLEFT JOIN pw_memberdata md ON m.uid = md.uid\n" +
"\tLEFT JOIN pw_memberinfo mi ON mi.uid = m.uid\n" +
"WHERE m.uid IN (?)", //
output);
}
{
String output = SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION);
assertEquals("select m.*, m.icon as micon, md.uid as `md.uid`, md.lastmsg, md.postnum\n" +
"\t, md.rvrc, md.money, md.credit, md.currency, md.lastvisit\n" +
"\t, md.thisvisit, md.onlinetime, md.lastpost, md.todaypost, md.monthpost\n" +
"\t, md.onlineip, md.uploadtime, md.uploadnum, md.starttime, md.pwdctime\n" +
"\t, md.monoltime, md.digests, md.f_num, md.creditpop, md.jobnum\n" +
"\t, md.lastgrab, md.follows, md.fans, md.newfans, md.newreferto\n" +
"\t, md.newcomment, md.postcheck, md.punch, mi.customdata\n" +
"from pw_members m\n" +
"\tleft join pw_memberdata md on m.uid = md.uid\n" +
"\tleft join pw_memberinfo mi on mi.uid = m.uid\n" +
"where m.uid in (?)", //
output);
}
{
String output = SQLUtils.toMySqlString(stmt, new SQLUtils.FormatOption(true, true, true));
assertEquals("SELECT m.*, m.icon AS micon, md.uid AS `md.uid`, md.lastmsg, md.postnum\n" +
"\t, md.rvrc, md.money, md.credit, md.currency, md.lastvisit\n" +
"\t, md.thisvisit, md.onlinetime, md.lastpost, md.todaypost, md.monthpost\n" +
"\t, md.onlineip, md.uploadtime, md.uploadnum, md.starttime, md.pwdctime\n" +
"\t, md.monoltime, md.digests, md.f_num, md.creditpop, md.jobnum\n" +
"\t, md.lastgrab, md.follows, md.fans, md.newfans, md.newreferto\n" +
"\t, md.newcomment, md.postcheck, md.punch, mi.customdata\n" +
"FROM pw_members m\n" +
"\tLEFT JOIN pw_memberdata md ON m.uid = md.uid\n" +
"\tLEFT JOIN pw_memberinfo mi ON mi.uid = m.uid\n" +
"WHERE m.uid IN (?)", //
output);
}
}
}
| MySqlSelectTest_58_alias_dot |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/Dialect.java | {
"start": 51290,
"end": 68945
} | enum ____ as String to their string value
functionRegistry.register( "string",
new StringFunction( typeConfiguration ) );
//format() function for datetimes, emulated on many databases using the
//Oracle-style to_char() function, and on others using their native
//formatting functions
functionFactory.format_toChar();
//timestampadd()/timestampdiff() delegated back to the Dialect itself
//since there is a great variety of different ways to emulate them
//by default, we don't allow plain parameters for the timestamp argument as most database don't support this
functionFactory.timestampaddAndDiff( this, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionRegistry.registerAlternateKey( "dateadd", "timestampadd" );
functionRegistry.registerAlternateKey( "datediff", "timestampdiff" );
//ANSI SQL (and JPA) current date/time/timestamp functions, supported
//natively on almost every database, delegated back to the Dialect
functionRegistry.register(
"current_date",
new CurrentFunction(
"current_date",
currentDate(),
dateType
)
);
functionRegistry.register(
"current_time",
new CurrentFunction(
"current_time",
currentTime(),
timeType
)
);
functionRegistry.register(
"current_timestamp",
new CurrentFunction(
"current_timestamp",
currentTimestamp(),
timestampType
)
);
functionRegistry.registerAlternateKey( "current date", "current_date" );
functionRegistry.registerAlternateKey( "current time", "current_time" );
functionRegistry.registerAlternateKey( "current timestamp", "current_timestamp" );
//HQL current instant/date/time/datetime functions, delegated back to the Dialect
functionRegistry.register(
"local_date",
new CurrentFunction(
"local_date",
currentDate(),
localDateType
)
);
functionRegistry.register(
"local_time",
new CurrentFunction(
"local_time",
currentLocalTime(),
localTimeType
)
);
functionRegistry.register(
"local_datetime",
new CurrentFunction(
"local_datetime",
currentLocalTimestamp(),
localDateTimeType
)
);
functionRegistry.register(
"offset_datetime",
new CurrentFunction(
"offset_datetime",
currentTimestampWithTimeZone(),
offsetDateTimeType
)
);
functionRegistry.registerAlternateKey( "local date", "local_date" );
functionRegistry.registerAlternateKey( "local time", "local_time" );
functionRegistry.registerAlternateKey( "local datetime", "local_datetime" );
functionRegistry.registerAlternateKey( "offset datetime", "offset_datetime" );
functionRegistry.register(
"instant",
new CurrentFunction(
"instant",
currentTimestampWithTimeZone(),
instantType
)
);
functionRegistry.registerAlternateKey( "current_instant", "instant" ); //deprecated legacy!
functionRegistry.register( "sql", new SqlFunction() );
}
/**
* Translation of the HQL/JPQL {@code current_date} function, which
* maps to the Java type {@link java.sql.Date}, and of the HQL
* {@code local_date} function which maps to the Java type
* {@link java.time.LocalDate}.
*/
public String currentDate() {
return "current_date";
}
/**
* Translation of the HQL/JPQL {@code current_time} function, which
* maps to the Java type {@link java.sql.Time} which is a time with
* no time zone. This contradicts ANSI SQL where {@code current_time}
* has the type {@code TIME WITH TIME ZONE}.
* <p>
* It is recommended to override this in dialects for databases which
* support {@code localtime} or {@code time at local}.
*/
public String currentTime() {
return "current_time";
}
/**
* Translation of the HQL/JPQL {@code current_timestamp} function,
* which maps to the Java type {@link java.sql.Timestamp} which is
* a datetime with no time zone. This contradicts ANSI SQL where
* {@code current_timestamp} has the type
* {@code TIMESTAMP WITH TIME ZONE}.
* <p>
* It is recommended to override this in dialects for databases which
* support {@code localtimestamp} or {@code timestamp at local}.
*/
public String currentTimestamp() {
return "current_timestamp";
}
/**
* Translation of the HQL {@code local_time} function, which maps to
* the Java type {@link java.time.LocalTime} which is a time with no
* time zone. It should usually be the same SQL function as for
* {@link #currentTime()}.
* <p>
* It is recommended to override this in dialects for databases which
* support {@code localtime} or {@code current_time at local}.
*/
public String currentLocalTime() {
return currentTime();
}
/**
* Translation of the HQL {@code local_datetime} function, which maps
* to the Java type {@link java.time.LocalDateTime} which is a datetime
* with no time zone. It should usually be the same SQL function as for
* {@link #currentTimestamp()}.
* <p>
* It is recommended to override this in dialects for databases which
* support {@code localtimestamp} or {@code current_timestamp at local}.
*/
public String currentLocalTimestamp() {
return currentTimestamp();
}
/**
* Translation of the HQL {@code offset_datetime} function, which maps
* to the Java type {@link java.time.OffsetDateTime} which is a datetime
* with a time zone. This in principle correctly maps to the ANSI SQL
* {@code current_timestamp} which has the type
* {@code TIMESTAMP WITH TIME ZONE}.
*/
public String currentTimestampWithTimeZone() {
return currentTimestamp();
}
/**
* Obtain a pattern for the SQL equivalent to an
* {@code extract()} function call. The resulting
* pattern must contain ?1 and ?2 placeholders
* for the arguments.
* <p>
* This method does not need to handle
* {@link TemporalUnit#NANOSECOND},
* {@link TemporalUnit#NATIVE},
* {@link TemporalUnit#OFFSET},
* {@link TemporalUnit#DATE},
* {@link TemporalUnit#TIME},
* {@link TemporalUnit#WEEK_OF_YEAR}, or
* {@link TemporalUnit#WEEK_OF_MONTH},
* which are already desugared by
* {@link ExtractFunction}.
*
* @param unit the first argument
*/
public String extractPattern(TemporalUnit unit) {
return "extract(?1 from ?2)";
}
/**
* Obtain a pattern for the SQL equivalent to a
* {@code cast()} function call. The resulting
* pattern must contain ?1 and ?2 placeholders
* for the arguments.
*
* @param from a {@link CastType} indicating the
* type of the value argument
* @param to a {@link CastType} indicating the
* type the value argument is cast to
*/
public String castPattern(CastType from, CastType to) {
switch ( to ) {
case STRING:
switch ( from ) {
case INTEGER_BOOLEAN:
return "case ?1 when 1 then 'true' when 0 then 'false' else null end";
case YN_BOOLEAN:
return "case ?1 when 'Y' then 'true' when 'N' then 'false' else null end";
case TF_BOOLEAN:
return "case ?1 when 'T' then 'true' when 'F' then 'false' else null end";
}
break;
case INTEGER:
case LONG:
switch ( from ) {
case YN_BOOLEAN:
return "case ?1 when 'Y' then 1 when 'N' then 0 else null end";
case TF_BOOLEAN:
return "case ?1 when 'T' then 1 when 'F' then 0 else null end";
case BOOLEAN:
return "case ?1 when true then 1 when false then 0 else null end";
}
break;
case INTEGER_BOOLEAN:
switch ( from ) {
case STRING:
return buildStringToBooleanCast( "1", "0" );
case INTEGER:
case LONG:
return "abs(sign(?1))";
case YN_BOOLEAN:
return "case ?1 when 'Y' then 1 when 'N' then 0 else null end";
case TF_BOOLEAN:
return "case ?1 when 'T' then 1 when 'F' then 0 else null end";
case BOOLEAN:
return "case ?1 when true then 1 when false then 0 else null end";
}
break;
case YN_BOOLEAN:
switch ( from ) {
case STRING:
return buildStringToBooleanCast( "'Y'", "'N'" );
case INTEGER_BOOLEAN:
return "case ?1 when 1 then 'Y' when 0 then 'N' else null end";
case INTEGER:
case LONG:
return "case abs(sign(?1)) when 1 then 'Y' when 0 then 'N' else null end";
case TF_BOOLEAN:
return "case ?1 when 'T' then 'Y' when 'F' then 'N' else null end";
case BOOLEAN:
return "case ?1 when true then 'Y' when false then 'N' else null end";
}
break;
case TF_BOOLEAN:
switch ( from ) {
case STRING:
return buildStringToBooleanCast( "'T'", "'F'" );
case INTEGER_BOOLEAN:
return "case ?1 when 1 then 'T' when 0 then 'F' else null end";
case INTEGER:
case LONG:
return "case abs(sign(?1)) when 1 then 'T' when 0 then 'F' else null end";
case YN_BOOLEAN:
return "case ?1 when 'Y' then 'T' when 'N' then 'F' else null end";
case BOOLEAN:
return "case ?1 when true then 'T' when false then 'F' else null end";
}
break;
case BOOLEAN:
switch ( from ) {
case STRING:
return buildStringToBooleanCast( "true", "false" );
case INTEGER_BOOLEAN:
case INTEGER:
case LONG:
return "(?1<>0)";
case YN_BOOLEAN:
return "(?1<>'N')";
case TF_BOOLEAN:
return "(?1<>'F')";
}
break;
}
return "cast(?1 as ?2)";
}
protected static final String[] TRUE_STRING_VALUES = new String[] { "t", "true", "y", "1" };
protected static final String[] FALSE_STRING_VALUES = new String[] { "f", "false", "n", "0" };
protected String buildStringToBooleanCast(String trueValue, String falseValue) {
final boolean supportsValuesList = supportsValuesList();
final StringBuilder sb = new StringBuilder();
sb.append( "(select v.x from (" );
if ( supportsValuesList ) {
sb.append( "values (" );
sb.append( trueValue );
sb.append( "),(" );
sb.append( falseValue );
sb.append( ")) v(x)" );
}
else {
sb.append( "select " );
sb.append( trueValue );
sb.append( " x");
sb.append( getFromDualForSelectOnly() );
sb.append(" union all select " );
sb.append( falseValue );
sb.append( getFromDualForSelectOnly() );
sb.append( ") v" );
}
sb.append( " left join (" );
if ( supportsValuesList ) {
sb.append( "values" );
char separator = ' ';
for ( String trueStringValue : Dialect.TRUE_STRING_VALUES ) {
sb.append( separator );
sb.append( "('" );
sb.append( trueStringValue );
sb.append( "'," );
sb.append( trueValue );
sb.append( ')' );
separator = ',';
}
for ( String falseStringValue : Dialect.FALSE_STRING_VALUES ) {
sb.append( ",('" );
sb.append( falseStringValue );
sb.append( "'," );
sb.append( falseValue );
sb.append( ')' );
}
sb.append( ") t(k,v)" );
}
else {
sb.append( "select '" );
sb.append( Dialect.TRUE_STRING_VALUES[0] );
sb.append( "' k," );
sb.append( trueValue );
sb.append( " v" );
sb.append( getFromDualForSelectOnly() );
for ( int i = 1; i < Dialect.TRUE_STRING_VALUES.length; i++ ) {
sb.append( " union all select '" );
sb.append( Dialect.TRUE_STRING_VALUES[i] );
sb.append( "'," );
sb.append( trueValue );
sb.append( getFromDualForSelectOnly() );
}
for ( String falseStringValue : Dialect.FALSE_STRING_VALUES ) {
sb.append( " union all select '" );
sb.append( falseStringValue );
sb.append( "'," );
sb.append( falseValue );
sb.append( getFromDualForSelectOnly() );
}
sb.append( ") t" );
}
sb.append( " on " );
sb.append( getLowercaseFunction() );
sb.append( "(?1)=t.k where t.v is null or v.x=t.v)" );
return sb.toString();
}
protected String buildStringToBooleanCastDecode(String trueValue, String falseValue) {
final boolean supportsValuesList = supportsValuesList();
final StringBuilder sb = new StringBuilder();
sb.append( "(select v.x from (" );
if ( supportsValuesList ) {
sb.append( "values (" );
sb.append( trueValue );
sb.append( "),(" );
sb.append( falseValue );
sb.append( ")) v(x)" );
}
else {
sb.append( "select " );
sb.append( trueValue );
sb.append( " x");
sb.append( getFromDualForSelectOnly() );
sb.append(" union all select " );
sb.append( falseValue );
sb.append( getFromDualForSelectOnly() );
sb.append( ") v" );
}
sb.append( ", (" );
if ( supportsValuesList ) {
sb.append( "values (" );
sb.append( buildStringToBooleanDecode( trueValue, falseValue ) );
sb.append( ")) t(v)" );
}
else {
sb.append( "select " );
sb.append( buildStringToBooleanDecode( trueValue, falseValue ) );
sb.append( " v");
sb.append( getFromDualForSelectOnly() );
sb.append(") t" );
}
sb.append( " where t.v is null or v.x=t.v)" );
return sb.toString();
}
protected String buildStringToBooleanDecode(String trueValue, String falseValue) {
final StringBuilder sb = new StringBuilder();
sb.append( "decode(" );
sb.append( getLowercaseFunction() );
sb.append( "(?1)" );
for ( String trueStringValue : TRUE_STRING_VALUES ) {
sb.append( ",'" );
sb.append( trueStringValue );
sb.append( "'," );
sb.append( trueValue );
}
for ( String falseStringValue : FALSE_STRING_VALUES ) {
sb.append( ",'" );
sb.append( falseStringValue );
sb.append( "'," );
sb.append( falseValue );
}
sb.append( ",null)" );
return sb.toString();
}
/**
* Returns a table expression that has one row.
*
* @return the SQL equivalent to Oracle's {@code dual}.
*/
public String getDual() {
return "(values(0))";
}
public String getFromDualForSelectOnly() {
return "";
}
/**
* Obtain a pattern for the SQL equivalent to a
* {@code trim()} function call. The resulting
* pattern must contain a ?1 placeholder for the
* argument of type {@link String} and a ?2 placeholder
* for the trim character if {@code isWhitespace}
* was false.
*
* @param specification
* {@linkplain TrimSpec#LEADING leading},
* {@linkplain TrimSpec#TRAILING trailing},
* or {@linkplain TrimSpec#BOTH both}
*
* @param isWhitespace
* {@code true} if trimming whitespace, and the ?2
* placeholder for the trim character should be omitted,
* {@code false} if the trim character is explicit and
* the ?2 placeholder must be included in the pattern
*/
public String trimPattern(TrimSpec specification, boolean isWhitespace) {
return "trim(" + specification + ( isWhitespace ? "" : " ?2" ) + " from ?1)";
}
/**
* Whether the database supports adding a fractional interval
* to a timestamp, for example {@code timestamp + 0.5 second}.
*/
public boolean supportsFractionalTimestampArithmetic() {
return true;
}
/**
* Obtain a pattern for the SQL equivalent to a
* {@code timestampdiff()} function call. The resulting
* pattern must contain ?1, ?2, and ?3 placeholders
* for the arguments.
*
* @param unit the first argument
* @param fromTemporalType true if the first argument is
* a timestamp, false if a date
* @param toTemporalType true if the second argument is
*/
@SuppressWarnings("deprecation")
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
throw new UnsupportedOperationException( "`" + getClass().getName() + "` does not yet support #timestampdiffPattern" );
}
/**
* Obtain a pattern for the SQL equivalent to a
* {@code timestampadd()} function call. The resulting
* pattern must contain ?1, ?2, and ?3 placeholders
* for the arguments.
*
* @param unit The unit to add to the temporal
* @param temporalType The type of the temporal
* @param intervalType The type of interval to add or null if it's not a native interval
*/
@SuppressWarnings("deprecation")
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType, IntervalType intervalType) {
throw new UnsupportedOperationException( "`" + getClass().getName() + "` does not yet support #timestampaddPattern" );
}
/**
* Do the given JDBC type codes, as defined in {@link Types} represent
* essentially the same type in this dialect of SQL?
* <p>
* The default implementation treats {@link Types#NUMERIC NUMERIC} and
* {@link Types#DECIMAL DECIMAL} as the same type, and
* {@link Types#FLOAT FLOAT}, {@link Types#REAL REAL}, and
* {@link Types#DOUBLE DOUBLE} as essentially the same type, since the
* ANSI SQL specification fails to meaningfully distinguish them.
* <p>
* The default implementation also treats {@link Types#VARCHAR VARCHAR},
* {@link Types#NVARCHAR NVARCHAR}, {@link Types#LONGVARCHAR LONGVARCHAR},
* and {@link Types#LONGNVARCHAR LONGNVARCHAR} as the same type, and
* {@link Types#VARBINARY BINARY} and
* {@link Types#LONGVARBINARY LONGVARBINARY} as the same type, since
* Hibernate doesn't really differentiate these types.
* <p>
* On the other hand, integral types are not treated as equivalent,
* instead, {@link #isCompatibleIntegralType(int, int)} is responsible
* for determining if the types are compatible.
*
* @param typeCode1 the first column type info
* @param typeCode2 the second column type info
*
* @return {@code true} if the two type codes are equivalent
*/
public boolean equivalentTypes(int typeCode1, int typeCode2) {
return typeCode1==typeCode2
|| isNumericOrDecimal(typeCode1) && isNumericOrDecimal(typeCode2)
|| isFloatOrRealOrDouble(typeCode1) && isFloatOrRealOrDouble(typeCode2)
|| isVarcharType(typeCode1) && isVarcharType(typeCode2)
|| isVarbinaryType(typeCode1) && isVarbinaryType(typeCode2)
|| isCompatibleIntegralType(typeCode1, typeCode2)
// HHH-17908: Since the runtime can cope with | mapped |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/CombinedHostFileManager.java | {
"start": 2397,
"end": 2658
} | class ____ extends HostConfigManager {
private static final Logger LOG = LoggerFactory.getLogger(
CombinedHostFileManager.class);
private Configuration conf;
private HostProperties hostProperties = new HostProperties();
static | CombinedHostFileManager |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/filter/JsonIncludeTest.java | {
"start": 4754,
"end": 5065
} | class ____
{
public final String first;
public final double second;
public Issue1351Bean(String first, double second) {
this.first = first;
this.second = second;
}
}
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
static abstract | Issue1351Bean |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/mapping/ColumnWithExplicitReferenceToPrimaryTableTest.java | {
"start": 846,
"end": 1207
} | class ____ {
@Test
@JiraKey( value = "HHH-8539" )
public void testColumnAnnotationWithExplicitReferenceToPrimaryTable(EntityManagerFactoryScope scope) {
Assertions.assertNotNull( scope.getEntityManagerFactory() );
scope.getEntityManagerFactory().close();
}
@Entity
@Table( name = "THE_TABLE" )
public static | ColumnWithExplicitReferenceToPrimaryTableTest |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng7443ConsistencyOfOptionalProjectsAndProfilesTest.java | {
"start": 1016,
"end": 2527
} | class ____ extends AbstractMavenIntegrationTestCase {
@Test
public void testConsistentLoggingOfOptionalProfilesAndProjects() throws IOException, VerificationException {
File testDir = extractResources("/mng-7443-consistency-of-optional-profiles-and-projects");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.addCliArgument("-pl");
verifier.addCliArgument("?:does-not-exist");
verifier.addCliArgument("-P");
verifier.addCliArgument("?does-not-exist-either");
verifier.addCliArguments("clean", "verify");
verifier.execute();
final List<String> logLines = verifier.loadLogLines();
int projectSelectorMissingCounter = 0;
int profileSelectorMissingCounter = 0;
for (String logLine : logLines) {
if (logLine.contains("The requested optional projects")
&& logLine.contains(":does-not-exist")
&& logLine.contains("do not exist")) {
projectSelectorMissingCounter++;
}
if (logLine.contains("The requested optional profiles")
&& logLine.contains("does-not-exist-either")
&& logLine.contains("do not exist")) {
profileSelectorMissingCounter++;
}
}
assertEquals(2, profileSelectorMissingCounter);
assertEquals(2, projectSelectorMissingCounter);
}
}
| MavenITmng7443ConsistencyOfOptionalProjectsAndProfilesTest |
java | apache__camel | components/camel-rest-openapi/src/test/java/org/apache/camel/component/rest/openapi/RestOpenApiRequestValidationTest.java | {
"start": 3194,
"end": 23242
} | class ____ extends CamelTestSupport {
public static WireMockServer wireMockServer = new WireMockServer(wireMockConfig().dynamicPort());
@BeforeAll
public static void startWireMockServer() throws Exception {
wireMockServer.start();
setUpPetStoreStubs("/openapi-v3.json", "/api/v3/pet");
setUpPetStoreStubs("/petstore-3.1.yaml", "/api/v31/pet");
setUpFruitsApiStubs("/fruits-3.0.yaml");
}
@AfterAll
public static void stopWireMockServer() {
wireMockServer.stop();
}
@BeforeEach
public void resetWireMock() {
wireMockServer.resetRequests();
}
static void setUpPetStoreStubs(String specificationPath, String urlBasePath) throws Exception {
wireMockServer.stubFor(get(urlEqualTo(specificationPath)).willReturn(aResponse().withBody(
Files.readAllBytes(Paths.get(
Objects.requireNonNull(RestOpenApiComponentV3Test.class.getResource(specificationPath)).toURI())))));
String validationEnabledPetJson
= "{\"id\":10,\"name\":\"doggie\",\"photoUrls\":[\"https://test.photos.org/doggie.gif\"]}";
wireMockServer.stubFor(post(urlEqualTo(urlBasePath))
.withHeader("Content-Type", equalTo("application/json; charset=UTF-8"))
.withRequestBody(equalTo(
validationEnabledPetJson))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_CREATED)
.withBody(validationEnabledPetJson)));
String validationEnabledPetXml
= "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n" +
"<Pet>\n" +
" <id>10</id>\n" +
" <name>doggie</name>\n" +
" <photoUrls>https://test.photos.org/doggie.gif</photoUrls>\n" +
"</Pet>\n";
wireMockServer.stubFor(post(urlEqualTo(urlBasePath))
.withHeader("Content-Type", equalTo("application/xml"))
.withRequestBody(equalTo(
validationEnabledPetXml))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_CREATED)
.withBody(validationEnabledPetXml)));
String validationDisabledPetJson = "{\"id\":10,\"name\":\"doggie\"}";
wireMockServer.stubFor(post(urlEqualTo(urlBasePath))
.withRequestBody(equalTo(
validationDisabledPetJson))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_CREATED)
.withBody(validationDisabledPetJson)));
String petsJson = "[{\"id\":1,\"name\":\"doggie\", \"id\":2,\"name\":\"doggie2\"}]";
wireMockServer.stubFor(get(urlPathEqualTo(urlBasePath + "/findByStatus"))
.withQueryParam("status", equalTo("available"))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK)
.withBody(petsJson)));
wireMockServer.stubFor(delete(urlPathEqualTo(urlBasePath + "/10"))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK)
.withBody("Pet deleted")));
String uploadImageJson
= "{\"id\":1,\"category\":{\"id\":1,\"name\":\"Pet\"},\"name\":\"Test\",\"photoUrls\":[\"image.jpg\"],\"tags\":[],\"status\":\"available\"}";
wireMockServer.stubFor(post(urlPathEqualTo(urlBasePath + "/1/uploadImage"))
.withRequestBody(binaryEqualTo(createUploadImage()))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK)
.withBody(uploadImageJson)));
}
static void setUpFruitsApiStubs(String specificationPath) throws Exception {
String urlBasePath = "/api/v1/fruit";
wireMockServer.stubFor(get(urlEqualTo(specificationPath)).willReturn(aResponse().withBody(
Files.readAllBytes(Paths.get(
Objects.requireNonNull(RestOpenApiComponentV3Test.class.getResource(specificationPath)).toURI())))));
wireMockServer.stubFor(post(urlPathEqualTo(urlBasePath + "/form"))
.withHeader("Content-Type", equalTo("application/x-www-form-urlencoded"))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK)
.withBody("{\"name\":\"Lemon\",\"color\":\"Yellow\"}")));
wireMockServer.stubFor(delete(urlPathEqualTo(urlBasePath + "/1"))
.withHeader("deletionReason", matching("Test deletion reason"))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK)
.withBody("Fruit deleted")));
wireMockServer.stubFor(delete(urlPathEqualTo(urlBasePath + "/1"))
.withHeader("deletionReason", containing("Test deletion reason 1"))
.withHeader("deletionReason", containing("Test deletion reason 2"))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK)
.withBody("Fruit deleted")));
wireMockServer.stubFor(delete(urlPathEqualTo(urlBasePath))
.withQueryParam("id", containing("1"))
.withQueryParam("id", containing("2"))
.willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK)
.withBody("Fruits deleted")));
}
@ParameterizedTest
@MethodSource("petStoreVersions")
void requestValidationDisabled(String petStoreVersion) {
Pet pet = new Pet();
pet.setId(10);
pet.setName("doggie");
Pet createdPet = template.requestBodyAndHeader("direct:validationDisabled", pet, "petStoreVersion", petStoreVersion,
Pet.class);
assertEquals(10, createdPet.getId());
}
@ParameterizedTest
@MethodSource("petStoreVersions")
void requestValidationWithValidJsonBody(String petStoreVersion) {
Pet pet = new Pet();
pet.setId(10);
pet.setName("doggie");
pet.setPhotoUrls(List.of("https://test.photos.org/doggie.gif"));
Pet createdPet
= template.requestBodyAndHeader("direct:validateJsonBody", pet, "petStoreVersion", petStoreVersion, Pet.class);
assertEquals(10, createdPet.getId());
}
@ParameterizedTest
@MethodSource("petStoreVersions")
void requestValidationWithBadlyFormedJsonBody(String petStoreVersion) {
Exchange exchange = template.request("direct:validateInvalidJsonBody", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getMessage().setHeader("petStoreVersion", petStoreVersion);
exchange.getMessage().setBody("invalid JSON string");
}
});
Exception exception = exchange.getException();
assertNotNull(exception);
assertInstanceOf(RestOpenApiValidationException.class, exception);
RestOpenApiValidationException validationException = (RestOpenApiValidationException) exception;
Set<String> errors = validationException.getValidationErrors();
assertEquals(1, errors.size());
String errorMessage = errors.iterator().next();
assertTrue(errorMessage.startsWith("Unable to parse JSON"));
}
@ParameterizedTest
@MethodSource("petStoreVersions")
void requestValidationWithValidXmlBody(String petStoreVersion) {
Pet pet = new Pet();
pet.setId(10);
pet.setName("doggie");
pet.setPhotoUrls(List.of("https://test.photos.org/doggie.gif"));
Pet createdPet
= template.requestBodyAndHeader("direct:validateXmlBody", pet, "petStoreVersion", petStoreVersion, Pet.class);
assertEquals(10, createdPet.getId());
}
@ParameterizedTest
@MethodSource("petStoreVersions")
void requestValidationWithNullBody(String petStoreVersion) {
Exchange exchange = template.request("direct:validateNullBody", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getMessage().setHeader("petStoreVersion", petStoreVersion);
exchange.getMessage().setBody(null);
}
});
Exception exception = exchange.getException();
assertNotNull(exception);
assertInstanceOf(RestOpenApiValidationException.class, exception);
RestOpenApiValidationException validationException = (RestOpenApiValidationException) exception;
Set<String> errors = validationException.getValidationErrors();
assertEquals(1, errors.size());
assertEquals("A request body is required but none found.", errors.iterator().next());
}
@ParameterizedTest
@MethodSource("petStoreVersions")
void requestValidationWithInvalidContentType(String petStoreVersion) {
Pet pet = new Pet();
pet.setId(10);
pet.setName("doggie");
pet.setPhotoUrls(List.of("https://test.photos.org/doggie.gif"));
Exchange exchange = template.request("direct:validateContentType", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getMessage().setHeader("petStoreVersion", petStoreVersion);
exchange.getMessage().setBody(pet);
}
});
Exception exception = exchange.getException();
assertNotNull(exception);
assertInstanceOf(RestOpenApiValidationException.class, exception);
RestOpenApiValidationException validationException = (RestOpenApiValidationException) exception;
Set<String> errors = validationException.getValidationErrors();
assertEquals(1, errors.size());
String errorMessage = errors.iterator().next();
assertTrue(
errorMessage.startsWith("Request Content-Type header 'application/camel' does not match any allowed types"));
}
@ParameterizedTest
@MethodSource("petStoreVersions")
void requestValidationWithRequiredPathAndQueryParameter(String petStoreVersion) {
Map<String, Object> headers = Map.of(
"petStoreVersion", petStoreVersion,
"petId", 10,
"api_key", "foo");
String result = template.requestBodyAndHeaders("direct:validateDelete", null, headers, String.class);
assertEquals("Pet deleted", result);
}
@ParameterizedTest
@MethodSource("petStoreVersions")
@SuppressWarnings("unchecked")
void requestValidationWithRequiredQueryParameter(String petStoreVersion) {
Map<String, Object> headers = Map.of(
"status", "available",
"petStoreVersion", petStoreVersion);
List<Pet> pets = template.requestBodyAndHeaders("direct:validateOperationForQueryParams", null, headers, List.class);
assertFalse(pets.isEmpty());
}
@ParameterizedTest
@ValueSource(strings = { "petStoreV3" })
void requestValidationWithBinaryBody(String petStoreVersion) throws IOException {
Map<String, Object> headers = Map.of(
"petId", 1,
"petStoreVersion", petStoreVersion);
Pet pet = template.requestBodyAndHeaders("direct:binaryContent", createUploadImage(), headers, Pet.class);
assertNotNull(pet);
assertEquals(1, pet.getPhotoUrls().size());
}
@ParameterizedTest
@MethodSource("petStoreVersions")
void restOpenApiEndpointDefaultOptions(String petStoreVersion) throws Exception {
RestOpenApiEndpoint endpoint = context.getEndpoint(petStoreVersion + ":#addPet", RestOpenApiEndpoint.class);
endpoint.createProducer();
assertFalse(endpoint.isRequestValidationEnabled());
}
@ParameterizedTest
@MethodSource("fruitsApiVersions")
void requestValidationRequiredHeaderParamsNotPresent(String fruitsApiVersion) {
Exchange exchange = template.request("direct:headerParam", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getMessage().setHeader("fruitsApiVersion", fruitsApiVersion);
}
});
Exception exception = exchange.getException();
assertNotNull(exception);
assertInstanceOf(RestOpenApiValidationException.class, exception);
RestOpenApiValidationException validationException = (RestOpenApiValidationException) exception;
Set<String> errors = validationException.getValidationErrors();
assertEquals(1, errors.size());
assertTrue(errors.iterator().next().startsWith("Header parameter 'deletionReason' is required"));
}
@ParameterizedTest
@MethodSource("fruitsApiVersions")
void requestValidationRequiredHeaderParamsPresent(String fruitsApiVersion) {
Map<String, Object> headers = Map.of(
"fruitsApiVersion", fruitsApiVersion,
"id", 1,
"deletionReason", "Test deletion reason");
String result = template.requestBodyAndHeaders("direct:headerParam", null, headers, String.class);
assertEquals("Fruit deleted", result);
}
@ParameterizedTest
@MethodSource("fruitsApiVersions")
void requestValidationRequiredHeaderParamsPresentAsList(String fruitsApiVersion) {
Map<String, Object> headers = Map.of(
"fruitsApiVersion", fruitsApiVersion,
"id", 1,
"deletionReason", List.of("Test deletion reason 1", "Test deletion reason 2"));
String result = template.requestBodyAndHeaders("direct:headerParam", null, headers, String.class);
assertEquals("Fruit deleted", result);
}
@ParameterizedTest
@MethodSource("fruitsApiVersions")
void requestValidationRequiredFormParamsPresent(String fruitsApiVersion) {
String result = template.requestBodyAndHeader("direct:formParam", "name=Lemon&color=Yellow", "fruitsApiVersion",
fruitsApiVersion, String.class);
assertEquals("{\"name\":\"Lemon\",\"color\":\"Yellow\"}", result);
}
@ParameterizedTest
@MethodSource("fruitsApiVersions")
void requestValidationRequiredQueryParamsPresentAsList(String fruitsApiVersion) {
Map<String, Object> headers = Map.of(
"fruitsApiVersion", fruitsApiVersion,
"id", List.of("1", "2"));
String result = template.requestBodyAndHeaders("direct:queryParam", null, headers, String.class);
assertEquals("Fruits deleted", result);
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
camelContext.addComponent("petStoreV3", createRestOpenApiComponent("openapi-v3.json"));
camelContext.addComponent("petStoreV31", createRestOpenApiComponent("petstore-3.1.yaml"));
camelContext.addComponent("fruitsV3", createRestOpenApiComponent("fruits-3.0.yaml"));
camelContext.getGlobalOptions().put("CamelJacksonEnableTypeConverter", "true");
camelContext.getGlobalOptions().put("CamelJacksonTypeConverterToPojo", "true");
return camelContext;
}
public static Iterable<String> petStoreVersions() {
return List.of("petStoreV3", "petStoreV31");
}
public static Iterable<String> fruitsApiVersions() {
return List.of("fruitsV3");
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(EnumFeature.WRITE_ENUMS_TO_LOWERCASE, true);
JacksonDataFormat jacksonDataFormat = new JacksonDataFormat();
jacksonDataFormat.setObjectMapper(mapper);
JAXBContext jaxbContext = JAXBContext.newInstance(Pet.class);
JaxbDataFormat jaxbDataFormat = new JaxbDataFormat(jaxbContext);
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:validationDisabled")
.marshal(jacksonDataFormat)
.toD("${header.petStoreVersion}:#addPet")
.unmarshal(jacksonDataFormat);
from("direct:validateJsonBody")
.marshal(jacksonDataFormat)
// Append charset to verify the validator internals can handle it
.setHeader(Exchange.CONTENT_TYPE).constant("application/json; charset=UTF-8")
.toD("${header.petStoreVersion}:#addPet?requestValidationEnabled=true")
.unmarshal(jacksonDataFormat);
from("direct:validateDelete")
.toD("${header.petStoreVersion}:#deletePet?requestValidationEnabled=true");
from("direct:validateXmlBody")
.marshal(jaxbDataFormat)
.setHeader(Exchange.CONTENT_TYPE).constant("application/xml")
.toD("${header.petStoreVersion}:#addPet?requestValidationEnabled=true&consumes=application/xml&produces=application/xml")
.unmarshal(jaxbDataFormat);
from("direct:validateContentType")
.marshal(jacksonDataFormat)
.setHeader(Exchange.CONTENT_TYPE).constant("application/camel")
.toD("${header.petStoreVersion}:#addPet?requestValidationEnabled=true")
.unmarshal(jaxbDataFormat);
from("direct:validateNullBody")
.toD("${header.petStoreVersion}:#addPet?requestValidationEnabled=true");
from("direct:validateInvalidJsonBody")
.setHeader(Exchange.CONTENT_TYPE).constant("application/json")
.toD("${header.petStoreVersion}:#addPet?requestValidationEnabled=true");
from("direct:validateOperationForQueryParams")
.toD("${header.petStoreVersion}:findPetsByStatus?requestValidationEnabled=true")
.unmarshal(jacksonDataFormat);
from("direct:binaryContent")
.toD("${header.petStoreVersion}:uploadFile?requestValidationEnabled=true&produces=application/octet-stream")
.unmarshal(jacksonDataFormat);
from("direct:headerParam")
.toD("${header.fruitsApiVersion}:#deleteFruit?requestValidationEnabled=true");
from("direct:formParam")
.setHeader(Exchange.CONTENT_TYPE).constant("application/x-www-form-urlencoded")
.toD("${header.fruitsApiVersion}:#addFruitFromForm?requestValidationEnabled=true");
from("direct:queryParam")
.toD("${header.fruitsApiVersion}:#deleteFruits?requestValidationEnabled=true");
}
};
}
private RestOpenApiComponent createRestOpenApiComponent(String specificationUri) {
RestOpenApiComponent component = new RestOpenApiComponent();
component.setComponentName("http");
component.setSpecificationUri("classpath:" + specificationUri);
component.setConsumes("application/json");
component.setProduces("application/json");
component.setHost("http://localhost:" + wireMockServer.port());
return component;
}
private static byte[] createUploadImage() throws IOException {
// Creates a 50x50 square filled with white
BufferedImage image = new BufferedImage(50, 50, BufferedImage.TYPE_INT_RGB);
Graphics2D graphics = image.createGraphics();
graphics.setColor(Color.WHITE);
graphics.fillRect(0, 0, 50, 50);
graphics.dispose();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write(image, "jpg", baos);
return baos.toByteArray();
}
}
| RestOpenApiRequestValidationTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/UnsupportedSelectorException.java | {
"start": 584,
"end": 857
} | class ____ extends IllegalArgumentException {
public UnsupportedSelectorException(String expression) {
super("Index component selectors are not supported in this context but found selector in expression [" + expression + "]");
}
}
| UnsupportedSelectorException |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/StateLogger.java | {
"start": 779,
"end": 2947
} | class ____ {
final Logger logger;
StateLogger(Logger logger) {
this.logger = logger;
}
void log(String instance, String action, long initialState, long committedState) {
log(instance, action, initialState, committedState, false);
}
void log(String instance,
String action,
long initialState,
long committedState,
boolean logStackTrace) {
if (logStackTrace) {
this.logger.trace(String.format("[%s][%s][%s][%s-%s]",
instance,
action,
action,
Thread.currentThread()
.getId(),
formatState(initialState, 64),
formatState(committedState, 64)), new RuntimeException());
}
else {
this.logger.trace(String.format("[%s][%s][%s][\n\t%s\n\t%s]",
instance,
action,
Thread.currentThread()
.getId(),
formatState(initialState, 64),
formatState(committedState, 64)));
}
}
void log(String instance, String action, int initialState, int committedState) {
log(instance, action, initialState, committedState, false);
}
void log(String instance,
String action,
int initialState,
int committedState,
boolean logStackTrace) {
if (logStackTrace) {
this.logger.trace(String.format("[%s][%s][%s][%s-%s]",
instance,
action,
action,
Thread.currentThread()
.getId(),
formatState(initialState, 32),
formatState(committedState, 32)), new RuntimeException());
}
else {
this.logger.trace(String.format("[%s][%s][%s][%s-%s]",
instance,
action,
Thread.currentThread()
.getId(),
formatState(initialState, 32),
formatState(committedState, 32)));
}
}
static String formatState(long state, int size) {
final String defaultFormat = Long.toBinaryString(state);
final StringBuilder formatted = new StringBuilder();
final int toPrepend = size - defaultFormat.length();
for (int i = 0; i < size; i++) {
if (i != 0 && i % 4 == 0) {
formatted.append("_");
}
if (i < toPrepend) {
formatted.append("0");
}
else {
formatted.append(defaultFormat.charAt(i - toPrepend));
}
}
formatted.insert(0, "0b");
return formatted.toString();
}
}
| StateLogger |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/transport/ClusterConnectionManager.java | {
"start": 1843,
"end": 19568
} | class ____ implements ConnectionManager {
private static final Logger logger = LogManager.getLogger(ClusterConnectionManager.class);
private final ConcurrentMap<DiscoveryNode, Transport.Connection> connectedNodes = ConcurrentCollections.newConcurrentMap();
private final ConcurrentMap<DiscoveryNode, SubscribableListener<Transport.Connection>> pendingConnections = ConcurrentCollections
.newConcurrentMap();
private final AbstractRefCounted connectingRefCounter = AbstractRefCounted.of(this::pendingConnectionsComplete);
private final Transport transport;
private final ThreadContext threadContext;
private final ConnectionProfile defaultProfile;
private final AtomicBoolean closing = new AtomicBoolean(false);
private final CountDownLatch closeLatch = new CountDownLatch(1);
private final DelegatingNodeConnectionListener connectionListener = new DelegatingNodeConnectionListener();
public ClusterConnectionManager(Settings settings, Transport transport, ThreadContext threadContext) {
this(ConnectionProfile.buildDefaultConnectionProfile(settings), transport, threadContext);
}
public ClusterConnectionManager(ConnectionProfile connectionProfile, Transport transport, ThreadContext threadContext) {
this.transport = transport;
this.defaultProfile = connectionProfile;
this.threadContext = threadContext;
}
@Override
public void addListener(TransportConnectionListener listener) {
this.connectionListener.addListener(listener);
}
@Override
public void removeListener(TransportConnectionListener listener) {
this.connectionListener.removeListener(listener);
}
@Override
public void openConnection(DiscoveryNode node, ConnectionProfile connectionProfile, ActionListener<Transport.Connection> listener) {
ConnectionProfile resolvedProfile = ConnectionProfile.resolveConnectionProfile(connectionProfile, defaultProfile);
if (acquireConnectingRef()) {
var success = false;
final var release = new RunOnce(connectingRefCounter::decRef);
try {
internalOpenConnection(node, resolvedProfile, ActionListener.runBefore(listener, release::run));
success = true;
} finally {
if (success == false) {
release.run();
}
}
} else {
listener.onFailure(new ConnectTransportException(node, "connection manager is closed"));
}
}
/**
* Connects to the given node, or acquires another reference to an existing connection to the given node if a connection already exists.
*
* @param connectionProfile the profile to use if opening a new connection. Only used in tests, this is {@code null} in production.
* @param connectionValidator a callback to validate the connection before it is exposed (e.g. to {@link #nodeConnected}).
* @param listener completed on the calling thread or by the {@link ConnectionValidator}; in production the
* {@link ConnectionValidator} will complete the listener on the generic thread pool (see
* {@link TransportService#connectionValidator}). If successful, completed with a {@link Releasable} which
* will release this connection (and close it if no other references to it are held).
*/
@Override
public void connectToNode(
DiscoveryNode node,
@Nullable ConnectionProfile connectionProfile,
ConnectionValidator connectionValidator,
ActionListener<Releasable> listener
) throws ConnectTransportException {
connectToNodeOrRetry(
node,
connectionProfile,
connectionValidator,
0,
ContextPreservingActionListener.wrapPreservingContext(listener, threadContext)
);
}
/**
* Connects to the given node, or acquires another reference to an existing connection to the given node if a connection already exists.
* If a connection already exists but has been completely released (so it's in the process of closing) then this method will wait for
* the close to complete and then try again (up to 10 times).
*/
private void connectToNodeOrRetry(
DiscoveryNode node,
@Nullable ConnectionProfile connectionProfile,
ConnectionValidator connectionValidator,
int previousFailureCount,
ActionListener<Releasable> listener
) throws ConnectTransportException {
ConnectionProfile resolvedProfile = ConnectionProfile.resolveConnectionProfile(connectionProfile, defaultProfile);
if (node == null) {
listener.onFailure(new ConnectTransportException(null, "can't connect to a null node"));
return;
}
if (acquireConnectingRef() == false) {
listener.onFailure(new ConnectTransportException(node, "connection manager is closed"));
return;
}
final ActionListener<Transport.Connection> acquiringListener = listener.delegateFailure((delegate, connection) -> {
if (connection.tryIncRef()) {
delegate.onResponse(Releasables.releaseOnce(connection::decRef));
return;
}
// We found a connection that's registered but already fully released, so it'll be removed soon by its close listener. Bad luck,
// let's wait for it to be removed and then try again.
final int failureCount = previousFailureCount + 1;
if (failureCount < 10) {
logger.trace("concurrent connect/disconnect for [{}] ([{}] failures), will try again", node, failureCount);
connection.addRemovedListener(
delegate.delegateFailure(
(retryDelegate, ignored) -> connectToNodeOrRetry(
node,
connectionProfile,
connectionValidator,
failureCount,
retryDelegate
)
)
);
} else {
// A run of bad luck this long is probably not bad luck after all: something's broken, just give up.
logger.warn("failed to connect to [{}] after [{}] attempts, giving up", node.descriptionWithoutAttributes(), failureCount);
delegate.onFailure(
new ConnectTransportException(
node,
"concurrently connecting and disconnecting even after [" + failureCount + "] attempts"
)
);
}
});
final Transport.Connection existingConnection = connectedNodes.get(node);
if (existingConnection != null) {
connectingRefCounter.decRef();
acquiringListener.onResponse(existingConnection);
return;
}
final SubscribableListener<Transport.Connection> currentListener = new SubscribableListener<>();
final SubscribableListener<Transport.Connection> existingListener = pendingConnections.putIfAbsent(node, currentListener);
if (existingListener != null) {
try {
// wait on previous entry to complete connection attempt
existingListener.addListener(acquiringListener);
} finally {
connectingRefCounter.decRef();
}
return;
}
currentListener.addListener(acquiringListener);
// It's possible that a connection completed, and the pendingConnections entry was removed, between the calls to
// connectedNodes.containsKey and pendingConnections.putIfAbsent above, so we check again to make sure we don't open a redundant
// extra connection to the node. We could _just_ check here, but checking up front skips the work to mark the connection as pending.
final Transport.Connection existingConnectionRecheck = connectedNodes.get(node);
if (existingConnectionRecheck != null) {
var future = pendingConnections.remove(node);
assert future == currentListener : "Listener in pending map is different than the expected listener";
connectingRefCounter.decRef();
future.onResponse(existingConnectionRecheck);
return;
}
final RunOnce releaseOnce = new RunOnce(connectingRefCounter::decRef);
internalOpenConnection(
node,
resolvedProfile,
ActionListener.wrap(
conn -> connectionValidator.validate(conn, resolvedProfile, ActionListener.runAfter(ActionListener.wrap(ignored -> {
assert Transports.assertNotTransportThread("connection validator success");
final var managerRefs = AbstractRefCounted.of(conn::onRemoved);
try {
if (connectedNodes.putIfAbsent(node, conn) != null) {
assert false : "redundant connection to " + node;
logger.warn("existing connection to node [{}], closing new redundant connection", node);
IOUtils.closeWhileHandlingException(conn);
} else {
logger.debug("connected to node [{}]", node);
managerRefs.mustIncRef();
try {
connectionListener.onNodeConnected(node, conn);
} finally {
conn.addCloseListener(new ActionListener<Void>() {
@Override
public void onResponse(Void ignored) {
handleClose(null);
}
@Override
public void onFailure(Exception e) {
handleClose(e);
}
void handleClose(@Nullable Exception e) {
connectedNodes.remove(node, conn);
try {
connectionListener.onNodeDisconnected(node, e);
} finally {
managerRefs.decRef();
}
}
});
conn.addCloseListener(ActionListener.running(() -> {
if (connectingRefCounter.hasReferences() == false) {
logger.trace("connection manager shut down, closing transport connection to [{}]", node);
} else if (conn.hasReferences()) {
logger.info(
"""
transport connection to [{}] closed by remote; \
if unexpected, see [{}] for troubleshooting guidance""",
node.descriptionWithoutAttributes(),
ReferenceDocs.NETWORK_DISCONNECT_TROUBLESHOOTING
);
// In production code we only close connections via ref-counting, so this message confirms that a
// 'node-left ... reason: disconnected' event was caused by external factors. Put differently, if a
// node leaves the cluster with "reason: disconnected" but without this message being logged then
// that's a bug.
} else {
logger.debug("closing unused transport connection to [{}]", node);
}
}));
}
}
} finally {
var future = pendingConnections.remove(node);
assert future == currentListener : "Listener in pending map is different than the expected listener";
managerRefs.decRef();
releaseOnce.run();
future.onResponse(conn);
}
}, e -> {
assert Transports.assertNotTransportThread("connection validator failure");
IOUtils.closeWhileHandlingException(conn);
failConnectionListener(node, releaseOnce, e, currentListener);
}), conn::decRef)),
e -> {
assert Transports.assertNotTransportThread("internalOpenConnection failure");
failConnectionListener(node, releaseOnce, e, currentListener);
}
)
);
}
/**
* Returns a connection for the given node if the node is connected.
* Connections returned from this method must not be closed. The lifecycle of this connection is
* maintained by this connection manager
*
* @throws NodeNotConnectedException if the node is not connected
* @see #connectToNode(DiscoveryNode, ConnectionProfile, ConnectionValidator, ActionListener)
*/
@Override
public Transport.Connection getConnection(DiscoveryNode node) {
Transport.Connection connection = connectedNodes.get(node);
if (connection == null) {
throw new NodeNotConnectedException(node, "Node not connected");
}
return connection;
}
/**
* Returns {@code true} if the node is connected.
*/
@Override
public boolean nodeConnected(DiscoveryNode node) {
return connectedNodes.containsKey(node);
}
/**
* Disconnected from the given node, if not connected, will do nothing.
*/
@Override
public void disconnectFromNode(DiscoveryNode node) {
Transport.Connection nodeChannels = connectedNodes.remove(node);
if (nodeChannels != null) {
// if we found it and removed it we close
nodeChannels.close();
}
}
/**
* Returns the number of nodes this manager is connected to.
*/
@Override
public int size() {
return connectedNodes.size();
}
@Override
public Set<DiscoveryNode> getAllConnectedNodes() {
return Collections.unmodifiableSet(connectedNodes.keySet());
}
@Override
public void close() {
internalClose(true);
}
@Override
public void closeNoBlock() {
internalClose(false);
}
private void internalClose(boolean waitForPendingConnections) {
assert Transports.assertNotTransportThread("Closing ConnectionManager");
if (closing.compareAndSet(false, true)) {
connectingRefCounter.decRef();
if (waitForPendingConnections) {
try {
closeLatch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException(e);
}
}
}
}
private void pendingConnectionsComplete() {
final Iterator<Map.Entry<DiscoveryNode, Transport.Connection>> iterator = connectedNodes.entrySet().iterator();
while (iterator.hasNext()) {
final Map.Entry<DiscoveryNode, Transport.Connection> next = iterator.next();
try {
IOUtils.closeWhileHandlingException(next.getValue());
} finally {
iterator.remove();
}
}
closeLatch.countDown();
}
private void internalOpenConnection(
DiscoveryNode node,
ConnectionProfile connectionProfile,
ActionListener<Transport.Connection> listener
) {
transport.openConnection(node, connectionProfile, listener.map(connection -> {
assert Transports.assertNotTransportThread("internalOpenConnection success");
try {
connectionListener.onConnectionOpened(connection);
} finally {
connection.addCloseListener(ActionListener.running(() -> connectionListener.onConnectionClosed(connection)));
}
if (connection.isClosed()) {
throw new ConnectTransportException(node, "a channel closed while connecting");
}
return connection;
}));
}
private void failConnectionListener(
DiscoveryNode node,
RunOnce releaseOnce,
Exception e,
SubscribableListener<Transport.Connection> expectedListener
) {
final var future = pendingConnections.remove(node);
releaseOnce.run();
if (future != null) {
assert future == expectedListener : "Listener in pending map is different than the expected listener";
future.onFailure(e);
}
}
@Override
public ConnectionProfile getConnectionProfile() {
return defaultProfile;
}
private boolean acquireConnectingRef() {
return closing.get() == false && connectingRefCounter.tryIncRef();
}
}
| ClusterConnectionManager |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/logical/LogicalSnapshot.java | {
"start": 2021,
"end": 5344
} | class ____ extends Snapshot {
// ~ Constructors -----------------------------------------------------------
/** Creates a LogicalSnapshot by parsing serialized output. */
public LogicalSnapshot(RelInput input) {
super(input);
}
/**
* Creates a LogicalSnapshot.
*
* <p>Use {@link #create} unless you know what you're doing.
*
* @param cluster Cluster that this relational expression belongs to
* @param traitSet The traits of this relational expression
* @param hints Hints for this node
* @param input Input relational expression
* @param period Timestamp expression which as the table was at the given time in the past
*/
public LogicalSnapshot(
RelOptCluster cluster,
RelTraitSet traitSet,
List<RelHint> hints,
RelNode input,
RexNode period) {
super(cluster, traitSet, hints, input, period);
}
/**
* Creates a LogicalSnapshot.
*
* <p>Use {@link #create} unless you know what you're doing.
*
* @param cluster Cluster that this relational expression belongs to
* @param traitSet The traits of this relational expression
* @param input Input relational expression
* @param period Timestamp expression which as the table was at the given time in the past
*/
public LogicalSnapshot(
RelOptCluster cluster, RelTraitSet traitSet, RelNode input, RexNode period) {
super(cluster, traitSet, ImmutableList.of(), input, period);
}
@Override
public Snapshot copy(RelTraitSet traitSet, RelNode input, RexNode period) {
return new LogicalSnapshot(getCluster(), traitSet, hints, input, period);
}
/** Creates a LogicalSnapshot. */
public static LogicalSnapshot create(RelNode input, RexNode period) {
final RelOptCluster cluster = input.getCluster();
final RelMetadataQuery mq = cluster.getMetadataQuery();
final RelTraitSet traitSet =
cluster.traitSet()
.replace(Convention.NONE)
.replaceIfs(
RelCollationTraitDef.INSTANCE,
() -> RelMdCollation.snapshot(mq, input))
.replaceIf(
RelDistributionTraitDef.INSTANCE,
() -> RelMdDistribution.snapshot(mq, input));
return new LogicalSnapshot(cluster, traitSet, input, period);
}
@Override
public boolean isValid(Litmus litmus, Context context) {
SqlTypeName periodTypeName = getPeriod().getType().getSqlTypeName();
if (!(periodTypeName == SqlTypeName.TIMESTAMP
|| periodTypeName == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE)) {
return litmus.fail(
"The system time period specification expects TIMESTAMP or TIMESTAMP"
+ " WITH LOCAL TIME ZONE type but is '"
+ periodTypeName
+ "'");
}
return litmus.succeed();
}
@Override
public RelNode withHints(final List<RelHint> hintList) {
return new LogicalSnapshot(getCluster(), traitSet, hintList, input, getPeriod());
}
}
| LogicalSnapshot |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/services/Result.java | {
"start": 1715,
"end": 1941
} | interface ____<REQ extends Request<?>> {
/**
* Returns the request that produced this result.
*
* @return the originating request instance, never {@code null}
*/
@Nonnull
REQ getRequest();
}
| Result |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/resource/PromiseAdapter.java | {
"start": 172,
"end": 1210
} | class ____ {
/**
* Create a promise that emits a {@code Boolean} value on completion of the {@code future}
*
* @param future the future.
* @return Promise emitting a {@code Boolean} value. {@code true} if the {@code future} completed successfully, otherwise
* the cause wil be transported.
*/
static Promise<Boolean> toBooleanPromise(Future<?> future) {
DefaultPromise<Boolean> result = new DefaultPromise<>(GlobalEventExecutor.INSTANCE);
if (future.isDone() || future.isCancelled()) {
if (future.isSuccess()) {
result.setSuccess(true);
} else {
result.setFailure(future.cause());
}
return result;
}
future.addListener((GenericFutureListener<Future<Object>>) f -> {
if (f.isSuccess()) {
result.setSuccess(true);
} else {
result.setFailure(f.cause());
}
});
return result;
}
}
| PromiseAdapter |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/optimizer/Optimizer.java | {
"start": 13503,
"end": 15512
} | class ____ extends OptimizerRule<LimitWithOffset> {
CombineLimits() {
super(TransformDirection.UP);
}
@Override
protected LogicalPlan rule(LimitWithOffset limit) {
// bail out early
if (limit.child() instanceof LimitWithOffset == false) {
return limit;
}
LimitWithOffset primary = (LimitWithOffset) limit.child();
int primaryLimit = (Integer) primary.limit().fold();
int primaryOffset = primary.offset();
// +1 means ASC, -1 descending and 0 if there are no results
int sign = Integer.signum(primaryLimit);
int secondaryLimit = (Integer) limit.limit().fold();
if (limit.offset() != 0) {
throw new EqlIllegalArgumentException("Limits with different offset not implemented yet");
}
// for the same direction
if (primaryLimit > 0 && secondaryLimit > 0) {
// consider the minimum
primaryLimit = Math.min(primaryLimit, secondaryLimit);
} else if (primaryLimit < 0 && secondaryLimit < 0) {
primaryLimit = Math.max(primaryLimit, secondaryLimit);
} else {
// the secondary limit cannot go beyond the primary - if it does it gets ignored
if (MathUtils.abs(secondaryLimit) < MathUtils.abs(primaryLimit)) {
primaryOffset += MathUtils.abs(primaryLimit + secondaryLimit);
// preserve order
primaryLimit = MathUtils.abs(secondaryLimit) * sign;
}
}
Literal literal = new Literal(primary.limit().source(), primaryLimit, DataTypes.INTEGER);
return new LimitWithOffset(primary.source(), literal, primaryOffset, primary.child());
}
}
/**
* Any condition applied on a join/sequence/sample key, gets propagated to all rules.
*/
static | CombineLimits |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassEnhancer.java | {
"start": 9707,
"end": 9987
} | interface ____ extends Callback {
boolean isMatch(Method candidateMethod);
}
/**
* A {@link CallbackFilter} that works by interrogating {@link Callback Callbacks} in the order
* that they are defined via {@link ConditionalCallback}.
*/
private static | ConditionalCallback |
java | quarkusio__quarkus | test-framework/arquillian/src/test/java/io/quarkus/arquillian/test/SimpleTest.java | {
"start": 1834,
"end": 1904
} | class ____ {
@Inject
Config config;
}
}
| SimpleClass |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractEtag.java | {
"start": 1140,
"end": 1323
} | class ____ extends AbstractContractEtagTest {
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new S3AContract(conf);
}
}
| ITestS3AContractEtag |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/InvokerBuilder.java | {
"start": 15852,
"end": 16972
} | class ____ declares the transformer
* @param methodName transformer method name
* @return this builder
* @throws IllegalArgumentException if {@code position} is less than zero or greater than
* or equal to the number of parameters declared by the target method
* @throws IllegalStateException if this method is called more than once with the same {@code position}
*/
public InvokerBuilder withArgumentTransformer(int position, Class<?> clazz, String methodName) {
if (position < 0 || position >= argumentTransformers.length) {
throw new IllegalArgumentException("Position must be >= 0 && < " + argumentTransformers.length);
}
if (argumentTransformers[position] != null) {
throw new IllegalStateException("Argument transformer already set for position " + position);
}
this.argumentTransformers[position] = new InvocationTransformer(InvocationTransformerKind.ARGUMENT, clazz, methodName);
return this;
}
/**
* Configures an output transformer for the return value.
*
* @param clazz | that |
java | apache__spark | launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java | {
"start": 20117,
"end": 23712
} | class ____ extends SparkSubmitOptionParser {
boolean isSpecialCommand = false;
private final boolean errorOnUnknownArgs;
OptionParser(boolean errorOnUnknownArgs) {
this.errorOnUnknownArgs = errorOnUnknownArgs;
}
@Override
protected boolean handle(String opt, String value) {
switch (opt) {
case MASTER -> master = value;
case REMOTE -> {
isRemote = true;
remote = value;
}
case DEPLOY_MODE -> deployMode = value;
case PROPERTIES_FILE -> propertiesFile = value;
case LOAD_SPARK_DEFAULTS -> loadSparkDefaults = true;
case DRIVER_MEMORY -> conf.put(SparkLauncher.DRIVER_MEMORY, value);
case DRIVER_JAVA_OPTIONS -> conf.put(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, value);
case DRIVER_LIBRARY_PATH -> conf.put(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH, value);
case DRIVER_DEFAULT_CLASS_PATH ->
conf.put(SparkLauncher.DRIVER_DEFAULT_EXTRA_CLASS_PATH, value);
case DRIVER_CLASS_PATH -> conf.put(SparkLauncher.DRIVER_EXTRA_CLASSPATH, value);
case CONF -> {
checkArgument(value != null, "Missing argument to %s", CONF);
String[] setConf = value.split("=", 2);
checkArgument(setConf.length == 2, "Invalid argument to %s: %s", CONF, value);
conf.put(setConf[0], setConf[1]);
}
case CLASS -> {
// The special classes require some special command line handling, since they allow
// mixing spark-submit arguments with arguments that should be propagated to the shell
// itself. Note that for this to work, the "--class" argument must come before any
// non-spark-submit arguments.
mainClass = value;
if (specialClasses.containsKey(value)) {
allowsMixedArguments = true;
appResource = specialClasses.get(value);
}
}
case KILL_SUBMISSION, STATUS -> {
isSpecialCommand = true;
parsedArgs.add(opt);
parsedArgs.add(value);
}
case HELP, USAGE_ERROR, VERSION -> {
isSpecialCommand = true;
parsedArgs.add(opt);
}
default -> {
parsedArgs.add(opt);
if (value != null) {
parsedArgs.add(value);
}
}
}
return true;
}
@Override
protected boolean handleUnknown(String opt) {
// When mixing arguments, add unrecognized parameters directly to the user arguments list. In
// normal mode, any unrecognized parameter triggers the end of command line parsing, and the
// parameter itself will be interpreted by SparkSubmit as the application resource. The
// remaining params will be appended to the list of SparkSubmit arguments.
if (allowsMixedArguments) {
appArgs.add(opt);
return true;
} else if (isExample) {
String className = opt;
if (!className.startsWith(EXAMPLE_CLASS_PREFIX)) {
className = EXAMPLE_CLASS_PREFIX + className;
}
mainClass = className;
appResource = findExamplesAppJar();
return false;
} else if (errorOnUnknownArgs) {
checkArgument(!opt.startsWith("-"), "Unrecognized option: %s", opt);
checkState(appResource == null, "Found unrecognized argument but resource is already set.");
appResource = opt;
return false;
}
return true;
}
@Override
protected void handleExtraArgs(List<String> extra) {
appArgs.addAll(extra);
}
}
}
| OptionParser |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/basic/RegisterUserEventListenersTest.java | {
"start": 1267,
"end": 2105
} | class ____ {
@Test
@JiraKey(value = "HHH-7478")
public void testTransactionProcessSynchronization(SessionFactoryScope scope) {
final EventListenerRegistry registry = scope.getSessionFactory().getEventEngine().getListenerRegistry();
final CountingPostInsertTransactionBoundaryListener listener = new CountingPostInsertTransactionBoundaryListener();
registry.getEventListenerGroup( EventType.POST_INSERT ).appendListener( listener );
scope.inTransaction( session -> {
StrTestEntity entity = new StrTestEntity( "str1" );
session.persist( entity );
} );
// Post insert listener invoked three times - before/after insertion of original data,
// revision entity and audit row.
assertEquals( 3, listener.getBeforeCount() );
assertEquals( 3, listener.getAfterCount() );
}
private static | RegisterUserEventListenersTest |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/mom/JmsManager.java | {
"start": 2119,
"end": 4599
} | class ____ {
private final Properties jndiProperties;
private final String connectionFactoryName;
private final String destinationName;
private final String userName;
private final char[] password;
private final boolean immediateFail;
private final boolean retry;
private final long reconnectIntervalMillis;
JmsManagerConfiguration(
final Properties jndiProperties,
final String connectionFactoryName,
final String destinationName,
final String userName,
final char[] password,
final boolean immediateFail,
final long reconnectIntervalMillis) {
this.jndiProperties = jndiProperties;
this.connectionFactoryName = connectionFactoryName;
this.destinationName = destinationName;
this.userName = userName;
this.password = password;
this.immediateFail = immediateFail;
this.reconnectIntervalMillis = reconnectIntervalMillis;
this.retry = reconnectIntervalMillis > 0;
}
public String getConnectionFactoryName() {
return connectionFactoryName;
}
public String getDestinationName() {
return destinationName;
}
public JndiManager getJndiManager() {
return JndiManager.getJndiManager(getJndiProperties());
}
public Properties getJndiProperties() {
return jndiProperties;
}
public char[] getPassword() {
return password;
}
public long getReconnectIntervalMillis() {
return reconnectIntervalMillis;
}
public String getUserName() {
return userName;
}
public boolean isImmediateFail() {
return immediateFail;
}
public boolean isRetry() {
return retry;
}
@Override
public String toString() {
return "JmsManagerConfiguration [jndiProperties=" + jndiProperties + ", connectionFactoryName="
+ connectionFactoryName + ", destinationName=" + destinationName + ", userName=" + userName
+ ", immediateFail=" + immediateFail + ", retry=" + retry + ", reconnectIntervalMillis="
+ reconnectIntervalMillis + "]";
}
}
private static | JmsManagerConfiguration |
java | apache__camel | components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/annotation/Link.java | {
"start": 1175,
"end": 1359
} | interface ____ {
/**
* Type of link identifying the relation between the classes
*
* @return LinkType
*/
LinkType linkType() default LinkType.OneToOne;
}
| Link |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/RefreshQueuesRequest.java | {
"start": 1199,
"end": 1811
} | class ____ {
@Public
@Stable
public static RefreshQueuesRequest newInstance() {
RefreshQueuesRequest request =
Records.newRecord(RefreshQueuesRequest.class);
return request;
}
@Public
@Stable
public static RefreshQueuesRequest newInstance(String subClusterId) {
RefreshQueuesRequest request = Records.newRecord(RefreshQueuesRequest.class);
request.setSubClusterId(subClusterId);
return request;
}
@Public
@Unstable
public abstract String getSubClusterId();
@Private
@Unstable
public abstract void setSubClusterId(String subClusterId);
}
| RefreshQueuesRequest |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/columnar/vector/writable/WritableBytesVector.java | {
"start": 1042,
"end": 1444
} | interface ____ extends WritableColumnVector, BytesColumnVector {
/**
* Append byte[] at rowId with the provided value. Note: Must append values according to the
* order of rowId, can not random append.
*/
void appendBytes(int rowId, byte[] value, int offset, int length);
/** Fill the column vector with the provided value. */
void fill(byte[] value);
}
| WritableBytesVector |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/AntPathMatcher.java | {
"start": 27310,
"end": 29542
} | class ____ implements Comparator<String> {
private final String path;
private final String pathSeparator;
public AntPatternComparator(String path) {
this(path, DEFAULT_PATH_SEPARATOR);
}
public AntPatternComparator(String path, String pathSeparator) {
this.path = path;
this.pathSeparator = pathSeparator;
}
/**
* Compare two patterns to determine which should match first, i.e. which
* is the most specific regarding the current path.
* @return a negative integer, zero, or a positive integer as pattern1 is
* more specific, equally specific, or less specific than pattern2.
*/
@Override
public int compare(String pattern1, String pattern2) {
PatternInfo info1 = new PatternInfo(pattern1, this.pathSeparator);
PatternInfo info2 = new PatternInfo(pattern2, this.pathSeparator);
if (info1.isLeastSpecific() && info2.isLeastSpecific()) {
return 0;
}
else if (info1.isLeastSpecific()) {
return 1;
}
else if (info2.isLeastSpecific()) {
return -1;
}
boolean pattern1EqualsPath = pattern1.equals(this.path);
boolean pattern2EqualsPath = pattern2.equals(this.path);
if (pattern1EqualsPath && pattern2EqualsPath) {
return 0;
}
else if (pattern1EqualsPath) {
return -1;
}
else if (pattern2EqualsPath) {
return 1;
}
if (info1.isPrefixPattern() && info2.isPrefixPattern()) {
return info2.getLength() - info1.getLength();
}
else if (info1.isPrefixPattern() && info2.getDoubleWildcards() == 0) {
return 1;
}
else if (info2.isPrefixPattern() && info1.getDoubleWildcards() == 0) {
return -1;
}
if (info1.getTotalCount() != info2.getTotalCount()) {
return info1.getTotalCount() - info2.getTotalCount();
}
if (info1.getLength() != info2.getLength()) {
return info2.getLength() - info1.getLength();
}
if (info1.getSingleWildcards() < info2.getSingleWildcards()) {
return -1;
}
else if (info2.getSingleWildcards() < info1.getSingleWildcards()) {
return 1;
}
if (info1.getUriVars() < info2.getUriVars()) {
return -1;
}
else if (info2.getUriVars() < info1.getUriVars()) {
return 1;
}
return 0;
}
/**
* Value | AntPatternComparator |
java | apache__dubbo | dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/support/wrapper/AbstractCluster.java | {
"start": 3579,
"end": 5655
} | class ____<T> extends AbstractClusterInvoker<T> {
private final ClusterInvoker<T> filterInvoker;
public ClusterFilterInvoker(AbstractClusterInvoker<T> invoker) {
List<FilterChainBuilder> builders = ScopeModelUtil.getApplicationModel(
invoker.getUrl().getScopeModel())
.getExtensionLoader(FilterChainBuilder.class)
.getActivateExtensions();
if (CollectionUtils.isEmpty(builders)) {
filterInvoker = invoker;
} else {
ClusterInvoker<T> tmpInvoker = invoker;
for (FilterChainBuilder builder : builders) {
tmpInvoker = builder.buildClusterInvokerChain(
tmpInvoker, REFERENCE_FILTER_KEY, CommonConstants.CONSUMER);
}
filterInvoker = tmpInvoker;
}
}
@Override
public Result invoke(Invocation invocation) throws RpcException {
return filterInvoker.invoke(invocation);
}
@Override
public Directory<T> getDirectory() {
return filterInvoker.getDirectory();
}
@Override
public URL getRegistryUrl() {
return filterInvoker.getRegistryUrl();
}
@Override
public boolean isDestroyed() {
return filterInvoker.isDestroyed();
}
@Override
public URL getUrl() {
return filterInvoker.getUrl();
}
/**
* The only purpose is to build a interceptor chain, so the cluster related logic doesn't matter.
* Use ClusterInvoker<T> to replace AbstractClusterInvoker<T> in the future.
*/
@Override
protected Result doInvoke(Invocation invocation, List<Invoker<T>> invokers, LoadBalance loadbalance)
throws RpcException {
return null;
}
public ClusterInvoker<T> getFilterInvoker() {
return filterInvoker;
}
}
static | ClusterFilterInvoker |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableAggregateFunction.java | {
"start": 1356,
"end": 2952
} | class ____ a user-defined table aggregate function. A user-defined table aggregate function
* maps scalar values of multiple rows to zero, one, or multiple rows (or structured types). If an
* output record consists of only one field, the structured record can be omitted, and a scalar
* value can be emitted that will be implicitly wrapped into a row by the runtime.
*
* <p>Similar to an {@link AggregateFunction}, the behavior of a {@link TableAggregateFunction} is
* centered around the concept of an accumulator. The accumulator is an intermediate data structure
* that stores the aggregated values until a final aggregation result is computed.
*
* <p>For each set of rows that needs to be aggregated, the runtime will create an empty accumulator
* by calling {@link #createAccumulator()}. Subsequently, the {@code accumulate()} method of the
* function is called for each input row to update the accumulator. Once all rows have been
* processed, the {@code emitValue()} or {@code emitUpdateWithRetract()} method of the function is
* called to compute and return the final result.
*
* <p>The main behavior of an {@link TableAggregateFunction} can be defined by implementing a custom
* accumulate method. An accumulate method must be declared publicly, not static, and named <code>
* accumulate</code>. Accumulate methods can also be overloaded by implementing multiple methods
* named <code>accumulate</code>.
*
* <p>By default, input, accumulator, and output data types are automatically extracted using
* reflection. This includes the generic argument {@code ACC} of the | for |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LocalExec.java | {
"start": 789,
"end": 1944
} | class ____ extends LeafExec {
private final Executable executable;
public LocalExec(Source source, Executable executable) {
super(source);
this.executable = executable;
}
@Override
protected NodeInfo<LocalExec> info() {
return NodeInfo.create(this, LocalExec::new, executable);
}
public Executable executable() {
return executable;
}
@Override
public List<Attribute> output() {
return executable.output();
}
public boolean isEmpty() {
return executable instanceof EmptyExecutable;
}
@Override
public void execute(SqlSession session, ActionListener<Page> listener) {
executable.execute(session, listener);
}
@Override
public int hashCode() {
return Objects.hash(executable);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
LocalExec other = (LocalExec) obj;
return Objects.equals(executable, other.executable);
}
}
| LocalExec |
java | spring-projects__spring-boot | module/spring-boot-cassandra/src/main/java/org/springframework/boot/cassandra/autoconfigure/DriverConfigLoaderBuilderCustomizer.java | {
"start": 855,
"end": 1114
} | interface ____ can be implemented by beans wishing to customize the
* {@link DriverConfigLoaderBuilderCustomizer} to fine-tune its auto-configuration before
* it creates a {@link DriverConfigLoader}.
*
* @author Stephane Nicoll
* @since 4.0.0
*/
public | that |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/framework/ResteasyReactiveTestClassLoader.java | {
"start": 481,
"end": 3229
} | class ____ extends URLClassLoader {
final Map<String, byte[]> generatedClasses;
final Map<String, List<BiFunction<String, ClassVisitor, ClassVisitor>>> transformers;
public ResteasyReactiveTestClassLoader(URL[] urls, ClassLoader parent, List<GeneratedClass> generatedClasses,
Map<String, List<BiFunction<String, ClassVisitor, ClassVisitor>>> transformers) {
super(urls, parent);
this.transformers = transformers;
this.generatedClasses = new HashMap<>();
for (var i : generatedClasses) {
this.generatedClasses.put(i.getName(), i.getData());
}
}
@Override
protected Class<?> findClass(String name) throws ClassNotFoundException {
var loaded = findLoadedClass(name);
if (loaded != null) {
return loaded;
}
if (generatedClasses.containsKey(name)) {
byte[] data = generatedClasses.get(name);
return defineClass(name, data, 0, data.length);
}
if (transformers.containsKey(name)) {
try (InputStream resource = super.getResourceAsStream(name.replace(".", "/") + ".class")) {
if (resource != null) {
byte[] data;
ClassReader cr = new ClassReader(resource.readAllBytes());
ClassWriter writer = new ClassWriter(cr,
ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS);
ClassVisitor visitor = writer;
for (BiFunction<String, ClassVisitor, ClassVisitor> i : transformers.get(name)) {
visitor = i.apply(name, visitor);
}
cr.accept(visitor, 0);
data = writer.toByteArray();
return defineClass(name, data, 0, data.length);
} else {
throw new RuntimeException(
"Could not find " + name + " to transform, make sure it is added to the test archive");
}
} catch (IOException e) {
return super.findClass(name);
}
}
return super.findClass(name);
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
try {
return findClass(name);
} catch (ClassNotFoundException e) {
return super.loadClass(name);
}
}
@Override
protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
try {
return findClass(name);
} catch (ClassNotFoundException e) {
return super.loadClass(name, resolve);
}
}
}
| ResteasyReactiveTestClassLoader |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/builder/ParameterExpression.java | {
"start": 1169,
"end": 4170
} | class ____ extends HashMap<String, String> {
private static final long serialVersionUID = -2417552199605158680L;
public ParameterExpression(String expression) {
parse(expression);
}
private void parse(String expression) {
int p = skipWS(expression, 0);
if (expression.charAt(p) == '(') {
expression(expression, p + 1);
} else {
property(expression, p);
}
}
private void expression(String expression, int left) {
int match = 1;
int right = left + 1;
while (match > 0) {
if (expression.charAt(right) == ')') {
match--;
} else if (expression.charAt(right) == '(') {
match++;
}
right++;
}
put("expression", expression.substring(left, right - 1));
jdbcTypeOpt(expression, right);
}
private void property(String expression, int left) {
if (left < expression.length()) {
int right = skipUntil(expression, left, ",:");
put("property", trimmedStr(expression, left, right));
jdbcTypeOpt(expression, right);
}
}
private int skipWS(String expression, int p) {
for (int i = p; i < expression.length(); i++) {
if (expression.charAt(i) > 0x20) {
return i;
}
}
return expression.length();
}
private int skipUntil(String expression, int p, final String endChars) {
for (int i = p; i < expression.length(); i++) {
char c = expression.charAt(i);
if (endChars.indexOf(c) > -1) {
return i;
}
}
return expression.length();
}
private void jdbcTypeOpt(String expression, int p) {
p = skipWS(expression, p);
if (p < expression.length()) {
if (expression.charAt(p) == ':') {
jdbcType(expression, p + 1);
} else if (expression.charAt(p) == ',') {
option(expression, p + 1);
} else {
throw new BuilderException("Parsing error in {" + expression + "} in position " + p);
}
}
}
private void jdbcType(String expression, int p) {
int left = skipWS(expression, p);
int right = skipUntil(expression, left, ",");
if (right <= left) {
throw new BuilderException("Parsing error in {" + expression + "} in position " + p);
}
put("jdbcType", trimmedStr(expression, left, right));
option(expression, right + 1);
}
private void option(String expression, int p) {
int left = skipWS(expression, p);
if (left < expression.length()) {
int right = skipUntil(expression, left, "=");
String name = trimmedStr(expression, left, right);
left = right + 1;
right = skipUntil(expression, left, ",");
String value = trimmedStr(expression, left, right);
put(name, value);
option(expression, right + 1);
}
}
private String trimmedStr(String str, int start, int end) {
while (str.charAt(start) <= 0x20) {
start++;
}
while (str.charAt(end - 1) <= 0x20) {
end--;
}
return start >= end ? "" : str.substring(start, end);
}
}
| ParameterExpression |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/FileUploadHandler.java | {
"start": 3489,
"end": 14461
} | class ____ extends SimpleChannelInboundHandler<HttpObject> {
private static final Logger LOG = LoggerFactory.getLogger(FileUploadHandler.class);
public static final String HTTP_ATTRIBUTE_REQUEST = "request";
private static final AttributeKey<FileUploads> UPLOADED_FILES =
AttributeKey.valueOf("UPLOADED_FILES");
private static final HttpDataFactory DATA_FACTORY = new DefaultHttpDataFactory(true);
private final Path uploadDir;
private final MultipartRoutes multipartRoutes;
private HttpPostRequestDecoder currentHttpPostRequestDecoder;
private HttpRequest currentHttpRequest;
private byte[] currentJsonPayload;
private Path currentUploadDir;
public FileUploadHandler(final Path uploadDir, final MultipartRoutes multipartRoutes) {
super(true);
// the clean up of temp files when jvm exits is handled by
// org.apache.flink.util.ShutdownHookUtil; thus,
// it's no need to register those files (post chunks and upload file chunks) to
// java.io.DeleteOnExitHook
// which may lead to memory leak.
DiskAttribute.deleteOnExitTemporaryFile = false;
DiskFileUpload.deleteOnExitTemporaryFile = false;
DiskFileUpload.baseDirectory = uploadDir.normalize().toAbsolutePath().toString();
// share the same directory with file upload for post chunks storage.
DiskAttribute.baseDirectory = DiskFileUpload.baseDirectory;
this.uploadDir = requireNonNull(uploadDir);
this.multipartRoutes = requireNonNull(multipartRoutes);
}
@Override
protected void channelRead0(final ChannelHandlerContext ctx, final HttpObject msg)
throws Exception {
try {
if (msg instanceof HttpRequest) {
final HttpRequest httpRequest = (HttpRequest) msg;
LOG.trace(
"Received request. URL:{} Method:{}",
httpRequest.uri(),
httpRequest.method());
if (httpRequest.method().equals(HttpMethod.POST)) {
if (HttpPostRequestDecoder.isMultipart(httpRequest)) {
LOG.trace("Initializing multipart file upload.");
checkState(currentHttpPostRequestDecoder == null);
checkState(currentHttpRequest == null);
checkState(currentUploadDir == null);
currentHttpPostRequestDecoder =
new HttpPostRequestDecoder(DATA_FACTORY, httpRequest);
currentHttpRequest = ReferenceCountUtil.retain(httpRequest);
// We check this after initializing the multipart file upload in order for
// handleError to work correctly.
if (!multipartRoutes.isPostRoute(httpRequest.uri())) {
LOG.trace("POST request not allowed for {}.", httpRequest.uri());
handleError(
ctx,
"POST request not allowed",
HttpResponseStatus.BAD_REQUEST,
null);
return;
}
// make sure that we still have a upload dir in case that it got deleted in
// the meanwhile
RestServerEndpoint.createUploadDir(uploadDir, LOG, false);
currentUploadDir =
Files.createDirectory(
uploadDir.resolve(UUID.randomUUID().toString()));
} else {
ctx.fireChannelRead(ReferenceCountUtil.retain(msg));
}
} else {
ctx.fireChannelRead(ReferenceCountUtil.retain(msg));
}
} else if (msg instanceof HttpContent && currentHttpPostRequestDecoder != null) {
LOG.trace("Received http content.");
// make sure that we still have a upload dir in case that it got deleted in the
// meanwhile
RestServerEndpoint.createUploadDir(uploadDir, LOG, false);
final HttpContent httpContent = (HttpContent) msg;
currentHttpPostRequestDecoder.offer(httpContent);
while (httpContent != LastHttpContent.EMPTY_LAST_CONTENT
&& hasNext(currentHttpPostRequestDecoder)) {
final InterfaceHttpData data = currentHttpPostRequestDecoder.next();
if (data.getHttpDataType() == InterfaceHttpData.HttpDataType.FileUpload) {
HttpRequest httpRequest = currentHttpRequest;
if (!multipartRoutes.isFileUploadRoute(httpRequest.uri())) {
LOG.trace("File upload not allowed for {}.", httpRequest.uri());
handleError(
ctx,
"File upload not allowed",
HttpResponseStatus.BAD_REQUEST,
null);
return;
}
final DiskFileUpload fileUpload = (DiskFileUpload) data;
checkState(fileUpload.isCompleted());
// wrapping around another File instantiation is a simple way to remove any
// path information - we're
// solely interested in the filename
final Path dest =
currentUploadDir.resolve(
new File(fileUpload.getFilename()).getName());
fileUpload.renameTo(dest.toFile());
LOG.trace(
"Upload of file {} into destination {} complete.",
fileUpload.getFilename(),
dest.toString());
} else if (data.getHttpDataType() == InterfaceHttpData.HttpDataType.Attribute) {
final Attribute request = (Attribute) data;
// this could also be implemented by using the first found Attribute as the
// payload
LOG.trace("Upload of attribute {} complete.", request.getName());
if (data.getName().equals(HTTP_ATTRIBUTE_REQUEST)) {
currentJsonPayload = request.get();
} else {
handleError(
ctx,
"Received unknown attribute " + data.getName() + '.',
HttpResponseStatus.BAD_REQUEST,
null);
return;
}
}
}
if (httpContent instanceof LastHttpContent) {
LOG.trace("Finalizing multipart file upload.");
ctx.channel().attr(UPLOADED_FILES).set(new FileUploads(currentUploadDir));
if (currentJsonPayload != null) {
currentHttpRequest
.headers()
.set(HttpHeaderNames.CONTENT_LENGTH, currentJsonPayload.length);
currentHttpRequest
.headers()
.set(HttpHeaderNames.CONTENT_TYPE, RestConstants.REST_CONTENT_TYPE);
ctx.fireChannelRead(currentHttpRequest);
ctx.fireChannelRead(
httpContent.replace(Unpooled.wrappedBuffer(currentJsonPayload)));
} else {
currentHttpRequest.headers().set(HttpHeaderNames.CONTENT_LENGTH, 0);
currentHttpRequest.headers().remove(HttpHeaderNames.CONTENT_TYPE);
ctx.fireChannelRead(currentHttpRequest);
ctx.fireChannelRead(LastHttpContent.EMPTY_LAST_CONTENT);
}
reset();
}
} else {
ctx.fireChannelRead(ReferenceCountUtil.retain(msg));
}
} catch (Exception e) {
handleError(ctx, "File upload failed.", HttpResponseStatus.INTERNAL_SERVER_ERROR, e);
}
}
private static boolean hasNext(HttpPostRequestDecoder decoder) {
try {
return decoder.hasNext();
} catch (HttpPostRequestDecoder.EndOfDataDecoderException e) {
// this can occur if the final chuck wasn't empty, but didn't contain any attribute data
// unfortunately the Netty APIs don't give us any way to check this
return false;
}
}
private void handleError(
ChannelHandlerContext ctx,
String errorMessage,
HttpResponseStatus responseStatus,
@Nullable Throwable e) {
HttpRequest tmpRequest = currentHttpRequest;
deleteUploadedFiles();
reset();
LOG.warn(errorMessage, e);
HandlerUtils.sendErrorResponse(
ctx,
tmpRequest,
new ErrorResponseBody(errorMessage),
responseStatus,
Collections.emptyMap());
ReferenceCountUtil.release(tmpRequest);
}
private void deleteUploadedFiles() {
if (currentUploadDir != null) {
try {
FileUtils.deleteDirectory(currentUploadDir.toFile());
} catch (IOException e) {
LOG.warn("Could not cleanup uploaded files.", e);
}
}
}
private void reset() {
// destroy() can fail because some data is stored multiple times in the decoder causing an
// IllegalReferenceCountException
// see https://github.com/netty/netty/issues/7814
try {
currentHttpPostRequestDecoder.getBodyHttpDatas().clear();
} catch (HttpPostRequestDecoder.NotEnoughDataDecoderException ned) {
// this method always fails if not all chunks were offered to the decoder yet
LOG.debug("Error while resetting handler.", ned);
}
currentHttpPostRequestDecoder.destroy();
currentHttpPostRequestDecoder = null;
currentHttpRequest = null;
currentUploadDir = null;
currentJsonPayload = null;
}
public static FileUploads getMultipartFileUploads(ChannelHandlerContext ctx) {
return Optional.ofNullable(ctx.channel().attr(UPLOADED_FILES).getAndSet(null))
.orElse(FileUploads.EMPTY);
}
}
| FileUploadHandler |
java | spring-projects__spring-framework | spring-websocket/src/test/java/org/springframework/web/socket/adapter/jetty/JettyWebSocketSessionTests.java | {
"start": 1296,
"end": 3604
} | class ____ {
private final Map<String, Object> attributes = Map.of();
private final UpgradeRequest request = mock();
private final UpgradeResponse response = mock();
private final Session nativeSession = mock();
@Test
@SuppressWarnings("resource")
void getPrincipalWithConstructorArg() {
TestPrincipal user = new TestPrincipal("joe");
JettyWebSocketSession session = new JettyWebSocketSession(attributes, user);
assertThat(session.getPrincipal()).isSameAs(user);
}
@Test
@SuppressWarnings("resource")
void getPrincipalFromNativeSession() {
TestPrincipal user = new TestPrincipal("joe");
given(request.getUserPrincipal()).willReturn(user);
given(response.getAcceptedSubProtocol()).willReturn(null);
given(nativeSession.getUpgradeRequest()).willReturn(request);
given(nativeSession.getUpgradeResponse()).willReturn(response);
JettyWebSocketSession session = new JettyWebSocketSession(attributes);
session.initializeNativeSession(nativeSession);
reset(nativeSession);
assertThat(session.getPrincipal()).isSameAs(user);
verifyNoMoreInteractions(nativeSession);
}
@Test
@SuppressWarnings("resource")
void getPrincipalNotAvailable() {
given(request.getUserPrincipal()).willReturn(null);
given(response.getAcceptedSubProtocol()).willReturn(null);
given(nativeSession.getUpgradeRequest()).willReturn(request);
given(nativeSession.getUpgradeResponse()).willReturn(response);
JettyWebSocketSession session = new JettyWebSocketSession(attributes);
session.initializeNativeSession(nativeSession);
reset(nativeSession);
assertThat(session.getPrincipal()).isNull();
verifyNoMoreInteractions(nativeSession);
}
@Test
@SuppressWarnings("resource")
void getAcceptedProtocol() {
String protocol = "foo";
given(request.getUserPrincipal()).willReturn(null);
given(response.getAcceptedSubProtocol()).willReturn(protocol);
given(nativeSession.getUpgradeRequest()).willReturn(request);
given(nativeSession.getUpgradeResponse()).willReturn(response);
JettyWebSocketSession session = new JettyWebSocketSession(attributes);
session.initializeNativeSession(nativeSession);
reset(nativeSession);
assertThat(session.getAcceptedProtocol()).isSameAs(protocol);
verifyNoMoreInteractions(nativeSession);
}
}
| JettyWebSocketSessionTests |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_hifor_issue_511.java | {
"start": 4523,
"end": 4699
} | class ____ {
String glide;
String name;
String office;
String mark;
String duty;
byte[] pic;
}
public static | VW_NRE_Doctor |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/ContextResolverTest.java | {
"start": 2468,
"end": 2859
} | class ____ implements ContextResolver<ObjectMapper> {
@Override
public ObjectMapper getContext(Class<?> type) {
if (!type.isAssignableFrom(Type.class)) {
return null;
}
ObjectMapper result = new ObjectMapper();
result.enable(WRITE_ENUMS_USING_INDEX);
return result;
}
}
}
| TypeContextResolver |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/api/condition/DisabledIfSystemPropertyConditionTests.java | {
"start": 853,
"end": 3700
} | class ____ extends AbstractExecutionConditionTests {
@Override
protected ExecutionCondition getExecutionCondition() {
return new DisabledIfSystemPropertyCondition();
}
@Override
protected Class<?> getTestClass() {
return DisabledIfSystemPropertyIntegrationTests.class;
}
@BeforeAll
static void setSystemProperties() {
DisabledIfSystemPropertyIntegrationTests.setSystemProperties();
}
@AfterAll
static void clearSystemProperties() {
DisabledIfSystemPropertyIntegrationTests.clearSystemProperties();
}
/**
* @see DisabledIfSystemPropertyIntegrationTests#enabledBecauseAnnotationIsNotPresent()
*/
@Test
void enabledBecauseAnnotationIsNotPresent() {
evaluateCondition();
assertEnabled();
assertReasonContains("No @DisabledIfSystemProperty conditions resulting in 'disabled' execution encountered");
}
/**
* @see DisabledIfSystemPropertyIntegrationTests#blankNamedAttribute()
*/
@Test
void blankNamedAttribute() {
assertPreconditionViolationNotBlankFor("The 'named' attribute", this::evaluateCondition);
}
/**
* @see DisabledIfSystemPropertyIntegrationTests#blankMatchesAttribute()
*/
@Test
void blankMatchesAttribute() {
assertPreconditionViolationNotBlankFor("The 'matches' attribute", this::evaluateCondition);
}
/**
* @see DisabledIfSystemPropertyIntegrationTests#disabledBecauseSystemPropertyMatchesExactly()
*/
@Test
void disabledBecauseSystemPropertyMatchesExactly() {
evaluateCondition();
assertDisabled();
assertReasonContains("matches regular expression");
assertCustomDisabledReasonIs("That's an enigma");
}
/**
* @see DisabledIfSystemPropertyIntegrationTests#disabledBecauseSystemPropertyForComposedAnnotationMatchesExactly()
*/
@Test
void disabledBecauseSystemPropertyForComposedAnnotationMatchesExactly() {
evaluateCondition();
assertDisabled();
assertReasonContains("matches regular expression");
}
/**
* @see DisabledIfSystemPropertyIntegrationTests#disabledBecauseSystemPropertyMatchesPattern()
*/
@Test
void disabledBecauseSystemPropertyMatchesPattern() {
evaluateCondition();
assertDisabled();
assertReasonContains("matches regular expression");
}
/**
* @see DisabledIfSystemPropertyIntegrationTests#enabledBecauseSystemPropertyDoesNotMatch()
*/
@Test
void enabledBecauseSystemPropertyDoesNotMatch() {
evaluateCondition();
assertEnabled();
assertReasonContains("No @DisabledIfSystemProperty conditions resulting in 'disabled' execution encountered");
}
/**
* @see DisabledIfSystemPropertyIntegrationTests#enabledBecauseSystemPropertyDoesNotExist()
*/
@Test
void enabledBecauseSystemPropertyDoesNotExist() {
evaluateCondition();
assertEnabled();
assertReasonContains("No @DisabledIfSystemProperty conditions resulting in 'disabled' execution encountered");
}
}
| DisabledIfSystemPropertyConditionTests |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidator.java | {
"start": 1502,
"end": 5291
} | class ____ implements JwtFieldValidator {
// Allows any non-null value for the sub claim
public static final JwtStringClaimValidator ALLOW_ALL_SUBJECTS = new JwtStringClaimValidator("sub", true, List.of(), List.of("*"));
private final String claimName;
// Whether the claim should be a single string
private final boolean singleValuedClaim;
@Nullable
private final Map<String, String> fallbackClaimNames;
private final Predicate<String> allowedClaimValuesPredicate;
public JwtStringClaimValidator(
String claimName,
boolean singleValuedClaim,
Collection<String> allowedClaimValues,
Collection<String> allowedClaimValuePatterns
) {
this(claimName, singleValuedClaim, null, allowedClaimValues, allowedClaimValuePatterns);
}
public JwtStringClaimValidator(
String claimName,
boolean singleValuedClaim,
Map<String, String> fallbackClaimNames,
Collection<String> allowedClaimValues,
Collection<String> allowedClaimValuePatterns
) {
assert allowedClaimValues != null : "allowed claim values should be empty rather than null";
assert allowedClaimValuePatterns != null : "allowed claim value patterns should be empty rather than null";
this.claimName = claimName;
this.singleValuedClaim = singleValuedClaim;
this.fallbackClaimNames = fallbackClaimNames;
this.allowedClaimValuesPredicate = new Predicate<>() {
private final Set<String> allowedClaimsSet = new HashSet<>(allowedClaimValues);
private final Predicate<String> allowedClaimPatternsPredicate = predicateFromPatterns(claimName, allowedClaimValuePatterns);
@Override
public boolean test(String s) {
return allowedClaimsSet.contains(s) || allowedClaimPatternsPredicate.test(s);
}
@Override
public String toString() {
return "[" + Strings.collectionToCommaDelimitedString(allowedClaimsSet) + "] || [" + allowedClaimPatternsPredicate + "]";
}
};
}
@Override
public void validate(JWSHeader jwsHeader, JWTClaimsSet jwtClaimsSet) {
final FallbackableClaim fallbackableClaim = new FallbackableClaim(claimName, fallbackClaimNames, jwtClaimsSet);
final List<String> claimValues = getStringClaimValues(fallbackableClaim);
if (claimValues == null) {
throw new IllegalArgumentException("missing required string claim [" + fallbackableClaim + "]");
}
for (String claimValue : claimValues) {
if (allowedClaimValuesPredicate.test(claimValue)) {
return;
}
}
throw new IllegalArgumentException(
"string claim ["
+ fallbackableClaim
+ "] has value ["
+ Strings.collectionToCommaDelimitedString(claimValues)
+ "] which does not match allowed claim values "
+ allowedClaimValuesPredicate
);
}
private List<String> getStringClaimValues(FallbackableClaim fallbackableClaim) {
if (singleValuedClaim) {
final String claimValue = fallbackableClaim.getStringClaimValue();
return claimValue != null ? List.of(claimValue) : null;
} else {
return fallbackableClaim.getStringListClaimValue();
}
}
private static Predicate<String> predicateFromPatterns(String claimName, Collection<String> patterns) {
try {
return Automatons.predicate(patterns);
} catch (Exception e) {
throw new SettingsException("Invalid patterns for allowed claim values for [" + claimName + "].", e);
}
}
}
| JwtStringClaimValidator |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/statistics/TestMeanStatistic.java | {
"start": 1160,
"end": 6184
} | class ____ extends AbstractHadoopTestBase {
private static final Logger LOG =
LoggerFactory.getLogger(TestMeanStatistic.class);
private static final int TEN = 10;
private static final double ZEROD = 0.0d;
private static final double TEND = 10.0d;
private final MeanStatistic empty = new MeanStatistic(0, 0);
private final MeanStatistic tenFromOne = new MeanStatistic(1, TEN);
private final MeanStatistic tenFromTen = new MeanStatistic(TEN, TEN);
@Test
public void testEmptiness() throws Throwable {
assertThat(empty)
.matches(MeanStatistic::isEmpty, "is empty")
.isEqualTo(new MeanStatistic(0, TEN))
.isEqualTo(new MeanStatistic())
.isNotEqualTo(tenFromOne);
assertThat(empty.mean())
.isEqualTo(ZEROD);
assertThat(empty.toString())
.contains("0.0");
}
@Test
public void testTenFromOne() throws Throwable {
assertThat(tenFromOne)
.matches(p -> !p.isEmpty(), "is not empty")
.isEqualTo(tenFromOne)
.isNotEqualTo(tenFromTen);
assertThat(tenFromOne.mean())
.isEqualTo(TEND);
}
@Test
public void testNegativeSamplesAreEmpty() throws Throwable {
MeanStatistic stat = new MeanStatistic(-10, 1);
assertThat(stat)
.describedAs("stat with negative samples")
.matches(MeanStatistic::isEmpty, "is empty")
.isEqualTo(empty)
.extracting(MeanStatistic::mean)
.isEqualTo(ZEROD);
assertThat(stat.toString())
.contains("0.0");
}
@Test
public void testCopyNonEmpty() throws Throwable {
MeanStatistic stat = tenFromOne.copy();
assertThat(stat)
.describedAs("copy of " + tenFromOne)
.isEqualTo(tenFromOne)
.isNotSameAs(tenFromOne);
}
@Test
public void testCopyEmpty() throws Throwable {
MeanStatistic stat = empty.copy();
assertThat(stat)
.describedAs("copy of " + empty)
.isEqualTo(empty)
.isNotSameAs(empty);
}
@Test
public void testDoubleSamples() throws Throwable {
MeanStatistic stat = tenFromOne.copy();
assertThat(stat.add(tenFromOne))
.isEqualTo(new MeanStatistic(2, 20))
.extracting(MeanStatistic::mean)
.isEqualTo(TEND);
}
@Test
public void testAddEmptyR() throws Throwable {
MeanStatistic stat = tenFromOne.copy();
assertThat(stat.add(empty))
.isEqualTo(tenFromOne);
}
@Test
public void testAddEmptyL() throws Throwable {
MeanStatistic stat = empty.copy();
assertThat(stat.add(tenFromOne))
.isEqualTo(tenFromOne);
}
@Test
public void testAddEmptyLR() throws Throwable {
MeanStatistic stat = empty.copy();
assertThat(stat.add(empty))
.isEqualTo(empty);
}
@Test
public void testAddSampleToEmpty() throws Throwable {
MeanStatistic stat = empty.copy();
stat.addSample(TEN);
assertThat(stat)
.isEqualTo(tenFromOne);
}
@Test
public void testAddZeroValueSamples() throws Throwable {
MeanStatistic stat = tenFromOne.copy();
for (int i = 0; i < 9; i++) {
stat.addSample(0);
}
assertThat(stat)
.isEqualTo(tenFromTen);
}
@Test
public void testSetSamples() throws Throwable {
MeanStatistic stat = tenFromOne.copy();
stat.setSamples(10);
assertThat(stat)
.isEqualTo(tenFromTen);
}
@Test
public void testSetSums() throws Throwable {
MeanStatistic stat = tenFromOne.copy();
stat.setSum(100);
stat.setSamples(20);
assertThat(stat)
.isEqualTo(new MeanStatistic(20, 100))
.extracting(MeanStatistic::mean)
.isEqualTo(5.0d);
}
@Test
public void testSetNegativeSamplesMakesEmpty() throws Throwable {
MeanStatistic stat = tenFromOne.copy();
stat.setSamples(-3);
assertThat(stat)
.isEqualTo(empty);
}
@Test
public void testJsonRoundTrip() throws Throwable {
JsonSerialization<MeanStatistic> serializer = serializer();
String json = serializer.toJson(tenFromTen);
LOG.info("serialized form\n{}", json);
assertThat(json)
.describedAs("JSON form of %s", tenFromTen)
.doesNotContain("empty")
.doesNotContain("mean");
MeanStatistic deser = serializer.fromJson(json);
LOG.info("deserialized {}", deser);
assertThat(deser)
.isEqualTo(tenFromTen);
}
/**
* negative sample counts in the json convert the stat to being empty.
*/
@Test
public void testHandleMaliciousStat() throws Throwable {
String json = "{\n"
+ " \"sum\" : 10,\n"
+ " \"samples\" : -10\n"
+ "}";
JsonSerialization<MeanStatistic> serializer = serializer();
MeanStatistic deser = serializer.fromJson(json);
LOG.info("deserialized {}", deser);
assertThat(deser)
.isEqualTo(empty);
}
/**
* Get a JSON serializer.
* @return a serializer.
*/
public static JsonSerialization<MeanStatistic> serializer() {
return new JsonSerialization<>(MeanStatistic.class, true, true);
}
}
| TestMeanStatistic |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/request/JinaAIRerankRequestEntityTests.java | {
"start": 860,
"end": 4915
} | class ____ extends ESTestCase {
public void testXContent_SingleRequest_WritesAllFieldsIfDefined() throws IOException {
var entity = new JinaAIRerankRequestEntity(
"query",
List.of("abc"),
Boolean.TRUE,
12,
new JinaAIRerankTaskSettings(8, Boolean.FALSE),
"model"
);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString("""
{
"model": "model",
"query": "query",
"documents": [
"abc"
],
"top_n": 12,
"return_documents": true
}
"""));
}
public void testXContent_SingleRequest_WritesMinimalFields() throws IOException {
var entity = new JinaAIRerankRequestEntity("query", List.of("abc"), null, null, new JinaAIRerankTaskSettings(null, null), "model");
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString("""
{
"model": "model",
"query": "query",
"documents": [
"abc"
]
}
"""));
}
public void testXContent_MultipleRequests_WritesAllFieldsIfDefined() throws IOException {
var entity = new JinaAIRerankRequestEntity(
"query",
List.of("abc", "def"),
Boolean.FALSE,
12,
new JinaAIRerankTaskSettings(8, Boolean.TRUE),
"model"
);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString("""
{
"model": "model",
"query": "query",
"documents": [
"abc",
"def"
],
"top_n": 12,
"return_documents": false
}
"""));
}
public void testXContent_MultipleRequests_WritesMinimalFields() throws IOException {
var entity = new JinaAIRerankRequestEntity("query", List.of("abc", "def"), null, null, null, "model");
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString("""
{
"model": "model",
"query": "query",
"documents": [
"abc",
"def"
]
}
"""));
}
public void testXContent_SingleRequest_UsesTaskSettingsTopNIfRootIsNotDefined() throws IOException {
var entity = new JinaAIRerankRequestEntity(
"query",
List.of("abc"),
null,
null,
new JinaAIRerankTaskSettings(8, Boolean.FALSE),
"model"
);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString("""
{
"model": "model",
"query": "query",
"documents": [
"abc"
],
"top_n": 8,
"return_documents": false
}
"""));
}
}
| JinaAIRerankRequestEntityTests |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/IntArrayFieldTest_primitive.java | {
"start": 247,
"end": 1300
} | class ____ extends TestCase {
public void test_array() throws Exception {
Assert.assertEquals("[1]", JSON.toJSONString(new int[] { 1 }));
}
public void test_codec_null() throws Exception {
V0 v = new V0();
SerializeConfig mapping = new SerializeConfig();
mapping.setAsmEnable(false);
String text = JSON.toJSONString(v, mapping, SerializerFeature.WriteMapNullValue);
Assert.assertEquals("{\"value\":null}", text);
V0 v1 = JSON.parseObject(text, V0.class);
Assert.assertEquals(v1.getValue(), v.getValue());
}
public void test_codec_null_1() throws Exception {
V0 v = new V0();
SerializeConfig mapping = new SerializeConfig();
mapping.setAsmEnable(false);
Assert.assertTrue(!mapping.isAsmEnable());
String text = JSON.toJSONString(v, mapping, SerializerFeature.WriteMapNullValue, SerializerFeature.WriteNullListAsEmpty);
Assert.assertEquals("{\"value\":[]}", text);
}
public static | IntArrayFieldTest_primitive |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java | {
"start": 751,
"end": 8715
} | interface ____ extends Block permits IntArrayBlock, IntVectorBlock, ConstantNullBlock, IntBigArrayBlock {
/**
* Retrieves the int value stored at the given value index.
*
* <p> Values for a given position are between getFirstValueIndex(position) (inclusive) and
* getFirstValueIndex(position) + getValueCount(position) (exclusive).
*
* @param valueIndex the value index
* @return the data value (as a int)
*/
int getInt(int valueIndex);
/**
* Checks if this block has the given value at position. If at this index we have a
* multivalue, then it returns true if any values match.
*
* @param position the index at which we should check the value(s)
* @param value the value to check against
*/
default boolean hasValue(int position, int value) {
final var count = getValueCount(position);
final var startIndex = getFirstValueIndex(position);
for (int index = startIndex; index < startIndex + count; index++) {
if (value == getInt(index)) {
return true;
}
}
return false;
}
@Override
IntVector asVector();
@Override
IntBlock filter(int... positions);
/**
* Make a deep copy of this {@link Block} using the provided {@link BlockFactory},
* likely copying all data.
*/
@Override
default IntBlock deepCopy(BlockFactory blockFactory) {
try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(getPositionCount())) {
builder.copyFrom(this, 0, getPositionCount());
builder.mvOrdering(mvOrdering());
return builder.build();
}
}
@Override
IntBlock keepMask(BooleanVector mask);
@Override
ReleasableIterator<? extends IntBlock> lookup(IntBlock positions, ByteSizeValue targetBlockSize);
@Override
IntBlock expand();
static IntBlock readFrom(BlockStreamInput in) throws IOException {
final byte serializationType = in.readByte();
return switch (serializationType) {
case SERIALIZE_BLOCK_VALUES -> IntBlock.readValues(in);
case SERIALIZE_BLOCK_VECTOR -> IntVector.readFrom(in.blockFactory(), in).asBlock();
case SERIALIZE_BLOCK_ARRAY -> IntArrayBlock.readArrayBlock(in.blockFactory(), in);
case SERIALIZE_BLOCK_BIG_ARRAY -> IntBigArrayBlock.readArrayBlock(in.blockFactory(), in);
default -> {
assert false : "invalid block serialization type " + serializationType;
throw new IllegalStateException("invalid serialization type " + serializationType);
}
};
}
private static IntBlock readValues(BlockStreamInput in) throws IOException {
final int positions = in.readVInt();
try (IntBlock.Builder builder = in.blockFactory().newIntBlockBuilder(positions)) {
for (int i = 0; i < positions; i++) {
if (in.readBoolean()) {
builder.appendNull();
} else {
final int valueCount = in.readVInt();
builder.beginPositionEntry();
for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) {
builder.appendInt(in.readInt());
}
builder.endPositionEntry();
}
}
return builder.build();
}
}
@Override
default void writeTo(StreamOutput out) throws IOException {
IntVector vector = asVector();
final var version = out.getTransportVersion();
if (vector != null) {
out.writeByte(SERIALIZE_BLOCK_VECTOR);
vector.writeTo(out);
} else if (this instanceof IntArrayBlock b) {
out.writeByte(SERIALIZE_BLOCK_ARRAY);
b.writeArrayBlock(out);
} else if (this instanceof IntBigArrayBlock b) {
out.writeByte(SERIALIZE_BLOCK_BIG_ARRAY);
b.writeArrayBlock(out);
} else {
out.writeByte(SERIALIZE_BLOCK_VALUES);
IntBlock.writeValues(this, out);
}
}
private static void writeValues(IntBlock block, StreamOutput out) throws IOException {
final int positions = block.getPositionCount();
out.writeVInt(positions);
for (int pos = 0; pos < positions; pos++) {
if (block.isNull(pos)) {
out.writeBoolean(true);
} else {
out.writeBoolean(false);
final int valueCount = block.getValueCount(pos);
out.writeVInt(valueCount);
for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) {
out.writeInt(block.getInt(block.getFirstValueIndex(pos) + valueIndex));
}
}
}
}
/**
* Compares the given object with this block for equality. Returns {@code true} if and only if the
* given object is a IntBlock, and both blocks are {@link #equals(IntBlock, IntBlock) equal}.
*/
@Override
boolean equals(Object obj);
/** Returns the hash code of this block, as defined by {@link #hash(IntBlock)}. */
@Override
int hashCode();
/**
* Returns {@code true} if the given blocks are equal to each other, otherwise {@code false}.
* Two blocks are considered equal if they have the same position count, and contain the same
* values (including absent null values) in the same order. This definition ensures that the
* equals method works properly across different implementations of the IntBlock interface.
*/
static boolean equals(IntBlock block1, IntBlock block2) {
if (block1 == block2) {
return true;
}
final int positions = block1.getPositionCount();
if (positions != block2.getPositionCount()) {
return false;
}
for (int pos = 0; pos < positions; pos++) {
if (block1.isNull(pos) || block2.isNull(pos)) {
if (block1.isNull(pos) != block2.isNull(pos)) {
return false;
}
} else {
final int valueCount = block1.getValueCount(pos);
if (valueCount != block2.getValueCount(pos)) {
return false;
}
final int b1ValueIdx = block1.getFirstValueIndex(pos);
final int b2ValueIdx = block2.getFirstValueIndex(pos);
for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) {
if (block1.getInt(b1ValueIdx + valueIndex) != block2.getInt(b2ValueIdx + valueIndex)) {
return false;
}
}
}
}
return true;
}
/**
* Generates the hash code for the given block. The hash code is computed from the block's values.
* This ensures that {@code block1.equals(block2)} implies that {@code block1.hashCode()==block2.hashCode()}
* for any two blocks, {@code block1} and {@code block2}, as required by the general contract of
* {@link Object#hashCode}.
*/
static int hash(IntBlock block) {
final int positions = block.getPositionCount();
int result = 1;
for (int pos = 0; pos < positions; pos++) {
if (block.isNull(pos)) {
result = 31 * result - 1;
} else {
final int valueCount = block.getValueCount(pos);
result = 31 * result + valueCount;
final int firstValueIdx = block.getFirstValueIndex(pos);
for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) {
result = 31 * result + block.getInt(firstValueIdx + valueIndex);
}
}
}
return result;
}
/**
* Builder for {@link IntBlock}
*/
sealed | IntBlock |
java | mockito__mockito | mockito-extensions/mockito-errorprone/src/main/java/org/mockito/errorprone/bugpatterns/MockitoAnyClassWithPrimitiveType.java | {
"start": 1858,
"end": 2707
} | class ____ extends AbstractMockitoAnyForPrimitiveType {
private static final String[] CLASS_NAMES = {
"org.mockito.Mockito", "org.mockito.ArgumentMatchers", "org.mockito.Matchers"
};
// Match against the any() or any(Class) methods.
private static final Matcher<ExpressionTree> GENERIC_ANY =
Matchers.staticMethod().onClassAny(CLASS_NAMES).named("any");
@Override
protected Matcher<? super MethodInvocationTree> matcher() {
return GENERIC_ANY;
}
@Override
protected String formatMessage(
String expectedTypeAsString, Type matcherType, String replacementName) {
return String.format(
"Matcher mismatch: use %s() matcher to match primitive %s arguments",
replacementName, expectedTypeAsString);
}
}
| MockitoAnyClassWithPrimitiveType |
java | resilience4j__resilience4j | resilience4j-reactor/src/test/java/io/github/resilience4j/reactor/micrometer/operator/MonoTimerTest.java | {
"start": 1296,
"end": 2555
} | class ____ {
@Test
public void shouldTimeSuccessfulMono() {
String message = "Hello!";
MeterRegistry registry = new SimpleMeterRegistry();
Timer timer = Timer.of("timer 1", registry);
String result = Mono.just(message)
.transformDeferred(TimerOperator.of(timer))
.block(ofSeconds(1));
then(result).isEqualTo(message);
thenSuccessTimed(registry, timer);
}
@Test
public void shouldTimeFailedMono() {
IllegalStateException exception = new IllegalStateException();
MeterRegistry registry = new SimpleMeterRegistry();
TimerConfig config = TimerConfig.custom()
.onFailureTagResolver(ex -> {
then(ex).isEqualTo(exception);
return ex.toString();
})
.build();
Timer timer = Timer.of("timer 1", registry, config);
try {
Mono.error(exception)
.transformDeferred(TimerOperator.of(timer))
.block(ofSeconds(1));
failBecauseExceptionWasNotThrown(exception.getClass());
} catch (Exception e) {
thenFailureTimed(registry, timer, e);
}
}
}
| MonoTimerTest |
java | google__error-prone | core/src/main/java/com/google/errorprone/refaster/UPlaceholderExpression.java | {
"start": 2475,
"end": 2762
} | class ____ extends JCIdent {
final UVariableDecl param;
PlaceholderParamIdent(UVariableDecl param, Context context) {
super(Names.instance(context).fromString(param.getName().contents()), null);
this.param = checkNotNull(param);
}
}
static | PlaceholderParamIdent |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/impl/cache/CacheSelector.java | {
"start": 4926,
"end": 5070
} | interface ____ the given name
return matchesAnyInterface(parentReq.getClass(), parentRequestType);
}
/**
* Gets the short | with |
java | apache__camel | components/camel-fop/src/main/java/org/apache/camel/component/fop/FopComponent.java | {
"start": 1098,
"end": 1673
} | class ____ extends DefaultComponent {
public FopComponent() {
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
FopOutputType type = FopOutputType.asFooOutputType(remaining);
if (type == null) {
type = getCamelContext().getTypeConverter().mandatoryConvertTo(FopOutputType.class, remaining);
}
FopEndpoint endpoint = new FopEndpoint(uri, this, type);
setProperties(endpoint, parameters);
return endpoint;
}
}
| FopComponent |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java | {
"start": 18746,
"end": 23667
} | class ____ implements Pipe.AttributeResolver {
private QueryContainer container;
private QueryAttributeResolver(QueryContainer container) {
this.container = container;
}
@Override
public FieldExtraction resolve(Attribute attribute) {
Tuple<QueryContainer, FieldExtraction> ref = container.asFieldExtraction(attribute);
container = ref.v1();
return ref.v2();
}
}
QueryAttributeResolver resolver = new QueryAttributeResolver(this);
proc = proc.resolveAttributes(resolver);
QueryContainer qContainer = resolver.container;
// update proc (if needed)
if (qContainer.scalarFunctions().size() != scalarFunctions.size()) {
qContainer = qContainer.withScalarProcessors(AttributeMap.builder(qContainer.scalarFunctions).put(attr, proc).build());
}
return new Tuple<>(qContainer, new ComputedRef(proc));
}
public QueryContainer addColumn(Attribute attr) {
Expression expression = aliases.resolve(attr, attr);
Tuple<QueryContainer, FieldExtraction> tuple = asFieldExtraction(attr);
return tuple.v1().addColumn(tuple.v2(), Expressions.id(expression), attr);
}
private Tuple<QueryContainer, FieldExtraction> asFieldExtraction(Attribute attr) {
// resolve it Expression
Expression expression = aliases.resolve(attr, attr);
if (expression instanceof FieldAttribute fa) {
if (fa.isNested()) {
return nestedHitFieldRef(fa);
} else {
return new Tuple<>(this, topHitFieldRef(fa));
}
}
if (expression == null) {
throw new SqlIllegalArgumentException("Unknown output attribute {}", attr);
}
if (expression.foldable()) {
return new Tuple<>(this, new ComputedRef(new ConstantInput(expression.source(), expression, expression.fold())));
}
if (expression instanceof Score) {
return new Tuple<>(this, new ComputedRef(new ScorePipe(expression.source(), expression)));
}
if (expression instanceof ScalarFunction) {
return resolvedTreeComputingRef((ScalarFunction) expression, attr);
}
throw new SqlIllegalArgumentException("Unknown output attribute {}", attr);
}
public QueryContainer addColumn(FieldExtraction ref, String id, Attribute attribute) {
return new QueryContainer(
query,
aggs,
combine(fields, new FieldInfo(ref, id, attribute)),
aliases,
pseudoFunctions,
scalarFunctions,
sort,
limit,
trackHits,
includeFrozen,
minPageSize,
allowPartialSearchResults
);
}
public AttributeMap<Pipe> scalarFunctions() {
return scalarFunctions;
}
//
// agg methods
//
public QueryContainer addAgg(String groupId, LeafAgg agg) {
return with(aggs.addAgg(agg));
}
public QueryContainer addGroups(Collection<GroupByKey> values) {
return with(aggs.addGroups(values));
}
public GroupByKey findGroupForAgg(String aggId) {
return aggs.findGroupForAgg(aggId);
}
public QueryContainer updateGroup(GroupByKey group) {
return with(aggs.updateGroup(group));
}
//
// boiler plate
//
@Override
public int hashCode() {
return Objects.hash(query, aggs, fields, aliases, sort, limit, trackHits, includeFrozen, allowPartialSearchResults);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
QueryContainer other = (QueryContainer) obj;
return Objects.equals(query, other.query)
&& Objects.equals(aggs, other.aggs)
&& Objects.equals(fields, other.fields)
&& Objects.equals(aliases, other.aliases)
&& Objects.equals(sort, other.sort)
&& Objects.equals(limit, other.limit)
&& Objects.equals(trackHits, other.trackHits)
&& Objects.equals(includeFrozen, other.includeFrozen)
&& Objects.equals(allowPartialSearchResults, other.allowPartialSearchResults);
}
@Override
public String toString() {
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
builder.humanReadable(true).prettyPrint();
SourceGenerator.sourceBuilder(this, null, null).toXContent(builder, ToXContent.EMPTY_PARAMS);
return Strings.toString(builder);
} catch (IOException e) {
throw new RuntimeException("error rendering", e);
}
}
}
| QueryAttributeResolver |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java | {
"start": 1133,
"end": 2111
} | class ____ extends BaseDateTimeFunction {
private final NonIsoDateTimeExtractor extractor;
NonIsoDateTimeFunction(Source source, Expression field, ZoneId zoneId, NonIsoDateTimeExtractor extractor) {
super(source, field, zoneId);
this.extractor = extractor;
}
@Override
public ScriptTemplate asScript() {
ScriptTemplate script = super.asScript();
String template = formatTemplate(
"{sql}." + StringUtils.underscoreToLowerCamelCase(extractor.name()) + "(" + script.template() + ", {})"
);
ParamsBuilder params = paramsBuilder().script(script.params()).variable(zoneId().getId());
return new ScriptTemplate(template, params.build(), dataType());
}
@Override
protected Processor makeProcessor() {
return new NonIsoDateTimeProcessor(extractor, zoneId());
}
@Override
public DataType dataType() {
return DataTypes.INTEGER;
}
}
| NonIsoDateTimeFunction |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java | {
"start": 6851,
"end": 10536
} | class ____ the object to parse
* @param consumer something to consume the parsed object
* @param <T> the type of the object to parse
* @throws IOException if anything went wrong during parsing or if the type or name cannot be derived
* from the field's name
* @throws ParsingException if the parser isn't positioned on either START_OBJECT or START_ARRAY at the beginning
*/
public static <T> void parseTypedKeysObject(XContentParser parser, String delimiter, Class<T> objectClass, Consumer<T> consumer)
throws IOException {
if (parser.currentToken() != Token.START_OBJECT && parser.currentToken() != Token.START_ARRAY) {
throwUnknownToken(parser.currentToken(), parser);
}
String currentFieldName = parser.currentName();
if (Strings.hasLength(currentFieldName)) {
int position = currentFieldName.indexOf(delimiter);
if (position > 0) {
String type = currentFieldName.substring(0, position);
String name = currentFieldName.substring(position + 1);
consumer.accept(parser.namedObject(objectClass, type, name));
return;
}
// if we didn't find a delimiter we ignore the object or array for forward compatibility instead of throwing an error
parser.skipChildren();
} else {
throw new ParsingException(parser.getTokenLocation(), "Failed to parse object: empty key");
}
}
/**
* Parses a list of a given type from the given {@code parser}. Assumes that the parser is currently positioned on a
* {@link Token#START_ARRAY} token and will fail if it is not. The returned list may or may not be mutable.
*
* @param parser x-content parser
* @param valueParser parser for expected list value type
* @return list parsed from parser
*/
public static <T> List<T> parseList(XContentParser parser, CheckedFunction<XContentParser, T, IOException> valueParser)
throws IOException {
XContentParserUtils.ensureExpectedToken(Token.START_ARRAY, parser.currentToken(), parser);
if (parser.nextToken() == Token.END_ARRAY) {
return List.of();
}
final ArrayList<T> list = new ArrayList<>();
do {
list.add(valueParser.apply(parser));
} while (parser.nextToken() != Token.END_ARRAY);
return list;
}
/**
* This is the same as {@link #parseList(XContentParser, CheckedFunction)}
* except that it passes the array index while parsing the array. Parses a list of a given type from the given {@code parser}
* while passing the valueParser the current array index.
* Assumes that the parser is currently positioned on a {@link Token#START_ARRAY} token and will fail if it is not.
* The returned list may or may not be mutable.
*
* @param parser x-content parser
* @param valueParser parser for expected list value type
* @return list parsed from parser
*/
public static <T> List<T> parseList(XContentParser parser, CheckedBiFunction<XContentParser, Integer, T, IOException> valueParser)
throws IOException {
XContentParserUtils.ensureExpectedToken(Token.START_ARRAY, parser.currentToken(), parser);
if (parser.nextToken() == Token.END_ARRAY) {
return List.of();
}
final ArrayList<T> list = new ArrayList<>();
int index = 0;
do {
list.add(valueParser.apply(parser, index++));
} while (parser.nextToken() != Token.END_ARRAY);
return list;
}
}
| of |
java | micronaut-projects__micronaut-core | websocket/src/main/java/io/micronaut/websocket/WebSocketPongMessage.java | {
"start": 967,
"end": 1484
} | class ____ {
private final ByteBuffer<?> content;
/**
* @param content The content of the pong message.
*/
public WebSocketPongMessage(@NonNull ByteBuffer<?> content) {
Objects.requireNonNull(content, "content");
this.content = content;
}
/**
* @return The content of the pong message. This buffer may be released after the message handler has completed.
*/
@NonNull
public ByteBuffer<?> getContent() {
return content;
}
}
| WebSocketPongMessage |
java | apache__camel | components/camel-milo/src/main/java/org/apache/camel/component/milo/client/MiloClientConnection.java | {
"start": 1774,
"end": 3167
} | class ____ implements AutoCloseable {
private final MiloClientConfiguration configuration;
private SubscriptionManager manager;
private volatile boolean initialized;
private MonitorFilterConfiguration monitorFilterConfiguration;
public MiloClientConnection(final MiloClientConfiguration configuration,
final MonitorFilterConfiguration monitorFilterConfiguration) {
requireNonNull(configuration);
// make a copy since the configuration is mutable
this.configuration = configuration.clone();
this.monitorFilterConfiguration = monitorFilterConfiguration;
}
public MiloClientConfiguration getConfiguration() {
return configuration;
}
protected void init() {
this.manager = new SubscriptionManager(this.configuration, Stack.sharedScheduledExecutor(), 10_000);
}
@Override
public void close() throws Exception {
if (this.manager != null) {
this.manager.dispose();
this.manager = null;
}
}
protected synchronized void checkInit() {
if (this.initialized) {
return;
}
try {
init();
} catch (final Exception e) {
throw new RuntimeCamelException(e);
}
this.initialized = true;
}
@FunctionalInterface
public | MiloClientConnection |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/simp/stomp/StompEncoder.java | {
"start": 7086,
"end": 7703
} | class ____ extends ArrayList<Object> implements Result {
private int size;
@Override
public void add(byte[] bytes) {
this.size += bytes.length;
super.add(bytes);
}
@Override
public void add(byte b) {
this.size++;
super.add(b);
}
@Override
public byte[] toByteArray() {
byte[] result = new byte[this.size];
int position = 0;
for (Object o : this) {
if (o instanceof byte[] src) {
System.arraycopy(src, 0, result, position, src.length);
position += src.length;
}
else {
result[position++] = (Byte) o;
}
}
return result;
}
}
}
| DefaultResult |
java | apache__camel | components/camel-graphql/src/main/java/org/apache/camel/component/graphql/GraphqlProducer.java | {
"start": 2349,
"end": 13851
} | class ____ extends DefaultAsyncProducer {
private static final Logger LOG = LoggerFactory.getLogger(GraphqlProducer.class);
private static final Integer OK_RESPONSE_CODE = 200;
private static final String OK_STATUS_RANGE = "200-299";
private HttpClient httpClient;
private boolean closeHttpClient;
public GraphqlProducer(GraphqlEndpoint endpoint) {
super(endpoint);
}
@Override
protected void doStart() throws Exception {
super.doStart();
httpClient = getEndpoint().getHttpClient();
if (httpClient == null) {
httpClient = getEndpoint().createHttpClient();
closeHttpClient = true;
}
}
@Override
protected void doStop() throws Exception {
if (closeHttpClient && httpClient instanceof CloseableHttpClient chc) {
IOHelper.close(chc);
}
}
@Override
public GraphqlEndpoint getEndpoint() {
return (GraphqlEndpoint) super.getEndpoint();
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
try {
URI httpUri = getEndpoint().getHttpUri();
String requestBody = buildRequestBody(getQuery(exchange), getEndpoint().getOperationName(),
getVariables(exchange));
try (HttpEntity requestEntity = new StringEntity(requestBody, ContentType.APPLICATION_JSON)) {
HttpPost httpPost = new HttpPost(httpUri);
httpPost.setHeader(HttpHeaders.CONTENT_TYPE, "application/json");
httpPost.setHeader(HttpHeaders.ACCEPT, "application/json");
httpPost.setHeader(HttpHeaders.ACCEPT_ENCODING, "gzip");
httpPost.setEntity(requestEntity);
populateRequestHeaders(exchange, httpPost);
httpClient.execute(httpPost, httpResponse -> {
if (LOG.isDebugEnabled()) {
LOG.debug("Finished executing http: {} method: {}", httpUri, HttpPost.METHOD_NAME);
}
int responseCode = httpResponse.getCode();
if (LOG.isDebugEnabled()) {
LOG.debug("Http responseCode: {}", responseCode);
}
if (!getEndpoint().isThrowExceptionOnFailure()) {
// if we do not use failed exception then populate response for all response codes
populateResponse(exchange, httpResponse, getEndpoint().getHeaderFilterStrategy(), responseCode);
} else {
boolean ok = HttpHelper.isStatusCodeOk(responseCode, OK_STATUS_RANGE);
if (ok) {
// only populate response for OK response
populateResponse(exchange, httpResponse, getEndpoint().getHeaderFilterStrategy(), responseCode);
} else {
// also store response code when throwing exception
populateResponseCode(exchange.getMessage(), httpResponse, responseCode);
// operation failed so populate exception to throw
exchange.setException(populateHttpOperationFailedException(exchange, httpResponse, responseCode));
}
}
return null;
});
}
} catch (Exception e) {
exchange.setException(e);
}
callback.done(true);
return true;
}
private void populateRequestHeaders(Exchange exchange, HttpPost httpRequest) {
HeaderFilterStrategy strategy = getEndpoint().getHeaderFilterStrategy();
final TypeConverter tc = exchange.getContext().getTypeConverter();
for (Map.Entry<String, Object> entry : exchange.getMessage().getHeaders().entrySet()) {
String key = entry.getKey();
// we should not add known headers
// skip known headers from graphql
boolean skip = getEndpoint().getQueryHeader() != null && key.equalsIgnoreCase(getEndpoint().getQueryHeader())
|| getEndpoint().getVariablesHeader() != null && key.equalsIgnoreCase(getEndpoint().getVariablesHeader());
if (skip) {
continue;
}
Object headerValue = entry.getValue();
if (headerValue != null) {
if (headerValue instanceof String || headerValue instanceof Integer || headerValue instanceof Long
|| headerValue instanceof Boolean || headerValue instanceof Date) {
// optimise for common types
String value = headerValue.toString();
if (!strategy.applyFilterToCamelHeaders(key, value, exchange)) {
httpRequest.addHeader(key, value);
}
continue;
}
// use an iterator as there can be multiple values. (must not use a delimiter, and allow empty values)
final Iterator<?> it = ObjectHelper.createIterator(headerValue, null, true);
HttpUtil.applyHeader(strategy, exchange, it, tc, key,
(multiValues, prev) -> applyHeader(httpRequest, key, multiValues, prev));
}
}
}
private static void applyHeader(HttpUriRequest httpRequest, String key, List<String> multiValues, String prev) {
// add the value(s) as a http request header
if (multiValues != null) {
// use the default toString of a ArrayList to create in the form [xxx, yyy]
// if multi valued, for a single value, then just output the value as is
String s = multiValues.size() > 1 ? multiValues.toString() : multiValues.get(0);
httpRequest.addHeader(key, s);
} else if (prev != null) {
httpRequest.addHeader(key, prev);
}
}
private static void populateResponseCode(Message message, ClassicHttpResponse httpResponse, int responseCode) {
// optimize for 200 response code as the boxing is outside the cached integers
if (responseCode == 200) {
message.setHeader(Exchange.HTTP_RESPONSE_CODE, OK_RESPONSE_CODE);
} else {
message.setHeader(Exchange.HTTP_RESPONSE_CODE, responseCode);
}
if (httpResponse.getReasonPhrase() != null) {
message.setHeader(Exchange.HTTP_RESPONSE_TEXT, httpResponse.getReasonPhrase());
}
}
protected Exception populateHttpOperationFailedException(
Exchange exchange, ClassicHttpResponse httpResponse, int responseCode)
throws IOException, ParseException {
Exception answer;
String statusText = httpResponse.getReasonPhrase() != null ? httpResponse.getReasonPhrase() : null;
Map<String, String> headers = extractResponseHeaders(httpResponse.getHeaders());
Object responseBody = EntityUtils.toString(httpResponse.getEntity());
// make a defensive copy of the response body in the exception so its detached from the cache
String copy = null;
if (responseBody != null) {
copy = exchange.getContext().getTypeConverter().convertTo(String.class, exchange, responseBody);
}
Header locationHeader = httpResponse.getFirstHeader("location");
String uri = getEndpoint().getHttpUri().toString();
if (locationHeader != null && responseCode >= 300 && responseCode < 400) {
answer = new HttpOperationFailedException(
uri, responseCode, statusText, locationHeader.getValue(), headers, copy);
} else {
answer = new HttpOperationFailedException(uri, responseCode, statusText, null, headers, copy);
}
return answer;
}
protected void populateResponse(
Exchange exchange, ClassicHttpResponse httpResponse,
HeaderFilterStrategy strategy, int responseCode)
throws IOException, ParseException {
Message answer = exchange.getMessage();
populateResponseCode(answer, httpResponse, responseCode);
// We just make the out message is not create when extractResponseBody throws exception
Object responseBody = EntityUtils.toString(httpResponse.getEntity());
answer.setBody(responseBody);
// optimize to walk headers with an iterator which does not create a new array as getAllHeaders does
boolean found = false;
Iterator<Header> it = httpResponse.headerIterator();
while (it.hasNext()) {
Header header = it.next();
String name = header.getName();
String value = header.getValue();
if (!found && name.equalsIgnoreCase("content-type")) {
name = Exchange.CONTENT_TYPE;
exchange.setProperty(ExchangePropertyKey.CHARSET_NAME, IOHelper.getCharsetNameFromContentType(value));
found = true;
}
// use http helper to extract parameter value as it may contain multiple values
Object extracted = HttpHelper.extractHttpParameterValue(value);
if (strategy != null && !strategy.applyFilterToExternalHeaders(name, extracted, exchange)) {
HttpHelper.appendHeader(answer.getHeaders(), name, extracted);
}
}
}
/**
* Extracts the response headers
*
* @param responseHeaders the headers
* @return the extracted headers or an empty map if no headers existed
*/
protected static Map<String, String> extractResponseHeaders(Header[] responseHeaders) {
if (responseHeaders == null || responseHeaders.length == 0) {
return Map.of();
}
Map<String, String> answer = new HashMap<>();
for (Header header : responseHeaders) {
answer.put(header.getName(), header.getValue());
}
return answer;
}
protected static String buildRequestBody(String query, String operationName, JsonObject variables) {
JsonObject jsonObject = new JsonObject();
jsonObject.put("query", query);
jsonObject.put("operationName", operationName);
jsonObject.put("variables", variables != null ? variables : new JsonObject());
return jsonObject.toJson();
}
private String getQuery(Exchange exchange) throws InvalidPayloadException {
String query = null;
if (getEndpoint().getQuery() != null) {
query = getEndpoint().getQuery();
} else if (getEndpoint().getQueryHeader() != null) {
query = exchange.getIn().getHeader(getEndpoint().getQueryHeader(), String.class);
} else {
query = exchange.getIn().getMandatoryBody(String.class);
}
return query;
}
private JsonObject getVariables(Exchange exchange) {
JsonObject variables = null;
if (getEndpoint().getVariables() != null) {
variables = getEndpoint().getVariables();
} else if (getEndpoint().getVariablesHeader() != null) {
variables = exchange.getIn().getHeader(getEndpoint().getVariablesHeader(), JsonObject.class);
} else if (exchange.getIn().getBody() instanceof JsonObject) {
variables = exchange.getIn().getBody(JsonObject.class);
}
return variables;
}
}
| GraphqlProducer |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/semantics/UniqueListType.java | {
"start": 630,
"end": 2031
} | class ____ implements UserCollectionType {
@Override
public CollectionClassification getClassification() {
return CollectionClassification.LIST;
}
@Override
public Class<?> getCollectionClass() {
return List.class;
}
@Override
public PersistentCollection instantiate(
SharedSessionContractImplementor session,
CollectionPersister persister) {
return new UniqueListWrapper( session );
}
@Override
public PersistentCollection wrap(
SharedSessionContractImplementor session,
Object collection) {
return new UniqueListWrapper( session, (List) collection );
}
@Override
public Iterator getElementsIterator(Object collection) {
return ( (List) collection ).iterator();
}
@Override
public boolean contains(Object collection, Object entity) {
return ( (List) collection ).contains( entity );
}
@Override
public Object indexOf(Object collection, Object entity) {
return ( (List) collection ).indexOf( entity );
}
@Override
public Object replaceElements(
Object original,
Object target,
CollectionPersister persister,
Object owner,
Map copyCache,
SharedSessionContractImplementor session) {
List result = (List) target;
result.clear();
result.addAll( (List) original );
return result;
}
@Override
public Object instantiate(int anticipatedSize) {
return new ArrayList<>();
}
}
//end::collections-custom-type-ex[]
| UniqueListType |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/deployment/VerticleFactoryTest.java | {
"start": 12231,
"end": 13300
} | class ____ implements VerticleFactory {
String prefix;
Verticle verticle;
String identifier;
int order;
boolean failInCreate;
Context createContext;
boolean createWorkerThread;
TestVerticleFactory(String prefix) {
this.prefix = prefix;
}
TestVerticleFactory(String prefix, Verticle verticle) {
this.prefix = prefix;
this.verticle = verticle;
}
TestVerticleFactory(String prefix, Verticle verticle, int order) {
this.prefix = prefix;
this.verticle = verticle;
this.order = order;
}
TestVerticleFactory(String prefix, Verticle verticle, int order, boolean failInCreate) {
this.prefix = prefix;
this.verticle = verticle;
this.order = order;
this.failInCreate = failInCreate;
}
@Override
public int order() {
return order;
}
@Override
public void init(Vertx vertx) {
}
@Override
public String prefix() {
return prefix;
}
@Override
public void close() {
}
static | TestVerticleFactory |
java | apache__flink | flink-python/src/main/java/org/apache/flink/python/env/AbstractPythonEnvironmentManager.java | {
"start": 2032,
"end": 2188
} | class ____ python environment manager which is used to create the PythonEnvironment object
* used to execute Python functions.
*/
@Internal
public abstract | of |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassPostProcessorTests.java | {
"start": 74590,
"end": 74829
} | class ____ {
@Bean
public ServiceBean serviceBean() {
return provider().getServiceBean();
}
@Bean
public ServiceBeanProvider provider() {
return new ServiceBeanProvider();
}
}
@Configuration
public static | AbstractConfig |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/merge/MergeDetachedCascadedCollectionInEmbeddableTest.java | {
"start": 3296,
"end": 3454
} | class ____ {
private long id;
@Id
@GeneratedValue
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
}
| Thing |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/StaticResourcesProcessor.java | {
"start": 1484,
"end": 6649
} | class ____ {
@BuildStep(onlyIf = IsDevelopment.class)
HotDeploymentWatchedFileBuildItem indexHtmlFile() {
String staticRoot = StaticResourcesRecorder.META_INF_RESOURCES + "/index.html";
return new HotDeploymentWatchedFileBuildItem(staticRoot, !QuarkusClassLoader.isResourcePresentAtRuntime(staticRoot));
}
@BuildStep
void collectStaticResources(Capabilities capabilities,
List<AdditionalStaticResourceBuildItem> additionalStaticResources,
BuildProducer<StaticResourcesBuildItem> staticResources,
LaunchModeBuildItem launchModeBuildItem) {
if (capabilities.isPresent(Capability.SERVLET)) {
// Servlet container handles static resources
return;
}
Set<StaticResourcesBuildItem.Entry> paths = getClasspathResources();
// We shouldn't add them in test and dev-mode (as they are handled by the GeneratedStaticResourcesProcessor), but for backward compatibility we keep it for now
for (AdditionalStaticResourceBuildItem bi : additionalStaticResources) {
paths.add(new StaticResourcesBuildItem.Entry(bi.getPath(), bi.isDirectory()));
}
if (!paths.isEmpty()) {
staticResources.produce(new StaticResourcesBuildItem(paths));
}
}
@BuildStep
@Record(RUNTIME_INIT)
public void runtimeInit(Optional<StaticResourcesBuildItem> staticResources, StaticResourcesRecorder recorder,
CoreVertxBuildItem vertx, BeanContainerBuildItem beanContainer,
BuildProducer<DefaultRouteBuildItem> defaultRoutes) {
if (staticResources.isPresent()) {
defaultRoutes.produce(new DefaultRouteBuildItem(recorder.start(staticResources.get().getPaths())));
}
}
@BuildStep(onlyIf = NativeOrNativeSourcesBuild.class)
public void nativeImageResource(Optional<StaticResourcesBuildItem> staticResources,
BuildProducer<NativeImageResourceBuildItem> producer) {
if (staticResources.isPresent()) {
Set<StaticResourcesBuildItem.Entry> entries = staticResources.get().getEntries();
List<String> metaInfResources = new ArrayList<>(entries.size());
for (StaticResourcesBuildItem.Entry entry : entries) {
if (entry.isDirectory()) {
// TODO: do we perhaps want to register the whole directory?
continue;
}
String metaInfResourcesPath = StaticResourcesRecorder.META_INF_RESOURCES + entry.getPath();
metaInfResources.add(metaInfResourcesPath);
}
producer.produce(new NativeImageResourceBuildItem(metaInfResources));
// register all directories under META-INF/resources for reflection in order to enable
// the serving of index.html in arbitrarily nested directories
final Set<String> collectedDirs = new HashSet<>();
visitRuntimeMetaInfResources(visit -> {
if (Files.isDirectory(visit.getPath())) {
final String relativePath = visit.getRelativePath();
if (collectedDirs.add(relativePath)) {
producer.produce(new NativeImageResourceBuildItem(relativePath));
}
}
});
}
}
/**
* Find all static file resources that are available from classpath.
*
* @return the set of static resources
*/
private Set<StaticResourcesBuildItem.Entry> getClasspathResources() {
Set<StaticResourcesBuildItem.Entry> knownPaths = new HashSet<>();
final String prefix = StaticResourcesRecorder.META_INF_RESOURCES;
visitRuntimeMetaInfResources(visit -> {
Path visitPath = visit.getPath();
if (!Files.isDirectory(visitPath)) {
String rel = visit.getRelativePath();
// Ensure that the relative path starts with the prefix before calling substring
if (rel.startsWith(prefix)) {
// Strip the "META-INF/resources/" prefix and add the remainder
String subPath = rel.substring(prefix.length());
knownPaths.add(new StaticResourcesBuildItem.Entry(subPath, false));
}
}
});
return knownPaths;
}
/**
* Visits all {@code META-INF/resources} directories and their content found on the runtime classpath
*
* @param visitor visitor implementation
*/
private static void visitRuntimeMetaInfResources(PathVisitor visitor) {
final List<ClassPathElement> elements = QuarkusClassLoader.getElements(StaticResourcesRecorder.META_INF_RESOURCES,
false);
if (!elements.isEmpty()) {
for (var element : elements) {
if (element.isRuntime()) {
element.apply(tree -> {
tree.walkIfContains(StaticResourcesRecorder.META_INF_RESOURCES, visitor);
return null;
});
}
}
}
}
}
| StaticResourcesProcessor |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/TimingStats.java | {
"start": 1418,
"end": 13870
} | class ____ implements ToXContentObject, Writeable {
public static final ParseField BUCKET_COUNT = new ParseField("bucket_count");
public static final ParseField TOTAL_BUCKET_PROCESSING_TIME_MS = new ParseField("total_bucket_processing_time_ms");
public static final ParseField MIN_BUCKET_PROCESSING_TIME_MS = new ParseField("minimum_bucket_processing_time_ms");
public static final ParseField MAX_BUCKET_PROCESSING_TIME_MS = new ParseField("maximum_bucket_processing_time_ms");
public static final ParseField AVG_BUCKET_PROCESSING_TIME_MS = new ParseField("average_bucket_processing_time_ms");
public static final ParseField EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS = new ParseField(
"exponential_average_bucket_processing_time_ms"
);
public static final ParseField EXPONENTIAL_AVG_CALCULATION_CONTEXT = new ParseField("exponential_average_calculation_context");
public static final ParseField EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_PER_HOUR_MS = new ParseField(
"exponential_average_bucket_processing_time_per_hour_ms"
);
public static final ParseField TYPE = new ParseField("timing_stats");
public static final ConstructingObjectParser<TimingStats, Void> PARSER = new ConstructingObjectParser<>(
TYPE.getPreferredName(),
true,
args -> {
String jobId = (String) args[0];
long bucketCount = (long) args[1];
Double minBucketProcessingTimeMs = (Double) args[2];
Double maxBucketProcessingTimeMs = (Double) args[3];
Double avgBucketProcessingTimeMs = (Double) args[4];
Double exponentialAvgBucketProcessingTimeMs = (Double) args[5];
ExponentialAverageCalculationContext exponentialAvgCalculationContext = (ExponentialAverageCalculationContext) args[6];
return new TimingStats(
jobId,
bucketCount,
minBucketProcessingTimeMs,
maxBucketProcessingTimeMs,
avgBucketProcessingTimeMs,
exponentialAvgBucketProcessingTimeMs,
getOrDefault(exponentialAvgCalculationContext, new ExponentialAverageCalculationContext())
);
}
);
static {
PARSER.declareString(constructorArg(), Job.ID);
PARSER.declareLong(constructorArg(), BUCKET_COUNT);
PARSER.declareDouble(optionalConstructorArg(), MIN_BUCKET_PROCESSING_TIME_MS);
PARSER.declareDouble(optionalConstructorArg(), MAX_BUCKET_PROCESSING_TIME_MS);
PARSER.declareDouble(optionalConstructorArg(), AVG_BUCKET_PROCESSING_TIME_MS);
PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS);
PARSER.declareObject(optionalConstructorArg(), ExponentialAverageCalculationContext.PARSER, EXPONENTIAL_AVG_CALCULATION_CONTEXT);
}
public static String documentId(String jobId) {
return jobId + "_timing_stats";
}
private final String jobId;
private long bucketCount;
private Double minBucketProcessingTimeMs;
private Double maxBucketProcessingTimeMs;
private Double avgBucketProcessingTimeMs;
private Double exponentialAvgBucketProcessingTimeMs;
private final ExponentialAverageCalculationContext exponentialAvgCalculationContext;
public TimingStats(
String jobId,
long bucketCount,
@Nullable Double minBucketProcessingTimeMs,
@Nullable Double maxBucketProcessingTimeMs,
@Nullable Double avgBucketProcessingTimeMs,
@Nullable Double exponentialAvgBucketProcessingTimeMs,
ExponentialAverageCalculationContext exponentialAvgCalculationContext
) {
this.jobId = Objects.requireNonNull(jobId);
this.bucketCount = bucketCount;
this.minBucketProcessingTimeMs = minBucketProcessingTimeMs;
this.maxBucketProcessingTimeMs = maxBucketProcessingTimeMs;
this.avgBucketProcessingTimeMs = avgBucketProcessingTimeMs;
this.exponentialAvgBucketProcessingTimeMs = exponentialAvgBucketProcessingTimeMs;
this.exponentialAvgCalculationContext = Objects.requireNonNull(exponentialAvgCalculationContext);
}
public TimingStats(String jobId) {
this(jobId, 0, null, null, null, null, new ExponentialAverageCalculationContext());
}
public TimingStats(TimingStats lhs) {
this(
lhs.jobId,
lhs.bucketCount,
lhs.minBucketProcessingTimeMs,
lhs.maxBucketProcessingTimeMs,
lhs.avgBucketProcessingTimeMs,
lhs.exponentialAvgBucketProcessingTimeMs,
new ExponentialAverageCalculationContext(lhs.exponentialAvgCalculationContext)
);
}
public TimingStats(StreamInput in) throws IOException {
this.jobId = in.readString();
this.bucketCount = in.readLong();
this.minBucketProcessingTimeMs = in.readOptionalDouble();
this.maxBucketProcessingTimeMs = in.readOptionalDouble();
this.avgBucketProcessingTimeMs = in.readOptionalDouble();
this.exponentialAvgBucketProcessingTimeMs = in.readOptionalDouble();
this.exponentialAvgCalculationContext = in.readOptionalWriteable(ExponentialAverageCalculationContext::new);
}
public String getJobId() {
return jobId;
}
public long getBucketCount() {
return bucketCount;
}
/** Calculates total bucket processing time as a product of the all-time average bucket processing time and the number of buckets. */
public double getTotalBucketProcessingTimeMs() {
return avgBucketProcessingTimeMs != null ? bucketCount * avgBucketProcessingTimeMs : 0.0;
}
public Double getMinBucketProcessingTimeMs() {
return minBucketProcessingTimeMs;
}
public Double getMaxBucketProcessingTimeMs() {
return maxBucketProcessingTimeMs;
}
public Double getAvgBucketProcessingTimeMs() {
return avgBucketProcessingTimeMs;
}
public Double getExponentialAvgBucketProcessingTimeMs() {
return exponentialAvgBucketProcessingTimeMs;
}
public Double getExponentialAvgBucketProcessingTimePerHourMs() {
return exponentialAvgCalculationContext.getCurrentExponentialAverageMs();
}
// Visible for testing
ExponentialAverageCalculationContext getExponentialAvgCalculationContext() {
return exponentialAvgCalculationContext;
}
/**
* Updates the statistics (min, max, avg, exponential avg) for the given data point (bucket processing time).
*/
public void updateStats(double bucketProcessingTimeMs) {
if (bucketProcessingTimeMs < 0.0) {
throw new IllegalArgumentException("bucketProcessingTimeMs must be non-negative, was: " + bucketProcessingTimeMs);
}
if (minBucketProcessingTimeMs == null || bucketProcessingTimeMs < minBucketProcessingTimeMs) {
minBucketProcessingTimeMs = bucketProcessingTimeMs;
}
if (maxBucketProcessingTimeMs == null || bucketProcessingTimeMs > maxBucketProcessingTimeMs) {
maxBucketProcessingTimeMs = bucketProcessingTimeMs;
}
if (avgBucketProcessingTimeMs == null) {
avgBucketProcessingTimeMs = bucketProcessingTimeMs;
} else {
// Calculate the cumulative moving average (see https://en.wikipedia.org/wiki/Moving_average#Cumulative_moving_average) of
// bucket processing times.
avgBucketProcessingTimeMs = (bucketCount * avgBucketProcessingTimeMs + bucketProcessingTimeMs) / (bucketCount + 1);
}
if (exponentialAvgBucketProcessingTimeMs == null) {
exponentialAvgBucketProcessingTimeMs = bucketProcessingTimeMs;
} else {
// Calculate the exponential moving average (see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average) of
// bucket processing times.
exponentialAvgBucketProcessingTimeMs = (1 - ALPHA) * exponentialAvgBucketProcessingTimeMs + ALPHA * bucketProcessingTimeMs;
}
bucketCount++;
exponentialAvgCalculationContext.increment(bucketProcessingTimeMs);
}
public void setLatestRecordTimestamp(Instant latestRecordTimestamp) {
exponentialAvgCalculationContext.setLatestTimestamp(latestRecordTimestamp);
}
/**
* Constant smoothing factor used for calculating exponential moving average. Represents the degree of weighting decrease.
*/
private static double ALPHA = 0.01;
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(jobId);
out.writeLong(bucketCount);
out.writeOptionalDouble(minBucketProcessingTimeMs);
out.writeOptionalDouble(maxBucketProcessingTimeMs);
out.writeOptionalDouble(avgBucketProcessingTimeMs);
out.writeOptionalDouble(exponentialAvgBucketProcessingTimeMs);
out.writeOptionalWriteable(exponentialAvgCalculationContext);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (params.paramAsBoolean(ToXContentParams.FOR_INTERNAL_STORAGE, false)) {
builder.field(Result.RESULT_TYPE.getPreferredName(), TYPE.getPreferredName());
}
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(BUCKET_COUNT.getPreferredName(), bucketCount);
if (params.paramAsBoolean(ToXContentParams.INCLUDE_CALCULATED_FIELDS, false)) {
builder.field(TOTAL_BUCKET_PROCESSING_TIME_MS.getPreferredName(), getTotalBucketProcessingTimeMs());
}
if (minBucketProcessingTimeMs != null) {
builder.field(MIN_BUCKET_PROCESSING_TIME_MS.getPreferredName(), minBucketProcessingTimeMs);
}
if (maxBucketProcessingTimeMs != null) {
builder.field(MAX_BUCKET_PROCESSING_TIME_MS.getPreferredName(), maxBucketProcessingTimeMs);
}
if (avgBucketProcessingTimeMs != null) {
builder.field(AVG_BUCKET_PROCESSING_TIME_MS.getPreferredName(), avgBucketProcessingTimeMs);
}
if (exponentialAvgBucketProcessingTimeMs != null) {
builder.field(EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS.getPreferredName(), exponentialAvgBucketProcessingTimeMs);
}
if (params.paramAsBoolean(ToXContentParams.INCLUDE_CALCULATED_FIELDS, false)) {
Double expAvgBucketProcessingTimePerHourMs = getExponentialAvgBucketProcessingTimePerHourMs();
if (expAvgBucketProcessingTimePerHourMs != null) {
builder.field(EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_PER_HOUR_MS.getPreferredName(), expAvgBucketProcessingTimePerHourMs);
}
}
if (params.paramAsBoolean(ToXContentParams.FOR_INTERNAL_STORAGE, false)) {
builder.field(EXPONENTIAL_AVG_CALCULATION_CONTEXT.getPreferredName(), exponentialAvgCalculationContext);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || getClass() != o.getClass()) return false;
TimingStats that = (TimingStats) o;
return Objects.equals(this.jobId, that.jobId)
&& this.bucketCount == that.bucketCount
&& Objects.equals(this.minBucketProcessingTimeMs, that.minBucketProcessingTimeMs)
&& Objects.equals(this.maxBucketProcessingTimeMs, that.maxBucketProcessingTimeMs)
&& Objects.equals(this.avgBucketProcessingTimeMs, that.avgBucketProcessingTimeMs)
&& Objects.equals(this.exponentialAvgBucketProcessingTimeMs, that.exponentialAvgBucketProcessingTimeMs)
&& Objects.equals(this.exponentialAvgCalculationContext, that.exponentialAvgCalculationContext);
}
@Override
public int hashCode() {
return Objects.hash(
jobId,
bucketCount,
minBucketProcessingTimeMs,
maxBucketProcessingTimeMs,
avgBucketProcessingTimeMs,
exponentialAvgBucketProcessingTimeMs,
exponentialAvgCalculationContext
);
}
@Override
public String toString() {
return Strings.toString(this);
}
private static <T> T getOrDefault(@Nullable T value, T defaultValue) {
return value != null ? value : defaultValue;
}
}
| TimingStats |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/detached/reference/DetachedReferenceInitializationDelayedFetchTest.java | {
"start": 1155,
"end": 6256
} | class ____ {
@Test
public void testDetachedAndPersistentEntity(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var entityB = session.find( EntityB.class, 1L );
session.clear();
// put a different instance of EntityB in the persistence context
final var ignored = session.find( EntityB.class, 1L );
fetchQuery( entityB, session );
} );
}
@Test
public void testDetachedEntityAndPersistentInitializedProxy(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var entityB = session.find( EntityB.class, 1L );
session.clear();
// put a different instance of EntityB in the persistence context
final var ignored = session.getReference( EntityB.class, 1L );
Hibernate.initialize( ignored );
fetchQuery( entityB, session );
} );
}
@Test
public void testDetachedEntityAndPersistentProxy(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var entityB = session.find( EntityB.class, 1L );
session.clear();
// put a different instance of EntityB in the persistence context
final var ignored = session.getReference( EntityB.class, 1L );
fetchQuery( entityB, session );
} );
}
@Test
public void testDetachedProxyAndPersistentEntity(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var entityB = session.getReference( EntityB.class, 1L );
session.clear();
// put a different instance of EntityB in the persistence context
final var ignored = session.find( EntityB.class, 1L );
fetchQuery( entityB, session );
} );
}
@Test
public void testDetachedProxyAndPersistentInitializedProxy(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var entityB = session.getReference( EntityB.class, 1L );
session.clear();
// put a different instance of EntityB in the persistence context
final var ignored = session.getReference( EntityB.class, 1L );
Hibernate.initialize( ignored );
fetchQuery( entityB, session );
} );
}
@Test
public void testDetachedAndPersistentProxy(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var entityB = session.getReference( EntityB.class, 1L );
session.clear();
// put a different instance of EntityB in the persistence context
final var ignored = session.getReference( EntityB.class, 1L );
fetchQuery( entityB, session );
} );
}
@Test
public void testDetachedInitializedProxyAndPersistentEntity(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var entityB = session.getReference( EntityB.class, 1L );
Hibernate.initialize( entityB );
session.clear();
// put a different instance of EntityB in the persistence context
final var ignored = session.find( EntityB.class, 1L );
fetchQuery( entityB, session );
} );
}
@Test
public void testDetachedAndPersistentInitializedProxy(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var entityB = session.getReference( EntityB.class, 1L );
Hibernate.initialize( entityB );
session.clear();
// put a different instance of EntityB in the persistence context
final var ignored = session.getReference( EntityB.class, 1L );
Hibernate.initialize( ignored );
fetchQuery( entityB, session );
} );
}
@Test
public void testDetachedInitializedProxyAndPersistentProxy(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var entityB = session.getReference( EntityB.class, 1L );
Hibernate.initialize( entityB );
session.clear();
// put a different instance of EntityB in the persistence context
final var ignored = session.getReference( EntityB.class, 1L );
fetchQuery( entityB, session );
} );
}
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final var entityB = new EntityB();
entityB.id = 1L;
entityB.name = "b_1";
session.persist( entityB );
} );
}
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction( session -> {
session.createMutationQuery( "delete from EntityA" ).executeUpdate();
} );
}
private void fetchQuery(EntityB entityB, SessionImplementor session) {
final var entityA = new EntityA();
entityA.id = 1L;
entityA.b = entityB;
session.persist( entityA );
final var wasDetachedInitialized = Hibernate.isInitialized( entityB );
final var id = session.getSessionFactory().getPersistenceUnitUtil().getIdentifier( entityB );
final var reference = session.getReference( EntityB.class, id );
final var wasManagedInitialized = Hibernate.isInitialized( reference );
final var result = session.createQuery(
"from EntityA a",
EntityA.class
).getSingleResult();
assertThat( Hibernate.isInitialized( entityB ) ).isEqualTo( wasDetachedInitialized );
assertThat( result.b ).isSameAs( entityB );
assertThat( Hibernate.isInitialized( reference ) ).isEqualTo( wasManagedInitialized );
assertThat( reference ).isNotSameAs( entityB );
}
@Entity(name = "EntityA")
static | DetachedReferenceInitializationDelayedFetchTest |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/AnnotationsPropertySourceTests.java | {
"start": 13718,
"end": 13816
} | interface ____ {
Level1 level1();
@Retention(RetentionPolicy.RUNTIME)
@ | DeeplyNestedAnnotations |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/streaming/api/datastream/StatefulDataStreamV2ITCase.java | {
"start": 11310,
"end": 12537
} | class ____
implements OneInputStreamProcessFunction<Long, String> {
private final ValueStateDeclaration<Long> stateDeclaration;
public MockSumProcessFunction() {
this.stateDeclaration =
StateDeclarations.valueState("value-state", TypeDescriptors.LONG);
}
@Override
public void processRecord(
Long record, Collector<String> output, PartitionedContext<String> ctx)
throws Exception {
Optional<ValueState<Long>> maybeState =
ctx.getStateManager().getStateOptional(stateDeclaration);
if (!maybeState.isPresent()) {
throw new FlinkRuntimeException("State not found: " + stateDeclaration);
}
Long currentValue = maybeState.get().value();
currentValue = currentValue == null ? 0 : currentValue;
maybeState.get().update(currentValue + record);
output.collect(Long.toString(maybeState.get().value()));
}
}
/**
* {@link OneInputStreamProcessFunction} that verifies the result. If verification fails, it
* throws an exception.
*/
private static | MockSumProcessFunction |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/annotation/AnnotationValueResolver.java | {
"start": 1140,
"end": 1281
} | enum ____
*/
<E extends Enum> Optional<E> enumValue(@NonNull String member, @NonNull Class<E> enumType);
/**
* Return the | type |
java | quarkusio__quarkus | extensions/assistant/deployment-spi/src/main/java/io/quarkus/assistant/deployment/spi/AssistantPageBuildItem.java | {
"start": 153,
"end": 723
} | class ____ extends MultiBuildItem {
private final PageBuilder pageBuilder;
private final boolean alwaysVisible;
public AssistantPageBuildItem(PageBuilder pageBuilder) {
this(pageBuilder, false);
}
public AssistantPageBuildItem(PageBuilder pageBuilder, boolean alwaysVisible) {
this.pageBuilder = pageBuilder;
this.alwaysVisible = alwaysVisible;
}
public PageBuilder getPageBuilder() {
return pageBuilder;
}
public boolean isAlwaysVisible() {
return alwaysVisible;
}
}
| AssistantPageBuildItem |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java | {
"start": 1693,
"end": 9081
} | class ____ extends AbstractQueryTestCase<QueryStringQueryBuilder> {
private static final String INTEGER_RANGE_FIELD_NAME = "mapped_int_range";
private static final String LONG_RANGE_FIELD_NAME = "mapped_long_range";
private static final String FLOAT_RANGE_FIELD_NAME = "mapped_float_range";
private static final String DOUBLE_RANGE_FIELD_NAME = "mapped_double_range";
private static final String DATE_RANGE_FIELD_NAME = "mapped_date_range";
private static final String IP_RANGE_FIELD_NAME = "mapped_ip_range";
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
mapperService.merge(
"_doc",
new CompressedXContent(
Strings.toString(
PutMappingRequest.simpleMapping(
INTEGER_RANGE_FIELD_NAME,
"type=integer_range",
LONG_RANGE_FIELD_NAME,
"type=long_range",
FLOAT_RANGE_FIELD_NAME,
"type=float_range",
DOUBLE_RANGE_FIELD_NAME,
"type=double_range",
DATE_RANGE_FIELD_NAME,
"type=date_range",
IP_RANGE_FIELD_NAME,
"type=ip_range"
)
)
),
MapperService.MergeReason.MAPPING_UPDATE
);
}
public void testIntegerRangeQuery() throws Exception {
Query query = new QueryStringQueryBuilder(INTEGER_RANGE_FIELD_NAME + ":[-450 TO 45000]").toQuery(createSearchExecutionContext());
Query range = IntRange.newIntersectsQuery(INTEGER_RANGE_FIELD_NAME, new int[] { -450 }, new int[] { 45000 });
Query dv = RangeType.INTEGER.dvRangeQuery(
INTEGER_RANGE_FIELD_NAME,
BinaryDocValuesRangeQuery.QueryType.INTERSECTS,
-450,
45000,
true,
true
);
assertEquals(new IndexOrDocValuesQuery(range, dv), query);
}
public void testLongRangeQuery() throws Exception {
Query query = new QueryStringQueryBuilder(LONG_RANGE_FIELD_NAME + ":[-450 TO 45000]").toQuery(createSearchExecutionContext());
Query range = LongRange.newIntersectsQuery(LONG_RANGE_FIELD_NAME, new long[] { -450 }, new long[] { 45000 });
Query dv = RangeType.LONG.dvRangeQuery(
LONG_RANGE_FIELD_NAME,
BinaryDocValuesRangeQuery.QueryType.INTERSECTS,
-450,
45000,
true,
true
);
assertEquals(new IndexOrDocValuesQuery(range, dv), query);
}
public void testFloatRangeQuery() throws Exception {
Query query = new QueryStringQueryBuilder(FLOAT_RANGE_FIELD_NAME + ":[-450 TO 45000]").toQuery(createSearchExecutionContext());
Query range = FloatRange.newIntersectsQuery(FLOAT_RANGE_FIELD_NAME, new float[] { -450 }, new float[] { 45000 });
Query dv = RangeType.FLOAT.dvRangeQuery(
FLOAT_RANGE_FIELD_NAME,
BinaryDocValuesRangeQuery.QueryType.INTERSECTS,
-450.0f,
45000.0f,
true,
true
);
assertEquals(new IndexOrDocValuesQuery(range, dv), query);
}
public void testDoubleRangeQuery() throws Exception {
Query query = new QueryStringQueryBuilder(DOUBLE_RANGE_FIELD_NAME + ":[-450 TO 45000]").toQuery(createSearchExecutionContext());
Query range = DoubleRange.newIntersectsQuery(DOUBLE_RANGE_FIELD_NAME, new double[] { -450 }, new double[] { 45000 });
Query dv = RangeType.DOUBLE.dvRangeQuery(
DOUBLE_RANGE_FIELD_NAME,
BinaryDocValuesRangeQuery.QueryType.INTERSECTS,
-450.0,
45000.0,
true,
true
);
assertEquals(new IndexOrDocValuesQuery(range, dv), query);
}
public void testDateRangeQuery() throws Exception {
SearchExecutionContext context = createSearchExecutionContext();
RangeFieldMapper.RangeFieldType type = (RangeFieldMapper.RangeFieldType) context.getFieldType(DATE_RANGE_FIELD_NAME);
DateMathParser parser = type.dateMathParser;
Query query = new QueryStringQueryBuilder(DATE_RANGE_FIELD_NAME + ":[2010-01-01 TO 2018-01-01]").toQuery(
createSearchExecutionContext()
);
String lowerBoundExact = "2010-01-01T00:00:00.000";
String upperBoundExact = "2018-01-01T23:59:59.999";
Query range = LongRange.newIntersectsQuery(
DATE_RANGE_FIELD_NAME,
new long[] { parser.parse(lowerBoundExact, () -> 0).toEpochMilli() },
new long[] { parser.parse(upperBoundExact, () -> 0).toEpochMilli() }
);
Query dv = RangeType.DATE.dvRangeQuery(
DATE_RANGE_FIELD_NAME,
BinaryDocValuesRangeQuery.QueryType.INTERSECTS,
parser.parse(lowerBoundExact, () -> 0).toEpochMilli(),
parser.parse(upperBoundExact, () -> 0).toEpochMilli(),
true,
true
);
assertEquals(new IndexOrDocValuesQuery(range, dv), query);
// also make sure the produced bounds are the same as on a regular `date` field
DateFieldMapper.DateFieldType dateType = (DateFieldMapper.DateFieldType) context.getFieldType(DATE_FIELD_NAME);
parser = dateType.dateMathParser;
Query queryOnDateField = new QueryStringQueryBuilder(DATE_FIELD_NAME + ":[2010-01-01 TO 2018-01-01]").toQuery(
createSearchExecutionContext()
);
Query controlQuery = LongPoint.newRangeQuery(
DATE_FIELD_NAME,
new long[] { parser.parse(lowerBoundExact, () -> 0).toEpochMilli() },
new long[] { parser.parse(upperBoundExact, () -> 0).toEpochMilli() }
);
Query controlDv = SortedNumericDocValuesField.newSlowRangeQuery(
DATE_FIELD_NAME,
parser.parse(lowerBoundExact, () -> 0).toEpochMilli(),
parser.parse(upperBoundExact, () -> 0).toEpochMilli()
);
assertEquals(new IndexOrDocValuesQuery(controlQuery, controlDv), queryOnDateField);
}
public void testIPRangeQuery() throws Exception {
InetAddress lower = InetAddresses.forString("192.168.0.1");
InetAddress upper = InetAddresses.forString("192.168.0.5");
Query query = new QueryStringQueryBuilder(IP_RANGE_FIELD_NAME + ":[192.168.0.1 TO 192.168.0.5]").toQuery(
createSearchExecutionContext()
);
Query range = InetAddressRange.newIntersectsQuery(IP_RANGE_FIELD_NAME, lower, upper);
Query dv = RangeType.IP.dvRangeQuery(IP_RANGE_FIELD_NAME, BinaryDocValuesRangeQuery.QueryType.INTERSECTS, lower, upper, true, true);
assertEquals(new IndexOrDocValuesQuery(range, dv), query);
}
@Override
protected QueryStringQueryBuilder doCreateTestQueryBuilder() {
return new QueryStringQueryBuilder(INTEGER_RANGE_FIELD_NAME + ":[-450 TO 450]");
}
@Override
protected void doAssertLuceneQuery(QueryStringQueryBuilder queryBuilder, Query query, SearchExecutionContext context) {
assertThat(query, either(instanceOf(PointRangeQuery.class)).or(instanceOf(IndexOrDocValuesQuery.class)));
}
}
| RangeFieldQueryStringQueryBuilderTests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java | {
"start": 15480,
"end": 15869
} | class ____ {
private final RPC.Server server;
private final String methodName;
CallInfo(RPC.Server server, String methodName) {
this.server = server;
this.methodName = methodName;
}
public RPC.Server getServer() {
return server;
}
public String getMethodName() {
return methodName;
}
}
static | CallInfo |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.