language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/log/SubSystemLogging.java | {
"start": 473,
"end": 779
} | class ____ package names
* <p>
* This is helpful to find such classes and is used to generate report
* (as a release artifact) describing logger names for logging configuration
* by the application.
* <p>
* At the moment Hibernate uses a mix sub-system logging and the more traditional
* package and | and |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/create/OracleCreatePackageTest1.java | {
"start": 1021,
"end": 3138
} | class ____ extends OracleTest {
public void test_types() throws Exception {
String sql = //
"CREATE OR REPLACE PACKAGE ACHIEVE_CONTRACT_SPECIMEN IS\n" +
" -- Author : 榫氬己\n" +
" -- Created : 2011-08-07\n" +
" -- Purpose : 涓氱哗鍒掑垎鏍锋湰鍚堝悓寮傚父鍚堝悓鏇存柊\n" +
" PROCEDURE STARTUPDATE(MODULUS_ID_VALUE NUMBER);\n" +
"END ACHIEVE_CONTRACT_SPECIMEN;";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("CREATE OR REPLACE PACKAGE ACHIEVE_CONTRACT_SPECIMEN\n" +
"\tPROCEDURE STARTUPDATE (\n" +
"\t\tMODULUS_ID_VALUE NUMBER\n" +
"\t)\n" +
"\t;\n" +
"END ACHIEVE_CONTRACT_SPECIMEN;",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
// assertEquals(2, visitor.getTables().size());
//
// assertEquals(5, visitor.getColumns().size());
//
// assertTrue(visitor.containsColumn("employees", "employee_id"));
// assertTrue(visitor.containsColumn("employees", "*"));
// assertTrue(visitor.containsColumn("departments", "department_id"));
// assertTrue(visitor.containsColumn("employees", "salary"));
// assertTrue(visitor.containsColumn("employees", "commission_pct"));
}
}
| OracleCreatePackageTest1 |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java | {
"start": 1047,
"end": 1357
} | class ____ extends ActionType<AcknowledgedResponse> {
public static final ResumeFollowAction INSTANCE = new ResumeFollowAction();
public static final String NAME = "cluster:admin/xpack/ccr/resume_follow";
private ResumeFollowAction() {
super(NAME);
}
public static | ResumeFollowAction |
java | ReactiveX__RxJava | src/jmh/java/io/reactivex/rxjava3/parallel/ParallelPerf.java | {
"start": 1265,
"end": 3350
} | class ____ implements Function<Integer, Integer> {
@Param({"10000"})
public int count;
@Param({"1", "10", "100", "1000", "10000"})
public int compute;
@Param({"1", "2", "3", "4"})
public int parallelism;
Flowable<Integer> flatMap;
Flowable<Integer> groupBy;
Flowable<Integer> parallel;
@Override
public Integer apply(Integer t) {
Blackhole.consumeCPU(compute);
return t;
}
@Setup
public void setup() {
final int cpu = parallelism;
Integer[] ints = new Integer[count];
Arrays.fill(ints, 777);
Flowable<Integer> source = Flowable.fromArray(ints);
flatMap = source.flatMap(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v) {
return Flowable.just(v).subscribeOn(Schedulers.computation())
.map(ParallelPerf.this);
}
}, cpu);
groupBy = source.groupBy(new Function<Integer, Integer>() {
int i;
@Override
public Integer apply(Integer v) {
return (i++) % cpu;
}
})
.flatMap(new Function<GroupedFlowable<Integer, Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(GroupedFlowable<Integer, Integer> g) {
return g.observeOn(Schedulers.computation()).map(ParallelPerf.this);
}
});
parallel = source.parallel(cpu).runOn(Schedulers.computation()).map(this).sequential();
}
void subscribe(Flowable<Integer> f, Blackhole bh) {
PerfAsyncConsumer consumer = new PerfAsyncConsumer(bh);
f.subscribe(consumer);
consumer.await(count);
}
@Benchmark
public void flatMap(Blackhole bh) {
subscribe(flatMap, bh);
}
@Benchmark
public void groupBy(Blackhole bh) {
subscribe(groupBy, bh);
}
@Benchmark
public void parallel(Blackhole bh) {
subscribe(parallel, bh);
}
}
| ParallelPerf |
java | quarkusio__quarkus | integration-tests/gradle/src/test/java/io/quarkus/gradle/BuildForkOptionsAreIncludedInQuarkusBuildTaskTest.java | {
"start": 130,
"end": 661
} | class ____ extends QuarkusGradleWrapperTestBase {
@Test
public void testBuildForkOptionsAreProcessed() throws Exception {
var projectDir = getProjectDir("basic-java-application-with-fork-options");
var buildResult = runGradleWrapper(projectDir, "clean", "quarkusBuild");
assertThat(BuildResult.isSuccessful(buildResult.getTasks().get(":quarkusGenerateCode"))).isTrue();
assertThat(buildResult.getOutput().contains("message!")).isTrue();
}
}
| BuildForkOptionsAreIncludedInQuarkusBuildTaskTest |
java | quarkusio__quarkus | extensions/jaxb/deployment/src/test/java/io/quarkus/jaxb/deployment/UserProvidedJaxbContextTest.java | {
"start": 2657,
"end": 2717
} | class ____ known to this context.");
}
public static | is |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/ConnectionMetadata.java | {
"start": 57,
"end": 1535
} | class ____ {
private volatile String clientName;
private volatile String libraryName;
private volatile String libraryVersion;
private volatile boolean sslEnabled;
public ConnectionMetadata() {
}
public ConnectionMetadata(RedisURI uri) {
apply(uri);
}
public void apply(RedisURI redisURI) {
setClientName(redisURI.getClientName());
setLibraryName(redisURI.getLibraryName());
setLibraryVersion(redisURI.getLibraryVersion());
setSslEnabled(redisURI.isSsl());
}
public void apply(ConnectionMetadata metadata) {
setClientName(metadata.getClientName());
setLibraryName(metadata.getLibraryName());
setLibraryVersion(metadata.getLibraryVersion());
setSslEnabled(metadata.isSslEnabled());
}
protected void setClientName(String clientName) {
this.clientName = clientName;
}
String getClientName() {
return clientName;
}
void setLibraryName(String libraryName) {
this.libraryName = libraryName;
}
String getLibraryName() {
return libraryName;
}
void setLibraryVersion(String libraryVersion) {
this.libraryVersion = libraryVersion;
}
String getLibraryVersion() {
return libraryVersion;
}
boolean isSslEnabled() {
return sslEnabled;
}
void setSslEnabled(boolean sslEnabled) {
this.sslEnabled = sslEnabled;
}
}
| ConnectionMetadata |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestHostRestrictingAuthorizationFilterHandler.java | {
"start": 1797,
"end": 6887
} | class ____ {
final static String CONFNAME =
HostRestrictingAuthorizationFilter.HDFS_CONFIG_PREFIX +
HostRestrictingAuthorizationFilter.RESTRICTION_CONFIG;
/*
* Test running in with no ACL rules (restrict all)
*/
@Test
public void testRejectAll() {
EmbeddedChannel channel = new CustomEmbeddedChannel("127.0.0.1", 1006,
new HostRestrictingAuthorizationFilterHandler());
FullHttpRequest httpRequest =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
HttpMethod.GET,
WebHdfsFileSystem.PATH_PREFIX + "/user/myName/fooFile?op=OPEN");
// we will send back an error so ensure our write returns false
assertFalse(channel.writeInbound(httpRequest),
"Should get error back from handler for rejected request");
DefaultHttpResponse channelResponse =
(DefaultHttpResponse) channel.outboundMessages().poll();
assertNotNull(channelResponse, "Expected response to exist.");
assertEquals(HttpResponseStatus.FORBIDDEN, channelResponse.status());
assertFalse(channel.isOpen());
}
/*
* Test accepting multiple allowed GET requests to ensure channel can be
* reused
*/
@Test
public void testMultipleAcceptedGETsOneChannel() {
Configuration conf = new Configuration();
conf.set(CONFNAME, "*,*,/allowed");
HostRestrictingAuthorizationFilter filter =
HostRestrictingAuthorizationFilterHandler.initializeState(conf);
EmbeddedChannel channel = new CustomEmbeddedChannel("127.0.0.1", 1006,
new HostRestrictingAuthorizationFilterHandler(filter));
FullHttpRequest allowedHttpRequest =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
HttpMethod.GET,
WebHdfsFileSystem.PATH_PREFIX + "/allowed/file_one?op=OPEN");
FullHttpRequest allowedHttpRequest2 =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
HttpMethod.GET,
WebHdfsFileSystem.PATH_PREFIX + "/allowed/file_two?op=OPEN");
FullHttpRequest allowedHttpRequest3 =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
HttpMethod.GET,
WebHdfsFileSystem.PATH_PREFIX + "/allowed/file_three?op=OPEN");
assertTrue(channel.writeInbound(allowedHttpRequest), "Should successfully accept request");
assertTrue(channel.writeInbound(allowedHttpRequest2),
"Should successfully accept request, second time");
assertTrue(channel.writeInbound(allowedHttpRequest3),
"Should successfully accept request, third time");
}
/*
* Test accepting multiple allowed GET requests in different channels to a
* single filter instance
*/
@Test
public void testMultipleChannels() {
Configuration conf = new Configuration();
conf.set(CONFNAME, "*,*,/allowed");
HostRestrictingAuthorizationFilter filter =
HostRestrictingAuthorizationFilterHandler.initializeState(conf);
EmbeddedChannel channel1 = new CustomEmbeddedChannel("127.0.0.1", 1006,
new HostRestrictingAuthorizationFilterHandler(filter));
EmbeddedChannel channel2 = new CustomEmbeddedChannel("127.0.0.2", 1006,
new HostRestrictingAuthorizationFilterHandler(filter));
EmbeddedChannel channel3 = new CustomEmbeddedChannel("127.0.0.3", 1006,
new HostRestrictingAuthorizationFilterHandler(filter));
FullHttpRequest allowedHttpRequest =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
HttpMethod.GET,
WebHdfsFileSystem.PATH_PREFIX + "/allowed/file_one?op=OPEN");
FullHttpRequest allowedHttpRequest2 =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
HttpMethod.GET,
WebHdfsFileSystem.PATH_PREFIX + "/allowed/file_two?op=OPEN");
FullHttpRequest allowedHttpRequest3 =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
HttpMethod.GET,
WebHdfsFileSystem.PATH_PREFIX + "/allowed/file_three?op=OPEN");
assertTrue(channel1.writeInbound(allowedHttpRequest), "Should successfully accept request");
assertTrue(channel2.writeInbound(allowedHttpRequest2),
"Should successfully accept request, second time");
// verify closing one channel does not affect remaining channels
channel1.close();
assertTrue(channel3.writeInbound(allowedHttpRequest3),
"Should successfully accept request, third time");
}
/*
* Test accepting a GET request for the file checksum
*/
@Test
public void testAcceptGETFILECHECKSUM() {
EmbeddedChannel channel = new CustomEmbeddedChannel("127.0.0.1", 1006,
new HostRestrictingAuthorizationFilterHandler());
FullHttpRequest httpRequest =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
HttpMethod.GET,
WebHdfsFileSystem.PATH_PREFIX + "/user/myName/fooFile?op" +
"=GETFILECHECKSUM");
assertTrue(channel.writeInbound(httpRequest), "Should successfully accept request");
}
/*
* Custom channel implementation which allows for mocking a client's remote
* address
*/
protected static | TestHostRestrictingAuthorizationFilterHandler |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/StreamsConfigTest.java | {
"start": 46780,
"end": 47461
} | class ____.apache.kafka.streams.StreamsConfigTest$MisconfiguredSerde",
e.getMessage()
);
}
}
@SuppressWarnings("resource")
@Test
public void shouldSpecifyCorrectValueSerdeClassOnError() {
final Properties props = getStreamsConfig();
props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, MisconfiguredSerde.class);
final StreamsConfig config = new StreamsConfig(props);
try {
config.defaultValueSerde();
fail("Test should throw a StreamsException");
} catch (final StreamsException e) {
assertEquals(
"Failed to configure value serde | org |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/testutil/FiveMinuteUser.java | {
"start": 154,
"end": 203
} | enum ____ { MALE, FEMALE };
public static | Gender |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/webmonitor/threadinfo/VertexThreadInfoTrackerTest.java | {
"start": 33593,
"end": 33966
} | class ____<K, V> implements RemovalListener<K, V> {
private final CountDownLatch latch;
private LatchRemovalListener(CountDownLatch latch) {
this.latch = latch;
}
@Override
public void onRemoval(@Nonnull RemovalNotification<K, V> removalNotification) {
latch.countDown();
}
}
}
| LatchRemovalListener |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/plugin/version/DefaultPluginVersionRequest.java | {
"start": 1279,
"end": 4503
} | class ____ implements PluginVersionRequest {
private String groupId;
private String artifactId;
private Model pom;
private List<RemoteRepository> repositories = Collections.emptyList();
private RepositorySystemSession session;
/**
* Creates an empty request.
*/
public DefaultPluginVersionRequest() {}
/**
* Creates a request for the specified plugin by copying settings from the specified build session. If the session
* has a current project, its plugin repositories will be used as well.
*
* @param plugin The plugin for which to resolve a version, must not be {@code null}.
* @param session The Maven session to use, must not be {@code null}.
*/
public DefaultPluginVersionRequest(Plugin plugin, MavenSession session) {
setGroupId(plugin.getGroupId());
setArtifactId(plugin.getArtifactId());
setRepositorySession(session.getRepositorySession());
MavenProject project = session.getCurrentProject();
if (project != null) {
setRepositories(project.getRemotePluginRepositories());
}
}
/**
* Creates a request for the specified plugin using the given repository session and plugin repositories.
*
* @param plugin The plugin for which to resolve a version, must not be {@code null}.
* @param session The repository session to use, must not be {@code null}.
* @param repositories The plugin repositories to query, may be {@code null}.
*/
public DefaultPluginVersionRequest(
Plugin plugin, RepositorySystemSession session, List<RemoteRepository> repositories) {
setGroupId(plugin.getGroupId());
setArtifactId(plugin.getArtifactId());
setRepositorySession(session);
setRepositories(repositories);
}
@Override
public String getGroupId() {
return groupId;
}
@Override
public DefaultPluginVersionRequest setGroupId(String groupId) {
this.groupId = groupId;
return this;
}
@Override
public String getArtifactId() {
return artifactId;
}
@Override
public DefaultPluginVersionRequest setArtifactId(String artifactId) {
this.artifactId = artifactId;
return this;
}
@Override
public Model getPom() {
return pom;
}
@Override
public DefaultPluginVersionRequest setPom(Model pom) {
this.pom = pom;
return this;
}
@Override
public List<RemoteRepository> getRepositories() {
return repositories;
}
@Override
public DefaultPluginVersionRequest setRepositories(List<RemoteRepository> repositories) {
if (repositories != null) {
this.repositories = Collections.unmodifiableList(repositories);
} else {
this.repositories = Collections.emptyList();
}
return this;
}
@Override
public RepositorySystemSession getRepositorySession() {
return session;
}
@Override
public DefaultPluginVersionRequest setRepositorySession(RepositorySystemSession session) {
this.session = session;
return this;
}
}
| DefaultPluginVersionRequest |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/CoStreamITCase.java | {
"start": 1679,
"end": 5536
} | class ____ extends AbstractTestBaseJUnit4 {
@Test
public void test() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
TestListResultSink<String> resultSink = new TestListResultSink<String>();
DataStream<Integer> src = env.fromData(1, 3, 5);
DataStream<Integer> filter1 =
src.filter(
new FilterFunction<Integer>() {
@Override
public boolean filter(Integer value) throws Exception {
return true;
}
})
.keyBy(
new KeySelector<Integer, Integer>() {
@Override
public Integer getKey(Integer value) throws Exception {
return value;
}
});
DataStream<Tuple2<Integer, Integer>> filter2 =
src.map(
new MapFunction<Integer, Tuple2<Integer, Integer>>() {
@Override
public Tuple2<Integer, Integer> map(Integer value)
throws Exception {
return new Tuple2<>(value, value + 1);
}
})
.rebalance()
.filter(
new FilterFunction<Tuple2<Integer, Integer>>() {
@Override
public boolean filter(Tuple2<Integer, Integer> value)
throws Exception {
return true;
}
})
.disableChaining()
.keyBy(
new KeySelector<Tuple2<Integer, Integer>, Integer>() {
@Override
public Integer getKey(Tuple2<Integer, Integer> value)
throws Exception {
return value.f0;
}
});
DataStream<String> connected =
filter1.connect(filter2)
.flatMap(
new CoFlatMapFunction<Integer, Tuple2<Integer, Integer>, String>() {
@Override
public void flatMap1(Integer value, Collector<String> out)
throws Exception {
out.collect(value.toString());
}
@Override
public void flatMap2(
Tuple2<Integer, Integer> value, Collector<String> out)
throws Exception {
out.collect(value.toString());
}
});
connected.addSink(resultSink);
env.execute();
List<String> expected = Arrays.asList("(1,2)", "(3,4)", "(5,6)", "1", "3", "5");
List<String> result = resultSink.getResult();
Collections.sort(result);
assertEquals(expected, result);
}
}
| CoStreamITCase |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/introspect/DefaultAccessorNamingStrategy.java | {
"start": 21390,
"end": 22940
} | class ____
extends DefaultAccessorNamingStrategy
{
/**
* Names of actual Record fields from definition; auto-detected.
*/
protected final Set<String> _fieldNames;
public RecordNaming(MapperConfig<?> config, AnnotatedClass forClass) {
super(config, forClass,
// no setters for (immutable) Records:
null,
// trickier: regular fields are ok (handled differently), but should
// we also allow getter discovery? For now let's do so
"get", "is", null);
String[] recordFieldNames = RecordUtil.getRecordFieldNames(forClass.getRawType());
// 01-May-2022, tatu: Due to [databind#3417] may return null when no info available
_fieldNames = recordFieldNames == null ?
Collections.emptySet() :
new HashSet<>(Arrays.asList(recordFieldNames));
}
@Override
public String findNameForRegularGetter(AnnotatedMethod am, String name)
{
// By default, field names are un-prefixed, but verify so that we will not
// include "toString()" or additional custom methods (unless latter are
// annotated for inclusion)
if (_fieldNames.contains(name)) {
return name;
}
// but also allow auto-detecting additional getters, if any?
return super.findNameForRegularGetter(am, name);
}
}
}
| RecordNaming |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/security/AbstractPermissionCheckerRestMultiTest.java | {
"start": 2486,
"end": 3912
} | class ____ {
@GET
@Path("public-multi")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Multi<Byte> publicMethod(@QueryParam("user") String user) {
return RestMulti.fromMultiData(Multi.createFrom().<Byte> empty())
.status(201)
.header("header1", "value header 1")
.build();
}
@PermissionsAllowed("secured")
@GET
@Path("secured-multi")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Multi<Byte> securedMethod(@QueryParam("user") String user) {
return RestMulti.fromMultiData(Multi.createFrom().<Byte> empty())
.status(201)
.header("header1", "value header 1")
.build();
}
@PermissionsAllowed("secured")
@GET
@Path("secured-rest-multi")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public RestMulti<Byte> securedMethodRestMulti(@QueryParam("user") String user) {
return RestMulti.fromMultiData(Multi.createFrom().<Byte> empty())
.status(201)
.header("header1", "value header 1")
.build();
}
@PermissionChecker(value = "secured")
public boolean canCallSecured(String user) {
return "Georgios".equals(user);
}
}
}
| TestResource |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoValueTest.java | {
"start": 93605,
"end": 94521
} | class ____ {
abstract Builder setBar(int x);
abstract InnerWithBuilder build();
}
}
@Test
public void testBuilderWithinBuilder() {
OuterWithBuilder x =
OuterWithBuilder.builder()
.inner(InnerWithBuilder.builder().setBar(23).build())
.foo("yes")
.build();
String expectedStringX =
omitIdentifiers
? "{yes, {23}}"
: "OuterWithBuilder{foo=yes, inner=InnerWithBuilder{bar=23}}";
assertThat(x.toString()).isEqualTo(expectedStringX);
OuterWithBuilder.Builder xBuilder = x.toBuilder();
xBuilder.innerBuilder().setBar(17);
OuterWithBuilder y = xBuilder.build();
String expectedStringY =
omitIdentifiers
? "{yes, {17}}"
: "OuterWithBuilder{foo=yes, inner=InnerWithBuilder{bar=17}}";
assertThat(y.toString()).isEqualTo(expectedStringY);
}
public static | Builder |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/async/AsyncEndpointDualTopicLoadBalanceTest.java | {
"start": 979,
"end": 1848
} | class ____ extends ContextTestSupport {
@Test
public void testAsyncEndpoint() throws Exception {
getMockEndpoint("mock:before").expectedBodiesReceived("Hello Camel");
getMockEndpoint("mock:result").expectedBodiesReceived("Hello Camel");
template.sendBody("direct:start", "Hello Camel");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
context.addComponent("async", new MyAsyncComponent());
from("direct:start").to("mock:before").to("log:before").loadBalance().topic()
.to("async:bye:camel", "async:bye:world").end().to("log:after").to("mock:result");
}
};
}
}
| AsyncEndpointDualTopicLoadBalanceTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java | {
"start": 1272,
"end": 12460
} | class ____ extends ESTestCase {
GeoShapeIndexer indexer = new GeoShapeIndexer(Orientation.CCW, "test");
public void testRectangle() {
Rectangle indexed = new Rectangle(-179, -178, 10, -10);
Geometry processed = GeometryNormalizer.apply(Orientation.CCW, indexed);
assertEquals(indexed, processed);
// a rectangle is broken into two triangles
List<IndexableField> fields = indexer.indexShape(indexed);
assertEquals(fields.size(), 2);
indexed = new Rectangle(179, -179, 10, -10);
processed = GeometryNormalizer.apply(Orientation.CCW, indexed);
assertEquals(indexed, processed);
// a rectangle crossing the dateline is broken into 4 triangles
fields = indexer.indexShape(indexed);
assertEquals(fields.size(), 4);
}
public void testDegeneratedRectangles() {
Rectangle indexed = new Rectangle(-179, -179, 10, -10);
Geometry processed = GeometryNormalizer.apply(Orientation.CCW, indexed);
assertEquals(indexed, processed);
// Rectangle is a line
List<IndexableField> fields = indexer.indexShape(indexed);
assertEquals(fields.size(), 1);
indexed = new Rectangle(-179, -178, 10, 10);
processed = GeometryNormalizer.apply(Orientation.CCW, indexed);
assertEquals(indexed, processed);
// Rectangle is a line
fields = indexer.indexShape(indexed);
assertEquals(fields.size(), 1);
indexed = new Rectangle(-179, -179, 10, 10);
processed = GeometryNormalizer.apply(Orientation.CCW, indexed);
assertEquals(indexed, processed);
// Rectangle is a point
fields = indexer.indexShape(indexed);
assertEquals(fields.size(), 1);
indexed = new Rectangle(180, -179, 10, -10);
processed = GeometryNormalizer.apply(Orientation.CCW, indexed);
assertEquals(indexed, processed);
// Rectangle crossing the dateline, one side is a line
fields = indexer.indexShape(indexed);
assertEquals(fields.size(), 3);
indexed = new Rectangle(180, -179, 10, 10);
processed = GeometryNormalizer.apply(Orientation.CCW, indexed);
assertEquals(indexed, processed);
// Rectangle crossing the dateline, one side is a point,
// other side a line
fields = indexer.indexShape(indexed);
assertEquals(fields.size(), 2);
indexed = new Rectangle(-178, -180, 10, -10);
processed = GeometryNormalizer.apply(Orientation.CCW, indexed);
assertEquals(indexed, processed);
// Rectangle crossing the dateline, one side is a line
fields = indexer.indexShape(indexed);
assertEquals(fields.size(), 3);
indexed = new Rectangle(-178, -180, 10, 10);
processed = GeometryNormalizer.apply(Orientation.CCW, indexed);
assertEquals(indexed, processed);
// Rectangle crossing the dateline, one side is a point,
// other side a line
fields = indexer.indexShape(indexed);
assertEquals(fields.size(), 2);
indexed = new Rectangle(0.0, 1.0819389717881644E-299, 1.401298464324817E-45, 0.0);
processed = GeometryNormalizer.apply(Orientation.CCW, indexed);
assertEquals(indexed, processed);
// Rectangle is a point
fields = indexer.indexShape(processed);
assertEquals(fields.size(), 1);
indexed = new Rectangle(-1.4017117476654298E-170, 0.0, 0.0, -2.415012082648633E-174);
processed = GeometryNormalizer.apply(Orientation.CCW, indexed);
assertEquals(indexed, processed);
// Rectangle is a triangle but needs to be computed quantize
fields = indexer.indexShape(processed);
assertEquals(fields.size(), 2);
}
public void testPolygon() {
Polygon polygon = new Polygon(new LinearRing(new double[] { 160, 200, 200, 160, 160 }, new double[] { 10, 10, 20, 20, 10 }));
Geometry indexed = new MultiPolygon(
Arrays.asList(
new Polygon(new LinearRing(new double[] { 180, 180, 160, 160, 180 }, new double[] { 10, 20, 20, 10, 10 })),
new Polygon(new LinearRing(new double[] { -180, -180, -160, -160, -180 }, new double[] { 20, 10, 10, 20, 20 }))
)
);
assertEquals(indexed, GeometryNormalizer.apply(Orientation.CCW, polygon));
polygon = new Polygon(
new LinearRing(new double[] { 160, 200, 200, 160, 160 }, new double[] { 10, 10, 20, 20, 10 }),
Collections.singletonList(new LinearRing(new double[] { 165, 165, 195, 195, 165 }, new double[] { 12, 18, 18, 12, 12 }))
);
indexed = new MultiPolygon(
Arrays.asList(
new Polygon(
new LinearRing(
new double[] { 180, 180, 165, 165, 180, 180, 160, 160, 180 },
new double[] { 10, 12, 12, 18, 18, 20, 20, 10, 10 }
)
),
new Polygon(
new LinearRing(
new double[] { -180, -180, -160, -160, -180, -180, -165, -165, -180 },
new double[] { 12, 10, 10, 20, 20, 18, 18, 12, 12 }
)
)
)
);
assertEquals(indexed, GeometryNormalizer.apply(Orientation.CCW, polygon));
}
public void testPolygonOrientation() throws IOException, ParseException {
assertEquals(
expected("POLYGON ((160 10, -160 10, -160 0, 160 0, 160 10))"), // current algorithm shifts edges to left
actual("POLYGON ((160 0, 160 10, -160 10, -160 0, 160 0))", randomBoolean())
); // In WKT the orientation is ignored
assertEquals(
expected("POLYGON ((20 10, -20 10, -20 0, 20 0, 20 10)))"),
actual("POLYGON ((20 0, 20 10, -20 10, -20 0, 20 0))", randomBoolean())
);
assertEquals(
expected("POLYGON ((160 10, -160 10, -160 0, 160 0, 160 10))"),
actual(polygon(null, 160, 0, 160, 10, -160, 10, -160, 0, 160, 0), true)
);
assertEquals(
expected("MULTIPOLYGON (((180 0, 180 10, 160 10, 160 0, 180 0)), ((-180 10, -180 0, -160 0, -160 10, -180 10)))"),
actual(polygon(randomBoolean() ? null : false, 160, 0, 160, 10, -160, 10, -160, 0, 160, 0), false)
);
assertEquals(
expected("MULTIPOLYGON (((180 0, 180 10, 160 10, 160 0, 180 0)), ((-180 10, -180 0, -160 0, -160 10, -180 10)))"),
actual(polygon(false, 160, 0, 160, 10, -160, 10, -160, 0, 160, 0), true)
);
assertEquals(
expected("POLYGON ((20 10, -20 10, -20 0, 20 0, 20 10)))"),
actual(polygon(randomBoolean() ? null : randomBoolean(), 20, 0, 20, 10, -20, 10, -20, 0, 20, 0), randomBoolean())
);
assertEquals(
expected("POLYGON ((180 29, 180 38, 180 56, 180 53, 178 47, 177 23, 180 29))"),
actual("POLYGON ((180 38, 180.0 56, 180.0 53, 178 47, 177 23, 180 29, 180 36, 180 37, 180 38))", randomBoolean())
);
assertEquals(
expected("POLYGON ((-135 85, 135 85, 45 85, -45 85, -135 85))"),
actual("POLYGON ((-45 85, -135 85, 135 85, 45 85, -45 85))", randomBoolean())
);
}
public void testInvalidSelfCrossingPolygon() {
Polygon polygon = new Polygon(
new LinearRing(new double[] { 0, 0, 1, 0.5, 1.5, 1, 2, 2, 0 }, new double[] { 0, 2, 1.9, 1.8, 1.8, 1.9, 2, 0, 0 })
);
Exception e = expectThrows(IllegalArgumentException.class, () -> GeometryNormalizer.apply(Orientation.CCW, polygon));
assertThat(e.getMessage(), containsString("Self-intersection at or near point ["));
assertThat(e.getMessage(), not(containsString("NaN")));
}
public void testCrossingDateline() {
Polygon polygon = new Polygon(new LinearRing(new double[] { 170, -170, -170, 170, 170 }, new double[] { -10, -10, 10, 10, -10 }));
Geometry geometry = GeometryNormalizer.apply(Orientation.CCW, polygon);
assertTrue(geometry instanceof MultiPolygon);
polygon = new Polygon(new LinearRing(new double[] { 180, -170, -170, 170, 180 }, new double[] { -10, -5, 15, -15, -10 }));
geometry = GeometryNormalizer.apply(Orientation.CCW, polygon);
assertTrue(geometry instanceof MultiPolygon);
}
public void testPolygonAllCollinearPoints() {
Polygon polygon = new Polygon(new LinearRing(new double[] { 0, 1, -1, 0 }, new double[] { 0, 1, -1, 0 }));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> indexer.indexShape(polygon));
assertEquals("at least three non-collinear points required", e.getMessage());
}
private XContentBuilder polygon(Boolean orientation, double... val) throws IOException {
XContentBuilder pointGeoJson = XContentFactory.jsonBuilder().startObject();
{
pointGeoJson.field("type", "polygon");
if (orientation != null) {
pointGeoJson.field("orientation", orientation ? "right" : "left");
}
pointGeoJson.startArray("coordinates").startArray();
{
assertEquals(0, val.length % 2);
for (int i = 0; i < val.length; i += 2) {
pointGeoJson.startArray().value(val[i]).value(val[i + 1]).endArray();
}
}
pointGeoJson.endArray().endArray();
}
pointGeoJson.endObject();
return pointGeoJson;
}
private Geometry expected(String wkt) throws IOException, ParseException {
return parseGeometry(wkt, true);
}
private Geometry actual(String wkt, boolean rightOrientation) throws IOException, ParseException {
Geometry shape = parseGeometry(wkt, rightOrientation);
return GeometryNormalizer.apply(Orientation.CCW, shape);
}
private Geometry actual(XContentBuilder geoJson, boolean rightOrientation) throws IOException, ParseException {
Geometry shape = parseGeometry(geoJson, rightOrientation);
return GeometryNormalizer.apply(Orientation.CCW, shape);
}
private Geometry parseGeometry(String wkt, boolean rightOrientation) throws IOException, ParseException {
XContentBuilder json = XContentFactory.jsonBuilder().startObject().field("value", wkt).endObject();
try (XContentParser parser = createParser(json)) {
parser.nextToken();
parser.nextToken();
parser.nextToken();
GeometryParser geometryParser = new GeometryParser(rightOrientation, true, true);
return geometryParser.parse(parser);
}
}
private Geometry parseGeometry(XContentBuilder geoJson, boolean rightOrientation) throws IOException, ParseException {
try (XContentParser parser = createParser(geoJson)) {
parser.nextToken();
GeometryParser geometryParser = new GeometryParser(rightOrientation, true, true);
return geometryParser.parse(parser);
}
}
}
| GeometryIndexerTests |
java | quarkusio__quarkus | integration-tests/hibernate-search-orm-elasticsearch-tenancy/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/multitenancy/book/BookResource.java | {
"start": 742,
"end": 1584
} | class ____ {
@Inject
@PersistenceUnit("books")
EntityManager entityManager;
@Inject
@PersistenceUnit("books")
SearchSession searchSession;
@POST
@Path("/")
@Transactional
public Response create(@NotNull Book book) {
searchSession.indexingPlanFilter(context -> context.exclude(Book.class));
entityManager.persist(book);
return Response.ok(book).status(Response.Status.CREATED).build();
}
@GET
@Path("/search")
@Transactional
public Response search(@NotNull @QueryParam("terms") String terms) {
List<Book> list = searchSession.search(Book.class)
.where(f -> f.simpleQueryString().field("name").matching(terms))
.fetchAllHits();
return Response.status(Response.Status.OK).entity(list).build();
}
}
| BookResource |
java | apache__logging-log4j2 | log4j-osgi-test/src/test/java/org/apache/logging/log4j/osgi/tests/CustomConfigurationFactory.java | {
"start": 1391,
"end": 2316
} | class ____ extends ConfigurationFactory {
/**
* Valid file extensions for XML files.
*/
public static final String[] SUFFIXES = new String[] {".custom"};
/**
* Returns the Configuration.
* @param source The InputSource.
* @return The Configuration.
*/
@Override
public Configuration getConfiguration(final LoggerContext loggerContext, final ConfigurationSource source) {
return new CustomConfiguration(loggerContext, source);
}
@Override
public Configuration getConfiguration(
final LoggerContext loggerContext, final String name, final URI configLocation) {
return new CustomConfiguration(loggerContext);
}
/**
* Returns the file suffixes for XML files.
* @return An array of File extensions.
*/
@Override
public String[] getSupportedTypes() {
return SUFFIXES;
}
}
| CustomConfigurationFactory |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncResultsServiceTests.java | {
"start": 2102,
"end": 2330
} | class ____ extends ESSingleNodeTestCase {
private ClusterService clusterService;
private TaskManager taskManager;
private AsyncTaskIndexService<TestAsyncResponse> indexService;
public static | AsyncResultsServiceTests |
java | apache__flink | flink-filesystems/flink-gs-fs-hadoop/src/main/java/org/apache/flink/fs/gs/utils/ConfigUtils.java | {
"start": 8227,
"end": 9036
} | interface ____ {
/**
* Returns a named environment variable.
*
* @param name Name of variable
* @return Value of variable
*/
Optional<String> getenv(String name);
/**
* Loads the Hadoop configuration from a directory.
*
* @param configDir The Hadoop config directory.
* @return The Hadoop configuration.
*/
org.apache.hadoop.conf.Configuration loadHadoopConfigFromDir(String configDir);
/**
* Loads the Google credentials from a file.
*
* @param credentialsPath The path of the credentials file.
* @return The Google credentials.
*/
GoogleCredentials loadStorageCredentialsFromFile(String credentialsPath);
}
}
| ConfigContext |
java | apache__camel | components/camel-jcache/src/main/java/org/apache/camel/component/jcache/JCacheComponent.java | {
"start": 2980,
"end": 4587
} | class ____ of the {@link javax.cache.spi.CachingProvider}
*/
public String getCachingProvider() {
return cachingProvider;
}
public void setCachingProvider(String cachingProvider) {
this.cachingProvider = cachingProvider;
}
/**
* A {@link Configuration} for the {@link Cache}
*/
public Configuration getCacheConfiguration() {
return cacheConfiguration;
}
public void setCacheConfiguration(Configuration cacheConfiguration) {
this.cacheConfiguration = cacheConfiguration;
}
/**
* Properties to configure jcache
*/
public Map getCacheConfigurationProperties() {
return cacheConfigurationProperties;
}
public void setCacheConfigurationProperties(Map cacheConfigurationProperties) {
this.cacheConfigurationProperties = cacheConfigurationProperties;
}
public String getCacheConfigurationPropertiesRef() {
return cacheConfigurationPropertiesRef;
}
/**
* References to an existing {@link Properties} or {@link Map} to lookup in the registry to use for configuring
* jcache.
*/
public void setCacheConfigurationPropertiesRef(String cacheConfigurationPropertiesRef) {
this.cacheConfigurationPropertiesRef = cacheConfigurationPropertiesRef;
}
/**
* An implementation specific URI for the {@link CacheManager}
*/
public String getConfigurationUri() {
return configurationUri;
}
public void setConfigurationUri(String configurationUri) {
this.configurationUri = configurationUri;
}
}
| name |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/FloatingPointAssertionWithinEpsilonTest.java | {
"start": 1092,
"end": 1858
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(FloatingPointAssertionWithinEpsilon.class, getClass());
@Test
public void positiveCase() {
compilationHelper
.addSourceLines(
"FloatingPointAssertionWithinEpsilonPositiveCases.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
/**
* Positive test cases for FloatingPointAssertionWithinEpsilon check.
*
* @author ghm@google.com (Graeme Morgan)
*/
final | FloatingPointAssertionWithinEpsilonTest |
java | elastic__elasticsearch | test/framework/src/test/java/org/elasticsearch/bootstrap/TestScopeResolverTests.java | {
"start": 685,
"end": 2040
} | class ____ extends ESTestCase {
public void testScopeResolverServerClass() {
var testBuildInfo = new TestBuildInfo(
"server",
List.of(new TestBuildInfoLocation("org/elasticsearch/Build.class", "org.elasticsearch.server"))
);
var resolver = TestScopeResolver.createScopeResolver(testBuildInfo, List.of(), Set.of());
var scope = resolver.apply(Plugin.class);
assertThat(scope.componentName(), is("(server)"));
assertThat(scope.moduleName(), is("org.elasticsearch.server"));
}
public void testScopeResolverInternalClass() {
var testBuildInfo = new TestBuildInfo(
"server",
List.of(new TestBuildInfoLocation("org/elasticsearch/Build.class", "org.elasticsearch.server"))
);
var testOwnBuildInfo = new TestBuildInfo(
"test-component",
List.of(new TestBuildInfoLocation("org/elasticsearch/bootstrap/TestBuildInfoParserTests.class", "test-module-name"))
);
var resolver = TestScopeResolver.createScopeResolver(testBuildInfo, List.of(testOwnBuildInfo), Set.of("test-component"));
var scope = resolver.apply(this.getClass());
assertThat(scope.componentName(), is("test-component"));
assertThat(scope.moduleName(), is("test-module-name"));
}
}
| TestScopeResolverTests |
java | elastic__elasticsearch | x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java | {
"start": 1606,
"end": 5479
} | class ____ extends TransportLocalClusterStateAction<EnrichStatsAction.Request, EnrichStatsAction.Response> {
private final Client client;
/**
* NB prior to 9.0 this was a TransportMasterNodeAction so for BwC it must be registered with the TransportService until
* we no longer need to support calling this action remotely.
*/
@UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT)
@SuppressWarnings("this-escape")
@Inject
public TransportEnrichStatsAction(
TransportService transportService,
ClusterService clusterService,
ActionFilters actionFilters,
Client client
) {
super(
EnrichStatsAction.NAME,
actionFilters,
transportService.getTaskManager(),
clusterService,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.client = client;
transportService.registerRequestHandler(
actionName,
executor,
false,
true,
EnrichStatsAction.Request::new,
(request, channel, task) -> executeDirect(task, request, new ChannelActionListener<>(channel))
);
}
@Override
protected void localClusterStateOperation(
Task task,
EnrichStatsAction.Request request,
ClusterState state,
ActionListener<EnrichStatsAction.Response> listener
) {
EnrichCoordinatorStatsAction.Request statsRequest = new EnrichCoordinatorStatsAction.Request();
ActionListener<EnrichCoordinatorStatsAction.Response> statsListener = listener.delegateFailureAndWrap((delegate, response) -> {
if (response.hasFailures()) {
// Report failures even if some node level requests succeed:
Exception failure = null;
for (FailedNodeException nodeFailure : response.failures()) {
if (failure == null) {
failure = nodeFailure;
} else {
failure.addSuppressed(nodeFailure);
}
}
delegate.onFailure(failure);
return;
}
List<CoordinatorStats> coordinatorStats = response.getNodes()
.stream()
.map(EnrichCoordinatorStatsAction.NodeResponse::getCoordinatorStats)
.sorted(Comparator.comparing(CoordinatorStats::nodeId))
.collect(Collectors.toList());
List<ExecutingPolicy> policyExecutionTasks = taskManager.getTasks()
.values()
.stream()
.filter(t -> t.getAction().equals(EnrichPolicyExecutor.TASK_ACTION))
.map(t -> t.taskInfo(clusterService.localNode().getId(), true))
.map(t -> new ExecutingPolicy(t.description(), t))
.sorted(Comparator.comparing(ExecutingPolicy::name))
.collect(Collectors.toList());
List<EnrichStatsAction.Response.CacheStats> cacheStats = response.getNodes()
.stream()
.map(EnrichCoordinatorStatsAction.NodeResponse::getCacheStats)
.filter(Objects::nonNull)
.sorted(Comparator.comparing(EnrichStatsAction.Response.CacheStats::nodeId))
.collect(Collectors.toList());
delegate.onResponse(new EnrichStatsAction.Response(policyExecutionTasks, coordinatorStats, cacheStats));
});
((CancellableTask) task).ensureNotCancelled();
client.execute(EnrichCoordinatorStatsAction.INSTANCE, statsRequest, statsListener);
}
@Override
protected ClusterBlockException checkBlock(EnrichStatsAction.Request request, ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
}
}
| TransportEnrichStatsAction |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/domain/userguide/CreditCardPayment.java | {
"start": 309,
"end": 589
} | class ____ extends Payment {
@Column(name = "card_number")
String cardNumber;
public void setCardNumber(String cardNumber) {
this.cardNumber = cardNumber;
}
public String getCardNumber() {
return cardNumber;
}
}
//end::hql-examples-domain-model-example[]
| CreditCardPayment |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/AnnotationUtilsTests.java | {
"start": 48199,
"end": 48260
} | class ____ {
}
@Meta2
static | ClassWithInheritedMetaAnnotation |
java | apache__camel | components/camel-telegram/src/generated/java/org/apache/camel/component/telegram/TelegramEndpointUriFactory.java | {
"start": 518,
"end": 3395
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":type";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(31);
props.add("authorizationToken");
props.add("backoffErrorThreshold");
props.add("backoffIdleThreshold");
props.add("backoffMultiplier");
props.add("baseUri");
props.add("bridgeErrorHandler");
props.add("bufferSize");
props.add("chatId");
props.add("client");
props.add("delay");
props.add("exceptionHandler");
props.add("exchangePattern");
props.add("greedy");
props.add("initialDelay");
props.add("lazyStartProducer");
props.add("limit");
props.add("pollStrategy");
props.add("proxyHost");
props.add("proxyPort");
props.add("proxyType");
props.add("repeatCount");
props.add("runLoggingLevel");
props.add("scheduledExecutorService");
props.add("scheduler");
props.add("schedulerProperties");
props.add("sendEmptyMessageWhenIdle");
props.add("startScheduler");
props.add("timeUnit");
props.add("timeout");
props.add("type");
props.add("useFixedDelay");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(1);
secretProps.add("authorizationToken");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
Map<String, String> prefixes = new HashMap<>(1);
prefixes.put("schedulerProperties", "scheduler.");
MULTI_VALUE_PREFIXES = Collections.unmodifiableMap(prefixes);
}
@Override
public boolean isEnabled(String scheme) {
return "telegram".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "type", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| TelegramEndpointUriFactory |
java | apache__avro | lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueInput.java | {
"start": 3248,
"end": 3749
} | class ____ extends Mapper<AvroKey<Integer>, AvroValue<CharSequence>, Text, IntWritable> {
@Override
protected void map(AvroKey<Integer> docid, AvroValue<CharSequence> body, Context context)
throws IOException, InterruptedException {
for (String token : body.datum().toString().split(" ")) {
context.write(new Text(token), new IntWritable(docid.datum()));
}
}
}
/** A reducer for aggregating token to docid mapping into a hitlist. */
public static | IndexMapper |
java | apache__kafka | clients/src/test/java/org/apache/kafka/clients/consumer/internals/AcknowledgementCommitCallbackHandlerTest.java | {
"start": 1752,
"end": 6941
} | class ____ {
private AcknowledgementCommitCallbackHandler acknowledgementCommitCallbackHandler;
private Map<TopicPartitionAndOffset, Exception> exceptionMap;
private final TopicPartition tp0 = new TopicPartition("test-topic", 0);
private final TopicIdPartition tip0 = new TopicIdPartition(Uuid.randomUuid(), tp0);
private final TopicPartitionAndOffset tpo00 = new TopicPartitionAndOffset(tip0, 0L);
private final TopicPartitionAndOffset tpo01 = new TopicPartitionAndOffset(tip0, 1L);
private final TopicPartition tp1 = new TopicPartition("test-topic-2", 0);
private final TopicIdPartition tip1 = new TopicIdPartition(Uuid.randomUuid(), tp1);
private final TopicPartitionAndOffset tpo10 = new TopicPartitionAndOffset(tip1, 0L);
private final TopicPartition tp2 = new TopicPartition("test-topic-2", 1);
private final TopicIdPartition tip2 = new TopicIdPartition(Uuid.randomUuid(), tp2);
private final TopicPartitionAndOffset tpo20 = new TopicPartitionAndOffset(tip2, 0L);
private Map<TopicIdPartition, Acknowledgements> acknowledgementsMap;
@BeforeEach
public void setup() {
acknowledgementsMap = new HashMap<>();
exceptionMap = new LinkedHashMap<>();
TestableAcknowledgeCommitCallback callback = new TestableAcknowledgeCommitCallback();
acknowledgementCommitCallbackHandler = new AcknowledgementCommitCallbackHandler(callback);
}
@Test
public void testNoException() throws Exception {
Acknowledgements acknowledgements = Acknowledgements.empty();
acknowledgements.add(0L, AcknowledgeType.ACCEPT);
acknowledgements.add(1L, AcknowledgeType.REJECT);
acknowledgementsMap.put(tip0, acknowledgements);
acknowledgementCommitCallbackHandler.onComplete(Collections.singletonList(acknowledgementsMap));
TestUtils.retryOnExceptionWithTimeout(() -> {
assertNull(exceptionMap.get(tpo00));
assertNull(exceptionMap.get(tpo01));
});
}
@Test
public void testInvalidRecord() throws Exception {
Acknowledgements acknowledgements = Acknowledgements.empty();
acknowledgements.add(0L, AcknowledgeType.ACCEPT);
acknowledgements.add(1L, AcknowledgeType.REJECT);
acknowledgements.complete(Errors.INVALID_RECORD_STATE.exception());
acknowledgementsMap.put(tip0, acknowledgements);
acknowledgementCommitCallbackHandler.onComplete(Collections.singletonList(acknowledgementsMap));
TestUtils.retryOnExceptionWithTimeout(() -> {
assertInstanceOf(InvalidRecordStateException.class, exceptionMap.get(tpo00));
assertInstanceOf(InvalidRecordStateException.class, exceptionMap.get(tpo01));
});
}
@Test
public void testUnauthorizedTopic() throws Exception {
Acknowledgements acknowledgements = Acknowledgements.empty();
acknowledgements.add(0L, AcknowledgeType.ACCEPT);
acknowledgements.add(1L, AcknowledgeType.REJECT);
acknowledgements.complete(Errors.TOPIC_AUTHORIZATION_FAILED.exception());
acknowledgementsMap.put(tip0, acknowledgements);
acknowledgementCommitCallbackHandler.onComplete(Collections.singletonList(acknowledgementsMap));
TestUtils.retryOnExceptionWithTimeout(() -> {
assertInstanceOf(TopicAuthorizationException.class, exceptionMap.get(tpo00));
assertInstanceOf(TopicAuthorizationException.class, exceptionMap.get(tpo01));
});
}
@Test
public void testMultiplePartitions() throws Exception {
Acknowledgements acknowledgements = Acknowledgements.empty();
acknowledgements.add(0L, AcknowledgeType.ACCEPT);
acknowledgements.add(1L, AcknowledgeType.REJECT);
acknowledgements.complete(Errors.TOPIC_AUTHORIZATION_FAILED.exception());
acknowledgementsMap.put(tip0, acknowledgements);
Acknowledgements acknowledgements1 = Acknowledgements.empty();
acknowledgements1.add(0L, AcknowledgeType.RELEASE);
acknowledgements1.complete(Errors.INVALID_RECORD_STATE.exception());
acknowledgementsMap.put(tip1, acknowledgements1);
Map<TopicIdPartition, Acknowledgements> acknowledgementsMap2 = new HashMap<>();
Acknowledgements acknowledgements2 = Acknowledgements.empty();
acknowledgements2.add(0L, AcknowledgeType.ACCEPT);
acknowledgementsMap2.put(tip2, acknowledgements2);
List<Map<TopicIdPartition, Acknowledgements>> acknowledgementsMapList = new LinkedList<>();
acknowledgementsMapList.add(acknowledgementsMap);
acknowledgementsMapList.add(acknowledgementsMap2);
acknowledgementCommitCallbackHandler.onComplete(acknowledgementsMapList);
TestUtils.retryOnExceptionWithTimeout(() -> {
assertInstanceOf(TopicAuthorizationException.class, exceptionMap.get(tpo00));
assertInstanceOf(TopicAuthorizationException.class, exceptionMap.get(tpo01));
assertInstanceOf(InvalidRecordStateException.class, exceptionMap.get(tpo10));
assertNull(exceptionMap.get(tpo20));
});
}
private | AcknowledgementCommitCallbackHandlerTest |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-api/src/test/java/org/apache/dubbo/remoting/telnet/support/ExitTelnetHandlerTest.java | {
"start": 1119,
"end": 1405
} | class ____ {
@Test
void test() {
Channel channel = Mockito.mock(Channel.class);
ExitTelnetHandler exitTelnetHandler = new ExitTelnetHandler();
exitTelnetHandler.telnet(channel, null);
verify(channel, times(1)).close();
}
}
| ExitTelnetHandlerTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/jobgraph/jsonplan/StreamGraphJsonSchema.java | {
"start": 1251,
"end": 2107
} | class ____ {
public static final String FIELD_NAME_NODES = "nodes";
@JsonProperty(FIELD_NAME_NODES)
private final List<JsonStreamNodeSchema> nodes;
@JsonCreator
public StreamGraphJsonSchema(@JsonProperty(FIELD_NAME_NODES) List<JsonStreamNodeSchema> nodes) {
this.nodes = nodes;
}
@JsonIgnore
public List<JsonStreamNodeSchema> getNodes() {
return nodes;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
StreamGraphJsonSchema that = (StreamGraphJsonSchema) o;
return Objects.equals(nodes, that.nodes);
}
@Override
public int hashCode() {
return Objects.hash(nodes);
}
public static | StreamGraphJsonSchema |
java | spring-projects__spring-boot | loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/net/protocol/Handlers.java | {
"start": 916,
"end": 1961
} | class ____ {
private static final String PROTOCOL_HANDLER_PACKAGES = "java.protocol.handler.pkgs";
private static final String PACKAGE = Handlers.class.getPackageName();
private Handlers() {
}
/**
* Register a {@literal 'java.protocol.handler.pkgs'} property so that a
* {@link URLStreamHandler} will be located to deal with jar URLs.
*/
public static void register() {
String packages = System.getProperty(PROTOCOL_HANDLER_PACKAGES, "");
packages = (!packages.isEmpty() && !packages.contains(PACKAGE)) ? packages + "|" + PACKAGE : PACKAGE;
System.setProperty(PROTOCOL_HANDLER_PACKAGES, packages);
resetCachedUrlHandlers();
}
/**
* Reset any cached handlers just in case a jar protocol has already been used. We
* reset the handler by trying to set a null {@link URLStreamHandlerFactory} which
* should have no effect other than clearing the handlers cache.
*/
private static void resetCachedUrlHandlers() {
try {
URL.setURLStreamHandlerFactory(null);
}
catch (Error ex) {
// Ignore
}
}
}
| Handlers |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java | {
"start": 4097,
"end": 14774
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(ServiceTestUtils.class);
private MiniYARNCluster yarnCluster = null;
private MiniDFSCluster hdfsCluster = null;
private TestingCluster zkCluster;
private CuratorService curatorService;
private FileSystem fs = null;
private Configuration conf = null;
public static final int NUM_NMS = 1;
private File basedir;
public static final JsonSerDeser<Service> JSON_SER_DESER =
new JsonSerDeser<>(Service.class,
PropertyNamingStrategies.SNAKE_CASE);
// Example service definition
// 2 components, each of which has 2 containers.
public static Service createExampleApplication() {
Service exampleApp = new Service();
exampleApp.setName("example-app");
exampleApp.setVersion("v1");
exampleApp.addComponent(createComponent("compa"));
exampleApp.addComponent(createComponent("compb"));
return exampleApp;
}
// Example service definition
// 2 components, each of which has 2 containers.
public static Service createTerminatingJobExample(String serviceName) {
Service exampleApp = new Service();
exampleApp.setName(serviceName);
exampleApp.setVersion("v1");
exampleApp.addComponent(
createComponent("terminating-comp1", 2, "sleep 1000",
Component.RestartPolicyEnum.NEVER, null));
exampleApp.addComponent(
createComponent("terminating-comp2", 2, "sleep 1000",
Component.RestartPolicyEnum.ON_FAILURE, null));
exampleApp.addComponent(
createComponent("terminating-comp3", 2, "sleep 1000",
Component.RestartPolicyEnum.ON_FAILURE, null));
return exampleApp;
}
public static Service createTerminatingDominantComponentJobExample(
String serviceName) {
Service exampleApp = new Service();
exampleApp.setName(serviceName);
exampleApp.setVersion("v1");
Component serviceStateComponent = createComponent("terminating-comp1", 2,
"sleep 1000", Component.RestartPolicyEnum.NEVER, null);
serviceStateComponent.getConfiguration().setProperty(
CONTAINER_STATE_REPORT_AS_SERVICE_STATE, "true");
exampleApp.addComponent(serviceStateComponent);
exampleApp.addComponent(
createComponent("terminating-comp2", 2, "sleep 60000",
Component.RestartPolicyEnum.ON_FAILURE, null));
return exampleApp;
}
public static Component createComponent(String name) {
return createComponent(name, 2L, "sleep 1000",
Component.RestartPolicyEnum.ALWAYS, null);
}
protected static Component createComponent(String name, long numContainers,
String command) {
Component comp1 = new Component();
comp1.setNumberOfContainers(numContainers);
comp1.setLaunchCommand(command);
comp1.setName(name);
Resource resource = new Resource();
comp1.setResource(resource);
resource.setMemory("128");
resource.setCpus(1);
return comp1;
}
protected static Component createComponent(String name, long numContainers,
String command, Component.RestartPolicyEnum restartPolicyEnum,
List<String> dependencies) {
Component comp = createComponent(name, numContainers, command);
comp.setRestartPolicy(restartPolicyEnum);
if (dependencies != null) {
comp.dependencies(dependencies);
}
return comp;
}
public static SliderFileSystem initMockFs() throws IOException {
return initMockFs(null);
}
public static SliderFileSystem initMockFs(Service ext) throws IOException {
SliderFileSystem sfs = mock(SliderFileSystem.class);
FileSystem mockFs = mock(FileSystem.class);
JsonSerDeser<Service> jsonSerDeser = mock(JsonSerDeser.class);
when(sfs.getFileSystem()).thenReturn(mockFs);
when(sfs.buildClusterDirPath(any())).thenReturn(
new Path("cluster_dir_path"));
if (ext != null) {
when(jsonSerDeser.load(any(), any())).thenReturn(ext);
}
ServiceApiUtil.setJsonSerDeser(jsonSerDeser);
return sfs;
}
protected void setConf(YarnConfiguration conf) {
this.conf = conf;
}
protected Configuration getConf() {
return conf;
}
protected FileSystem getFS() {
return fs;
}
protected MiniYARNCluster getYarnCluster() {
return yarnCluster;
}
protected void setupInternal(int numNodeManager)
throws Exception {
LOG.info("Starting up YARN cluster");
if (conf == null) {
setConf(new YarnConfiguration());
conf.setBoolean(YarnConfiguration.YARN_MINICLUSTER_FIXED_PORTS, false);
conf.setBoolean(YarnConfiguration.YARN_MINICLUSTER_USE_RPC, false);
conf.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS,
YarnConfiguration.DEFAULT_RM_MAX_COMPLETED_APPLICATIONS);
}
conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 128);
// reduce the teardown waiting time
conf.setLong(YarnConfiguration.DISPATCHER_DRAIN_EVENTS_TIMEOUT, 1000);
conf.set("yarn.log.dir", "target");
// mark if we need to launch the v1 timeline server
// disable aux-service based timeline aggregators
conf.set(YarnConfiguration.NM_AUX_SERVICES, "");
conf.set(YarnConfiguration.NM_VMEM_PMEM_RATIO, "8");
// Enable ContainersMonitorImpl
conf.set(YarnConfiguration.NM_CONTAINER_MON_RESOURCE_CALCULATOR,
LinuxResourceCalculatorPlugin.class.getName());
conf.set(YarnConfiguration.NM_CONTAINER_MON_PROCESS_TREE,
ProcfsBasedProcessTree.class.getName());
conf.setBoolean(
YarnConfiguration.YARN_MINICLUSTER_CONTROL_RESOURCE_MONITORING, true);
conf.setBoolean(TIMELINE_SERVICE_ENABLED, false);
conf.setInt(YarnConfiguration.NM_MAX_PER_DISK_UTILIZATION_PERCENTAGE, 100);
conf.setLong(DEBUG_NM_DELETE_DELAY_SEC, 60000);
conf.setLong(AM_RESOURCE_MEM, 526);
conf.setLong(YarnServiceConf.READINESS_CHECK_INTERVAL, 5);
// Disable vmem check to disallow NM killing the container
conf.setBoolean(NM_VMEM_CHECK_ENABLED, false);
conf.setBoolean(NM_PMEM_CHECK_ENABLED, false);
// set auth filters
conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
"org.apache.hadoop.security.AuthenticationFilterInitializer,"
+ "org.apache.hadoop.security.HttpCrossOriginFilterInitializer");
// setup zk cluster
zkCluster = new TestingCluster(1);
zkCluster.start();
conf.set(YarnConfiguration.RM_ZK_ADDRESS, zkCluster.getConnectString());
conf.set(KEY_REGISTRY_ZK_QUORUM, zkCluster.getConnectString());
LOG.info("ZK cluster: {}.", zkCluster.getConnectString());
curatorService = new CuratorService("testCuratorService");
curatorService.init(conf);
curatorService.start();
fs = FileSystem.get(conf);
basedir = new File("target", "apps");
if (basedir.exists()) {
FileUtils.deleteDirectory(basedir);
} else {
basedir.mkdirs();
}
conf.set(YARN_SERVICE_BASE_PATH, basedir.getAbsolutePath());
if (yarnCluster == null) {
yarnCluster = new MiniYARNCluster(this.getClass().getSimpleName(), 1,
numNodeManager, 1, 1);
yarnCluster.init(conf);
yarnCluster.start();
waitForNMsToRegister();
URL url = Thread.currentThread().getContextClassLoader()
.getResource("yarn-site.xml");
if (url == null) {
throw new RuntimeException(
"Could not find 'yarn-site.xml' dummy file in classpath");
}
Configuration yarnClusterConfig = yarnCluster.getConfig();
yarnClusterConfig.set(YarnConfiguration.YARN_APPLICATION_CLASSPATH,
new File(url.getPath()).getParent());
//write the document to a buffer (not directly to the file, as that
//can cause the file being written to get read -which will then fail.
ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
yarnClusterConfig.writeXml(bytesOut);
bytesOut.close();
//write the bytes to the file in the classpath
OutputStream os = Files.newOutputStream(new File(url.getPath()).toPath());
os.write(bytesOut.toByteArray());
os.close();
LOG.info("Write yarn-site.xml configs to: {}.", url);
}
if (hdfsCluster == null) {
HdfsConfiguration hdfsConfig = new HdfsConfiguration();
hdfsCluster = new MiniDFSCluster.Builder(hdfsConfig)
.numDataNodes(1).build();
}
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
LOG.error("setup thread sleep interrupted.", e);
}
}
public void shutdown() throws IOException {
if (yarnCluster != null) {
try {
yarnCluster.stop();
} finally {
yarnCluster = null;
}
}
if (hdfsCluster != null) {
try {
hdfsCluster.shutdown();
} finally {
hdfsCluster = null;
}
}
if (curatorService != null) {
ServiceOperations.stop(curatorService);
}
if (zkCluster != null) {
zkCluster.stop();
}
if (basedir != null) {
FileUtils.deleteDirectory(basedir);
}
SliderFileSystem sfs = new SliderFileSystem(conf);
Path appDir = sfs.getBaseApplicationPath();
sfs.getFileSystem().delete(appDir, true);
}
private void waitForNMsToRegister() throws Exception {
int sec = 60;
while (sec >= 0) {
if (yarnCluster.getResourceManager().getRMContext().getRMNodes().size()
>= NUM_NMS) {
break;
}
Thread.sleep(1000);
sec--;
}
}
/**
* Creates a {@link ServiceClient} for test purposes.
*/
public static ServiceClient createClient(Configuration conf)
throws Exception {
ServiceClient client = new ServiceClient() {
@Override
protected Path addJarResource(String appName,
Map<String, LocalResource> localResources)
throws IOException, SliderException {
// do nothing, the Unit test will use local jars
return null;
}
};
client.init(conf);
client.start();
return client;
}
public static ServiceManager createServiceManager(ServiceContext context) {
ServiceManager serviceManager = new ServiceManager(context);
context.setServiceManager(serviceManager);
return serviceManager;
}
/**
* Creates a YarnClient for test purposes.
*/
public static YarnClient createYarnClient(Configuration conf) {
YarnClient client = YarnClient.createYarnClient();
client.init(conf);
client.start();
return client;
}
protected CuratorService getCuratorService() throws IOException {
return curatorService;
}
/**
* Watcher to initialize yarn service base path under target and deletes the
* the test directory when finishes.
*/
public static | ServiceTestUtils |
java | google__guice | core/src/com/google/inject/matcher/Matchers.java | {
"start": 8530,
"end": 9859
} | class ____ extends AbstractMatcher<Class> implements Serializable {
private final transient Package targetPackage;
private final String packageName;
public InPackage(Package targetPackage) {
this.targetPackage = checkNotNull(targetPackage, "package");
this.packageName = targetPackage.getName();
}
@Override
public boolean matches(Class c) {
return c.getPackage().equals(targetPackage);
}
@Override
public boolean equals(Object other) {
return other instanceof InPackage && ((InPackage) other).targetPackage.equals(targetPackage);
}
@Override
public int hashCode() {
return 37 * targetPackage.hashCode();
}
@Override
public String toString() {
return "inPackage(" + targetPackage.getName() + ")";
}
public Object readResolve() {
return inPackage(Package.getPackage(packageName));
}
private static final long serialVersionUID = 0;
}
/**
* Returns a matcher which matches classes in the given package and its subpackages. Unlike {@link
* #inPackage(Package) inPackage()}, this matches classes from any classloader.
*
* @since 2.0
*/
public static Matcher<Class> inSubpackage(final String targetPackageName) {
return new InSubpackage(targetPackageName);
}
private static | InPackage |
java | FasterXML__jackson-databind | attic/ImmutableBitSet.java | {
"start": 79,
"end": 1680
} | class ____ extends BitSet
{
private static final long serialVersionUID = 1L;
private ImmutableBitSet(BitSet bits) {
super();
_parentOr(bits);
}
public static ImmutableBitSet of(BitSet bits) {
return new ImmutableBitSet(bits);
}
private void _parentOr(BitSet set) {
super.or(set);
}
@Override
public void and(BitSet set) {
_failMutableOperation();
}
@Override
public void andNot(BitSet set) {
_failMutableOperation();
}
@Override
public void or(BitSet set) {
_failMutableOperation();
}
@Override
public void xor(BitSet set) {
_failMutableOperation();
}
@Override
public void clear() {
_failMutableOperation();
}
@Override
public void clear(int ix) {
_failMutableOperation();
}
@Override
public void clear(int from, int to) {
_failMutableOperation();
}
@Override
public void flip(int bitIndex) {
_failMutableOperation();
}
@Override
public void flip(int from, int to) {
_failMutableOperation();
}
@Override
public void set(int bitIndex) {
_failMutableOperation();
}
@Override
public void set(int bitIndex, boolean state) {
_failMutableOperation();
}
@Override
public void set(int from, int to) {
_failMutableOperation();
}
private void _failMutableOperation() {
throw new UnsupportedOperationException("ImmutableBitSet does not support modification");
}
}
| ImmutableBitSet |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SmooksComponentBuilderFactory.java | {
"start": 1955,
"end": 4573
} | interface ____ extends ComponentBuilder<SmooksComponent> {
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default SmooksComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default SmooksComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* To use a custom factory for creating Smooks.
*
* The option is a: <code>org.smooks.SmooksFactory</code>
* type.
*
* Group: advanced
*
* @param smooksFactory the value to set
* @return the dsl builder
*/
default SmooksComponentBuilder smooksFactory(org.smooks.SmooksFactory smooksFactory) {
doSetProperty("smooksFactory", smooksFactory);
return this;
}
}
| SmooksComponentBuilder |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/compress/GzipCompression.java | {
"start": 3926,
"end": 4503
} | class ____ implements Compression.Builder<GzipCompression> {
private int level = GZIP.defaultLevel();
public Builder level(int level) {
if ((level < GZIP.minLevel() || GZIP.maxLevel() < level) && level != GZIP.defaultLevel()) {
throw new IllegalArgumentException("gzip doesn't support given compression level: " + level);
}
this.level = level;
return this;
}
@Override
public GzipCompression build() {
return new GzipCompression(level);
}
}
}
| Builder |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/processor/internals/RepartitionTopics.java | {
"start": 1987,
"end": 15949
} | class ____ {
private final InternalTopicManager internalTopicManager;
private final TopologyMetadata topologyMetadata;
private final Cluster clusterMetadata;
private final CopartitionedTopicsEnforcer copartitionedTopicsEnforcer;
private final Logger log;
private final Map<TopicPartition, PartitionInfo> topicPartitionInfos = new HashMap<>();
private final Map<Subtopology, Set<String>> missingInputTopicsBySubtopology = new HashMap<>();
public RepartitionTopics(final TopologyMetadata topologyMetadata,
final InternalTopicManager internalTopicManager,
final CopartitionedTopicsEnforcer copartitionedTopicsEnforcer,
final Cluster clusterMetadata,
final String logPrefix) {
this.topologyMetadata = topologyMetadata;
this.internalTopicManager = internalTopicManager;
this.clusterMetadata = clusterMetadata;
this.copartitionedTopicsEnforcer = copartitionedTopicsEnforcer;
final LogContext logContext = new LogContext(logPrefix);
log = logContext.logger(getClass());
}
public void setup() {
final Map<String, InternalTopicConfig> repartitionTopicMetadata = computeRepartitionTopicConfig(clusterMetadata);
if (repartitionTopicMetadata.isEmpty()) {
if (missingInputTopicsBySubtopology.isEmpty()) {
log.info("Skipping the repartition topic validation since there are no repartition topics.");
} else {
log.info("Skipping the repartition topic validation since all topologies containing repartition"
+ "topics are missing external user source topics and cannot be processed.");
}
} else {
// ensure the co-partitioning topics within the group have the same number of partitions,
// and enforce the number of partitions for those repartition topics to be the same if they
// are co-partitioned as well.
ensureCopartitioning(topologyMetadata.copartitionGroups(), repartitionTopicMetadata, clusterMetadata);
// make sure the repartition source topics exist with the right number of partitions,
// create these topics if necessary
internalTopicManager.makeReady(repartitionTopicMetadata);
// augment the metadata with the newly computed number of partitions for all the
// repartition source topics
for (final Map.Entry<String, InternalTopicConfig> entry : repartitionTopicMetadata.entrySet()) {
final String topic = entry.getKey();
final int numPartitions = entry.getValue().numberOfPartitions().orElse(-1);
for (int partition = 0; partition < numPartitions; partition++) {
topicPartitionInfos.put(
new TopicPartition(topic, partition),
new PartitionInfo(topic, partition, null, new Node[0], new Node[0])
);
}
}
}
}
public Set<String> topologiesWithMissingInputTopics() {
return missingInputTopicsBySubtopology.keySet()
.stream()
.map(s -> getTopologyNameOrElseUnnamed(s.namedTopology))
.collect(Collectors.toSet());
}
public Set<String> missingSourceTopics() {
return missingInputTopicsBySubtopology.entrySet().stream()
.map(entry -> entry.getValue())
.flatMap(missingTopicSet -> missingTopicSet.stream())
.collect(Collectors.toSet());
}
public Queue<StreamsException> missingSourceTopicExceptions() {
return missingInputTopicsBySubtopology.entrySet().stream().map(entry -> {
final Set<String> missingSourceTopics = entry.getValue();
final int subtopologyId = entry.getKey().nodeGroupId;
final String topologyName = entry.getKey().namedTopology;
return new StreamsException(
new MissingSourceTopicException(String.format(
"Missing source topics %s for subtopology %d of topology %s",
missingSourceTopics, subtopologyId, topologyName)),
new TaskId(subtopologyId, 0, topologyName));
}).collect(Collectors.toCollection(LinkedList::new));
}
public Map<TopicPartition, PartitionInfo> topicPartitionsInfo() {
return Collections.unmodifiableMap(topicPartitionInfos);
}
/**
* @param clusterMetadata cluster metadata, eg which topics exist on the brokers
*/
private Map<String, InternalTopicConfig> computeRepartitionTopicConfig(final Cluster clusterMetadata) {
final Set<TopicsInfo> allTopicsInfo = new HashSet<>();
final Map<String, InternalTopicConfig> allRepartitionTopicConfigs = new HashMap<>();
for (final Map.Entry<String, Map<Subtopology, TopicsInfo>> topologyEntry : topologyMetadata.topologyToSubtopologyTopicsInfoMap().entrySet()) {
final String topologyName = topologyMetadata.hasNamedTopologies() ? topologyEntry.getKey() : null;
final Set<TopicsInfo> topicsInfoForTopology = new HashSet<>();
final Set<String> missingSourceTopicsForTopology = new HashSet<>();
final Map<String, InternalTopicConfig> repartitionTopicConfigsForTopology = new HashMap<>();
for (final Map.Entry<Subtopology, TopicsInfo> subtopologyEntry : topologyEntry.getValue().entrySet()) {
final TopicsInfo topicsInfo = subtopologyEntry.getValue();
topicsInfoForTopology.add(topicsInfo);
repartitionTopicConfigsForTopology.putAll(
topicsInfo.repartitionSourceTopics
.values()
.stream()
.collect(Collectors.toMap(InternalTopicConfig::name, topicConfig -> topicConfig)));
final Set<String> missingSourceTopicsForSubtopology = computeMissingExternalSourceTopics(topicsInfo, clusterMetadata);
missingSourceTopicsForTopology.addAll(missingSourceTopicsForSubtopology);
if (!missingSourceTopicsForSubtopology.isEmpty()) {
final Subtopology subtopology = subtopologyEntry.getKey();
missingInputTopicsBySubtopology.put(subtopology, missingSourceTopicsForSubtopology);
log.error("Subtopology {} has missing source topics {} and will be excluded from the current assignment, "
+ "this can be due to the consumer client's metadata being stale or because they have "
+ "not been created yet. Please verify that you have created all input topics; if they "
+ "do exist, you just need to wait for the metadata to be updated, at which time a new "
+ "rebalance will be kicked off automatically and the topology will be retried at that time.",
subtopology.nodeGroupId, missingSourceTopicsForSubtopology);
}
}
if (missingSourceTopicsForTopology.isEmpty()) {
allRepartitionTopicConfigs.putAll(repartitionTopicConfigsForTopology);
allTopicsInfo.addAll(topicsInfoForTopology);
} else {
log.debug("Skipping repartition topic validation for entire topology {} due to missing source topics {}",
topologyName, missingSourceTopicsForTopology);
}
}
setRepartitionSourceTopicPartitionCount(allRepartitionTopicConfigs, allTopicsInfo, clusterMetadata);
return allRepartitionTopicConfigs;
}
private void ensureCopartitioning(final Collection<Set<String>> copartitionGroups,
final Map<String, InternalTopicConfig> repartitionTopicMetadata,
final Cluster clusterMetadata) {
for (final Set<String> copartitionGroup : copartitionGroups) {
copartitionedTopicsEnforcer.enforce(copartitionGroup, repartitionTopicMetadata, clusterMetadata);
}
}
private Set<String> computeMissingExternalSourceTopics(final TopicsInfo topicsInfo,
final Cluster clusterMetadata) {
final Set<String> missingExternalSourceTopics = new HashSet<>(topicsInfo.sourceTopics);
missingExternalSourceTopics.removeAll(topicsInfo.repartitionSourceTopics.keySet());
missingExternalSourceTopics.removeAll(clusterMetadata.topics());
return missingExternalSourceTopics;
}
/**
* Computes the number of partitions and sets it for each repartition topic in repartitionTopicMetadata
*/
private void setRepartitionSourceTopicPartitionCount(final Map<String, InternalTopicConfig> repartitionTopicMetadata,
final Collection<TopicsInfo> topicGroups,
final Cluster clusterMetadata) {
boolean partitionCountNeeded;
do {
partitionCountNeeded = false;
boolean progressMadeThisIteration = false; // avoid infinitely looping without making any progress on unknown repartitions
for (final TopicsInfo topicsInfo : topicGroups) {
for (final String repartitionSourceTopic : topicsInfo.repartitionSourceTopics.keySet()) {
final Optional<Integer> repartitionSourceTopicPartitionCount =
repartitionTopicMetadata.get(repartitionSourceTopic).numberOfPartitions();
if (repartitionSourceTopicPartitionCount.isEmpty()) {
final Integer numPartitions = computePartitionCount(
repartitionTopicMetadata,
topicGroups,
clusterMetadata,
repartitionSourceTopic
);
if (numPartitions == null) {
partitionCountNeeded = true;
log.trace("Unable to determine number of partitions for {}, another iteration is needed",
repartitionSourceTopic);
} else {
log.trace("Determined number of partitions for {} to be {}", repartitionSourceTopic, numPartitions);
repartitionTopicMetadata.get(repartitionSourceTopic).setNumberOfPartitions(numPartitions);
progressMadeThisIteration = true;
}
}
}
}
if (!progressMadeThisIteration && partitionCountNeeded) {
log.error("Unable to determine the number of partitions of all repartition topics, most likely a source topic is missing or pattern doesn't match any topics\n" +
"topic groups: {}\n" +
"cluster topics: {}.", topicGroups, clusterMetadata.topics());
throw new TaskAssignmentException("Failed to compute number of partitions for all repartition topics, " +
"make sure all user input topics are created and all Pattern subscriptions match at least one topic in the cluster");
}
} while (partitionCountNeeded);
}
private Integer computePartitionCount(final Map<String, InternalTopicConfig> repartitionTopicMetadata,
final Collection<TopicsInfo> topicGroups,
final Cluster clusterMetadata,
final String repartitionSourceTopic) {
Integer partitionCount = null;
// try set the number of partitions for this repartition topic if it is not set yet
for (final TopicsInfo topicsInfo : topicGroups) {
final Set<String> sinkTopics = topicsInfo.sinkTopics;
if (sinkTopics.contains(repartitionSourceTopic)) {
// if this topic is one of the sink topics of this topology,
// use the maximum of all its source topic partitions as the number of partitions
for (final String upstreamSourceTopic : topicsInfo.sourceTopics) {
Integer numPartitionsCandidate = null;
// It is possible the sourceTopic is another internal topic, i.e,
// map().join().join(map())
if (repartitionTopicMetadata.containsKey(upstreamSourceTopic)) {
if (repartitionTopicMetadata.get(upstreamSourceTopic).numberOfPartitions().isPresent()) {
numPartitionsCandidate =
repartitionTopicMetadata.get(upstreamSourceTopic).numberOfPartitions().get();
}
} else {
final Integer count = clusterMetadata.partitionCountForTopic(upstreamSourceTopic);
if (count == null) {
throw new TaskAssignmentException(
"No partition count found for source topic "
+ upstreamSourceTopic
+ ", but it should have been."
);
}
numPartitionsCandidate = count;
}
if (numPartitionsCandidate != null) {
if (partitionCount == null || numPartitionsCandidate > partitionCount) {
partitionCount = numPartitionsCandidate;
}
}
}
}
}
return partitionCount;
}
}
| RepartitionTopics |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1797/Issue1797Mapper.java | {
"start": 308,
"end": 455
} | interface ____ {
Issue1797Mapper INSTANCE = Mappers.getMapper( Issue1797Mapper.class );
CustomerDto map(Customer customer);
}
| Issue1797Mapper |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NonCanonicalStaticImportTest.java | {
"start": 1671,
"end": 1903
} | class ____ {}
""")
.doTest();
}
@Test
public void negativeStaticMethod() {
compilationHelper
.addSourceLines(
"a/A.java",
"""
package a;
public | Test |
java | apache__camel | components/camel-mock/src/main/java/org/apache/camel/component/mock/AssertionClauseTask.java | {
"start": 957,
"end": 1119
} | class ____ extends AssertionClause implements AssertionTask {
protected AssertionClauseTask(MockEndpoint mock) {
super(mock);
}
}
| AssertionClauseTask |
java | apache__flink | flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/socket/SocketWindowWordCountITCase.java | {
"start": 3770,
"end": 4706
} | class ____ extends Thread {
private final ServerSocket serverSocket;
private volatile Throwable error;
public ServerThread(ServerSocket serverSocket) {
super("Socket Server Thread");
this.serverSocket = serverSocket;
}
@Override
public void run() {
try {
try (Socket socket = NetUtils.acceptWithoutTimeout(serverSocket);
PrintWriter writer = new PrintWriter(socket.getOutputStream(), true)) {
writer.println(WordCountData.TEXT);
}
} catch (Throwable t) {
this.error = t;
}
}
public void checkError() throws IOException {
if (error != null) {
throw new IOException("Error in server thread: " + error.getMessage(), error);
}
}
}
private static final | ServerThread |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/jsontype/impl/SimpleNameIdResolver.java | {
"start": 752,
"end": 931
} | class ____
extends TypeIdResolverBase
implements java.io.Serializable
{
private static final long serialVersionUID = 3L;
/**
* Mappings from | SimpleNameIdResolver |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/dates/Dates_assertIsAfterYear_Test.java | {
"start": 1409,
"end": 4217
} | class ____ extends DatesBaseTest {
@Test
void should_fail_if_actual_is_not_strictly_after_given_year() {
AssertionInfo info = someInfo();
int year = 2020;
Throwable error = catchThrowable(() -> dates.assertIsAfterYear(info, actual, year));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeAfterYear(actual, year));
}
@Test
void should_fail_if_actual_year_is_equals_to_given_year() {
AssertionInfo info = someInfo();
parseDate("2011-01-01");
int year = 2011;
Throwable error = catchThrowable(() -> dates.assertIsAfterYear(info, actual, year));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeAfterYear(actual, year));
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> dates.assertIsAfterYear(someInfo(), null, 2010))
.withMessage(actualIsNull());
}
@Test
void should_pass_if_actual_is_strictly_after_given_year() {
dates.assertIsAfterYear(someInfo(), actual, 2010);
}
@Test
void should_fail_if_actual_is_not_strictly_after_given_year_whatever_custom_comparison_strategy_is() {
AssertionInfo info = someInfo();
int year = 2020;
Throwable error = catchThrowable(() -> datesWithCustomComparisonStrategy.assertIsAfterYear(info, actual, year));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeAfterYear(actual, year));
}
@Test
void should_fail_if_actual_year_is_equals_to_given_year_whatever_custom_comparison_strategy_is() {
AssertionInfo info = someInfo();
parseDate("2011-01-01");
int year = 2011;
Throwable error = catchThrowable(() -> datesWithCustomComparisonStrategy.assertIsAfterYear(info, actual, year));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeAfterYear(actual, year));
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> datesWithCustomComparisonStrategy.assertIsAfterYear(someInfo(),
null,
2010))
.withMessage(actualIsNull());
}
@Test
void should_pass_if_actual_is_strictly_after_given_year_whatever_custom_comparison_strategy_is() {
datesWithCustomComparisonStrategy.assertIsAfterYear(someInfo(), actual, 2000);
}
}
| Dates_assertIsAfterYear_Test |
java | spring-projects__spring-boot | module/spring-boot-http-client/src/test/java/org/springframework/boot/http/client/autoconfigure/PropertiesApiVersionInserterTests.java | {
"start": 1206,
"end": 2646
} | class ____ {
@Test
@SuppressWarnings("NullAway") // Test null check
void getWhenPropertiesIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> PropertiesApiVersionInserter.get(null))
.withMessage("'properties' must not be null");
}
@Test
void getReturnsInserterBasedOnProperties() throws Exception {
Insert properties = new ApiversionProperties().getInsert();
properties.setHeader("x-test");
properties.setQueryParameter("v");
properties.setPathSegment(1);
properties.setMediaTypeParameter("mtp");
ApiVersionInserter inserter = PropertiesApiVersionInserter.get(properties);
URI uri = new URI("https://example.com/foo/bar");
assertThat(inserter.insertVersion("123", uri)).hasToString("https://example.com/foo/123/bar?v=123");
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
inserter.insertVersion("123", headers);
assertThat(headers.get("x-test")).containsExactly("123");
MediaType contentType = headers.getContentType();
assertThat(contentType).isNotNull();
assertThat(contentType.getParameters()).containsEntry("mtp", "123");
}
@Test
void getWhenNoPropertiesReturnsEmpty() {
Insert properties = new ApiversionProperties().getInsert();
ApiVersionInserter inserter = PropertiesApiVersionInserter.get(properties);
assertThat(inserter).isEqualTo(PropertiesApiVersionInserter.EMPTY);
}
}
| PropertiesApiVersionInserterTests |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePart.java | {
"start": 1405,
"end": 5417
} | enum ____ implements DateTimeField {
YEAR(DateTimeExtractor.YEAR::extract, "years", "yyyy", "yy"),
QUARTER(QuarterProcessor::quarter, "quarters", "qq", "q"),
MONTH(DateTimeExtractor.MONTH_OF_YEAR::extract, "months", "mm", "m"),
DAYOFYEAR(DateTimeExtractor.DAY_OF_YEAR::extract, "dy", "y"),
DAY(DateTimeExtractor.DAY_OF_MONTH::extract, "days", "dd", "d"),
WEEK(NonIsoDateTimeExtractor.WEEK_OF_YEAR::extract, "weeks", "wk", "ww"),
WEEKDAY(NonIsoDateTimeExtractor.DAY_OF_WEEK::extract, "weekdays", "dw"),
HOUR(DateTimeExtractor.HOUR_OF_DAY::extract, "hours", "hh"),
MINUTE(DateTimeExtractor.MINUTE_OF_HOUR::extract, "minutes", "mi", "n"),
SECOND(DateTimeExtractor.SECOND_OF_MINUTE::extract, "seconds", "ss", "s"),
MILLISECOND(dt -> dt.get(ChronoField.MILLI_OF_SECOND), "milliseconds", "ms"),
MICROSECOND(dt -> dt.get(ChronoField.MICRO_OF_SECOND), "microseconds", "mcs"),
NANOSECOND(ZonedDateTime::getNano, "nanoseconds", "ns"),
TZOFFSET(dt -> dt.getOffset().getTotalSeconds() / 60, "tz");
private static final Map<String, Part> NAME_TO_PART;
private static final List<String> VALID_VALUES;
static {
NAME_TO_PART = DateTimeField.initializeResolutionMap(values());
VALID_VALUES = DateTimeField.initializeValidValues(values());
}
private ToIntFunction<ZonedDateTime> extractFunction;
private Set<String> aliases;
Part(ToIntFunction<ZonedDateTime> extractFunction, String... aliases) {
this.extractFunction = extractFunction;
this.aliases = Set.of(aliases);
}
@Override
public Iterable<String> aliases() {
return aliases;
}
public static List<String> findSimilar(String match) {
return DateTimeField.findSimilar(NAME_TO_PART.keySet(), match);
}
public static Part resolve(String dateTimePart) {
return DateTimeField.resolveMatch(NAME_TO_PART, dateTimePart);
}
public Integer extract(ZonedDateTime dateTime) {
return extractFunction.applyAsInt(dateTime);
}
}
public DatePart(Source source, Expression dateTimePart, Expression timestamp, ZoneId zoneId) {
super(source, dateTimePart, timestamp, zoneId);
}
@Override
public DataType dataType() {
return DataTypes.INTEGER;
}
@Override
protected TypeResolution resolveType() {
TypeResolution resolution = super.resolveType();
if (resolution.unresolved()) {
return resolution;
}
resolution = isDate(right(), sourceText(), SECOND);
if (resolution.unresolved()) {
return resolution;
}
return TypeResolution.TYPE_RESOLVED;
}
@Override
protected BinaryScalarFunction replaceChildren(Expression newDateTimePart, Expression newTimestamp) {
return new DatePart(source(), newDateTimePart, newTimestamp, zoneId());
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, DatePart::new, left(), right(), zoneId());
}
@Override
protected String scriptMethodName() {
return "datePart";
}
@Override
public Object fold() {
return DatePartProcessor.process(left().fold(), right().fold(), zoneId());
}
@Override
protected Pipe createPipe(Pipe dateTimePart, Pipe timestamp, ZoneId zoneId) {
return new DatePartPipe(source(), this, dateTimePart, timestamp, zoneId);
}
@Override
protected boolean resolveDateTimeField(String dateTimeField) {
return Part.resolve(dateTimeField) != null;
}
@Override
protected List<String> findSimilarDateTimeFields(String dateTimeField) {
return Part.findSimilar(dateTimeField);
}
@Override
protected List<String> validDateTimeFieldValues() {
return Part.VALID_VALUES;
}
}
| Part |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/stats/common/MemoryUsageTests.java | {
"start": 802,
"end": 2516
} | class ____ extends AbstractXContentSerializingTestCase<MemoryUsage> {
private boolean lenient;
@Before
public void chooseStrictOrLenient() {
lenient = randomBoolean();
}
@Override
protected boolean supportsUnknownFields() {
return lenient;
}
@Override
protected MemoryUsage doParseInstance(XContentParser parser) throws IOException {
return lenient ? MemoryUsage.LENIENT_PARSER.parse(parser, null) : MemoryUsage.STRICT_PARSER.parse(parser, null);
}
@Override
protected ToXContent.Params getToXContentParams() {
return new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true"));
}
public static MemoryUsage createRandom() {
return new MemoryUsage(
randomAlphaOfLength(10),
Instant.now(),
randomNonNegativeLong(),
randomBoolean() ? null : randomFrom(MemoryUsage.Status.values()),
randomBoolean() ? null : randomNonNegativeLong()
);
}
@Override
protected Writeable.Reader<MemoryUsage> instanceReader() {
return MemoryUsage::new;
}
@Override
protected MemoryUsage createTestInstance() {
return createRandom();
}
@Override
protected MemoryUsage mutateInstance(MemoryUsage instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
public void testZeroUsage() {
MemoryUsage memoryUsage = new MemoryUsage("zero_usage_job");
String asJson = Strings.toString(memoryUsage);
assertThat(asJson, equalTo("""
{"peak_usage_bytes":0,"status":"ok"}"""));
}
}
| MemoryUsageTests |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/issues/OnCompletionAfterConsumerModeIssueTest.java | {
"start": 970,
"end": 4577
} | class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testOnCompletionInSub() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.transform(constant("a"))
.to("mock:a")
.to("direct:sub")
.transform(constant("c"))
.to("mock:c");
from("direct:sub")
.transform(constant("b"))
.to("mock:b")
.onCompletion()
.to("mock:end")
.end();
}
});
context.start();
getMockEndpoint("mock:a").expectedBodiesReceived("a");
getMockEndpoint("mock:b").expectedBodiesReceived("b");
getMockEndpoint("mock:c").expectedBodiesReceived("c");
getMockEndpoint("mock:end").expectedBodiesReceived("c");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testOnCompletionInMainAndSub() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.transform(constant("a"))
.to("mock:a")
.to("direct:sub")
.transform(constant("c"))
.to("mock:c")
.onCompletion()
.to("mock:end")
.end();
from("direct:sub")
.transform(constant("b"))
.to("mock:b")
.onCompletion()
.to("mock:end")
.end();
}
});
context.start();
getMockEndpoint("mock:a").expectedBodiesReceived("a");
getMockEndpoint("mock:b").expectedBodiesReceived("b");
getMockEndpoint("mock:c").expectedBodiesReceived("c");
getMockEndpoint("mock:end").expectedBodiesReceived("c", "c");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testOnCompletionInGlobalAndSub() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
onCompletion().to("mock:end");
from("direct:start")
.transform(constant("a"))
.to("mock:a")
.to("direct:sub")
.transform(constant("c"))
.to("mock:c");
from("direct:sub")
.transform(constant("b"))
.to("mock:b")
.onCompletion()
.to("mock:end")
.end();
}
});
context.start();
getMockEndpoint("mock:a").expectedBodiesReceived("a");
getMockEndpoint("mock:b").expectedBodiesReceived("b");
getMockEndpoint("mock:c").expectedBodiesReceived("c");
getMockEndpoint("mock:end").expectedBodiesReceived("c", "c");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
}
| OnCompletionAfterConsumerModeIssueTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ReduceTaskAttemptInfo.java | {
"start": 1121,
"end": 2628
} | class ____ extends TaskAttemptInfo {
protected long shuffleFinishTime;
protected long mergeFinishTime;
protected long elapsedShuffleTime;
protected long elapsedMergeTime;
protected long elapsedReduceTime;
public ReduceTaskAttemptInfo() {
}
public ReduceTaskAttemptInfo(TaskAttempt ta) {
this(ta, false);
}
public ReduceTaskAttemptInfo(TaskAttempt ta, Boolean isRunning) {
super(ta, TaskType.REDUCE, isRunning);
this.shuffleFinishTime = ta.getShuffleFinishTime();
this.mergeFinishTime = ta.getSortFinishTime();
this.elapsedShuffleTime = Times.elapsed(this.startTime,
this.shuffleFinishTime, false);
if (this.elapsedShuffleTime == -1) {
this.elapsedShuffleTime = 0;
}
this.elapsedMergeTime = Times.elapsed(this.shuffleFinishTime,
this.mergeFinishTime, false);
if (this.elapsedMergeTime == -1) {
this.elapsedMergeTime = 0;
}
this.elapsedReduceTime = Times.elapsed(this.mergeFinishTime,
this.finishTime, false);
if (this.elapsedReduceTime == -1) {
this.elapsedReduceTime = 0;
}
}
public long getShuffleFinishTime() {
return this.shuffleFinishTime;
}
public long getMergeFinishTime() {
return this.mergeFinishTime;
}
public long getElapsedShuffleTime() {
return this.elapsedShuffleTime;
}
public long getElapsedMergeTime() {
return this.elapsedMergeTime;
}
public long getElapsedReduceTime() {
return this.elapsedReduceTime;
}
}
| ReduceTaskAttemptInfo |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/concurrent/locks/LockingVisitors.java | {
"start": 4198,
"end": 4768
} | class ____ {
*
* private final StampedLockVisitor<PrintStream> lock;
* private final PrintStream ps;
*
* public SimpleLogger(OutputStream out) {
* ps = new PrintStream(out);
* lock = LockingVisitors.stampedLockVisitor(ps);
* }
*
* public void log(String message) {
* lock.acceptWriteLocked(ps -> ps.println(message));
* }
*
* public void log(byte[] buffer) {
* lock.acceptWriteLocked(ps -> { ps.write(buffer); ps.println(); });
* }
* }
* }
* </pre>
*
* @since 3.11
*/
public | SimpleLogger3 |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/AbstractExecutableMethod.java | {
"start": 1731,
"end": 6734
} | class ____ extends AbstractExecutable implements UnsafeExecutable, ExecutableMethod, EnvironmentConfigurable {
private final ReturnType returnType;
private final Argument<?> genericReturnType;
private final int hashCode;
private Environment environment;
private AnnotationMetadata methodAnnotationMetadata;
/**
* @param declaringType The declaring type
* @param methodName The method name
* @param genericReturnType The generic return type
* @param arguments The arguments
*/
@SuppressWarnings("WeakerAccess")
protected AbstractExecutableMethod(Class<?> declaringType,
String methodName,
Argument genericReturnType,
Argument... arguments) {
super(declaringType, methodName, arguments);
this.genericReturnType = genericReturnType;
this.returnType = new ReturnTypeImpl();
int result = ObjectUtils.hash(declaringType, methodName);
result = 31 * result + Arrays.hashCode(argTypes);
this.hashCode = result;
}
/**
* @param declaringType The declaring type
* @param methodName The method name
* @param genericReturnType The generic return type
*/
@SuppressWarnings("WeakerAccess")
protected AbstractExecutableMethod(Class<?> declaringType,
String methodName,
Argument genericReturnType) {
this(declaringType, methodName, genericReturnType, Argument.ZERO_ARGUMENTS);
}
/**
* @param declaringType The declaring type
* @param methodName The method name
*/
@SuppressWarnings("WeakerAccess")
@UsedByGeneratedCode
protected AbstractExecutableMethod(Class<?> declaringType,
String methodName) {
this(declaringType, methodName, Argument.OBJECT_ARGUMENT, Argument.ZERO_ARGUMENTS);
}
@Override
public boolean hasPropertyExpressions() {
return getAnnotationMetadata().hasPropertyExpressions();
}
@Override
public AnnotationMetadata getAnnotationMetadata() {
if (this.methodAnnotationMetadata == null) {
this.methodAnnotationMetadata = initializeAnnotationMetadata();
}
return this.methodAnnotationMetadata;
}
@Override
public void configure(Environment environment) {
this.environment = environment;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AbstractExecutableMethod that = (AbstractExecutableMethod) o;
return Objects.equals(declaringType, that.declaringType) &&
Objects.equals(methodName, that.methodName) &&
Arrays.equals(argTypes, that.argTypes);
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public String toString() {
String text = Argument.toString(getArguments());
return getReturnType().getType().getSimpleName() + " " + getMethodName() + "(" + text + ")";
}
@Override
public ReturnType getReturnType() {
return returnType;
}
@Override
public Class<?>[] getArgumentTypes() {
return argTypes;
}
@Override
public Class<?> getDeclaringType() {
return declaringType;
}
@Override
public String getMethodName() {
return methodName;
}
@Override
public final Object invoke(Object instance, Object... arguments) {
if (arguments.length > 0) {
ArgumentUtils.validateArguments(this, getArguments(), arguments);
}
return invokeInternal(instance, arguments);
}
@Override
public Object invokeUnsafe(Object instance, Object... arguments) {
return invokeInternal(instance, arguments);
}
/**
* @param instance The instance
* @param arguments The arguments
* @return The result
*/
@SuppressWarnings("WeakerAccess")
@UsedByGeneratedCode
protected abstract Object invokeInternal(Object instance, Object[] arguments);
/**
* Resolves the annotation metadata for this method. Subclasses
*
* @return The {@link AnnotationMetadata}
*/
protected AnnotationMetadata resolveAnnotationMetadata() {
return AnnotationMetadata.EMPTY_METADATA;
}
private AnnotationMetadata initializeAnnotationMetadata() {
AnnotationMetadata annotationMetadata = resolveAnnotationMetadata();
if (annotationMetadata != AnnotationMetadata.EMPTY_METADATA) {
if (annotationMetadata.hasPropertyExpressions()) {
// we make a copy of the result of annotation metadata which is normally a reference
// to the | AbstractExecutableMethod |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/telemetry/endpoints/onbinarymessage/MultiClient.java | {
"start": 255,
"end": 409
} | class ____ {
@OnBinaryMessage
Multi<String> echo(Multi<String> messages) {
return messages.map(msg -> "echo 0: " + msg);
}
}
| MultiClient |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/context/CustomPseudoScopeTest.java | {
"start": 3212,
"end": 3854
} | class ____ {
private final String message;
// this constructor is needed because otherwise SimpleBean(String) is considered an @Inject constructor
public SimpleBean() {
this(null);
}
public SimpleBean(String message) {
this.message = message;
}
public String ping() {
return message;
}
@Override
public String toString() {
return "SimpleBean [message=" + message + "]";
}
}
@Target({ TYPE, METHOD, FIELD })
@Retention(RUNTIME)
@Documented
@Scope
@Inherited
public @ | SimpleBean |
java | redisson__redisson | redisson-spring-data/redisson-spring-data-22/src/main/java/org/redisson/spring/data/connection/RedissonReactiveRedisConnection.java | {
"start": 946,
"end": 3213
} | class ____ extends RedissonBaseReactive implements ReactiveRedisConnection {
public RedissonReactiveRedisConnection(CommandReactiveExecutor executorService) {
super(executorService);
}
@Override
public Mono<Void> closeLater() {
return Mono.empty();
}
@Override
public ReactiveKeyCommands keyCommands() {
return new RedissonReactiveKeyCommands(executorService);
}
@Override
public ReactiveStringCommands stringCommands() {
return new RedissonReactiveStringCommands(executorService);
}
@Override
public ReactiveNumberCommands numberCommands() {
return new RedissonReactiveNumberCommands(executorService);
}
@Override
public ReactiveListCommands listCommands() {
return new RedissonReactiveListCommands(executorService);
}
@Override
public ReactiveSetCommands setCommands() {
return new RedissonReactiveSetCommands(executorService);
}
@Override
public ReactiveZSetCommands zSetCommands() {
return new RedissonReactiveZSetCommands(executorService);
}
@Override
public ReactiveHashCommands hashCommands() {
return new RedissonReactiveHashCommands(executorService);
}
@Override
public ReactiveGeoCommands geoCommands() {
return new RedissonReactiveGeoCommands(executorService);
}
@Override
public ReactiveHyperLogLogCommands hyperLogLogCommands() {
return new RedissonReactiveHyperLogLogCommands(executorService);
}
@Override
public ReactivePubSubCommands pubSubCommands() {
return new RedissonReactivePubSubCommands(executorService);
}
@Override
public ReactiveScriptingCommands scriptingCommands() {
return new RedissonReactiveScriptingCommands(executorService);
}
@Override
public ReactiveServerCommands serverCommands() {
return new RedissonReactiveServerCommands(executorService);
}
@Override
public ReactiveStreamCommands streamCommands() {
return new RedissonReactiveStreamCommands(executorService);
}
@Override
public Mono<String> ping() {
return read(null, StringCodec.INSTANCE, RedisCommands.PING);
}
}
| RedissonReactiveRedisConnection |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryNotFound.java | {
"start": 704,
"end": 1251
} | class ____ extends ResourceNotFoundException {
public PeerRecoveryNotFound(final long recoveryId, final ShardId shardId, final String targetAllocationId) {
super(
"Peer recovery for "
+ shardId
+ " with [recoveryId: "
+ recoveryId
+ ", targetAllocationId: "
+ targetAllocationId
+ "] not found."
);
}
public PeerRecoveryNotFound(StreamInput in) throws IOException {
super(in);
}
}
| PeerRecoveryNotFound |
java | spring-projects__spring-boot | module/spring-boot-jetty/src/test/java/org/springframework/boot/jetty/reactive/JettyReactiveWebServerFactoryTests.java | {
"start": 2304,
"end": 7009
} | class ____ extends AbstractReactiveWebServerFactoryTests {
@Override
protected JettyReactiveWebServerFactory getFactory() {
return new JettyReactiveWebServerFactory(0);
}
@Test
@Override
@Disabled("Jetty 12 does not support User-Agent-based compression")
// TODO Is this true with Jetty 12?
protected void noCompressionForUserAgent() {
}
@Test
@SuppressWarnings("NullAway") // Test null check
void setNullServerCustomizersShouldThrowException() {
JettyReactiveWebServerFactory factory = getFactory();
assertThatIllegalArgumentException().isThrownBy(() -> factory.setServerCustomizers(null))
.withMessageContaining("'customizers' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void addNullServerCustomizersShouldThrowException() {
JettyReactiveWebServerFactory factory = getFactory();
assertThatIllegalArgumentException()
.isThrownBy(() -> factory.addServerCustomizers((JettyServerCustomizer[]) null))
.withMessageContaining("'customizers' must not be null");
}
@Test
void jettyCustomizersShouldBeInvoked() {
HttpHandler handler = mock(HttpHandler.class);
JettyReactiveWebServerFactory factory = getFactory();
JettyServerCustomizer[] configurations = new JettyServerCustomizer[4];
Arrays.setAll(configurations, (i) -> mock(JettyServerCustomizer.class));
factory.setServerCustomizers(Arrays.asList(configurations[0], configurations[1]));
factory.addServerCustomizers(configurations[2], configurations[3]);
this.webServer = factory.getWebServer(handler);
InOrder ordered = inOrder((Object[]) configurations);
for (JettyServerCustomizer configuration : configurations) {
ordered.verify(configuration).customize(any(Server.class));
}
}
@Test
void specificIPAddressNotReverseResolved() throws Exception {
JettyReactiveWebServerFactory factory = getFactory();
InetAddress localhost = InetAddress.getLocalHost();
factory.setAddress(InetAddress.getByAddress(localhost.getAddress()));
this.webServer = factory.getWebServer(mock(HttpHandler.class));
this.webServer.start();
Connector connector = ((JettyWebServer) this.webServer).getServer().getConnectors()[0];
assertThat(((ServerConnector) connector).getHost()).isEqualTo(localhost.getHostAddress());
}
@Test
void useForwardedHeaders() {
JettyReactiveWebServerFactory factory = getFactory();
factory.setUseForwardHeaders(true);
assertForwardHeaderIsUsed(factory);
}
@Test
void whenServerIsShuttingDownGracefullyThenNewConnectionsCannotBeMade() {
JettyReactiveWebServerFactory factory = getFactory();
factory.setShutdown(Shutdown.GRACEFUL);
BlockingHandler blockingHandler = new BlockingHandler();
this.webServer = factory.getWebServer(blockingHandler);
this.webServer.start();
WebClient webClient = getWebClient(this.webServer.getPort()).build();
this.webServer.shutDownGracefully((result) -> {
});
Awaitility.await().atMost(Duration.ofSeconds(30)).until(() -> {
blockingHandler.stopBlocking();
try {
webClient.get().retrieve().toBodilessEntity().block();
return false;
}
catch (RuntimeException ex) {
return ex.getCause() instanceof ConnectException;
}
});
this.webServer.stop();
}
@Test
void shouldApplyMaxConnections() {
JettyReactiveWebServerFactory factory = getFactory();
factory.setMaxConnections(1);
this.webServer = factory.getWebServer(new EchoHandler());
Server server = ((JettyWebServer) this.webServer).getServer();
NetworkConnectionLimit connectionLimit = server.getBean(NetworkConnectionLimit.class);
assertThat(connectionLimit).isNotNull();
assertThat(connectionLimit.getMaxNetworkConnectionCount()).isOne();
}
@Test
void sslServerNameBundlesConfigurationThrowsException() {
Ssl ssl = new Ssl();
ssl.setBundle("test");
List<ServerNameSslBundle> bundles = List.of(new ServerNameSslBundle("first", "test1"),
new ServerNameSslBundle("second", "test2"));
ssl.setServerNameBundles(bundles);
JettyReactiveWebServerFactory factory = getFactory();
factory.setSsl(ssl);
assertThatIllegalStateException().isThrownBy(() -> this.webServer = factory.getWebServer(new EchoHandler()))
.withMessageContaining("Server name SSL bundles are not supported with Jetty");
}
@Override
protected String startedLogMessage() {
return JettyAccess.getStartedLogMessage((JettyWebServer) this.webServer);
}
@Override
protected void addConnector(int port, ConfigurableReactiveWebServerFactory factory) {
((JettyReactiveWebServerFactory) factory).addServerCustomizers((server) -> {
ServerConnector connector = new ServerConnector(server);
connector.setPort(port);
server.addConnector(connector);
});
}
}
| JettyReactiveWebServerFactoryTests |
java | google__guava | android/guava/src/com/google/common/util/concurrent/Service.java | {
"start": 8761,
"end": 8797
} | interface ____ 13.0)
*/
abstract | in |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/LocalJobOutputFiles.java | {
"start": 1188,
"end": 5456
} | class ____ implements NativeTaskOutput {
static final String TASKTRACKER_OUTPUT = "output";
static final String REDUCE_INPUT_FILE_FORMAT_STRING = "%s/map_%d.out";
static final String SPILL_FILE_FORMAT_STRING = "%s/spill%d.out";
static final String SPILL_INDEX_FILE_FORMAT_STRING = "%s/spill%d.out.index";
static final String OUTPUT_FILE_FORMAT_STRING = "%s/file.out";
static final String OUTPUT_FILE_INDEX_FORMAT_STRING = "%s/file.out.index";
private JobConf conf;
private LocalDirAllocator lDirAlloc = new LocalDirAllocator("mapred.local.dir");
public LocalJobOutputFiles(Configuration conf, String id) {
this.conf = new JobConf(conf);
}
/**
* Return the path to local map output file created earlier
*/
public Path getOutputFile() throws IOException {
String path = String.format(OUTPUT_FILE_FORMAT_STRING, TASKTRACKER_OUTPUT);
return lDirAlloc.getLocalPathToRead(path, conf);
}
/**
* Create a local map output file name.
*
* @param size the size of the file
*/
public Path getOutputFileForWrite(long size) throws IOException {
String path = String.format(OUTPUT_FILE_FORMAT_STRING, TASKTRACKER_OUTPUT);
return lDirAlloc.getLocalPathForWrite(path, size, conf);
}
/**
* Return the path to a local map output index file created earlier
*/
public Path getOutputIndexFile() throws IOException {
String path = String.format(OUTPUT_FILE_INDEX_FORMAT_STRING, TASKTRACKER_OUTPUT);
return lDirAlloc.getLocalPathToRead(path, conf);
}
/**
* Create a local map output index file name.
*
* @param size the size of the file
*/
public Path getOutputIndexFileForWrite(long size) throws IOException {
String path = String.format(OUTPUT_FILE_INDEX_FORMAT_STRING, TASKTRACKER_OUTPUT);
return lDirAlloc.getLocalPathForWrite(path, size, conf);
}
/**
* Return a local map spill file created earlier.
*
* @param spillNumber the number
*/
public Path getSpillFile(int spillNumber) throws IOException {
String path = String.format(SPILL_FILE_FORMAT_STRING, TASKTRACKER_OUTPUT, spillNumber);
return lDirAlloc.getLocalPathToRead(path, conf);
}
/**
* Create a local map spill file name.
*
* @param spillNumber the number
* @param size the size of the file
*/
public Path getSpillFileForWrite(int spillNumber, long size) throws IOException {
String path = String.format(SPILL_FILE_FORMAT_STRING, TASKTRACKER_OUTPUT, spillNumber);
return lDirAlloc.getLocalPathForWrite(path, size, conf);
}
/**
* Return a local map spill index file created earlier
*
* @param spillNumber the number
*/
public Path getSpillIndexFile(int spillNumber) throws IOException {
String path = String
.format(SPILL_INDEX_FILE_FORMAT_STRING, TASKTRACKER_OUTPUT, spillNumber);
return lDirAlloc.getLocalPathToRead(path, conf);
}
/**
* Create a local map spill index file name.
*
* @param spillNumber the number
* @param size the size of the file
*/
public Path getSpillIndexFileForWrite(int spillNumber, long size) throws IOException {
String path = String
.format(SPILL_INDEX_FILE_FORMAT_STRING, TASKTRACKER_OUTPUT, spillNumber);
return lDirAlloc.getLocalPathForWrite(path, size, conf);
}
/**
* Return a local reduce input file created earlier
*
* @param mapId a map task id
*/
public Path getInputFile(int mapId) throws IOException {
return lDirAlloc.getLocalPathToRead(
String.format(REDUCE_INPUT_FILE_FORMAT_STRING, TASKTRACKER_OUTPUT, Integer.valueOf(mapId)),
conf);
}
/**
* Create a local reduce input file name.
*
* @param mapId a map task id
* @param size the size of the file
*/
public Path getInputFileForWrite(TaskID mapId, long size, Configuration conf)
throws IOException {
return lDirAlloc.getLocalPathForWrite(
String.format(REDUCE_INPUT_FILE_FORMAT_STRING, TASKTRACKER_OUTPUT, mapId.getId()), size,
conf);
}
/** Removes all of the files related to a task. */
public void removeAll() throws IOException {
conf.deleteLocalFiles(TASKTRACKER_OUTPUT);
}
public String getOutputName(int partition) {
return String.format("part-%05d", partition);
}
}
| LocalJobOutputFiles |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SpringJdbcComponentBuilderFactory.java | {
"start": 5386,
"end": 6478
} | class ____
extends AbstractComponentBuilder<SpringJdbcComponent>
implements SpringJdbcComponentBuilder {
@Override
protected SpringJdbcComponent buildConcreteComponent() {
return new SpringJdbcComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "dataSource": ((SpringJdbcComponent) component).setDataSource((javax.sql.DataSource) value); return true;
case "lazyStartProducer": ((SpringJdbcComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((SpringJdbcComponent) component).setAutowiredEnabled((boolean) value); return true;
case "connectionStrategy": ((SpringJdbcComponent) component).setConnectionStrategy((org.apache.camel.component.jdbc.ConnectionStrategy) value); return true;
default: return false;
}
}
}
} | SpringJdbcComponentBuilderImpl |
java | elastic__elasticsearch | x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java | {
"start": 2094,
"end": 7484
} | class ____ extends ESTestCase {
public void testNoQueryNoFilter() {
QueryContainer container = new QueryContainer();
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
assertNull(sourceBuilder.query());
}
public void testQueryNoFilter() {
QueryContainer container = new QueryContainer().with(new MatchQuery(Source.EMPTY, "foo", "bar"));
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
assertEquals(matchQuery("foo", "bar").operator(Operator.OR), sourceBuilder.query());
}
public void testNoQueryFilter() {
QueryContainer container = new QueryContainer();
QueryBuilder filter = matchQuery("bar", "baz");
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, filter, randomIntBetween(1, 10));
assertEquals(boolQuery().filter(matchQuery("bar", "baz")), sourceBuilder.query());
}
public void testQueryFilter() {
QueryContainer container = new QueryContainer().with(new MatchQuery(Source.EMPTY, "foo", "bar"));
QueryBuilder filter = matchQuery("bar", "baz");
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, filter, randomIntBetween(1, 10));
assertEquals(
boolQuery().must(matchQuery("foo", "bar").operator(Operator.OR)).filter(matchQuery("bar", "baz")),
sourceBuilder.query()
);
}
public void testLimit() {
QueryContainer container = new QueryContainer().withLimit(10).addGroups(singletonList(new GroupByValue("1", "field")));
int size = randomIntBetween(1, 10);
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, size);
Builder aggBuilder = sourceBuilder.aggregations();
assertEquals(1, aggBuilder.count());
CompositeAggregationBuilder composite = (CompositeAggregationBuilder) aggBuilder.getAggregatorFactories().iterator().next();
assertEquals(size, composite.size());
}
public void testSortNoneSpecified() {
QueryContainer container = new QueryContainer();
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
assertEquals(singletonList(fieldSort("_doc")), sourceBuilder.sorts());
}
public void testSelectScoreForcesTrackingScore() {
Score score = new Score(Source.EMPTY);
ReferenceAttribute attr = new ReferenceAttribute(score.source(), "score", score.dataType());
QueryContainer container = new QueryContainer().withAliases(new AttributeMap<>(attr, score)).addColumn(attr);
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
assertTrue(sourceBuilder.trackScores());
}
public void testSortScoreSpecified() {
QueryContainer container = new QueryContainer().prependSort("id", new ScoreSort(Direction.DESC, null));
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
assertEquals(singletonList(scoreSort()), sourceBuilder.sorts());
}
public void testSortFieldSpecified() {
FieldSortBuilder sortField = fieldSort("test").unmappedType("keyword");
QueryContainer container = new QueryContainer().prependSort(
"id",
new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), Direction.ASC, Missing.LAST)
);
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
assertEquals(singletonList(sortField.order(SortOrder.ASC).missing("_last")), sourceBuilder.sorts());
container = new QueryContainer().prependSort(
"id",
new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), Direction.DESC, Missing.FIRST)
);
sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
assertEquals(singletonList(sortField.order(SortOrder.DESC).missing("_first")), sourceBuilder.sorts());
}
public void testNoSort() {
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(new QueryContainer(), null, randomIntBetween(1, 10));
assertEquals(singletonList(fieldSort("_doc").order(SortOrder.ASC)), sourceBuilder.sorts());
}
public void testTrackHits() {
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(
new QueryContainer().withTrackHits(),
null,
randomIntBetween(1, 10)
);
assertEquals(
"Should have tracked hits",
Integer.valueOf(SearchContext.TRACK_TOTAL_HITS_ACCURATE),
sourceBuilder.trackTotalHitsUpTo()
);
}
public void testNoSortIfAgg() {
QueryContainer container = new QueryContainer().addGroups(singletonList(new GroupByValue("group_id", "group_column")))
.addAgg("group_id", new AvgAgg("agg_id", AggSource.of("avg_column")));
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
assertNull(sourceBuilder.sorts());
}
}
| SourceGeneratorTests |
java | apache__camel | components/camel-test/camel-test-spring-junit5/src/test/java/org/apache/camel/test/spring/CamelSpringProvidesBreakpointInheritedTest.java | {
"start": 849,
"end": 953
} | class ____
extends CamelSpringProvidesBreakpointTest {
}
| CamelSpringProvidesBreakpointInheritedTest |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/support/TestPropertySourceUtils.java | {
"start": 3945,
"end": 20096
} | class ____).
for (List<MergedAnnotation<TestPropertySource>> aggregatedAnnotations :
findRepeatableAnnotations(testClass, TestPropertySource.class)) {
// Convert all the merged annotations for the current aggregate
// level to a list of TestPropertySourceAttributes.
List<TestPropertySourceAttributes> aggregatedAttributesList =
aggregatedAnnotations.stream().map(TestPropertySourceAttributes::new).toList();
// Merge all TestPropertySourceAttributes instances for the current
// aggregate level into a single TestPropertySourceAttributes instance.
TestPropertySourceAttributes mergedAttributes = mergeTestPropertySourceAttributes(aggregatedAttributesList);
if (mergedAttributes != null) {
if (!duplicationDetected(mergedAttributes, previousAttributes)) {
attributesList.add(mergedAttributes);
}
previousAttributes = mergedAttributes;
}
}
if (attributesList.isEmpty()) {
return MergedTestPropertySources.empty();
}
return new MergedTestPropertySources(mergeLocations(attributesList), mergeProperties(attributesList));
}
private static @Nullable TestPropertySourceAttributes mergeTestPropertySourceAttributes(
List<TestPropertySourceAttributes> aggregatedAttributesList) {
TestPropertySourceAttributes mergedAttributes = null;
TestPropertySourceAttributes previousAttributes = null;
for (TestPropertySourceAttributes currentAttributes : aggregatedAttributesList) {
if (mergedAttributes == null) {
mergedAttributes = currentAttributes;
}
else if (!duplicationDetected(currentAttributes, previousAttributes)) {
mergedAttributes.mergeWith(currentAttributes);
}
previousAttributes = currentAttributes;
}
return mergedAttributes;
}
@SuppressWarnings("NullAway") // Dataflow analysis limitation
private static boolean duplicationDetected(TestPropertySourceAttributes currentAttributes,
@Nullable TestPropertySourceAttributes previousAttributes) {
boolean duplicationDetected =
(currentAttributes.equals(previousAttributes) && !currentAttributes.isEmpty());
if (duplicationDetected && logger.isTraceEnabled()) {
logger.trace(String.format("Ignoring duplicate %s declaration on %s since it is also declared on %s",
currentAttributes, currentAttributes.getDeclaringClass().getName(),
previousAttributes.getDeclaringClass().getName()));
}
return duplicationDetected;
}
private static List<PropertySourceDescriptor> mergeLocations(List<TestPropertySourceAttributes> attributesList) {
List<PropertySourceDescriptor> descriptors = new ArrayList<>();
for (TestPropertySourceAttributes attrs : attributesList) {
if (logger.isTraceEnabled()) {
logger.trace("Processing locations for " + attrs);
}
descriptors.addAll(0, attrs.getPropertySourceDescriptors());
if (!attrs.isInheritLocations()) {
break;
}
}
return descriptors;
}
private static String[] mergeProperties(List<TestPropertySourceAttributes> attributesList) {
List<String> properties = new ArrayList<>();
for (TestPropertySourceAttributes attrs : attributesList) {
if (logger.isTraceEnabled()) {
logger.trace("Processing inlined properties for " + attrs);
}
String[] attrProps = attrs.getProperties();
properties.addAll(0, Arrays.asList(attrProps));
if (!attrs.isInheritProperties()) {
break;
}
}
return StringUtils.toStringArray(properties);
}
/**
* Add the {@link Properties} files from the given resource {@code locations}
* to the {@link Environment} of the supplied {@code context}.
* <p>This method delegates to
* {@link #addPropertiesFilesToEnvironment(ConfigurableEnvironment, ResourceLoader, String...)}.
* @param context the application context whose environment should be updated;
* never {@code null}
* @param locations the resource locations of {@code Properties} files to add
* to the environment; potentially empty but never {@code null}
* @throws IllegalStateException if an error occurs while processing a properties file
* @since 4.1.5
* @see org.springframework.core.io.support.ResourcePropertySource
* @see TestPropertySource#locations
* @see #addPropertiesFilesToEnvironment(ConfigurableEnvironment, ResourceLoader, String...)
* @see #addPropertySourcesToEnvironment(ConfigurableApplicationContext, List)
*/
public static void addPropertiesFilesToEnvironment(ConfigurableApplicationContext context, String... locations) {
Assert.notNull(context, "'context' must not be null");
Assert.notNull(locations, "'locations' must not be null");
addPropertiesFilesToEnvironment(context.getEnvironment(), context, locations);
}
/**
* Add the {@link Properties} files from the given resource {@code locations}
* to the supplied {@link ConfigurableEnvironment environment}.
* <p>Property placeholders in resource locations (i.e., <code>${...}</code>)
* will be {@linkplain Environment#resolveRequiredPlaceholders(String) resolved}
* against the {@code Environment}.
* <p>A {@link ResourcePatternResolver} will be used to resolve resource
* location patterns into multiple resource locations.
* <p>Each properties file will be converted to a
* {@link org.springframework.core.io.support.ResourcePropertySource ResourcePropertySource}
* that will be added to the {@link PropertySources} of the environment with
* the highest precedence.
* @param environment the environment to update; never {@code null}
* @param resourceLoader the {@code ResourceLoader} to use to load each resource;
* never {@code null}
* @param locations the resource locations of {@code Properties} files to add
* to the environment; potentially empty but never {@code null}
* @throws IllegalStateException if an error occurs while processing a properties file
* @since 4.3
* @see org.springframework.core.io.support.ResourcePropertySource
* @see TestPropertySource#locations
* @see #addPropertiesFilesToEnvironment(ConfigurableApplicationContext, String...)
* @see #addPropertySourcesToEnvironment(ConfigurableApplicationContext, List)
*/
public static void addPropertiesFilesToEnvironment(ConfigurableEnvironment environment,
ResourceLoader resourceLoader, String... locations) {
Assert.notNull(locations, "'locations' must not be null");
addPropertySourcesToEnvironment(environment, resourceLoader,
List.of(new PropertySourceDescriptor(locations)));
}
/**
* Add property sources for the given {@code descriptors} to the
* {@link Environment} of the supplied {@code context}.
* <p>This method delegates to
* {@link #addPropertySourcesToEnvironment(ConfigurableEnvironment, ResourceLoader, List)}.
* @param context the application context whose environment should be updated;
* never {@code null}
* @param descriptors the property source descriptors to process; potentially
* empty but never {@code null}
* @throws IllegalStateException if an error occurs while processing the
* descriptors and registering property sources
* @since 6.1
* @see TestPropertySource#locations
* @see TestPropertySource#encoding
* @see TestPropertySource#factory
* @see PropertySourceFactory
* @see #addPropertySourcesToEnvironment(ConfigurableEnvironment, ResourceLoader, List)
*/
public static void addPropertySourcesToEnvironment(ConfigurableApplicationContext context,
List<PropertySourceDescriptor> descriptors) {
Assert.notNull(context, "'context' must not be null");
Assert.notNull(descriptors, "'descriptors' must not be null");
addPropertySourcesToEnvironment(context.getEnvironment(), context, descriptors);
}
/**
* Add property sources for the given {@code descriptors} to the supplied
* {@link ConfigurableEnvironment environment}.
* <p>Property placeholders in resource locations (i.e., <code>${...}</code>)
* will be {@linkplain Environment#resolveRequiredPlaceholders(String) resolved}
* against the {@code Environment}.
* <p>A {@link ResourcePatternResolver} will be used to resolve resource
* location patterns into multiple resource locations.
* <p>Each {@link PropertySource} will be created via the configured
* {@link PropertySourceDescriptor#propertySourceFactory() PropertySourceFactory}
* (or the {@link DefaultPropertySourceFactory} if no factory is configured)
* and added to the {@link PropertySources} of the environment with the highest
* precedence.
* @param environment the environment to update; never {@code null}
* @param resourceLoader the {@code ResourceLoader} to use to load resources;
* never {@code null}
* @param descriptors the property source descriptors to process; potentially
* empty but never {@code null}
* @throws IllegalStateException if an error occurs while processing the
* descriptors and registering property sources
* @since 6.1
* @see TestPropertySource#locations
* @see TestPropertySource#encoding
* @see TestPropertySource#factory
* @see PropertySourceFactory
*/
public static void addPropertySourcesToEnvironment(ConfigurableEnvironment environment,
ResourceLoader resourceLoader, List<PropertySourceDescriptor> descriptors) {
Assert.notNull(environment, "'environment' must not be null");
Assert.notNull(resourceLoader, "'resourceLoader' must not be null");
Assert.notNull(descriptors, "'descriptors' must not be null");
ResourcePatternResolver resourcePatternResolver =
ResourcePatternUtils.getResourcePatternResolver(resourceLoader);
MutablePropertySources propertySources = environment.getPropertySources();
try {
for (PropertySourceDescriptor descriptor : descriptors) {
if (!descriptor.locations().isEmpty()) {
Class<? extends PropertySourceFactory> factoryClass = descriptor.propertySourceFactory();
PropertySourceFactory factory = (factoryClass != null ?
BeanUtils.instantiateClass(factoryClass) : defaultPropertySourceFactory);
for (String location : descriptor.locations()) {
String resolvedLocation = environment.resolveRequiredPlaceholders(location);
for (Resource resource : resourcePatternResolver.getResources(resolvedLocation)) {
PropertySource<?> propertySource = factory.createPropertySource(descriptor.name(),
new EncodedResource(resource, descriptor.encoding()));
propertySources.addFirst(propertySource);
}
}
}
}
}
catch (IOException ex) {
throw new IllegalStateException("Failed to add PropertySource to Environment", ex);
}
}
/**
* Add the given <em>inlined properties</em> to the {@link Environment} of the
* supplied {@code context}.
* <p>This method simply delegates to
* {@link #addInlinedPropertiesToEnvironment(ConfigurableEnvironment, String[])}.
* @param context the application context whose environment should be updated;
* never {@code null}
* @param inlinedProperties the inlined properties to add to the environment;
* potentially empty but never {@code null}
* @since 4.1.5
* @see TestPropertySource#properties
* @see #addInlinedPropertiesToEnvironment(ConfigurableEnvironment, String[])
*/
public static void addInlinedPropertiesToEnvironment(ConfigurableApplicationContext context, String... inlinedProperties) {
Assert.notNull(context, "'context' must not be null");
Assert.notNull(inlinedProperties, "'inlinedProperties' must not be null");
addInlinedPropertiesToEnvironment(context.getEnvironment(), inlinedProperties);
}
/**
* Add the given <em>inlined properties</em> (in the form of <em>key-value</em>
* pairs) to the supplied {@link ConfigurableEnvironment environment}.
* <p>All key-value pairs will be added to the {@code Environment} as a
* single {@link MapPropertySource} with the highest precedence.
* <p>For details on the parsing of <em>inlined properties</em>, consult the
* Javadoc for {@link #convertInlinedPropertiesToMap}.
* @param environment the environment to update; never {@code null}
* @param inlinedProperties the inlined properties to add to the environment;
* potentially empty but never {@code null}
* @since 4.1.5
* @see MapPropertySource
* @see #INLINED_PROPERTIES_PROPERTY_SOURCE_NAME
* @see TestPropertySource#properties
* @see #convertInlinedPropertiesToMap
*/
public static void addInlinedPropertiesToEnvironment(ConfigurableEnvironment environment, String... inlinedProperties) {
Assert.notNull(environment, "'environment' must not be null");
Assert.notNull(inlinedProperties, "'inlinedProperties' must not be null");
if (!ObjectUtils.isEmpty(inlinedProperties)) {
if (logger.isTraceEnabled()) {
logger.trace("Adding inlined properties to environment: " +
ObjectUtils.nullSafeToString(inlinedProperties));
}
MapPropertySource ps = (MapPropertySource)
environment.getPropertySources().get(INLINED_PROPERTIES_PROPERTY_SOURCE_NAME);
if (ps == null) {
ps = new MapPropertySource(INLINED_PROPERTIES_PROPERTY_SOURCE_NAME, new LinkedHashMap<>());
environment.getPropertySources().addFirst(ps);
}
ps.getSource().putAll(convertInlinedPropertiesToMap(inlinedProperties));
}
}
/**
* Convert the supplied <em>inlined properties</em> (in the form of <em>key-value</em>
* pairs) into a map keyed by property name.
* <p>Parsing of the key-value pairs is achieved by converting all supplied
* strings into <em>virtual</em> properties files in memory and delegating to
* {@link Properties#load(java.io.Reader)} to parse each virtual file.
* <p>The ordering of property names will be preserved in the returned map,
* analogous to the order in which the key-value pairs are supplied to this
* method. This also applies if a single string contains multiple key-value
* pairs separated by newlines — for example, when supplied by a user
* via a <em>text block</em>.
* <p>For a full discussion of <em>inlined properties</em>, consult the Javadoc
* for {@link TestPropertySource#properties}.
* @param inlinedProperties the inlined properties to convert; potentially empty
* but never {@code null}
* @return a new, ordered map containing the converted properties
* @throws IllegalStateException if a given key-value pair cannot be parsed
* @since 4.1.5
* @see #addInlinedPropertiesToEnvironment(ConfigurableEnvironment, String[])
*/
public static Map<String, Object> convertInlinedPropertiesToMap(String... inlinedProperties) {
Assert.notNull(inlinedProperties, "'inlinedProperties' must not be null");
SequencedProperties sequencedProperties = new SequencedProperties();
for (String input : inlinedProperties) {
if (!StringUtils.hasText(input)) {
continue;
}
try {
sequencedProperties.load(new StringReader(input));
}
catch (Exception ex) {
throw new IllegalStateException("Failed to load test environment properties from [" + input + "]", ex);
}
}
return sequencedProperties.getSequencedMap();
}
private static <T extends Annotation> List<List<MergedAnnotation<T>>> findRepeatableAnnotations(
Class<?> clazz, Class<T> annotationType) {
List<List<MergedAnnotation<T>>> listOfLists = new ArrayList<>();
findRepeatableAnnotations(clazz, annotationType, listOfLists, new int[] {0});
return listOfLists;
}
private static <T extends Annotation> void findRepeatableAnnotations(
Class<?> clazz, Class<T> annotationType, List<List<MergedAnnotation<T>>> listOfLists, int[] aggregateIndex) {
// Ensure we have a list for the current aggregate index.
if (listOfLists.size() < aggregateIndex[0] + 1) {
listOfLists.add(new ArrayList<>());
}
MergedAnnotations.from(clazz, SearchStrategy.DIRECT)
.stream(annotationType)
.sorted(reversedMetaDistance)
.forEach(annotation -> listOfLists.get(aggregateIndex[0]).add(0, annotation));
aggregateIndex[0]++;
// Declared on an interface?
for (Class<?> ifc : clazz.getInterfaces()) {
findRepeatableAnnotations(ifc, annotationType, listOfLists, aggregateIndex);
}
// Declared on a superclass?
Class<?> superclass = clazz.getSuperclass();
if (superclass != null & superclass != Object.class) {
findRepeatableAnnotations(superclass, annotationType, listOfLists, aggregateIndex);
}
// Declared on an enclosing | hierarchy |
java | apache__camel | components/camel-google/camel-google-drive/src/generated/java/org/apache/camel/component/google/drive/internal/GoogleDriveApiCollection.java | {
"start": 6401,
"end": 6545
} | class ____ {
private static final GoogleDriveApiCollection INSTANCE = new GoogleDriveApiCollection();
}
}
| GoogleDriveApiCollectionHolder |
java | spring-projects__spring-security | webauthn/src/main/java/org/springframework/security/web/webauthn/management/MapPublicKeyCredentialUserEntityRepository.java | {
"start": 1109,
"end": 2284
} | class ____ implements PublicKeyCredentialUserEntityRepository {
private final Map<String, PublicKeyCredentialUserEntity> usernameToUserEntity = new HashMap<>();
private final Map<Bytes, PublicKeyCredentialUserEntity> idToUserEntity = new HashMap<>();
@Override
public @Nullable PublicKeyCredentialUserEntity findById(Bytes id) {
Assert.notNull(id, "id cannot be null");
return this.idToUserEntity.get(id);
}
@Override
public @Nullable PublicKeyCredentialUserEntity findByUsername(String username) {
Assert.notNull(username, "username cannot be null");
return this.usernameToUserEntity.get(username);
}
@Override
public void save(PublicKeyCredentialUserEntity userEntity) {
if (userEntity == null) {
throw new IllegalArgumentException("userEntity cannot be null");
}
this.usernameToUserEntity.put(userEntity.getName(), userEntity);
this.idToUserEntity.put(userEntity.getId(), userEntity);
}
@Override
public void delete(Bytes id) {
PublicKeyCredentialUserEntity existing = this.idToUserEntity.remove(id);
if (existing != null) {
this.usernameToUserEntity.remove(existing.getName());
}
}
}
| MapPublicKeyCredentialUserEntityRepository |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java | {
"start": 1059,
"end": 4925
} | class ____ extends AllocationDecider {
public static final String NAME = "restore_in_progress";
@Override
public Decision canAllocate(final ShardRouting shardRouting, final RoutingNode node, final RoutingAllocation allocation) {
return canAllocate(shardRouting, allocation);
}
@Override
public Decision canAllocate(final ShardRouting shardRouting, final RoutingAllocation allocation) {
final RecoverySource recoverySource = shardRouting.recoverySource();
if (recoverySource == null || recoverySource.getType() != RecoverySource.Type.SNAPSHOT) {
return allocation.decision(Decision.YES, NAME, "ignored as shard is not being recovered from a snapshot");
}
final RecoverySource.SnapshotRecoverySource source = (RecoverySource.SnapshotRecoverySource) recoverySource;
if (source.restoreUUID().equals(RecoverySource.SnapshotRecoverySource.NO_API_RESTORE_UUID)) {
return allocation.decision(Decision.YES, NAME, "not an API-level restore");
}
RestoreInProgress.Entry restoreInProgress = RestoreInProgress.get(allocation.getClusterState()).get(source.restoreUUID());
if (restoreInProgress != null) {
RestoreInProgress.ShardRestoreStatus shardRestoreStatus = restoreInProgress.shards().get(shardRouting.shardId());
if (shardRestoreStatus != null && shardRestoreStatus.state().completed() == false) {
assert shardRestoreStatus.state() != RestoreInProgress.State.SUCCESS
: "expected shard [" + shardRouting + "] to be in initializing state but got [" + shardRestoreStatus.state() + "]";
return allocation.decision(Decision.YES, NAME, "shard is currently being restored");
}
}
/**
* POST: the RestoreInProgress.Entry is non-existent. This section differentiates between a restore that failed
* because of a indexing fault (see {@link AllocationService.applyFailedShards}) or because of an allocation
* failure.
*/
UnassignedInfo unassignedInfo = shardRouting.unassignedInfo();
if (unassignedInfo.failedAllocations() > 0) {
return allocation.decision(
Decision.NO,
NAME,
"shard has failed to be restored from the snapshot [%s] - manually close or delete the index [%s] in order to retry "
+ "to restore the snapshot again or use the reroute API to force the allocation of an empty primary shard. Check the "
+ "logs for more information about the failure. Details: [%s]",
source.snapshot(),
shardRouting.getIndexName(),
unassignedInfo.details()
);
} else {
return allocation.decision(
Decision.NO,
NAME,
"Restore from snapshot failed because the configured constraints prevented allocation on any of the available nodes. "
+ "Please check constraints applied in index and cluster settings, then retry the restore. "
+ "See [%s] for more details on using the allocation explain API.",
ReferenceDocs.ALLOCATION_EXPLAIN_API
);
}
}
@Override
public Decision canForceAllocatePrimary(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
assert shardRouting.primary() : "must not call canForceAllocatePrimary on a non-primary shard " + shardRouting;
return canAllocate(shardRouting, node, allocation);
}
@Override
public Decision canForceAllocateDuringReplace(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
return canAllocate(shardRouting, node, allocation);
}
}
| RestoreInProgressAllocationDecider |
java | spring-projects__spring-security | oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/registration/ClientRegistrationsTests.java | {
"start": 1908,
"end": 29786
} | class ____ {
/**
* Contains all optional parameters that are found in ClientRegistration
*/
// @formatter:off
private static final String DEFAULT_RESPONSE = "{\n"
+ " \"authorization_endpoint\": \"https://example.com/o/oauth2/v2/auth\", \n"
+ " \"claims_supported\": [\n"
+ " \"aud\", \n"
+ " \"email\", \n"
+ " \"email_verified\", \n"
+ " \"exp\", \n"
+ " \"family_name\", \n"
+ " \"given_name\", \n"
+ " \"iat\", \n"
+ " \"iss\", \n"
+ " \"locale\", \n"
+ " \"name\", \n"
+ " \"picture\", \n"
+ " \"sub\"\n"
+ " ], \n"
+ " \"code_challenge_methods_supported\": [\n"
+ " \"plain\", \n"
+ " \"S256\"\n"
+ " ], \n"
+ " \"id_token_signing_alg_values_supported\": [\n"
+ " \"RS256\"\n"
+ " ], \n"
+ " \"issuer\": \"https://example.com\", \n"
+ " \"jwks_uri\": \"https://example.com/oauth2/v3/certs\", \n"
+ " \"response_types_supported\": [\n"
+ " \"code\", \n"
+ " \"token\", \n"
+ " \"id_token\", \n"
+ " \"code token\", \n"
+ " \"code id_token\", \n"
+ " \"token id_token\", \n"
+ " \"code token id_token\", \n"
+ " \"none\"\n"
+ " ], \n"
+ " \"revocation_endpoint\": \"https://example.com/o/oauth2/revoke\", \n"
+ " \"scopes_supported\": [\n"
+ " \"openid\", \n"
+ " \"email\", \n"
+ " \"profile\"\n"
+ " ], \n"
+ " \"subject_types_supported\": [\n"
+ " \"public\"\n"
+ " ], \n"
+ " \"grant_types_supported\" : [\"authorization_code\"], \n"
+ " \"token_endpoint\": \"https://example.com/oauth2/v4/token\", \n"
+ " \"token_endpoint_auth_methods_supported\": [\n"
+ " \"client_secret_post\", \n"
+ " \"client_secret_basic\", \n"
+ " \"none\"\n"
+ " ], \n"
+ " \"userinfo_endpoint\": \"https://example.com/oauth2/v3/userinfo\"\n"
+ "}";
// @formatter:on
private MockWebServer server;
private JsonMapper mapper = new JsonMapper();
private Map<String, Object> response;
private String issuer;
@BeforeEach
public void setup() throws Exception {
this.server = new MockWebServer();
this.server.start();
this.response = this.mapper.readValue(DEFAULT_RESPONSE, new TypeReference<Map<String, Object>>() {
});
}
@AfterEach
public void cleanup() throws Exception {
this.server.shutdown();
}
@Test
public void issuerWhenAllInformationThenSuccess() throws Exception {
ClientRegistration registration = registration("").build();
ClientRegistration.ProviderDetails provider = registration.getProviderDetails();
assertIssuerMetadata(registration, provider);
assertThat(provider.getUserInfoEndpoint().getUri()).isEqualTo("https://example.com/oauth2/v3/userinfo");
}
/**
*
* Test compatibility with OpenID v1 discovery endpoint by making a <a href=
* "https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationRequest">OpenID
* Provider Configuration Request</a> as highlighted
* <a href="https://tools.ietf.org/html/rfc8414#section-5"> Compatibility Notes</a> of
* <a href="https://tools.ietf.org/html/rfc8414">RFC 8414</a> specification.
*/
@Test
public void issuerWhenOidcFallbackAllInformationThenSuccess() throws Exception {
ClientRegistration registration = registrationOidcFallback("issuer1", null).build();
ClientRegistration.ProviderDetails provider = registration.getProviderDetails();
assertIssuerMetadata(registration, provider);
assertThat(provider.getUserInfoEndpoint().getUri()).isEqualTo("https://example.com/oauth2/v3/userinfo");
}
@Test
public void issuerWhenOAuth2AllInformationThenSuccess() throws Exception {
ClientRegistration registration = registrationOAuth2("", null).build();
ClientRegistration.ProviderDetails provider = registration.getProviderDetails();
assertIssuerMetadata(registration, provider);
}
private void assertIssuerMetadata(ClientRegistration registration, ClientRegistration.ProviderDetails provider) {
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
assertThat(registration.getAuthorizationGrantType()).isEqualTo(AuthorizationGrantType.AUTHORIZATION_CODE);
assertThat(registration.getRegistrationId()).isEqualTo(URI.create(this.issuer).getHost());
assertThat(registration.getClientName()).isEqualTo(this.issuer);
assertThat(registration.getScopes()).isNull();
assertThat(provider.getAuthorizationUri()).isEqualTo("https://example.com/o/oauth2/v2/auth");
assertThat(provider.getTokenUri()).isEqualTo("https://example.com/oauth2/v4/token");
assertThat(provider.getJwkSetUri()).isEqualTo("https://example.com/oauth2/v3/certs");
assertThat(provider.getIssuerUri()).isEqualTo(this.issuer);
assertThat(provider.getConfigurationMetadata()).containsKeys("authorization_endpoint", "claims_supported",
"code_challenge_methods_supported", "id_token_signing_alg_values_supported", "issuer", "jwks_uri",
"response_types_supported", "revocation_endpoint", "scopes_supported", "subject_types_supported",
"grant_types_supported", "token_endpoint", "token_endpoint_auth_methods_supported",
"userinfo_endpoint");
}
// gh-7512
@Test
public void issuerWhenResponseMissingJwksUriThenThrowsIllegalArgumentException() throws Exception {
this.response.remove("jwks_uri");
assertThatIllegalArgumentException().isThrownBy(() -> registration("").build());
}
// gh-7512
@Test
public void issuerWhenOidcFallbackResponseMissingJwksUriThenThrowsIllegalArgumentException() throws Exception {
this.response.remove("jwks_uri");
assertThatIllegalArgumentException().isThrownBy(() -> registrationOidcFallback("issuer1", null).build());
}
// gh-7512
@Test
public void issuerWhenOAuth2ResponseMissingJwksUriThenThenSuccess() throws Exception {
this.response.remove("jwks_uri");
ClientRegistration registration = registrationOAuth2("", null).build();
ClientRegistration.ProviderDetails provider = registration.getProviderDetails();
assertThat(provider.getJwkSetUri()).isNull();
}
// gh-8187
@Test
public void issuerWhenResponseMissingUserInfoUriThenSuccess() throws Exception {
this.response.remove("userinfo_endpoint");
ClientRegistration registration = registration("").build();
assertThat(registration.getProviderDetails().getUserInfoEndpoint().getUri()).isNull();
}
@Test
public void issuerWhenContainsTrailingSlashThenSuccess() throws Exception {
assertThat(registration("")).isNotNull();
assertThat(this.issuer).endsWith("/");
}
@Test
public void issuerWhenOidcFallbackContainsTrailingSlashThenSuccess() throws Exception {
assertThat(registrationOidcFallback("", null)).isNotNull();
assertThat(this.issuer).endsWith("/");
}
@Test
public void issuerWhenOAuth2ContainsTrailingSlashThenSuccess() throws Exception {
assertThat(registrationOAuth2("", null)).isNotNull();
assertThat(this.issuer).endsWith("/");
}
@Test
public void issuerWhenGrantTypesSupportedNullThenDefaulted() throws Exception {
this.response.remove("grant_types_supported");
ClientRegistration registration = registration("").build();
assertThat(registration.getAuthorizationGrantType()).isEqualTo(AuthorizationGrantType.AUTHORIZATION_CODE);
}
@Test
public void issuerWhenOAuth2GrantTypesSupportedNullThenDefaulted() throws Exception {
this.response.remove("grant_types_supported");
ClientRegistration registration = registrationOAuth2("", null).build();
assertThat(registration.getAuthorizationGrantType()).isEqualTo(AuthorizationGrantType.AUTHORIZATION_CODE);
}
// gh-9828
@Test
public void issuerWhenImplicitGrantTypeThenSuccess() throws Exception {
this.response.put("grant_types_supported", Arrays.asList("implicit"));
ClientRegistration registration = registration("").build();
// The authorization_code grant type is still the default
assertThat(registration.getAuthorizationGrantType()).isEqualTo(AuthorizationGrantType.AUTHORIZATION_CODE);
}
// gh-9828
@Test
public void issuerWhenOAuth2JwtBearerGrantTypeThenSuccess() throws Exception {
this.response.put("grant_types_supported", Arrays.asList("urn:ietf:params:oauth:grant-type:jwt-bearer"));
ClientRegistration registration = registrationOAuth2("", null).build();
// The authorization_code grant type is still the default
assertThat(registration.getAuthorizationGrantType()).isEqualTo(AuthorizationGrantType.AUTHORIZATION_CODE);
}
// gh-9795
@Test
public void issuerWhenResponseAuthorizationEndpointIsNullThenSuccess() throws Exception {
this.response.put("grant_types_supported", Arrays.asList("urn:ietf:params:oauth:grant-type:jwt-bearer"));
this.response.remove("authorization_endpoint");
ClientRegistration registration = registration("").authorizationGrantType(AuthorizationGrantType.JWT_BEARER)
.build();
assertThat(registration.getAuthorizationGrantType()).isEqualTo(AuthorizationGrantType.JWT_BEARER);
ClientRegistration.ProviderDetails provider = registration.getProviderDetails();
assertThat(provider.getAuthorizationUri()).isNull();
}
// gh-9795
@Test
public void issuerWhenOAuth2ResponseAuthorizationEndpointIsNullThenSuccess() throws Exception {
this.response.put("grant_types_supported", Arrays.asList("urn:ietf:params:oauth:grant-type:jwt-bearer"));
this.response.remove("authorization_endpoint");
ClientRegistration registration = registrationOAuth2("", null)
.authorizationGrantType(AuthorizationGrantType.JWT_BEARER)
.build();
assertThat(registration.getAuthorizationGrantType()).isEqualTo(AuthorizationGrantType.JWT_BEARER);
ClientRegistration.ProviderDetails provider = registration.getProviderDetails();
assertThat(provider.getAuthorizationUri()).isNull();
}
@Test
public void issuerWhenTokenEndpointAuthMethodsNullThenDefaulted() throws Exception {
this.response.remove("token_endpoint_auth_methods_supported");
ClientRegistration registration = registration("").build();
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
@Test
public void issuerWhenOAuth2TokenEndpointAuthMethodsNullThenDefaulted() throws Exception {
this.response.remove("token_endpoint_auth_methods_supported");
ClientRegistration registration = registrationOAuth2("", null).build();
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
// gh-9780
@Test
public void issuerWhenClientSecretBasicAuthMethodThenMethodIsBasic() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("client_secret_basic"));
ClientRegistration registration = registration("").build();
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
// gh-9780
@Test
public void issuerWhenOAuth2ClientSecretBasicAuthMethodThenMethodIsBasic() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("client_secret_basic"));
ClientRegistration registration = registrationOAuth2("", null).build();
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
@Test
public void issuerWhenTokenEndpointAuthMethodsPostThenMethodIsPost() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("client_secret_post"));
ClientRegistration registration = registration("").build();
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_POST);
}
@Test
public void issuerWhenOAuth2TokenEndpointAuthMethodsPostThenMethodIsPost() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("client_secret_post"));
ClientRegistration registration = registrationOAuth2("", null).build();
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_POST);
}
// gh-9780
@Test
public void issuerWhenClientSecretJwtAuthMethodThenMethodIsClientSecretBasic() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("client_secret_jwt"));
ClientRegistration registration = registration("").build();
// The client_secret_basic auth method is still the default
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
// gh-9780
@Test
public void issuerWhenOAuth2ClientSecretJwtAuthMethodThenMethodIsClientSecretBasic() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("client_secret_jwt"));
ClientRegistration registration = registrationOAuth2("", null).build();
// The client_secret_basic auth method is still the default
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
// gh-9780
@Test
public void issuerWhenPrivateKeyJwtAuthMethodThenMethodIsClientSecretBasic() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("private_key_jwt"));
ClientRegistration registration = registration("").build();
// The client_secret_basic auth method is still the default
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
// gh-9780
@Test
public void issuerWhenOAuth2PrivateKeyJwtAuthMethodThenMethodIsClientSecretBasic() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("private_key_jwt"));
ClientRegistration registration = registrationOAuth2("", null).build();
// The client_secret_basic auth method is still the default
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
@Test
public void issuerWhenTokenEndpointAuthMethodsNoneThenMethodIsNone() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("none"));
ClientRegistration registration = registration("").build();
assertThat(registration.getClientAuthenticationMethod()).isEqualTo(ClientAuthenticationMethod.NONE);
}
@Test
public void issuerWhenOAuth2TokenEndpointAuthMethodsNoneThenMethodIsNone() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("none"));
ClientRegistration registration = registrationOAuth2("", null).build();
assertThat(registration.getClientAuthenticationMethod()).isEqualTo(ClientAuthenticationMethod.NONE);
}
// gh-9780
@Test
public void issuerWhenTlsClientAuthMethodThenSuccess() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("tls_client_auth"));
ClientRegistration registration = registration("").build();
// The client_secret_basic auth method is still the default
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
// gh-9780
@Test
public void issuerWhenOAuth2TlsClientAuthMethodThenSuccess() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("tls_client_auth"));
ClientRegistration registration = registrationOAuth2("", null).build();
// The client_secret_basic auth method is still the default
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
@Test
public void issuerWhenOAuth2EmptyStringThenMeaningfulErrorMessage() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> ClientRegistrations.fromIssuerLocation(""))
.withMessageContaining("issuer cannot be empty");
// @formatter:on
}
@Test
public void issuerWhenEmptyStringThenMeaningfulErrorMessage() {
// @formatter:off
assertThatIllegalArgumentException()
.isThrownBy(() -> ClientRegistrations.fromOidcIssuerLocation(""))
.withMessageContaining("issuer cannot be empty");
// @formatter:on
}
@Test
public void issuerWhenOpenIdConfigurationDoesNotMatchThenMeaningfulErrorMessage() throws Exception {
this.issuer = createIssuerFromServer("");
String body = this.mapper.writeValueAsString(this.response);
MockResponse mockResponse = new MockResponse().setBody(body)
.setHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE);
this.server.enqueue(mockResponse);
// @formatter:off
assertThatIllegalStateException()
.isThrownBy(() -> ClientRegistrations.fromOidcIssuerLocation(this.issuer))
.withMessageContaining("The Issuer \"https://example.com\" provided in the configuration metadata did "
+ "not match the requested issuer \"" + this.issuer + "\"");
// @formatter:on
}
@Test
public void issuerWhenOAuth2ConfigurationDoesNotMatchThenMeaningfulErrorMessage() throws Exception {
this.issuer = createIssuerFromServer("");
String body = this.mapper.writeValueAsString(this.response);
MockResponse mockResponse = new MockResponse().setBody(body)
.setHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE);
this.server.enqueue(mockResponse);
// @formatter:off
assertThatIllegalStateException()
.isThrownBy(() -> ClientRegistrations.fromIssuerLocation(this.issuer))
.withMessageContaining("The Issuer \"https://example.com\" provided in the configuration metadata "
+ "did not match the requested issuer \"" + this.issuer + "\"");
// @formatter:on
}
@Test
public void issuerWhenOidcConfigurationAllInformationThenSuccess() throws Exception {
ClientRegistration registration = registration(this.response).build();
ClientRegistration.ProviderDetails provider = registration.getProviderDetails();
assertIssuerMetadata(registration, provider);
assertThat(provider.getUserInfoEndpoint().getUri()).isEqualTo("https://example.com/oauth2/v3/userinfo");
}
private ClientRegistration.Builder registration(Map<String, Object> configuration) {
this.issuer = "https://example.com";
return ClientRegistrations.fromOidcConfiguration(configuration)
.clientId("client-id")
.clientSecret("client-secret");
}
@Test
public void issuerWhenOidcConfigurationResponseMissingJwksUriThenThrowsIllegalArgumentException() throws Exception {
this.response.remove("jwks_uri");
assertThatNullPointerException().isThrownBy(() -> registration(this.response).build());
}
@Test
public void issuerWhenOidcConfigurationResponseMissingUserInfoUriThenSuccess() throws Exception {
this.response.remove("userinfo_endpoint");
ClientRegistration registration = registration(this.response).build();
assertThat(registration.getProviderDetails().getUserInfoEndpoint().getUri()).isNull();
}
@Test
public void issuerWhenOidcConfigurationGrantTypesSupportedNullThenDefaulted() throws Exception {
this.response.remove("grant_types_supported");
ClientRegistration registration = registration(this.response).build();
assertThat(registration.getAuthorizationGrantType()).isEqualTo(AuthorizationGrantType.AUTHORIZATION_CODE);
}
@Test
public void issuerWhenOidcConfigurationImplicitGrantTypeThenSuccess() throws Exception {
this.response.put("grant_types_supported", Arrays.asList("implicit"));
ClientRegistration registration = registration(this.response).build();
// The authorization_code grant type is still the default
assertThat(registration.getAuthorizationGrantType()).isEqualTo(AuthorizationGrantType.AUTHORIZATION_CODE);
}
@Test
public void issuerWhenOidcConfigurationResponseAuthorizationEndpointIsNullThenSuccess() throws Exception {
this.response.put("grant_types_supported", Arrays.asList("urn:ietf:params:oauth:grant-type:jwt-bearer"));
this.response.remove("authorization_endpoint");
ClientRegistration registration = registration(this.response)
.authorizationGrantType(AuthorizationGrantType.JWT_BEARER)
.build();
assertThat(registration.getAuthorizationGrantType()).isEqualTo(AuthorizationGrantType.JWT_BEARER);
ClientRegistration.ProviderDetails provider = registration.getProviderDetails();
assertThat(provider.getAuthorizationUri()).isNull();
}
@Test
public void issuerWhenOidcConfigurationTokenEndpointAuthMethodsNullThenDefaulted() throws Exception {
this.response.remove("token_endpoint_auth_methods_supported");
ClientRegistration registration = registration(this.response).build();
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
@Test
public void issuerWhenOidcConfigurationClientSecretBasicAuthMethodThenMethodIsBasic() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("client_secret_basic"));
ClientRegistration registration = registration(this.response).build();
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
@Test
public void issuerWhenOidcConfigurationTokenEndpointAuthMethodsPostThenMethodIsPost() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("client_secret_post"));
ClientRegistration registration = registration(this.response).build();
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_POST);
}
@Test
public void issuerWhenOidcConfigurationClientSecretJwtAuthMethodThenMethodIsClientSecretBasic() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("client_secret_jwt"));
ClientRegistration registration = registration(this.response).build();
// The client_secret_basic auth method is still the default
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
@Test
public void issuerWhenOidcConfigurationPrivateKeyJwtAuthMethodThenMethodIsClientSecretBasic() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("private_key_jwt"));
ClientRegistration registration = registration(this.response).build();
// The client_secret_basic auth method is still the default
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
@Test
public void issuerWhenOidcConfigurationTokenEndpointAuthMethodsNoneThenMethodIsNone() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("none"));
ClientRegistration registration = registration(this.response).build();
assertThat(registration.getClientAuthenticationMethod()).isEqualTo(ClientAuthenticationMethod.NONE);
}
@Test
public void issuerWhenOidcConfigurationTlsClientAuthMethodThenSuccess() throws Exception {
this.response.put("token_endpoint_auth_methods_supported", Arrays.asList("tls_client_auth"));
ClientRegistration registration = registration(this.response).build();
// The client_secret_basic auth method is still the default
assertThat(registration.getClientAuthenticationMethod())
.isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_BASIC);
}
// gh-15852
@Test
public void oidcWhenHostContainsUnderscoreThenRetains() {
UriComponents oidc = ClientRegistrations.oidcUri("https://elated_sutherland:8080/path");
assertThat(oidc.getHost()).isEqualTo("elated_sutherland");
UriComponents oauth = ClientRegistrations.oauthUri("https://elated_sutherland:8080/path");
assertThat(oauth.getHost()).isEqualTo("elated_sutherland");
UriComponents oidcRfc8414 = ClientRegistrations.oidcRfc8414Uri("https://elated_sutherland:8080/path");
assertThat(oidcRfc8414.getHost()).isEqualTo("elated_sutherland");
}
@Test
public void issuerWhenAllEndpointsFailedThenExceptionIncludesFailureInformation() {
this.issuer = createIssuerFromServer("issuer1");
this.server.setDispatcher(new Dispatcher() {
@Override
public MockResponse dispatch(RecordedRequest request) {
int responseCode = switch (request.getPath()) {
case "/issuer1/.well-known/openid-configuration" -> 405;
case "/.well-known/openid-configuration/issuer1" -> 400;
default -> 404;
};
return new MockResponse().setResponseCode(responseCode);
}
});
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> ClientRegistrations.fromIssuerLocation(this.issuer).build())
.withMessageContaining("405")
.withMessageContaining("400")
.withMessageContaining("404");
}
private ClientRegistration.Builder registration(String path) throws Exception {
this.issuer = createIssuerFromServer(path);
this.response.put("issuer", this.issuer);
String body = this.mapper.writeValueAsString(this.response);
// @formatter:off
MockResponse mockResponse = new MockResponse()
.setBody(body)
.setHeader(HttpHeaders.CONTENT_TYPE,
MediaType.APPLICATION_JSON_VALUE);
this.server.enqueue(mockResponse);
return ClientRegistrations.fromOidcIssuerLocation(this.issuer)
.clientId("client-id")
.clientSecret("client-secret");
// @formatter:on
}
private ClientRegistration.Builder registrationOAuth2(String path, String body) throws Exception {
this.issuer = createIssuerFromServer(path);
this.response.put("issuer", this.issuer);
this.issuer = this.server.url(path).toString();
final String responseBody = (body != null) ? body : this.mapper.writeValueAsString(this.response);
final Dispatcher dispatcher = new Dispatcher() {
@Override
public MockResponse dispatch(RecordedRequest request) {
return switch (request.getPath()) {
case "/.well-known/oauth-authorization-server/issuer1",
"/.well-known/oauth-authorization-server/" ->
buildSuccessMockResponse(responseBody);
default -> new MockResponse().setResponseCode(404);
};
}
};
this.server.setDispatcher(dispatcher);
// @formatter:off
return ClientRegistrations.fromIssuerLocation(this.issuer)
.clientId("client-id")
.clientSecret("client-secret");
// @formatter:on
}
private String createIssuerFromServer(String path) {
return this.server.url(path).toString();
}
/**
* Simulates a situation when the ClientRegistration is used with a legacy application
* where the OIDC Discovery Endpoint is "/issuer1/.well-known/openid-configuration"
* instead of "/.well-known/openid-configuration/issuer1" in which case the first
* attempt results in HTTP 404 and the subsequent call results in 200 OK.
*
* @see <a href="https://tools.ietf.org/html/rfc8414#section-5">Section 5</a> for more
* details.
*/
private ClientRegistration.Builder registrationOidcFallback(String path, String body) throws Exception {
this.issuer = createIssuerFromServer(path);
this.response.put("issuer", this.issuer);
String responseBody = (body != null) ? body : this.mapper.writeValueAsString(this.response);
final Dispatcher dispatcher = new Dispatcher() {
@Override
public MockResponse dispatch(RecordedRequest request) {
return switch (request.getPath()) {
case "/issuer1/.well-known/openid-configuration", "/.well-known/openid-configuration/" ->
buildSuccessMockResponse(responseBody);
default -> new MockResponse().setResponseCode(404);
};
}
};
this.server.setDispatcher(dispatcher);
return ClientRegistrations.fromIssuerLocation(this.issuer).clientId("client-id").clientSecret("client-secret");
}
private MockResponse buildSuccessMockResponse(String body) {
// @formatter:off
return new MockResponse().setResponseCode(200)
.setBody(body)
.setHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE);
// @formatter:on
}
}
| ClientRegistrationsTests |
java | apache__camel | components/camel-cxf/camel-cxf-rest/src/test/java/org/apache/camel/component/cxf/jaxrs/simplebinding/testbean/Product.java | {
"start": 966,
"end": 1303
} | class ____ {
private long id;
private String description;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String d) {
this.description = d;
}
}
| Product |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/versioninfo/SuppressTimestampViaMapperConfig.java | {
"start": 407,
"end": 564
} | interface ____ {
WithProperties toObject(WithProperties object);
@MapperConfig(suppressTimestampInGenerated = true)
| SuppressTimestampViaMapperConfig |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/trace/publisher/TraceEventPublisher.java | {
"start": 1349,
"end": 6354
} | class ____ extends Thread implements ShardedEventPublisher {
private static final String THREAD_NAME = "trace.publisher-";
private static final Logger LOGGER = LoggerFactory.getLogger("com.alibaba.nacos.common.trace.publisher");
private static final int DEFAULT_WAIT_TIME = 60;
private final Map<Class<? extends Event>, Set<Subscriber<? extends Event>>> subscribes = new ConcurrentHashMap<>();
private volatile boolean initialized = false;
private volatile boolean shutdown = false;
private int queueMaxSize = -1;
private BlockingQueue<Event> queue;
private String publisherName;
@Override
public void init(Class<? extends Event> type, int bufferSize) {
this.queueMaxSize = bufferSize;
this.queue = new ArrayBlockingQueue<>(bufferSize);
this.publisherName = type.getSimpleName();
super.setName(THREAD_NAME + this.publisherName);
super.setDaemon(true);
super.start();
initialized = true;
}
@Override
public long currentEventSize() {
return this.queue.size();
}
@Override
public void addSubscriber(Subscriber subscriber) {
addSubscriber(subscriber, subscriber.subscribeType());
}
@Override
public void addSubscriber(Subscriber subscriber, Class<? extends Event> subscribeType) {
subscribes.computeIfAbsent(subscribeType, inputType -> new ConcurrentHashSet<>()).add(subscriber);
}
@Override
public void removeSubscriber(Subscriber subscriber) {
removeSubscriber(subscriber, subscriber.subscribeType());
}
@Override
public void removeSubscriber(Subscriber subscriber, Class<? extends Event> subscribeType) {
subscribes.computeIfPresent(subscribeType, (inputType, subscribers) -> {
subscribers.remove(subscriber);
return subscribers.isEmpty() ? null : subscribers;
});
}
@Override
public boolean publish(Event event) {
checkIsStart();
boolean success = this.queue.offer(event);
if (!success) {
LOGGER.warn("Trace Event Publish failed, event : {}, publish queue size : {}", event, currentEventSize());
}
return true;
}
@Override
public void notifySubscriber(Subscriber subscriber, Event event) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("[NotifyCenter] the {} will received by {}", event, subscriber);
}
final Runnable job = () -> subscriber.onEvent(event);
final Executor executor = subscriber.executor();
if (executor != null) {
executor.execute(job);
} else {
try {
job.run();
} catch (Throwable e) {
LOGGER.error("Event callback exception: ", e);
}
}
}
@Override
public void shutdown() throws NacosException {
this.shutdown = true;
this.queue.clear();
}
@Override
public void run() {
try {
waitSubscriberForInit();
handleEvents();
} catch (Exception e) {
LOGGER.error("Trace Event Publisher {}, stop to handle event due to unexpected exception: ",
this.publisherName, e);
}
}
private void waitSubscriberForInit() {
// To ensure that messages are not lost, enable EventHandler when
// waiting for the first Subscriber to register
for (int waitTimes = DEFAULT_WAIT_TIME; waitTimes > 0; waitTimes--) {
if (shutdown || !subscribes.isEmpty()) {
break;
}
ThreadUtils.sleep(1000L);
}
}
private void handleEvents() {
while (!shutdown) {
try {
final Event event = queue.take();
handleEvent(event);
} catch (InterruptedException e) {
LOGGER.warn("Trace Event Publisher {} take event from queue failed:", this.publisherName, e);
// set the interrupted flag
Thread.currentThread().interrupt();
}
}
}
private void handleEvent(Event event) {
Class<? extends Event> eventType = event.getClass();
Set<Subscriber<? extends Event>> subscribers = subscribes.get(eventType);
if (null == subscribers) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("[NotifyCenter] No subscribers for slow event {}", eventType.getName());
}
return;
}
for (Subscriber subscriber : subscribers) {
notifySubscriber(subscriber, event);
}
}
void checkIsStart() {
if (!initialized) {
throw new IllegalStateException("Publisher does not start");
}
}
public String getStatus() {
return String.format("Publisher %-30s: shutdown=%5s, queue=%7d/%-7d", publisherName, shutdown,
currentEventSize(), queueMaxSize);
}
}
| TraceEventPublisher |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/http/OAuth2ResourceServerBeanDefinitionParserTests.java | {
"start": 46658,
"end": 47123
} | class ____ implements FactoryBean<OpaqueTokenIntrospector> {
private RestOperations rest;
@Override
public OpaqueTokenIntrospector getObject() throws Exception {
return new SpringOpaqueTokenIntrospector("https://idp.example.org", this.rest);
}
@Override
public Class<?> getObjectType() {
return OpaqueTokenIntrospector.class;
}
public void setRest(RestOperations rest) {
this.rest = rest;
}
}
static | OpaqueTokenIntrospectorFactoryBean |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet2/Hamlet.java | {
"start": 173304,
"end": 175524
} | class ____<T extends __> extends EImp<T> implements HamletSpec.UL {
public UL(String name, T parent, EnumSet<EOpt> opts) {
super(name, parent, opts);
}
@Override
public UL<T> $id(String value) {
addAttr("id", value);
return this;
}
@Override
public UL<T> $class(String value) {
addAttr("class", value);
return this;
}
@Override
public UL<T> $title(String value) {
addAttr("title", value);
return this;
}
@Override
public UL<T> $style(String value) {
addAttr("style", value);
return this;
}
@Override
public UL<T> $lang(String value) {
addAttr("lang", value);
return this;
}
@Override
public UL<T> $dir(Dir value) {
addAttr("dir", value);
return this;
}
@Override
public UL<T> $onclick(String value) {
addAttr("onclick", value);
return this;
}
@Override
public UL<T> $ondblclick(String value) {
addAttr("ondblclick", value);
return this;
}
@Override
public UL<T> $onmousedown(String value) {
addAttr("onmousedown", value);
return this;
}
@Override
public UL<T> $onmouseup(String value) {
addAttr("onmouseup", value);
return this;
}
@Override
public UL<T> $onmouseover(String value) {
addAttr("onmouseover", value);
return this;
}
@Override
public UL<T> $onmousemove(String value) {
addAttr("onmousemove", value);
return this;
}
@Override
public UL<T> $onmouseout(String value) {
addAttr("onmouseout", value);
return this;
}
@Override
public UL<T> $onkeypress(String value) {
addAttr("onkeypress", value);
return this;
}
@Override
public UL<T> $onkeydown(String value) {
addAttr("onkeydown", value);
return this;
}
@Override
public UL<T> $onkeyup(String value) {
addAttr("onkeyup", value);
return this;
}
@Override
public LI<UL<T>> li() {
closeAttrs();
return li_(this, false);
}
@Override
public UL<T> li(String cdata) {
return li().__(cdata).__();
}
}
public | UL |
java | quarkusio__quarkus | integration-tests/gradle/src/test/java/io/quarkus/gradle/QuarkusGradleWrapperTestBase.java | {
"start": 573,
"end": 12988
} | class ____ extends QuarkusGradleTestBase {
private static final String GRADLE_WRAPPER_WINDOWS = "gradlew.bat";
private static final String GRADLE_WRAPPER_UNIX = "./gradlew";
private static final String MAVEN_REPO_LOCAL = "maven.repo.local";
private static final String QUARKUS_TEST_GRADLE_WRAPPER_VERSION = "quarkus-test-gradle-wrapper-version";
private Map<String, String> systemProps;
private boolean configurationCacheEnable = true;
private boolean noWatchFs = true;
@Override
protected File getProjectDir(String projectName) {
return getProjectDir(projectName, getTestProjectNameSuffix());
}
/**
* Returns a suffix for a test project directory name based on the requested Gradle wrapper version.
* If a Gradle wrapper version was not configured, the suffix will be null.
* Otherwise, it will be {@code -wrapper-${quarkus-test-gradle-wrapper-version}}.
*
* @return test project directory suffix or null
*/
private static String getTestProjectNameSuffix() {
var wrapperVersion = getRequestedWrapperVersion();
return wrapperVersion == null ? null : "-wrapper-" + wrapperVersion;
}
/**
* Returns configured Gradle wrapper version for a test or null, in case it wasn't configured.
*
* @return configured Gradle wrapper version or null
*/
private static String getRequestedWrapperVersion() {
return System.getProperty(QUARKUS_TEST_GRADLE_WRAPPER_VERSION);
}
protected void setupTestCommand() {
}
/**
* Gradle's configuration cache is enabled by default for all tests. This option can be used to disable the
* configuration test.
*/
protected void gradleConfigurationCache(boolean configurationCacheEnable) {
this.configurationCacheEnable = configurationCacheEnable;
}
/**
* Gradle is run by default with {@code --no-watch-fs} to reduce I/O load during tests. Some tests might run into issues
* with this option.
*/
protected void gradleNoWatchFs(boolean noWatchFs) {
this.noWatchFs = noWatchFs;
}
public BuildResult runGradleWrapper(File projectDir, String... args) throws IOException, InterruptedException {
return runGradleWrapper(false, projectDir, args);
}
public BuildResult runGradleWrapper(boolean expectError, File projectDir, String... args)
throws IOException, InterruptedException {
return runGradleWrapper(expectError, projectDir, true, args);
}
public BuildResult runGradleWrapper(boolean expectError, File projectDir, boolean skipAnalytics, String... args)
throws IOException, InterruptedException {
boolean isInCiPipeline = "true".equals(System.getenv("CI"));
// install a custom version of the wrapper, in case it's configured
installRequestedWrapper(projectDir);
setupTestCommand();
List<String> command = new ArrayList<>();
command.add(getGradleWrapperCommand(projectDir));
addSystemProperties(command);
if (!isInCiPipeline && isDebuggerConnected()) {
command.add("-Dorg.gradle.debug=true");
}
command.add("-Dorg.gradle.console=plain");
if (skipAnalytics) {
command.add("-Dquarkus.analytics.disabled=true");
}
if (configurationCacheEnable) {
command.add("--configuration-cache");
}
command.add("--stacktrace");
if (noWatchFs) {
command.add("--no-watch-fs");
}
command.add("--info");
command.add("--daemon");
command.addAll(Arrays.asList(args));
File logOutput = new File(projectDir, "command-output.log");
logCommandLine(command);
ProcessBuilder pb = new ProcessBuilder()
.directory(projectDir)
.command(command)
.redirectInput(ProcessBuilder.Redirect.INHERIT)
// Should prevent "fragmented" output (parts of stdout and stderr interleaved)
.redirectErrorStream(true);
if (System.getenv("GRADLE_JAVA_HOME") != null) {
// JAVA_HOME for Gradle explicitly configured.
pb.environment().put("JAVA_HOME", System.getenv("GRADLE_JAVA_HOME"));
} else if (System.getenv("JAVA_HOME") == null || System.getenv("JAVA_HOME").isEmpty()) {
// This helps running the tests in IntelliJ w/o configuring an explicit JAVA_HOME env var.
pb.environment().put("JAVA_HOME", System.getProperty("java.home"));
}
Process p = pb.start();
Thread outputPuller = new Thread(new LogRedirectAndStopper(p, logOutput, !isInCiPipeline));
outputPuller.setDaemon(true);
outputPuller.start();
boolean done;
if (!isInCiPipeline && isDebuggerConnected()) {
p.waitFor();
done = true;
} else {
//long timeout for native tests
//that may also need to download docker
done = p.waitFor(10, TimeUnit.MINUTES);
}
if (!done) {
destroyProcess(p);
}
outputPuller.interrupt();
outputPuller.join();
final BuildResult commandResult = BuildResult.of(logOutput);
int exitCode = p.exitValue();
// The test failed, if the Gradle build exits with != 0 and the tests expects no failure, or if the test
// expects a failure and the exit code is 0.
if (expectError == (exitCode == 0)) {
if (isInCiPipeline) {
// Only print the output, if the test does not expect a failure.
printCommandOutput(projectDir, command, commandResult, exitCode);
}
// Fail hard, if the test does not expect a failure.
Assertions.fail("Gradle build failed with exit code %d", exitCode);
}
return commandResult;
}
private static void logCommandLine(List<String> command) {
System.out.println("$ " + String.join(" ", command));
}
private void installRequestedWrapper(File projectDir) {
String wrapperVersion = getRequestedWrapperVersion();
if (wrapperVersion == null) {
// no specific version was configured, the integration-test/gradle one will be used
return;
}
final String defaultWrapper = getGradleWrapperCommand(projectDir);
final List<String> command = List.of(defaultWrapper, "wrapper", "--gradle-version=" + wrapperVersion);
logCommandLine(command);
final ProcessBuilder pb = new ProcessBuilder()
.directory(projectDir)
.command(command)
.redirectInput(ProcessBuilder.Redirect.INHERIT)
// Should prevent "fragmented" output (parts of stdout and stderr interleaved)
.redirectErrorStream(true);
try {
pb.start().waitFor();
} catch (Exception e) {
throw new RuntimeException("Failed to install Gradle wrapper", e);
}
assertGradleVersion(projectDir, wrapperVersion);
}
private void assertGradleVersion(File projectDir, String expectedGradleVersion) {
final String wrapper = getGradleWrapperCommand(projectDir);
final List<String> command = List.of(wrapper, "--version");
logCommandLine(command);
File output = new File(projectDir, "gradle-version.log");
final ProcessBuilder pb = new ProcessBuilder()
.directory(projectDir)
.command(command)
.redirectInput(ProcessBuilder.Redirect.INHERIT)
// Should prevent "fragmented" output (parts of stdout and stderr interleaved)
.redirectErrorStream(true)
.redirectOutput(output);
try {
pb.start().waitFor();
} catch (Exception e) {
throw new RuntimeException("Failed to install Gradle wrapper", e);
}
final String versionOutput;
try {
versionOutput = Files.readString(output.toPath());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
System.out.println(versionOutput);
assertThat(versionOutput).contains("Gradle " + expectedGradleVersion);
}
protected void setSystemProperty(String name, String value) {
if (systemProps == null) {
systemProps = new HashMap<>();
}
systemProps.put(name, value);
}
/**
* Returns Gradle wrapper path for a given project directory.
* If the project directory contains a wrapper, the project wrapper will be preferred.
* Otherwise, integration-tests/gradle wrapper will be returned.
*
* @param projectDir project directory
* @return Gradle wrapper path
*/
private String getGradleWrapperCommand(File projectDir) {
File wrapper = null;
if (projectDir != null) {
wrapper = new File(projectDir, getGradleWrapperName());
}
if (wrapper == null || !wrapper.exists()) {
wrapper = new File(getGradleWrapperName());
}
return wrapper.getAbsolutePath();
}
private String getGradleWrapperName() {
if (System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows")) {
return GRADLE_WRAPPER_WINDOWS;
}
return GRADLE_WRAPPER_UNIX;
}
private void addSystemProperties(List<String> args) {
if (systemProps != null) {
systemProps.entrySet().stream().map(e -> toPropertyArg(e.getKey(), e.getValue())).forEach(args::add);
}
final String mavenRepoLocal = System.getProperty(MAVEN_REPO_LOCAL);
if (mavenRepoLocal != null) {
args.add(toPropertyArg(MAVEN_REPO_LOCAL, mavenRepoLocal));
}
}
private static String toPropertyArg(String name, String value) {
return "-D" + name + "=" + value;
}
private void printCommandOutput(File projectDir, List<String> command, BuildResult commandResult, int exitCode) {
System.err.println(
"Command: " + String.join(" ", command) + " ran from: " + projectDir.getAbsolutePath()
+ " failed with exit code: " + exitCode + " and the following output:");
System.err.println(commandResult.getOutput());
}
/**
* Try to destroy the process normally a few times
* and resort to forceful destruction if necessary
*/
private static void destroyProcess(Process wrapperProcess) {
wrapperProcess.destroy();
int i = 0;
while (i++ < 10) {
try {
Thread.sleep(500);
} catch (InterruptedException ignored) {
}
if (!wrapperProcess.isAlive()) {
break;
}
}
if (wrapperProcess.isAlive()) {
wrapperProcess.destroyForcibly();
}
}
private static boolean isDebuggerConnected() {
return ManagementFactory.getRuntimeMXBean().getInputArguments().toString().contains("jdwp");
}
private record LogRedirectAndStopper(Process process, File targetFile, Boolean forwardToStdOut) implements Runnable {
@Override
public void run() {
try (BufferedReader stdOutReader = process.inputReader();
FileWriter fw = new FileWriter(targetFile);
BufferedWriter bw = new BufferedWriter(fw)) {
int errorCount = 0;
while (!Thread.interrupted()) {
String line = stdOutReader.readLine();
if (line == null) {
break;
}
bw.write(line);
bw.newLine();
if (forwardToStdOut) {
System.out.println(line);
}
if (line.contains("Build failure: Build failed due to errors")) {
errorCount++;
if (errorCount >= 3) {
process.destroyForcibly();
break;
}
}
}
} catch (IOException ignored) {
// ignored
}
}
}
}
| QuarkusGradleWrapperTestBase |
java | google__auto | value/src/test/java/com/google/auto/value/processor/TypeVariablesTest.java | {
"start": 3653,
"end": 3763
} | class ____<T extends Comparable<T>, U> {
abstract Map<T, ? extends U> getFoo();
}
abstract static | Source3 |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/EqualsHashCodeTest.java | {
"start": 2287,
"end": 2382
} | class ____ {}")
.addSourceLines(
"Test.java",
"""
| Super |
java | micronaut-projects__micronaut-core | http-client/src/test/groovy/io/micronaut/http/client/docs/annotation/retry/PetClient.java | {
"start": 1008,
"end": 1128
} | interface ____ extends PetOperations {
@Override
Mono<Pet> save(String name, int age);
}
// end::class[]
| PetClient |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ShowFunctionsOperation.java | {
"start": 1748,
"end": 4426
} | enum ____ {
USER,
ALL
}
private final FunctionScope functionScope;
private final @Nullable String databaseName;
public ShowFunctionsOperation(@Nullable String catalogName, @Nullable String databaseName) {
// "SHOW FUNCTIONS" default is ALL scope
this(FunctionScope.ALL, catalogName, databaseName, null);
}
public ShowFunctionsOperation(
FunctionScope functionScope,
@Nullable String catalogName,
@Nullable String databaseName,
@Nullable ShowLikeOperator likeOp) {
this(functionScope, null, catalogName, databaseName, likeOp);
}
public ShowFunctionsOperation(
FunctionScope functionScope,
@Nullable String preposition,
@Nullable String catalogName,
@Nullable String databaseName,
@Nullable ShowLikeOperator likeOp) {
super(catalogName, preposition, likeOp);
this.functionScope = functionScope;
this.databaseName = databaseName;
}
@Override
protected Collection<String> retrieveDataForTableResult(Context ctx) {
switch (functionScope) {
case USER:
if (preposition == null) {
return Arrays.asList(ctx.getFunctionCatalog().getUserDefinedFunctions());
}
return ctx
.getFunctionCatalog()
.getUserDefinedFunctions(catalogName, databaseName)
.stream()
.map(FunctionIdentifier::getFunctionName)
.collect(Collectors.toList());
case ALL:
if (preposition == null) {
return Arrays.asList(ctx.getFunctionCatalog().getFunctions());
}
return Arrays.asList(
ctx.getFunctionCatalog().getFunctions(catalogName, databaseName));
default:
throw new UnsupportedOperationException(
String.format(
"SHOW FUNCTIONS with %s scope is not supported.", databaseName));
}
}
@Override
protected String getOperationName() {
return functionScope == FunctionScope.ALL ? "SHOW FUNCTIONS" : "SHOW USER FUNCTIONS";
}
@Override
protected String getColumnName() {
return "function name";
}
@Override
public String getPrepositionSummaryString() {
if (databaseName == null) {
return super.getPrepositionSummaryString();
}
return super.getPrepositionSummaryString() + "." + databaseName;
}
}
| FunctionScope |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/connection/SessionProxy.java | {
"start": 1046,
"end": 1327
} | interface ____ extends Session {
/**
* Return the target Session of this proxy.
* <p>This will typically be the native provider Session
* or a wrapper from a session pool.
* @return the underlying Session (never {@code null})
*/
Session getTargetSession();
}
| SessionProxy |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/collection_in_constructor/Store2.java | {
"start": 771,
"end": 1748
} | class ____ {
private final Integer id;
private final List<Clerk> clerks;
private final List<Aisle> aisles;
public Store2(Integer id, List<Clerk> clerks, List<Aisle> aisles) {
super();
this.id = id;
this.clerks = clerks;
this.aisles = aisles;
}
public Integer getId() {
return id;
}
public List<Clerk> getClerks() {
return clerks;
}
public List<Aisle> getAisles() {
return aisles;
}
@Override
public int hashCode() {
return Objects.hash(clerks, id, aisles);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Store2)) {
return false;
}
Store2 other = (Store2) obj;
return Objects.equals(clerks, other.clerks) && Objects.equals(id, other.id) && Objects.equals(aisles, other.aisles);
}
@Override
public String toString() {
return "Store2 [id=" + id + ", clerks=" + clerks + ", aisles=" + aisles + "]";
}
}
| Store2 |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/response/IbmWatsonxErrorResponseEntity.java | {
"start": 858,
"end": 2047
} | class ____ extends UnifiedChatCompletionErrorResponse {
private static final String WATSONX_ERROR = "watsonx_error";
public static final UnifiedChatCompletionErrorParserContract WATSONX_ERROR_PARSER = UnifiedChatCompletionErrorResponseUtils
.createErrorParserWithGenericParser(IbmWatsonxErrorResponseEntity::doParse);
private IbmWatsonxErrorResponseEntity(String errorMessage) {
super(errorMessage, WATSONX_ERROR, null, null);
}
public static UnifiedChatCompletionErrorResponse fromResponse(HttpResult result) {
return WATSONX_ERROR_PARSER.parse(result);
}
private static Optional<UnifiedChatCompletionErrorResponse> doParse(XContentParser parser) throws IOException {
var responseMap = parser.map();
@SuppressWarnings("unchecked")
var error = (Map<String, Object>) responseMap.get("error");
if (error != null) {
var message = (String) error.get("message");
return Optional.of(new IbmWatsonxErrorResponseEntity(Objects.requireNonNullElse(message, "")));
}
return Optional.of(UnifiedChatCompletionErrorResponse.UNDEFINED_ERROR);
}
}
| IbmWatsonxErrorResponseEntity |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/transaction/programmatic/ProgrammaticTxMgmtTests.java | {
"start": 2615,
"end": 9166
} | class ____ {
String sqlScriptEncoding;
JdbcTemplate jdbcTemplate;
String methodName;
@Autowired
ApplicationContext applicationContext;
@Autowired
void setDataSource(DataSource dataSource) {
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
@BeforeEach
void trackTestName(TestInfo testInfo) {
this.methodName = testInfo.getTestMethod().get().getName();
}
@BeforeTransaction
void beforeTransaction() {
deleteFromTables("user");
executeSqlScript("classpath:/org/springframework/test/context/jdbc/data.sql", false);
}
@AfterTransaction
void afterTransaction() {
switch (this.methodName) {
case "commitTxAndStartNewTx", "commitTxButDoNotStartNewTx" -> assertUsers("Dogbert");
case "rollbackTxAndStartNewTx", "rollbackTxButDoNotStartNewTx", "startTxWithExistingTransaction" ->
assertUsers("Dilbert");
case "rollbackTxAndStartNewTxWithDefaultCommitSemantics" -> assertUsers("Dilbert", "Dogbert");
default -> fail("missing 'after transaction' assertion for test method: " + this.methodName);
}
}
@Test
@Transactional(propagation = Propagation.NOT_SUPPORTED)
void isActiveWithNonExistentTransactionContext() {
assertThat(TestTransaction.isActive()).isFalse();
}
@Test
@Transactional(propagation = Propagation.NOT_SUPPORTED)
void flagForRollbackWithNonExistentTransactionContext() {
assertThatIllegalStateException().isThrownBy(TestTransaction::flagForRollback);
}
@Test
@Transactional(propagation = Propagation.NOT_SUPPORTED)
void flagForCommitWithNonExistentTransactionContext() {
assertThatIllegalStateException().isThrownBy(TestTransaction::flagForCommit);
}
@Test
@Transactional(propagation = Propagation.NOT_SUPPORTED)
void isFlaggedForRollbackWithNonExistentTransactionContext() {
assertThatIllegalStateException().isThrownBy(TestTransaction::isFlaggedForRollback);
}
@Test
@Transactional(propagation = Propagation.NEVER)
void startTxWithNonExistentTransactionContext() {
assertThatIllegalStateException().isThrownBy(TestTransaction::start);
}
@Test
void startTxWithExistingTransaction() {
assertThatIllegalStateException().isThrownBy(TestTransaction::start);
}
@Test
@Transactional(propagation = Propagation.NEVER)
void endTxWithNonExistentTransactionContext() {
assertThatIllegalStateException().isThrownBy(TestTransaction::end);
}
@Test
void commitTxAndStartNewTx() {
assertThatTransaction().isActive();
assertThat(TestTransaction.isActive()).isTrue();
assertUsers("Dilbert");
deleteFromTables("user");
assertUsers();
// Commit
TestTransaction.flagForCommit();
assertThat(TestTransaction.isFlaggedForRollback()).isFalse();
TestTransaction.end();
assertThatTransaction().isNotActive();
assertThat(TestTransaction.isActive()).isFalse();
assertUsers();
executeSqlScript("classpath:/org/springframework/test/context/jdbc/data-add-dogbert.sql", false);
assertUsers("Dogbert");
TestTransaction.start();
assertThatTransaction().isActive();
assertThat(TestTransaction.isActive()).isTrue();
}
@Test
void commitTxButDoNotStartNewTx() {
assertThatTransaction().isActive();
assertThat(TestTransaction.isActive()).isTrue();
assertUsers("Dilbert");
deleteFromTables("user");
assertUsers();
// Commit
TestTransaction.flagForCommit();
assertThat(TestTransaction.isFlaggedForRollback()).isFalse();
TestTransaction.end();
assertThat(TestTransaction.isActive()).isFalse();
assertThatTransaction().isNotActive();
assertUsers();
executeSqlScript("classpath:/org/springframework/test/context/jdbc/data-add-dogbert.sql", false);
assertUsers("Dogbert");
}
@Test
void rollbackTxAndStartNewTx() {
assertThatTransaction().isActive();
assertThat(TestTransaction.isActive()).isTrue();
assertUsers("Dilbert");
deleteFromTables("user");
assertUsers();
// Rollback (automatically)
assertThat(TestTransaction.isFlaggedForRollback()).isTrue();
TestTransaction.end();
assertThat(TestTransaction.isActive()).isFalse();
assertThatTransaction().isNotActive();
assertUsers("Dilbert");
// Start new transaction with default rollback semantics
TestTransaction.start();
assertThatTransaction().isActive();
assertThat(TestTransaction.isFlaggedForRollback()).isTrue();
assertThat(TestTransaction.isActive()).isTrue();
executeSqlScript("classpath:/org/springframework/test/context/jdbc/data-add-dogbert.sql", false);
assertUsers("Dilbert", "Dogbert");
}
@Test
void rollbackTxButDoNotStartNewTx() {
assertThatTransaction().isActive();
assertThat(TestTransaction.isActive()).isTrue();
assertUsers("Dilbert");
deleteFromTables("user");
assertUsers();
// Rollback (automatically)
assertThat(TestTransaction.isFlaggedForRollback()).isTrue();
TestTransaction.end();
assertThat(TestTransaction.isActive()).isFalse();
assertThatTransaction().isNotActive();
assertUsers("Dilbert");
}
@Test
@Commit
void rollbackTxAndStartNewTxWithDefaultCommitSemantics() {
assertThatTransaction().isActive();
assertThat(TestTransaction.isActive()).isTrue();
assertUsers("Dilbert");
deleteFromTables("user");
assertUsers();
// Rollback
TestTransaction.flagForRollback();
assertThat(TestTransaction.isFlaggedForRollback()).isTrue();
TestTransaction.end();
assertThat(TestTransaction.isActive()).isFalse();
assertThatTransaction().isNotActive();
assertUsers("Dilbert");
// Start new transaction with default commit semantics
TestTransaction.start();
assertThatTransaction().isActive();
assertThat(TestTransaction.isFlaggedForRollback()).isFalse();
assertThat(TestTransaction.isActive()).isTrue();
executeSqlScript("classpath:/org/springframework/test/context/jdbc/data-add-dogbert.sql", false);
assertUsers("Dilbert", "Dogbert");
}
protected int deleteFromTables(String... names) {
return JdbcTestUtils.deleteFromTables(this.jdbcTemplate, names);
}
protected void executeSqlScript(String sqlResourcePath, boolean continueOnError) throws DataAccessException {
Resource resource = this.applicationContext.getResource(sqlResourcePath);
new ResourceDatabasePopulator(continueOnError, false, this.sqlScriptEncoding, resource).execute(jdbcTemplate.getDataSource());
}
private void assertUsers(String... users) {
List<String> expected = Arrays.asList(users);
Collections.sort(expected);
List<String> actual = jdbcTemplate.queryForList("select name from user", String.class);
Collections.sort(actual);
assertThat(actual).as("Users in database;").isEqualTo(expected);
}
@Configuration
static | ProgrammaticTxMgmtTests |
java | apache__flink | flink-dstl/flink-dstl-dfs/src/main/java/org/apache/flink/changelog/fs/StateChangeUploadScheduler.java | {
"start": 6201,
"end": 7922
} | class ____ {
final Collection<StateChangeSet> changeSets;
final Consumer<List<UploadResult>> successCallback;
final BiConsumer<List<SequenceNumber>, Throwable> failureCallback;
final AtomicBoolean finished = new AtomicBoolean();
public UploadTask(
Collection<StateChangeSet> changeSets,
Consumer<List<UploadResult>> successCallback,
BiConsumer<List<SequenceNumber>, Throwable> failureCallback) {
this.changeSets = new ArrayList<>(changeSets);
this.successCallback = successCallback;
this.failureCallback = failureCallback;
}
public void complete(List<UploadResult> results) {
if (finished.compareAndSet(false, true)) {
successCallback.accept(results);
}
}
public void fail(Throwable error) {
if (finished.compareAndSet(false, true)) {
failureCallback.accept(
changeSets.stream()
.map(StateChangeSet::getSequenceNumber)
.collect(toList()),
error);
}
}
public long getSize() {
long size = 0;
for (StateChangeSet set : changeSets) {
size += set.getSize();
}
return size;
}
public Collection<StateChangeSet> getChangeSets() {
return changeSets;
}
@Override
public String toString() {
return "changeSets=" + changeSets;
}
public boolean isFinished() {
return finished.get();
}
}
}
| UploadTask |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PatternMatchingInstanceofTest.java | {
"start": 7597,
"end": 8010
} | class ____ {
void test(Object o) {
if (o.hashCode() > 0) {
Integer test = (Integer) o;
test(test);
}
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void differentVariable() {
helper
.addInputLines(
"Test.java",
"""
| Test |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/create/MySqlCreateProcedureTest8.java | {
"start": 1196,
"end": 7145
} | class ____ extends MysqlTest {
/**
* DECLARE handler_type HANDLER FOR condition_value[,...] sp_statement handler_type: CONTINUE | EXIT condition_value: SQLSTATE [VALUE] sqlstate_value | condition_name | SQLWARNING | NOT FOUND | SQLEXCEPTION | mysql_error_code
*
* @throws Exception
*/
public void test_0() throws Exception {
String sql = "create or replace procedure test_cursor (in param int(10),out result varchar(90)) "
+ " begin"
+ " declare name varchar(20);"
+ " declare pass varchar(20);"
+ " declare done int;"
+ " declare cur_test CURSOR for select user_name,user_pass from test;"
+ " declare continue handler FOR SQLSTATE '02000' SET done = 1;"
+ " if param then"
+ " select concat_ws(',',user_name,user_pass) into result from test.users where id=param;"
+ " else"
+ " open cur_test;"
+ " repeat"
+ " fetch cur_test into name, pass;"
+ " select concat_ws(',',result,name,pass) into result;"
+ " until done end repeat;"
+ " close cur_test;"
+ " end if;"
+ " end;";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
SQLStatement stmt = statementList.get(0);
System.out.println(SQLUtils.toSQLString(stmt, JdbcConstants.MYSQL));
assertEquals(1, statementList.size());
System.out.println(stmt);
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.MYSQL);
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
assertEquals(2, visitor.getTables().size());
assertEquals(5, visitor.getColumns().size());
assertEquals(1, visitor.getConditions().size());
}
public void test_1() throws Exception {
String sql = "create or replace procedure sp_name(level int,age int)" +
" begin" +
" declare continue handler FOR SQLSTATE '02000' SET done = 1;" +
" end";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
statemen.accept(visitor);
assertEquals(0, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
}
public void test_2() throws Exception {
String sql = "create or replace procedure sp_name(level int,age int)" +
" begin" +
" declare continue handler FOR SQLEXCEPTION,SQLWARNING SET done = 1;" +
" end";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
statemen.accept(visitor);
assertEquals(0, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
}
public void test_3() throws Exception {
String sql = "create or replace procedure sp_name(level int,age int)" +
" begin" +
" declare continue handler FOR 1002 SET done = 1;" +
" end";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
statemen.accept(visitor);
assertEquals(0, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
}
public void test_4() throws Exception {
String sql = "create or replace procedure sp_name(level int,age int)" +
" begin" +
" declare continue handler FOR SQLWARNING begin set done = 1; end" +
" end";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
statemen.accept(visitor);
assertEquals(0, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
}
public void test_5() throws Exception {
String sql = "create or replace procedure sp_name(level int,age int)" +
" begin" +
" declare continue handler FOR SQLWARNING begin set done = 1; end;" +
" end";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
statemen.accept(visitor);
assertEquals(0, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
}
}
| MySqlCreateProcedureTest8 |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/PrimitiveTest.java | {
"start": 251,
"end": 3248
} | class ____ extends TestCase {
public void test_0() throws Exception {
StringWriter out = new StringWriter();
JSONSerializer.write(out, (byte) 1);
Assert.assertEquals("1", out.toString());
}
public void test_0_s() throws Exception {
SerializeWriter out = new SerializeWriter();
JSONSerializer.write(out, (byte) 1);
Assert.assertEquals("1", out.toString());
}
public void test_1() throws Exception {
StringWriter out = new StringWriter();
JSONSerializer.write(out, (short) 1);
Assert.assertEquals("1", out.toString());
}
public void test_1_s() throws Exception {
SerializeWriter out = new SerializeWriter();
JSONSerializer.write(out, (short) 1);
Assert.assertEquals("1", out.toString());
}
public void test_2() throws Exception {
StringWriter out = new StringWriter();
JSONSerializer.write(out, true);
Assert.assertEquals("true", out.toString());
}
public void test_2_s() throws Exception {
SerializeWriter out = new SerializeWriter();
JSONSerializer.write(out, true);
Assert.assertEquals("true", out.toString());
}
public void test_3() throws Exception {
StringWriter out = new StringWriter();
JSONSerializer.write(out, false);
Assert.assertEquals("false", out.toString());
}
public void test_3_s() throws Exception {
SerializeWriter out = new SerializeWriter();
JSONSerializer.write(out, false);
Assert.assertEquals("false", out.toString());
}
public void test_4() throws Exception {
StringWriter out = new StringWriter();
JSONSerializer.write(out, new boolean[] { true, false });
Assert.assertEquals("[true,false]", out.toString());
}
public void test_4_s() throws Exception {
SerializeWriter out = new SerializeWriter();
JSONSerializer.write(out, new boolean[] { true, false });
Assert.assertEquals("[true,false]", out.toString());
}
public void test_5() throws Exception {
StringWriter out = new StringWriter();
JSONSerializer.write(out, new boolean[] {});
Assert.assertEquals("[]", out.toString());
}
public void test_5_s() throws Exception {
SerializeWriter out = new SerializeWriter();
JSONSerializer.write(out, new boolean[] {});
Assert.assertEquals("[]", out.toString());
}
public void test_6() throws Exception {
StringWriter out = new StringWriter();
JSONSerializer.write(out, new boolean[] { true, false, true });
Assert.assertEquals("[true,false,true]", out.toString());
}
public void test_6_s() throws Exception {
SerializeWriter out = new SerializeWriter();
JSONSerializer.write(out, new boolean[] { true, false, true });
Assert.assertEquals("[true,false,true]", out.toString());
}
}
| PrimitiveTest |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/time/FastDatePrinter.java | {
"start": 9127,
"end": 10490
} | class ____ implements NumberRule {
// Note: This is final to avoid Spotbugs CT_CONSTRUCTOR_THROW
private final int field;
private final int size;
/**
* Constructs an instance of {@link PaddedNumberField}.
*
* @param field the field.
* @param size size of the output field.
*/
PaddedNumberField(final int field, final int size) {
if (size < 3) {
// Should use UnpaddedNumberField or TwoDigitNumberField.
throw new IllegalArgumentException();
}
this.field = field;
this.size = size;
}
/**
* {@inheritDoc}
*/
@Override
public void appendTo(final Appendable buffer, final Calendar calendar) throws IOException {
appendTo(buffer, calendar.get(field));
}
/**
* {@inheritDoc}
*/
@Override
public /* final */ void appendTo(final Appendable buffer, final int value) throws IOException {
// Checkstyle complains about redundant qualifier
appendFullDigits(buffer, value, size);
}
/**
* {@inheritDoc}
*/
@Override
public int estimateLength() {
return size;
}
}
/**
* Inner | PaddedNumberField |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/processor/DeadlockBreakupProcessor.java | {
"start": 1520,
"end": 2270
} | class ____ implements ExecNodeGraphProcessor {
@Override
public ExecNodeGraph process(ExecNodeGraph execGraph, ProcessorContext context) {
if (!execGraph.getRootNodes().stream().allMatch(r -> r instanceof BatchExecNode)) {
throw new TableException("Only BatchExecNode DAG are supported now.");
}
InputPriorityConflictResolver resolver =
new InputPriorityConflictResolver(
execGraph.getRootNodes(),
InputProperty.DamBehavior.END_INPUT,
StreamExchangeMode.BATCH,
context.getPlanner().getTableConfig());
resolver.detectAndResolve();
return execGraph;
}
}
| DeadlockBreakupProcessor |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/service/invoker/UrlArgumentResolver.java | {
"start": 1061,
"end": 1667
} | class ____ implements HttpServiceArgumentResolver {
@Override
public boolean resolve(
@Nullable Object argument, MethodParameter parameter, HttpRequestValues.Builder requestValues) {
parameter = parameter.nestedIfOptional();
if (!parameter.getNestedParameterType().equals(URI.class)) {
return false;
}
if (argument instanceof Optional<?> optionalValue) {
argument = optionalValue.orElse(null);
}
if (argument == null) {
Assert.isTrue(parameter.isOptional(), "URI is required");
return true;
}
requestValues.setUri((URI) argument);
return true;
}
}
| UrlArgumentResolver |
java | google__truth | core/src/main/java/com/google/common/truth/ExpectFailure.java | {
"start": 9479,
"end": 9614
} | interface ____<S extends Subject, A> {
void invokeAssertion(SimpleSubjectBuilder<S, A> whenTesting);
}
}
| SimpleSubjectBuilderCallback |
java | apache__spark | examples/src/main/java/org/apache/spark/examples/ml/JavaQuantileDiscretizerExample.java | {
"start": 1321,
"end": 2612
} | class ____ {
public static void main(String[] args) {
SparkSession spark = SparkSession
.builder()
.appName("JavaQuantileDiscretizerExample")
.getOrCreate();
// $example on$
List<Row> data = Arrays.asList(
RowFactory.create(0, 18.0),
RowFactory.create(1, 19.0),
RowFactory.create(2, 8.0),
RowFactory.create(3, 5.0),
RowFactory.create(4, 2.2)
);
StructType schema = new StructType(new StructField[]{
new StructField("id", DataTypes.IntegerType, false, Metadata.empty()),
new StructField("hour", DataTypes.DoubleType, false, Metadata.empty())
});
Dataset<Row> df = spark.createDataFrame(data, schema);
// $example off$
// Output of QuantileDiscretizer for such small datasets can depend on the number of
// partitions. Here we force a single partition to ensure consistent results.
// Note this is not necessary for normal use cases
df = df.repartition(1);
// $example on$
QuantileDiscretizer discretizer = new QuantileDiscretizer()
.setInputCol("hour")
.setOutputCol("result")
.setNumBuckets(3);
Dataset<Row> result = discretizer.fit(df).transform(df);
result.show(false);
// $example off$
spark.stop();
}
}
| JavaQuantileDiscretizerExample |
java | apache__flink | flink-core/src/test/java/org/apache/flink/core/fs/RefCountedFileWithStreamTest.java | {
"start": 1452,
"end": 4643
} | class ____ {
@TempDir private static java.nio.file.Path tempFolder;
@Test
void writeShouldSucceed() throws IOException {
byte[] content = bytesOf("hello world");
final File newFile =
new File(
TempDirUtils.newFolder(tempFolder).toPath().toFile(),
".tmp_" + UUID.randomUUID());
final OutputStream out =
Files.newOutputStream(newFile.toPath(), StandardOpenOption.CREATE_NEW);
final RefCountedFileWithStream fileUnderTest1 =
RefCountedFileWithStream.newFile(newFile, out);
fileUnderTest1.write(content, 0, content.length);
fileUnderTest1.closeStream();
assertThat(fileUnderTest1.getLength()).isEqualTo(content.length);
}
@Test
void closeShouldNotReleaseReference() throws IOException {
Path path = TempDirUtils.newFolder(tempFolder).toPath();
getClosedRefCountedFileWithContent("hello world", path);
try (Stream<Path> files = Files.list(path)) {
assertThat(files).hasSize(1);
}
}
@Test
void writeAfterCloseShouldThrowException() {
assertThatExceptionOfType(IOException.class)
.isThrownBy(
() -> {
final RefCountedFileWithStream fileUnderTest =
getClosedRefCountedFileWithContent(
"hello world",
TempDirUtils.newFolder(tempFolder).toPath());
byte[] content = bytesOf("Hello Again");
fileUnderTest.write(content, 0, content.length);
});
}
@Test
void flushAfterCloseShouldThrowException() {
assertThatExceptionOfType(IOException.class)
.isThrownBy(
() -> {
final RefCountedFileWithStream fileUnderTest =
getClosedRefCountedFileWithContent(
"hello world",
TempDirUtils.newFolder(tempFolder).toPath());
fileUnderTest.flush();
});
}
// ------------------------------------- Utilities -------------------------------------
private RefCountedFileWithStream getClosedRefCountedFileWithContent(
String content, Path tempFolder) throws IOException {
byte[] content1 = bytesOf(content);
final File newFile = new File(tempFolder.toFile(), ".tmp_" + UUID.randomUUID());
final OutputStream out =
Files.newOutputStream(newFile.toPath(), StandardOpenOption.CREATE_NEW);
final RefCountedFileWithStream fileUnderTest =
RefCountedFileWithStream.newFile(newFile, out);
fileUnderTest.write(content1, 0, content1.length);
fileUnderTest.closeStream();
return fileUnderTest;
}
private static byte[] bytesOf(String str) {
return str.getBytes(StandardCharsets.UTF_8);
}
}
| RefCountedFileWithStreamTest |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/context/support/AnnotationConfigWebApplicationContext.java | {
"start": 3453,
"end": 3853
} | class ____ also be directly instantiated and injected into Spring's
* {@code DispatcherServlet} or {@code ContextLoaderListener} when using the
* {@link org.springframework.web.WebApplicationInitializer WebApplicationInitializer}
* code-based alternative to {@code web.xml}. See its Javadoc for details and usage examples.
*
* <p>Unlike {@link XmlWebApplicationContext}, no default configuration | may |
java | apache__kafka | server-common/src/test/java/org/apache/kafka/server/util/MockTime.java | {
"start": 963,
"end": 1268
} | class ____ `org.apache.kafka.common.utils.MockTime`:
*
* 1. This has an associated scheduler instance for managing background tasks in a deterministic way.
* 2. This doesn't support the `auto-tick` functionality as it interacts badly with the current implementation of `MockScheduler`.
*/
public final | and |
java | apache__camel | components/camel-mllp/src/test/java/org/apache/camel/component/mllp/MllpAcknowledgementExceptionTest.java | {
"start": 4288,
"end": 5437
} | class ____ extends MllpAcknowledgementException {
MllpAcknowledgementExceptionStub(String message, boolean logPhi) {
super(message, logPhi);
}
MllpAcknowledgementExceptionStub(String message, Throwable cause, boolean logPhi) {
super(message, cause, logPhi);
}
MllpAcknowledgementExceptionStub(String message, byte[] hl7Message, boolean logPhi) {
super(message, hl7Message, logPhi);
}
MllpAcknowledgementExceptionStub(String message, byte[] hl7Message, byte[] hl7Acknowledgement, boolean logPhi) {
super(message, hl7Message, hl7Acknowledgement, logPhi);
}
MllpAcknowledgementExceptionStub(String message, byte[] hl7Message, Throwable cause, boolean logPhi) {
super(message, hl7Message, cause, logPhi);
}
MllpAcknowledgementExceptionStub(String message, byte[] hl7Message, byte[] hl7Acknowledgement, Throwable cause,
boolean logPhi) {
super(message, hl7Message, hl7Acknowledgement, cause, logPhi);
}
}
}
| MllpAcknowledgementExceptionStub |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java | {
"start": 10343,
"end": 10670
} | class ____ values in the order they appear in the {@code .csv} files that power it. A real
* index emits documents a fair random order. Multi-shard and multi-node tests doubly so.</li>
* </ul>
*/
// @TestLogging(value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "debug")
public | emits |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/LogicalUnnestRule.java | {
"start": 2429,
"end": 9127
} | class ____ extends RelRule<LogicalUnnestRule.LogicalUnnestRuleConfig> {
public static final LogicalUnnestRule INSTANCE = LogicalUnnestRuleConfig.DEFAULT.toRule();
public LogicalUnnestRule(LogicalUnnestRule.LogicalUnnestRuleConfig config) {
super(config);
}
public boolean matches(RelOptRuleCall call) {
LogicalCorrelate join = call.rel(0);
RelNode right = getRel(join.getRight());
if (right instanceof LogicalFilter) {
LogicalFilter logicalFilter = (LogicalFilter) right;
RelNode relNode = getRel(logicalFilter.getInput());
if (relNode instanceof Uncollect) {
return true;
} else if (relNode instanceof LogicalProject) {
LogicalProject logicalProject = (LogicalProject) relNode;
relNode = getRel(logicalProject.getInput());
return relNode instanceof Uncollect;
}
} else if (right instanceof LogicalProject) {
LogicalProject logicalProject = (LogicalProject) right;
RelNode relNode = getRel(logicalProject.getInput());
return relNode instanceof Uncollect;
} else {
return right instanceof Uncollect;
}
return false;
}
public void onMatch(RelOptRuleCall call) {
LogicalCorrelate correlate = call.rel(0);
RelNode outer = getRel(correlate.getLeft());
RelNode array = getRel(correlate.getRight());
// convert unnest into table function scan
RelNode tableFunctionScan = convert(array, correlate);
// create correlate with table function scan as input
Correlate newCorrelate =
correlate.copy(correlate.getTraitSet(), ImmutableList.of(outer, tableFunctionScan));
call.transformTo(newCorrelate);
}
private RelNode convert(RelNode relNode, LogicalCorrelate correlate) {
if (relNode instanceof HepRelVertex) {
HepRelVertex hepRelVertex = (HepRelVertex) relNode;
relNode = convert(getRel(hepRelVertex), correlate);
}
if (relNode instanceof LogicalProject) {
final LogicalProject logicalProject =
correlate.getJoinType() == JoinRelType.LEFT
? getLogicalProjectWithAdjustedNullability((LogicalProject) relNode)
: (LogicalProject) relNode;
return logicalProject.copy(
logicalProject.getTraitSet(),
ImmutableList.of(convert(getRel(logicalProject.getInput()), correlate)));
}
if (relNode instanceof LogicalFilter) {
LogicalFilter logicalFilter = (LogicalFilter) relNode;
return logicalFilter.copy(
logicalFilter.getTraitSet(),
ImmutableList.of(convert(getRel(logicalFilter.getInput()), correlate)));
}
if (relNode instanceof Uncollect) {
Uncollect uncollect = (Uncollect) relNode;
RelOptCluster cluster = correlate.getCluster();
FlinkTypeFactory typeFactory = ShortcutUtils.unwrapTypeFactory(cluster);
RelDataType relDataType =
(RelDataType)
((Map.Entry) uncollect.getInput().getRowType().getFieldList().get(0))
.getValue();
LogicalType logicalType = FlinkTypeFactory.toLogicalType(relDataType);
BridgingSqlFunction sqlFunction =
BridgingSqlFunction.of(
cluster,
uncollect.withOrdinality
? BuiltInFunctionDefinitions
.INTERNAL_UNNEST_ROWS_WITH_ORDINALITY
: BuiltInFunctionDefinitions.INTERNAL_UNNEST_ROWS);
RexNode rexCall =
cluster.getRexBuilder()
.makeCall(
typeFactory.createFieldTypeFromLogicalType(
toRowType(
UnnestRowsFunctionBase.getUnnestedType(
logicalType,
uncollect.withOrdinality))),
sqlFunction,
((LogicalProject) getRel(uncollect.getInput())).getProjects());
return new LogicalTableFunctionScan(
cluster,
correlate.getTraitSet(),
Collections.emptyList(),
rexCall,
null,
rexCall.getType(),
null);
} else {
throw new IllegalArgumentException("Unexpected input: " + relNode);
}
}
private RelNode getRel(RelNode rel) {
if (rel instanceof HepRelVertex) {
return ((HepRelVertex) rel).getCurrentRel();
}
return rel;
}
/**
* If unnesting type is {@code NOT NULL} however at the same time {@code LEFT JOIN} makes it
* nullable, this method adjusts nullability by inserting extra {@code CAST}.
*/
private LogicalProject getLogicalProjectWithAdjustedNullability(LogicalProject logicalProject) {
final RelOptCluster cluster = logicalProject.getCluster();
FlinkTypeFactory typeFactory = (FlinkTypeFactory) cluster.getTypeFactory();
RexBuilder rexBuilder = cluster.getRexBuilder();
final RelDataType rowType = logicalProject.getRowType();
return logicalProject.copy(
logicalProject.getTraitSet(),
logicalProject.getInput(),
logicalProject.getProjects().stream()
.map(
t -> {
if (t.getType().isNullable()) {
return t;
}
return rexBuilder.makeCast(
createNullableType(typeFactory, t.getType()), t);
})
.collect(Collectors.toList()),
rowType.isNullable() ? rowType : createNullableType(typeFactory, rowType));
}
private static RelDataType createNullableType(FlinkTypeFactory typeFactory, RelDataType type) {
return typeFactory.createTypeWithNullability(type, true);
}
/** Rule configuration. */
@Value.Immutable(singleton = false)
public | LogicalUnnestRule |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodecOptions.java | {
"start": 979,
"end": 1171
} | class ____ {
private ECSchema schema;
public ErasureCodecOptions(ECSchema schema) {
this.schema = schema;
}
public ECSchema getSchema() {
return schema;
}
}
| ErasureCodecOptions |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/internals/AutoOffsetResetStrategy.java | {
"start": 1305,
"end": 5838
} | enum ____ {
LATEST, EARLIEST, NONE, BY_DURATION;
@Override
public String toString() {
return super.toString().toLowerCase(Locale.ROOT);
}
}
public static final AutoOffsetResetStrategy EARLIEST = new AutoOffsetResetStrategy(StrategyType.EARLIEST);
public static final AutoOffsetResetStrategy LATEST = new AutoOffsetResetStrategy(StrategyType.LATEST);
public static final AutoOffsetResetStrategy NONE = new AutoOffsetResetStrategy(StrategyType.NONE);
private final StrategyType type;
private final Optional<Duration> duration;
private AutoOffsetResetStrategy(StrategyType type) {
this.type = type;
this.duration = Optional.empty();
}
private AutoOffsetResetStrategy(Duration duration) {
this.type = StrategyType.BY_DURATION;
this.duration = Optional.of(duration);
}
/**
* Returns the AutoOffsetResetStrategy from the given string.
*/
public static AutoOffsetResetStrategy fromString(String offsetStrategy) {
if (offsetStrategy == null) {
throw new IllegalArgumentException("Auto offset reset strategy is null");
}
if (StrategyType.BY_DURATION.toString().equals(offsetStrategy)) {
throw new IllegalArgumentException("<:duration> part is missing in by_duration auto offset reset strategy.");
}
if (Arrays.asList(Utils.enumOptions(StrategyType.class)).contains(offsetStrategy)) {
StrategyType type = StrategyType.valueOf(offsetStrategy.toUpperCase(Locale.ROOT));
switch (type) {
case EARLIEST:
return EARLIEST;
case LATEST:
return LATEST;
case NONE:
return NONE;
default:
throw new IllegalArgumentException("Unknown auto offset reset strategy: " + offsetStrategy);
}
}
if (offsetStrategy.startsWith(StrategyType.BY_DURATION + ":")) {
String isoDuration = offsetStrategy.substring(StrategyType.BY_DURATION.toString().length() + 1);
try {
Duration duration = Duration.parse(isoDuration);
if (duration.isNegative()) {
throw new IllegalArgumentException("Negative duration is not supported in by_duration offset reset strategy.");
}
return new AutoOffsetResetStrategy(duration);
} catch (Exception e) {
throw new IllegalArgumentException("Unable to parse duration string in by_duration offset reset strategy.", e);
}
}
throw new IllegalArgumentException("Unknown auto offset reset strategy: " + offsetStrategy);
}
/**
* Returns the offset reset strategy type.
*/
public StrategyType type() {
return type;
}
/**
* Returns the name of the offset reset strategy.
*/
public String name() {
return type.toString();
}
/**
* Return the timestamp to be used for the ListOffsetsRequest.
* @return the timestamp for the OffsetResetStrategy,
* if the strategy is EARLIEST or LATEST or duration is provided
* else return Optional.empty()
*/
public Optional<Long> timestamp() {
if (type == StrategyType.EARLIEST)
return Optional.of(ListOffsetsRequest.EARLIEST_TIMESTAMP);
else if (type == StrategyType.LATEST)
return Optional.of(ListOffsetsRequest.LATEST_TIMESTAMP);
else if (type == StrategyType.BY_DURATION && duration.isPresent()) {
Instant now = Instant.now();
return Optional.of(now.minus(duration.get()).toEpochMilli());
} else
return Optional.empty();
}
public Optional<Duration> duration() {
return duration;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AutoOffsetResetStrategy that = (AutoOffsetResetStrategy) o;
return type == that.type && Objects.equals(duration, that.duration);
}
@Override
public int hashCode() {
return Objects.hash(type, duration);
}
@Override
public String toString() {
return "AutoOffsetResetStrategy{" +
"type=" + type +
(duration.map(value -> ", duration=" + value).orElse("")) +
'}';
}
public static | StrategyType |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/BucketAggregationScript.java | {
"start": 628,
"end": 1234
} | class ____ {
public static final String[] PARAMETERS = {};
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("bucket_aggregation", Factory.class);
/**
* The generic runtime parameters for the script.
*/
private final Map<String, Object> params;
public BucketAggregationScript(Map<String, Object> params) {
this.params = params;
}
/**
* Return the parameters for this script.
*/
public Map<String, Object> getParams() {
return params;
}
public abstract Number execute();
public | BucketAggregationScript |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.