language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/transport/TransportHandshakerRawMessageTests.java | {
"start": 1513,
"end": 11221
} | class ____ extends ESSingleNodeTestCase {
@UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // remove support for v8 handshakes in v10
public void testV8Handshake() throws Exception {
final BytesRef handshakeRequestBytes;
final var requestId = randomNonNegativeLong();
final var requestNodeTransportVersionId = TransportVersionUtils.randomCompatibleVersion(random()).id();
try (var outputStream = new BytesStreamOutput()) {
outputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION);
outputStream.writeLong(requestId);
outputStream.writeByte(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0)));
outputStream.writeInt(TransportHandshaker.V8_HANDSHAKE_VERSION.id());
outputStream.writeInt(0x1a); // length of variable-length header, always 0x1a
outputStream.writeByte((byte) 0); // no request headers;
outputStream.writeByte((byte) 0); // no response headers;
outputStream.writeByte((byte) 0); // no features;
outputStream.writeString("internal:tcp/handshake");
outputStream.writeByte((byte) 0); // no parent task ID;
assertThat(requestNodeTransportVersionId, allOf(greaterThanOrEqualTo(1 << 22), lessThan(1 << 28))); // 4-byte vInt
outputStream.writeByte((byte) 4); // payload length
outputStream.writeVInt(requestNodeTransportVersionId);
handshakeRequestBytes = outputStream.bytes().toBytesRef();
}
final BytesRef handshakeResponseBytes;
try (var socket = openTransportConnection()) {
var streamOutput = new OutputStreamStreamOutput(socket.getOutputStream());
streamOutput.write("ES".getBytes(StandardCharsets.US_ASCII));
streamOutput.writeInt(handshakeRequestBytes.length);
streamOutput.writeBytes(handshakeRequestBytes.bytes, handshakeRequestBytes.offset, handshakeRequestBytes.length);
streamOutput.flush();
var streamInput = new InputStreamStreamInput(socket.getInputStream());
assertEquals((byte) 'E', streamInput.readByte());
assertEquals((byte) 'S', streamInput.readByte());
var responseLength = streamInput.readInt();
handshakeResponseBytes = streamInput.readBytesRef(responseLength);
}
try (var inputStream = new BytesArray(handshakeResponseBytes).streamInput()) {
assertEquals(requestId, inputStream.readLong());
assertEquals(TransportStatus.setResponse(TransportStatus.setHandshake((byte) 0)), inputStream.readByte());
assertEquals(TransportHandshaker.V8_HANDSHAKE_VERSION.id(), inputStream.readInt());
assertEquals(2, inputStream.readInt()); // length of variable-length header, always 0x02
assertEquals((byte) 0, inputStream.readByte()); // no request headers
assertEquals((byte) 0, inputStream.readByte()); // no response headers
inputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION);
assertEquals(requestNodeTransportVersionId, inputStream.readVInt());
assertEquals(-1, inputStream.read());
}
}
@UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // remove support for v9 handshakes in v11
public void testV9Handshake() throws Exception {
final BytesRef handshakeRequestBytes;
final var requestId = randomNonNegativeLong();
final var requestNodeTransportVersionId = TransportVersionUtils.randomCompatibleVersion(random()).id();
try (var outputStream = new BytesStreamOutput()) {
outputStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION);
outputStream.writeLong(requestId);
outputStream.writeByte(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0)));
outputStream.writeInt(TransportHandshaker.V9_HANDSHAKE_VERSION.id());
outputStream.writeInt(0x19); // length of variable-length header, always 0x19
outputStream.writeByte((byte) 0); // no request headers;
outputStream.writeByte((byte) 0); // no response headers;
outputStream.writeString("internal:tcp/handshake");
outputStream.writeByte((byte) 0); // no parent task ID;
assertThat(requestNodeTransportVersionId, allOf(greaterThanOrEqualTo(1 << 22), lessThan(1 << 28))); // 4-byte vInt
final var releaseVersionLength = between(0, 127 - 5); // so that its length, and the length of the payload, is a one-byte vInt
final var requestNodeReleaseVersion = randomAlphaOfLength(releaseVersionLength);
outputStream.writeByte((byte) (4 + 1 + releaseVersionLength)); // payload length
outputStream.writeVInt(requestNodeTransportVersionId);
outputStream.writeString(requestNodeReleaseVersion);
handshakeRequestBytes = outputStream.bytes().toBytesRef();
}
final BytesRef handshakeResponseBytes;
try (var socket = openTransportConnection()) {
var streamOutput = new OutputStreamStreamOutput(socket.getOutputStream());
streamOutput.write("ES".getBytes(StandardCharsets.US_ASCII));
streamOutput.writeInt(handshakeRequestBytes.length);
streamOutput.writeBytes(handshakeRequestBytes.bytes, handshakeRequestBytes.offset, handshakeRequestBytes.length);
streamOutput.flush();
var streamInput = new InputStreamStreamInput(socket.getInputStream());
assertEquals((byte) 'E', streamInput.readByte());
assertEquals((byte) 'S', streamInput.readByte());
var responseLength = streamInput.readInt();
handshakeResponseBytes = streamInput.readBytesRef(responseLength);
}
try (var inputStream = new BytesArray(handshakeResponseBytes).streamInput()) {
assertEquals(requestId, inputStream.readLong());
assertEquals(TransportStatus.setResponse(TransportStatus.setHandshake((byte) 0)), inputStream.readByte());
assertEquals(TransportHandshaker.V9_HANDSHAKE_VERSION.id(), inputStream.readInt());
assertEquals(2, inputStream.readInt()); // length of variable-length header, always 0x02
assertEquals((byte) 0, inputStream.readByte()); // no request headers
assertEquals((byte) 0, inputStream.readByte()); // no response headers
inputStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION);
assertEquals(requestNodeTransportVersionId, inputStream.readVInt());
assertEquals(Build.current().version(), inputStream.readString());
assertEquals(-1, inputStream.read());
}
}
public void testOutboundHandshake() throws Exception {
final BytesRef handshakeRequestBytes;
try (var serverSocket = new ServerSocket(0, 1, InetAddress.getLoopbackAddress())) {
getInstanceFromNode(TransportService.class).openConnection(
DiscoveryNodeUtils.builder(randomIdentifier())
.address(new TransportAddress(serverSocket.getInetAddress(), serverSocket.getLocalPort()))
.build(),
ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, null, null, null, null, null),
ActionListener.noop()
);
try (
var acceptedSocket = serverSocket.accept();
var streamInput = new InputStreamStreamInput(acceptedSocket.getInputStream())
) {
assertEquals((byte) 'E', streamInput.readByte());
assertEquals((byte) 'S', streamInput.readByte());
var responseLength = streamInput.readInt();
handshakeRequestBytes = streamInput.readBytesRef(responseLength);
}
}
final BytesRef payloadBytes;
try (var inputStream = new BytesArray(handshakeRequestBytes).streamInput()) {
assertThat(inputStream.readLong(), greaterThan(0L));
assertEquals(TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0)), inputStream.readByte());
assertEquals(TransportHandshaker.V9_HANDSHAKE_VERSION.id(), inputStream.readInt());
assertEquals(0x19, inputStream.readInt()); // length of variable-length header, always 0x19
assertEquals((byte) 0, inputStream.readByte()); // no request headers
assertEquals((byte) 0, inputStream.readByte()); // no response headers
assertEquals("internal:tcp/handshake", inputStream.readString());
assertEquals((byte) 0, inputStream.readByte()); // no parent task
inputStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION);
payloadBytes = inputStream.readBytesRef();
assertEquals(-1, inputStream.read());
}
try (var inputStream = new BytesArray(payloadBytes).streamInput()) {
inputStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION);
assertEquals(TransportVersion.current().id(), inputStream.readVInt());
assertEquals(Build.current().version(), inputStream.readString());
assertEquals(-1, inputStream.read());
}
}
private Socket openTransportConnection() throws Exception {
final var transportAddress = randomFrom(getInstanceFromNode(TransportService.class).boundAddress().boundAddresses()).address();
return new Socket(transportAddress.getAddress(), transportAddress.getPort());
}
}
| TransportHandshakerRawMessageTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/naturalid/lazy/NaturalIdInUninitializedAssociationTest.java | {
"start": 3791,
"end": 4150
} | class ____ {
@Id
private int id;
@NaturalId(mutable = true)
private String name;
public EntityMutableNaturalId() {
}
public EntityMutableNaturalId(int id, String name) {
this.id = id;
this.name = name;
}
public String getName() {
return name;
}
}
@Entity(name = "EntityImmutableNaturalId")
public static | EntityMutableNaturalId |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/servlet/util/matcher/PathPatternRequestMatcher.java | {
"start": 2176,
"end": 7054
} | class ____ implements RequestMatcher {
private final PathPattern pattern;
private final RequestMatcher method;
/**
* Creates a {@link PathPatternRequestMatcher} that uses the provided {@code pattern}.
* <p>
* The {@code pattern} should be relative to the context path
* </p>
* @param pattern the pattern used to match
*/
private PathPatternRequestMatcher(PathPattern pattern, RequestMatcher method) {
this.pattern = pattern;
this.method = method;
}
/**
* Construct a {@link PathPatternRequestMatcher} using the {@link PathPatternParser}
* defaults.
* <p>
* If you are configuring a custom {@link PathPatternParser}, please use
* {@link #withPathPatternParser} instead.
* @param pattern the URI pattern to match
* @return a {@link PathPatternRequestMatcher} that matches requests to the given
* {@code pattern}
* @since 7.0
* @see PathPattern
*/
public static PathPatternRequestMatcher pathPattern(String pattern) {
return pathPattern(null, pattern);
}
/**
* Construct a {@link PathPatternRequestMatcher} using the {@link PathPatternParser}
* defaults.
* <p>
* If you are configuring a custom {@link PathPatternParser}, please use
* {@link #withPathPatternParser} instead.
* @param method the HTTP method to match, {@code null} indicates that the method does
* not matter
* @param pattern the URI pattern to match
* @return a {@link PathPatternRequestMatcher} that matches requests to the given
* {@code pattern} and {@code method}
* @since 7.0
* @see PathPattern
*/
public static PathPatternRequestMatcher pathPattern(@Nullable HttpMethod method, String pattern) {
return withDefaults().matcher(method, pattern);
}
/**
* Use {@link PathPatternParser#defaultInstance} to parse path patterns.
* @return a {@link Builder} that treats URIs as relative to the context path, if any
*/
public static Builder withDefaults() {
return new Builder();
}
/**
* Use this {@link PathPatternParser} to parse path patterns.
* @param parser the {@link PathPatternParser} to use
* @return a {@link Builder} that treats URIs as relative to the given
* {@code servletPath}
*/
public static Builder withPathPatternParser(PathPatternParser parser) {
Assert.notNull(parser, "pathPatternParser cannot be null");
return new Builder(parser);
}
/**
* {@inheritDoc}
*/
@Override
public boolean matches(HttpServletRequest request) {
return matcher(request).isMatch();
}
/**
* {@inheritDoc}
*/
@Override
public MatchResult matcher(HttpServletRequest request) {
if (!this.method.matches(request)) {
return MatchResult.notMatch();
}
PathContainer path = getPathContainer(request);
PathPattern.PathMatchInfo info = this.pattern.matchAndExtract(path);
return (info != null) ? MatchResult.match(info.getUriVariables()) : MatchResult.notMatch();
}
private PathContainer getPathContainer(HttpServletRequest request) {
RequestPath path;
if (ServletRequestPathUtils.hasParsedRequestPath(request)) {
path = ServletRequestPathUtils.getParsedRequestPath(request);
}
else {
path = ServletRequestPathUtils.parseAndCache(request);
ServletRequestPathUtils.clearParsedRequestPath(request);
}
PathContainer contextPath = path.contextPath();
return path.subPath(contextPath.elements().size());
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
if (!(o instanceof PathPatternRequestMatcher that)) {
return false;
}
return Objects.equals(this.pattern, that.pattern) && Objects.equals(this.method, that.method);
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return Objects.hash(this.pattern, this.method);
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
StringBuilder request = new StringBuilder();
if (this.method instanceof HttpMethodRequestMatcher m) {
request.append(m.method.name()).append(' ');
}
return "PathPattern [" + request + this.pattern + "]";
}
/**
* A builder for specifying various elements of a request for the purpose of creating
* a {@link PathPatternRequestMatcher}.
*
* <p>
* To match a request URI like {@code /app/servlet/my/resource/**} where {@code /app}
* is the context path, you can do
* {@code PathPatternRequestMatcher.pathPattern("/servlet/my/resource/**")}
*
* <p>
* If you have many paths that have a common path prefix, you can use
* {@link #basePath} to reduce repetition like so:
*
* <pre>
* PathPatternRequestMatcher.Builder mvc = withDefaults().basePath("/mvc");
* http
* .authorizeHttpRequests((authorize) -> authorize
* .requestMatchers(mvc.matcher("/user/**")).hasAuthority("user")
* .requestMatchers(mvc.matcher("/admin/**")).hasAuthority("admin")
* )
* ...
* </pre>
*/
public static final | PathPatternRequestMatcher |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/id/insert/GetGeneratedKeysDelegate.java | {
"start": 1719,
"end": 6202
} | class ____ extends AbstractReturningDelegate {
private final String[] columnNames;
public GetGeneratedKeysDelegate(
EntityPersister persister,
boolean inferredKeys,
EventType timing) {
super( persister, timing, !inferredKeys, false );
if ( inferredKeys ) {
columnNames = null;
}
else {
final var resultBuilders = jdbcValuesMappingProducer.getResultBuilders();
final List<String> columnNamesList = new ArrayList<>( resultBuilders.size() );
final boolean unquote = dialect().unquoteGetGeneratedKeys();
for ( var resultBuilder : resultBuilders ) {
final String columnName =
getActualGeneratedModelPart( resultBuilder.getModelPart() )
.getSelectionExpression();
columnNamesList.add( unquote ? unquote( columnName, dialect() ) : columnName );
}
columnNames = columnNamesList.toArray( EMPTY_STRINGS );
}
}
@Override
public TableMutationBuilder<?> createTableMutationBuilder(
Expectation expectation,
SessionFactoryImplementor factory) {
final var identifierTableMapping = persister.getIdentifierTableMapping();
return getTiming() == EventType.INSERT
? new TableInsertBuilderStandard( persister, identifierTableMapping, factory )
: new TableUpdateBuilderStandard<>( persister, identifierTableMapping, factory );
}
@Override
public PreparedStatement prepareStatement(String sql, SharedSessionContractImplementor session) {
var preparer = session.getJdbcCoordinator().getMutationStatementPreparer();
return columnNames == null
? preparer.prepareStatement( sql, RETURN_GENERATED_KEYS )
: preparer.prepareStatement( sql, columnNames );
}
@Override
public GeneratedValues performMutation(
PreparedStatementDetails statementDetails,
JdbcValueBindings jdbcValueBindings,
Object entity,
SharedSessionContractImplementor session) {
final JdbcServices jdbcServices = session.getJdbcServices();
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
final String sql = statementDetails.getSqlString();
jdbcServices.getSqlStatementLogger().logStatement( sql );
try {
final var preparedStatement = statementDetails.resolveStatement();
jdbcValueBindings.beforeStatement( statementDetails );
jdbcCoordinator.getResultSetReturn().executeUpdate( preparedStatement, sql );
try {
final ResultSet resultSet = preparedStatement.getGeneratedKeys();
try {
return getGeneratedValues( resultSet, preparedStatement, persister, getTiming(), session );
}
catch (SQLException e) {
throw jdbcServices.getSqlExceptionHelper().convert(
e,
() -> String.format(
Locale.ROOT,
"Unable to extract generated key from generated-key for `%s`",
persister.getNavigableRole().getFullPath()
),
sql
);
}
finally {
if ( resultSet != null ) {
jdbcCoordinator.getLogicalConnection().getResourceRegistry()
.release( resultSet, preparedStatement );
}
}
}
catch (SQLException e) {
throw jdbcServices.getSqlExceptionHelper().convert(
e,
"Unable to extract generated-keys ResultSet",
sql
);
}
}
finally {
if ( statementDetails.getStatement() != null ) {
statementDetails.releaseStatement( session );
}
jdbcValueBindings.afterStatement( statementDetails.getMutatingTableDetails() );
}
}
@Override
public GeneratedValues executeAndExtractReturning(
String sql,
PreparedStatement preparedStatement,
SharedSessionContractImplementor session) {
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
final JdbcServices jdbcServices = session.getJdbcServices();
jdbcCoordinator.getResultSetReturn().executeUpdate( preparedStatement, sql );
try {
final ResultSet resultSet = preparedStatement.getGeneratedKeys();
try {
return getGeneratedValues( resultSet, preparedStatement, persister, getTiming(), session );
}
catch (SQLException e) {
throw jdbcServices.getSqlExceptionHelper().convert(
e,
"Unable to extract generated key(s) from generated-keys ResultSet",
sql
);
}
finally {
if ( resultSet != null ) {
jdbcCoordinator.getLogicalConnection().getResourceRegistry()
.release( resultSet, preparedStatement );
}
}
}
catch (SQLException e) {
throw jdbcServices.getSqlExceptionHelper().convert(
e,
"Unable to extract generated-keys ResultSet",
sql
);
}
}
}
| GetGeneratedKeysDelegate |
java | quarkusio__quarkus | extensions/amazon-lambda/deployment/src/test/java/io/quarkus/amazon/lambda/deployment/testing/LambdaWithHierarchyTest.java | {
"start": 1961,
"end": 2206
} | class ____<T, R> implements RequestHandler<T, R> {
@Override
public R handleRequest(T input, Context context) {
return getName(input);
}
public abstract R getName(T input);
}
}
| AbstractRequestHandler |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/generics/MultipleBoundsTest.java | {
"start": 2840,
"end": 3270
} | class ____ extends AbstractEntity implements WithTranslationKey {
private String name;
public User() {
}
public User(Long id, String name) {
setId( id );
this.name = name;
}
@Override
public String getTranslationKey() {
return name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity(name = "UserTranslation")
public static | User |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeFaultInjector.java | {
"start": 1772,
"end": 5857
} | class ____
extends DataNodeFaultInjector {
public static final long DELAY = 2000;
private long delayMs = 0;
private final String err = "Interrupted while sleeping. Bailing out.";
private long delayTries = 1;
void delayOnce() throws IOException {
if (delayTries > 0) {
delayTries--;
try {
Thread.sleep(DELAY);
} catch (InterruptedException ie) {
throw new IOException(err);
}
}
}
long getDelayMs() {
return delayMs;
}
void logDelay(final long duration) {
/**
* delay should be at least longer than DELAY, otherwise, delayXYZ is
* no-op
*/
if (duration >= DELAY) {
this.delayMs = duration;
}
}
}
@Test
@Timeout(value = 60)
public void testDelaySendingAckToUpstream() throws Exception {
final MetricsDataNodeFaultInjector mdnFaultInjector =
new MetricsDataNodeFaultInjector() {
@Override
public void delaySendingAckToUpstream(final String upstreamAddr)
throws IOException {
delayOnce();
}
@Override
public void logDelaySendingAckToUpstream(final String upstreamAddr,
final long delayMs) throws IOException {
logDelay(delayMs);
}
};
verifyFaultInjectionDelayPipeline(mdnFaultInjector);
}
@Test
@Timeout(value = 60)
public void testDelaySendingPacketDownstream() throws Exception {
final MetricsDataNodeFaultInjector mdnFaultInjector =
new MetricsDataNodeFaultInjector() {
@Override
public void stopSendingPacketDownstream(final String mirrAddr)
throws IOException {
delayOnce();
}
@Override
public void logDelaySendingPacketDownstream(final String mirrAddr,
final long delayMs) throws IOException {
logDelay(delayMs);
}
};
verifyFaultInjectionDelayPipeline(mdnFaultInjector);
}
private void verifyFaultInjectionDelayPipeline(
final MetricsDataNodeFaultInjector mdnFaultInjector) throws Exception {
final Path baseDir = new Path(
PathUtils.getTestDir(getClass()).getPath(),
GenericTestUtils.getMethodName());
final DataNodeFaultInjector oldDnInjector = DataNodeFaultInjector.get();
DataNodeFaultInjector.set(mdnFaultInjector);
final Configuration conf = new HdfsConfiguration();
/*
* MetricsDataNodeFaultInjector.DELAY/2 ms is viewed as slow.
*/
final long datanodeSlowLogThresholdMs = MetricsDataNodeFaultInjector.DELAY
/ 2;
conf.setLong(DFSConfigKeys.DFS_DATANODE_SLOW_IO_WARNING_THRESHOLD_KEY,
datanodeSlowLogThresholdMs);
conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.toString());
/**
* configure to avoid resulting in pipeline failure due to read socket
* timeout
*/
conf.setLong(HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY,
MetricsDataNodeFaultInjector.DELAY * 2);
conf.setBoolean(
HdfsClientConfigKeys.BlockWrite.ReplaceDatanodeOnFailure.ENABLE_KEY,
true);
conf.set(
HdfsClientConfigKeys.BlockWrite.ReplaceDatanodeOnFailure.POLICY_KEY,
"ALWAYS");
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
cluster.waitActive();
final FileSystem fs = cluster.getFileSystem();
try (FSDataOutputStream out = fs
.create(new Path(baseDir, "test.data"), (short) 2)) {
out.write(0x31);
out.hflush();
out.hsync();
}
LOG.info("delay info: " + mdnFaultInjector.getDelayMs() + ":"
+ datanodeSlowLogThresholdMs);
assertTrue(mdnFaultInjector.getDelayMs() > datanodeSlowLogThresholdMs,
"Injected delay should be longer than the configured one");
} finally {
if (cluster != null) {
cluster.shutdown();
}
DataNodeFaultInjector.set(oldDnInjector);
}
}
}
| MetricsDataNodeFaultInjector |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/BooleanFieldTest.java | {
"start": 144,
"end": 675
} | class ____ extends TestCase {
public void test_model() throws Exception {
Model model = new Model();
model.value = true;
String text = JSON.toJSONString(model);
Assert.assertEquals("{\"value\":true}", text);
}
public void test_model_max() throws Exception {
Model model = new Model();
model.value = false;
String text = JSON.toJSONString(model);
Assert.assertEquals("{\"value\":false}", text);
}
public static | BooleanFieldTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/runtime/src/main/java/io/quarkus/resteasy/reactive/server/EndpointDisabled.java | {
"start": 410,
"end": 618
} | interface ____ {
/**
* Name of the property to check
*/
String name();
/**
* Expected {@code String} value of the property (specified by {@code name}) if the Resource | EndpointDisabled |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageResourceBundleBuildItem.java | {
"start": 198,
"end": 794
} | class ____ extends MultiBuildItem {
private final String bundleName;
private final String moduleName;
public NativeImageResourceBundleBuildItem(String bundleName) {
this.bundleName = bundleName;
this.moduleName = null;
}
public NativeImageResourceBundleBuildItem(String bundleName, String moduleName) {
this.bundleName = bundleName;
this.moduleName = moduleName;
}
public String getBundleName() {
return bundleName;
}
public String getModuleName() {
return moduleName;
}
}
| NativeImageResourceBundleBuildItem |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/property/access/internal/PropertyAccessStrategyNoopImpl.java | {
"start": 2344,
"end": 2708
} | class ____ implements Setter {
/**
* Singleton access
*/
public static final SetterImpl INSTANCE = new SetterImpl();
@Override
public void set(Object target, @Nullable Object value) {
}
@Override
public @Nullable String getMethodName() {
return null;
}
@Override
public @Nullable Method getMethod() {
return null;
}
}
}
| SetterImpl |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/BaseSessionFactoryFunctionalTest.java | {
"start": 1579,
"end": 1754
} | class ____ tests bridging the legacy
* approach of SessionFactory building as a test fixture
*
* @author Steve Ebersole
*/
@SessionFactoryFunctionalTesting
public abstract | for |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/merge/CollectionMergeTest.java | {
"start": 1059,
"end": 3271
} | class ____<T>
{
@JsonMerge
T value;
public MergedX(T v) { value = v; }
protected MergedX() { }
public void setValue(T v) { value = v; }
}
/*
/********************************************************
/* Test methods
/********************************************************
*/
private final ObjectMapper MAPPER = jsonMapperBuilder()
// 26-Oct-2016, tatu: Make sure we'll report merge problems by default
.disable(MapperFeature.IGNORE_MERGE_FOR_UNMERGEABLE)
.build();
@Test
public void testCollectionMerging() throws Exception
{
CollectionWrapper w = MAPPER.readValue(a2q("{'bag':['b']}"), CollectionWrapper.class);
assertEquals(2, w.bag.size());
assertTrue(w.bag.contains("a"));
assertTrue(w.bag.contains("b"));
}
@Test
public void testListMerging() throws Exception
{
MergedList w = MAPPER.readValue(a2q("{'values':['x']}"), MergedList.class);
assertEquals(2, w.values.size());
assertTrue(w.values.contains("a"));
assertTrue(w.values.contains("x"));
}
// Test that uses generic type
@Test
public void testGenericListMerging() throws Exception
{
Collection<String> l = new ArrayList<>();
l.add("foo");
MergedX<Collection<String>> input = new MergedX<Collection<String>>(l);
MergedX<Collection<String>> result = MAPPER
.readerFor(new TypeReference<MergedX<Collection<String>>>() {})
.withValueToUpdate(input)
.readValue(a2q("{'value':['bar']}"));
assertSame(input, result);
assertEquals(2, result.value.size());
Iterator<String> it = result.value.iterator();
assertEquals("foo", it.next());
assertEquals("bar", it.next());
}
@Test
public void testEnumSetMerging() throws Exception
{
MergedEnumSet result = MAPPER.readValue(a2q("{'abc':['A']}"), MergedEnumSet.class);
assertEquals(2, result.abc.size());
assertTrue(result.abc.contains(ABC.B)); // original
assertTrue(result.abc.contains(ABC.A)); // added
}
}
| MergedX |
java | elastic__elasticsearch | x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java | {
"start": 2784,
"end": 15809
} | class ____ extends LocalExporterIntegTestCase {
private final String indexTimeFormat = randomFrom("yy", "yyyy", "yyyy.MM", "yyyy-MM", "MM.yyyy", "MM", null);
private void stopMonitoring() {
// Now disabling the monitoring service, so that no more collection are started
updateClusterSettings(
Settings.builder()
.putNull(MonitoringService.ENABLED.getKey())
.putNull("xpack.monitoring.exporters._local.type")
.putNull("xpack.monitoring.exporters._local.enabled")
.putNull("xpack.monitoring.exporters._local.cluster_alerts.management.enabled")
.putNull("xpack.monitoring.exporters._local.index.name.time_format")
);
}
public void testExport() throws Exception {
try {
if (randomBoolean()) {
// indexing some random documents
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
for (int i = 0; i < indexRequestBuilders.length; i++) {
indexRequestBuilders[i] = prepareIndex("test").setId(Integer.toString(i))
.setSource("title", "This is a random document");
}
indexRandom(true, indexRequestBuilders);
}
// start the monitoring service so that /_monitoring/bulk is not ignored
final Settings.Builder exporterSettings = Settings.builder()
.put(MonitoringService.ENABLED.getKey(), true)
.put("xpack.monitoring.exporters._local.type", LocalExporter.TYPE)
.put("xpack.monitoring.exporters._local.enabled", true)
.put("xpack.monitoring.exporters._local.cluster_alerts.management.enabled", false);
if (indexTimeFormat != null) {
exporterSettings.put("xpack.monitoring.exporters._local.index.name.time_format", indexTimeFormat);
}
// local exporter is now enabled
updateClusterSettings(exporterSettings);
if (randomBoolean()) {
// export some documents now, before starting the monitoring service
final int nbDocs = randomIntBetween(1, 20);
List<MonitoringBulkDoc> monitoringDocs = new ArrayList<>(nbDocs);
for (int i = 0; i < nbDocs; i++) {
monitoringDocs.add(createMonitoringBulkDoc());
}
assertBusy(() -> {
MonitoringBulkRequestBuilder bulk = new MonitoringBulkRequestBuilder(client());
monitoringDocs.forEach(bulk::add);
assertEquals(RestStatus.OK, bulk.get().status());
refresh();
assertThat(indexExists(".monitoring-*"), is(true));
ensureYellowAndNoInitializingShards(".monitoring-*");
assertResponse(
prepareSearch(".monitoring-*"),
response -> assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value()))
);
});
checkMonitoringTemplates();
checkMonitoringDocs();
}
final int numNodes = internalCluster().getNodeNames().length;
assertBusy(() -> {
assertThat(indexExists(".monitoring-*"), is(true));
ensureYellowAndNoInitializingShards(".monitoring-*");
assertThat(
SearchResponseUtils.getTotalHitsValue(
prepareSearch(".monitoring-es-*").setSize(0).setQuery(QueryBuilders.termQuery("type", "cluster_stats"))
),
greaterThan(0L)
);
assertThat(
SearchResponseUtils.getTotalHitsValue(
prepareSearch(".monitoring-es-*").setSize(0).setQuery(QueryBuilders.termQuery("type", "index_recovery"))
),
greaterThan(0L)
);
assertThat(
SearchResponseUtils.getTotalHitsValue(
prepareSearch(".monitoring-es-*").setSize(0).setQuery(QueryBuilders.termQuery("type", "index_stats"))
),
greaterThan(0L)
);
assertThat(
SearchResponseUtils.getTotalHitsValue(
prepareSearch(".monitoring-es-*").setSize(0).setQuery(QueryBuilders.termQuery("type", "indices_stats"))
),
greaterThan(0L)
);
assertThat(
SearchResponseUtils.getTotalHitsValue(
prepareSearch(".monitoring-es-*").setSize(0).setQuery(QueryBuilders.termQuery("type", "shards"))
),
greaterThan(0L)
);
assertResponse(
prepareSearch(".monitoring-es-*").setSize(0)
.setQuery(QueryBuilders.termQuery("type", "node_stats"))
.addAggregation(terms("agg_nodes_ids").field("node_stats.node_id")),
response -> {
Terms aggregation = response.getAggregations().get("agg_nodes_ids");
assertEquals(
"Aggregation on node_id must return a bucket per node involved in test",
numNodes,
aggregation.getBuckets().size()
);
for (String nodeName : internalCluster().getNodeNames()) {
String nodeId = getNodeId(nodeName);
Terms.Bucket bucket = aggregation.getBucketByKey(nodeId);
assertTrue("No bucket found for node id [" + nodeId + "]", bucket != null);
assertTrue(bucket.getDocCount() >= 1L);
}
}
);
}, 30L, TimeUnit.SECONDS);
checkMonitoringTemplates();
checkMonitoringDocs();
} finally {
stopMonitoring();
}
// This assertion loop waits for in flight exports to terminate. It checks that the latest
// node_stats document collected for each node is at least 10 seconds old, corresponding to
// 2 or 3 elapsed collection intervals.
final int elapsedInSeconds = 10;
final ZonedDateTime startTime = ZonedDateTime.now(ZoneOffset.UTC);
assertBusy(() -> {
if (indexExists(".monitoring-*")) {
ensureYellowAndNoInitializingShards(".monitoring-*");
refresh(".monitoring-es-*");
assertResponse(
prepareSearch(".monitoring-es-*").setSize(0)
.setQuery(QueryBuilders.termQuery("type", "node_stats"))
.addAggregation(
terms("agg_nodes_ids").field("node_stats.node_id")
.subAggregation(max("agg_last_time_collected").field("timestamp"))
),
response -> {
Terms aggregation = response.getAggregations().get("agg_nodes_ids");
for (String nodeName : internalCluster().getNodeNames()) {
String nodeId = getNodeId(nodeName);
Terms.Bucket bucket = aggregation.getBucketByKey(nodeId);
assertTrue("No bucket found for node id [" + nodeId + "]", bucket != null);
assertTrue(bucket.getDocCount() >= 1L);
Max subAggregation = bucket.getAggregations().get("agg_last_time_collected");
ZonedDateTime lastCollection = Instant.ofEpochMilli(Math.round(subAggregation.value())).atZone(ZoneOffset.UTC);
assertTrue(lastCollection.plusSeconds(elapsedInSeconds).isBefore(ZonedDateTime.now(ZoneOffset.UTC)));
}
}
);
} else {
assertTrue(ZonedDateTime.now(ZoneOffset.UTC).isAfter(startTime.plusSeconds(elapsedInSeconds)));
}
}, 30L, TimeUnit.SECONDS);
}
/**
* Checks that the monitoring templates have been created by the local exporter
*/
private void checkMonitoringTemplates() {
final Set<String> templates = new HashSet<>();
templates.add(".monitoring-alerts-7");
templates.add(".monitoring-es");
templates.add(".monitoring-kibana");
templates.add(".monitoring-logstash");
templates.add(".monitoring-beats");
GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates(TEST_REQUEST_TIMEOUT, ".monitoring-*").get();
Set<String> actualTemplates = response.getIndexTemplates().stream().map(IndexTemplateMetadata::getName).collect(Collectors.toSet());
assertEquals(templates, actualTemplates);
}
/**
* Checks that the monitoring documents all have the cluster_uuid, timestamp and source_node
* fields and belongs to the right data or timestamped index.
*/
private void checkMonitoringDocs() {
ClusterStateResponse response = clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get();
String customTimeFormat = response.getState()
.getMetadata()
.persistentSettings()
.get("xpack.monitoring.exporters._local.index.name.time_format");
assertEquals(indexTimeFormat, customTimeFormat);
if (customTimeFormat == null) {
customTimeFormat = "yyyy.MM.dd";
}
DateFormatter dateParser = DateFormatter.forPattern("strict_date_time");
DateFormatter dateFormatter = DateFormatter.forPattern(customTimeFormat).withZone(ZoneOffset.UTC);
assertResponse(prepareSearch(".monitoring-*").setSize(100), rsp -> {
assertThat(rsp.getHits().getTotalHits().value(), greaterThan(0L));
for (SearchHit hit : rsp.getHits().getHits()) {
final Map<String, Object> source = hit.getSourceAsMap();
assertTrue(source != null && source.isEmpty() == false);
final String timestamp = (String) source.get("timestamp");
final String type = (String) source.get("type");
assertTrue("document is missing cluster_uuid field", Strings.hasText((String) source.get("cluster_uuid")));
assertTrue("document is missing timestamp field", Strings.hasText(timestamp));
assertTrue("document is missing type field", Strings.hasText(type));
@SuppressWarnings("unchecked")
Map<String, Object> docSource = (Map<String, Object>) source.get("doc");
MonitoredSystem expectedSystem;
if (docSource == null) {
// This is a document indexed by the Monitoring service
expectedSystem = MonitoredSystem.ES;
} else {
// This is a document indexed through the Monitoring Bulk API
expectedSystem = MonitoredSystem.fromSystem((String) docSource.get("expected_system"));
}
String dateTime = dateFormatter.format(dateParser.parse(timestamp));
final String expectedIndex = ".monitoring-" + expectedSystem.getSystem() + "-" + TEMPLATE_VERSION + "-" + dateTime;
assertEquals("Expected " + expectedIndex + " but got " + hit.getIndex(), expectedIndex, hit.getIndex());
@SuppressWarnings("unchecked")
Map<String, Object> sourceNode = (Map<String, Object>) source.get("source_node");
if ("shards".equals(type) == false) {
assertNotNull("document is missing source_node field", sourceNode);
}
}
});
}
public static MonitoringBulkDoc createMonitoringBulkDoc() throws IOException {
final MonitoredSystem system = randomFrom(BEATS, KIBANA, LOGSTASH);
final XContentType xContentType = randomFrom(XContentType.values());
final BytesReference source;
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
builder.startObject();
{
builder.field("expected_system", system.getSystem());
final int nbFields = randomIntBetween(1, 3);
for (int i = 0; i < nbFields; i++) {
builder.field("field_" + i, i);
}
}
builder.endObject();
source = BytesReference.bytes(builder);
}
return MonitoringTestUtils.randomMonitoringBulkDoc(random(), xContentType, source, system, "doc");
}
}
| LocalExporterIntegTests |
java | netty__netty | handler/src/test/java/io/netty/handler/ssl/OpenSslPrivateKeyMethodTest.java | {
"start": 17866,
"end": 18015
} | class ____ extends Thread {
DelegateThread(Runnable target) {
super(target);
}
}
private static final | DelegateThread |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/MaxCapacityQueue.java | {
"start": 762,
"end": 2858
} | class ____<E> implements Queue<E> {
private final Queue<E> queue;
private final int maxCapacity;
MaxCapacityQueue(Queue<E> queue, int maxCapacity) {
this.queue = queue;
this.maxCapacity = maxCapacity;
}
@Override
public boolean add(E element) {
if (offer(element)) {
return true;
}
throw new IllegalStateException();
}
@Override
public boolean offer(E element) {
if (maxCapacity <= queue.size()) {
return false;
}
return queue.offer(element);
}
@Override
public E remove() {
return queue.remove();
}
@Override
public E poll() {
return queue.poll();
}
@Override
public E element() {
return queue.element();
}
@Override
public E peek() {
return queue.peek();
}
@Override
public int size() {
return queue.size();
}
@Override
public boolean isEmpty() {
return queue.isEmpty();
}
@Override
public boolean contains(Object o) {
return queue.contains(o);
}
@Override
public Iterator<E> iterator() {
return queue.iterator();
}
@Override
public Object[] toArray() {
return queue.toArray();
}
@Override
public <T> T[] toArray(T[] a) {
return queue.toArray(a);
}
@Override
public boolean remove(Object o) {
return queue.remove(o);
}
@Override
public boolean containsAll(Collection<?> c) {
return queue.containsAll(c);
}
@Override
public boolean addAll(Collection<? extends E> c) {
if (maxCapacity >= size() + c.size()) {
return queue.addAll(c);
}
throw new IllegalStateException();
}
@Override
public boolean removeAll(Collection<?> c) {
return queue.removeAll(c);
}
@Override
public boolean retainAll(Collection<?> c) {
return queue.retainAll(c);
}
@Override
public void clear() {
queue.clear();
}
}
| MaxCapacityQueue |
java | grpc__grpc-java | core/src/testFixtures/java/io/grpc/internal/FakeClock.java | {
"start": 1692,
"end": 2939
} | class ____ {
private static final TaskFilter ACCEPT_ALL_FILTER = new TaskFilter() {
@Override
public boolean shouldAccept(Runnable command) {
return true;
}
};
private final ScheduledExecutorService scheduledExecutorService = new ScheduledExecutorImpl();
private final PriorityBlockingQueue<ScheduledTask> scheduledTasks = new PriorityBlockingQueue<>();
private final LinkedBlockingQueue<ScheduledTask> dueTasks = new LinkedBlockingQueue<>();
private final Ticker ticker =
new Ticker() {
@Override public long read() {
return currentTimeNanos;
}
};
private final Deadline.Ticker deadlineTicker =
new Deadline.Ticker() {
@Override public long nanoTime() {
return currentTimeNanos;
}
};
private final Supplier<Stopwatch> stopwatchSupplier =
new Supplier<Stopwatch>() {
@Override public Stopwatch get() {
return Stopwatch.createUnstarted(ticker);
}
};
private final TimeProvider timeProvider =
new TimeProvider() {
@Override
public long currentTimeNanos() {
return currentTimeNanos;
}
};
private long currentTimeNanos;
public | FakeClock |
java | hibernate__hibernate-orm | hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HANALegacyDialect.java | {
"start": 50178,
"end": 51536
} | class ____<X> extends BasicExtractor<X> {
private final int maxLobPrefetchSize;
public BlobExtractor(JavaType<X> javaType, JdbcType jdbcType, int maxLobPrefetchSize) {
super( javaType, jdbcType );
this.maxLobPrefetchSize = maxLobPrefetchSize;
}
private X doExtract(Blob blob, WrapperOptions options) throws SQLException {
final X result;
if ( blob == null ) {
result = getJavaType().wrap( null, options );
}
else if ( blob.length() < maxLobPrefetchSize ) {
result = getJavaType().wrap( blob, options );
blob.free();
}
else {
final MaterializedBlob materialized = new MaterializedBlob( extractBytes( blob.getBinaryStream() ) );
blob.free();
result = getJavaType().wrap( materialized, options );
}
return result;
}
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return doExtract( rs.getBlob( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return doExtract( statement.getBlob( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException {
return doExtract( statement.getBlob( name ), options );
}
}
private static | BlobExtractor |
java | spring-projects__spring-security | access/src/main/java/org/springframework/security/web/access/expression/ExpressionBasedFilterInvocationSecurityMetadataSource.java | {
"start": 2282,
"end": 4671
} | class ____
extends DefaultFilterInvocationSecurityMetadataSource {
private static final Log logger = LogFactory.getLog(ExpressionBasedFilterInvocationSecurityMetadataSource.class);
public ExpressionBasedFilterInvocationSecurityMetadataSource(
LinkedHashMap<RequestMatcher, Collection<ConfigAttribute>> requestMap,
SecurityExpressionHandler<FilterInvocation> expressionHandler) {
super(processMap(requestMap, expressionHandler.getExpressionParser()));
Assert.notNull(expressionHandler, "A non-null SecurityExpressionHandler is required");
}
private static LinkedHashMap<RequestMatcher, Collection<ConfigAttribute>> processMap(
LinkedHashMap<RequestMatcher, Collection<ConfigAttribute>> requestMap, ExpressionParser parser) {
Assert.notNull(parser, "SecurityExpressionHandler returned a null parser object");
LinkedHashMap<RequestMatcher, Collection<ConfigAttribute>> processed = new LinkedHashMap<>(requestMap);
requestMap.forEach((request, value) -> process(parser, request, value, processed::put));
return processed;
}
private static void process(ExpressionParser parser, RequestMatcher request, Collection<ConfigAttribute> value,
BiConsumer<RequestMatcher, Collection<ConfigAttribute>> consumer) {
String expression = getExpression(request, value);
if (logger.isDebugEnabled()) {
logger.debug(LogMessage.format("Adding web access control expression [%s] for %s", expression, request));
}
AbstractVariableEvaluationContextPostProcessor postProcessor = createPostProcessor(request);
ArrayList<ConfigAttribute> processed = new ArrayList<>(1);
try {
processed.add(new WebExpressionConfigAttribute(parser.parseExpression(expression), postProcessor));
}
catch (ParseException ex) {
throw new IllegalArgumentException("Failed to parse expression '" + expression + "'");
}
consumer.accept(request, processed);
}
private static String getExpression(RequestMatcher request, Collection<ConfigAttribute> value) {
Assert.isTrue(value.size() == 1, () -> "Expected a single expression attribute for " + request);
return value.toArray(new ConfigAttribute[1])[0].getAttribute();
}
private static AbstractVariableEvaluationContextPostProcessor createPostProcessor(RequestMatcher request) {
return new RequestVariablesExtractorEvaluationContextPostProcessor(request);
}
static | ExpressionBasedFilterInvocationSecurityMetadataSource |
java | elastic__elasticsearch | x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java | {
"start": 713,
"end": 1381
} | class ____ extends StackTemplateRegistry {
StackRegistryWithNonRequiredTemplates(
Settings nodeSettings,
ClusterService clusterService,
ThreadPool threadPool,
Client client,
NamedXContentRegistry xContentRegistry
) {
super(nodeSettings, clusterService, threadPool, client, xContentRegistry);
}
@Override
protected Map<String, ComposableIndexTemplate> getComposableTemplateConfigs() {
return parseComposableTemplates(
new IndexTemplateConfig("syslog", "/non-required-template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE)
);
}
}
| StackRegistryWithNonRequiredTemplates |
java | grpc__grpc-java | api/src/main/java/io/grpc/ChannelLogger.java | {
"start": 1970,
"end": 2390
} | enum ____ {
DEBUG,
INFO,
WARNING,
ERROR
}
/**
* Logs a message.
*/
public abstract void log(ChannelLogLevel level, String message);
/**
* Logs a message, using a message format and a list of arguments used to generate the log
* message with {@link java.text.MessageFormat}.
*/
public abstract void log(ChannelLogLevel level, String messageFormat, Object... args);
}
| ChannelLogLevel |
java | apache__dubbo | dubbo-serialization/dubbo-serialization-api/src/main/java/org/apache/dubbo/common/serialize/Serialization.java | {
"start": 1455,
"end": 2596
} | interface ____ {
/**
* Get content type unique id, recommended that custom implementations use values different with
* any value of {@link Constants} and don't greater than ExchangeCodec.SERIALIZATION_MASK (31)
* because dubbo protocol use 5 bits to record serialization ID in header.
*
* @return content type id
*/
byte getContentTypeId();
/**
* Get content type
*
* @return content type
*/
String getContentType();
/**
* Get a serialization implementation instance
*
* @param url URL address for the remote service
* @param output the underlying output stream
* @return serializer
* @throws IOException
*/
@Adaptive
ObjectOutput serialize(URL url, OutputStream output) throws IOException;
/**
* Get a deserialization implementation instance
*
* @param url URL address for the remote service
* @param input the underlying input stream
* @return deserializer
* @throws IOException
*/
@Adaptive
ObjectInput deserialize(URL url, InputStream input) throws IOException;
}
| Serialization |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PreferredInterfaceTypeTest.java | {
"start": 7555,
"end": 7799
} | class ____ {
Test() {}
}
""")
.doTest();
}
@Test
public void returnTypeVoid_doesNotSuggestFix() {
testHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/function/server/RequestPredicates.java | {
"start": 35848,
"end": 37011
} | class ____ extends RequestModifyingPredicate
implements ChangePathPatternParserVisitor.Target {
private final RequestPredicate delegate;
private final RequestModifyingPredicate delegateModifying;
public NegateRequestPredicate(RequestPredicate delegate) {
Assert.notNull(delegate, "Delegate must not be null");
this.delegate = delegate;
this.delegateModifying = of(delegate);
}
@Override
protected Result testInternal(ServerRequest request) {
Result result = this.delegateModifying.testInternal(request);
return Result.of(!result.value(), result::modifyAttributes);
}
@Override
public void accept(Visitor visitor) {
visitor.startNegate();
this.delegate.accept(visitor);
visitor.endNegate();
}
@Override
public void changeParser(PathPatternParser parser) {
if (this.delegate instanceof ChangePathPatternParserVisitor.Target target) {
target.changeParser(parser);
}
}
@Override
public String toString() {
return "!" + this.delegate.toString();
}
}
/**
* {@link RequestPredicate} where either {@code left} or {@code right} predicates
* may match.
*/
static | NegateRequestPredicate |
java | google__guava | guava-gwt/src-super/com/google/common/cache/super/com/google/common/cache/LocalCache.java | {
"start": 20095,
"end": 20495
} | class ____<T> extends AbstractSet<T> {
final ConcurrentMap<?, ?> map;
AbstractCacheSet(ConcurrentMap<?, ?> map) {
this.map = map;
}
@Override
public int size() {
return map.size();
}
@Override
public boolean isEmpty() {
return map.isEmpty();
}
@Override
public void clear() {
map.clear();
}
}
private final | AbstractCacheSet |
java | spring-projects__spring-boot | module/spring-boot-data-mongodb/src/test/java/org/springframework/boot/data/mongodb/autoconfigure/DataMongoReactiveRepositoriesAutoConfigurationTests.java | {
"start": 2271,
"end": 4685
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(MongoAutoConfiguration.class, DataMongoAutoConfiguration.class,
MongoReactiveAutoConfiguration.class, DataMongoReactiveAutoConfiguration.class,
DataMongoReactiveRepositoriesAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class));
@Test
void testDefaultRepositoryConfiguration() {
this.contextRunner.withUserConfiguration(TestConfiguration.class).run((context) -> {
assertThat(context).hasSingleBean(ReactiveCityRepository.class);
assertThat(context).hasSingleBean(MongoClient.class);
MongoMappingContext mappingContext = context.getBean(MongoMappingContext.class);
ManagedTypes managedTypes = (ManagedTypes) ReflectionTestUtils.getField(mappingContext, "managedTypes");
assertThat(managedTypes).isNotNull();
assertThat(managedTypes.toList()).hasSize(1);
});
}
@Test
void testNoRepositoryConfiguration() {
this.contextRunner.withUserConfiguration(EmptyConfiguration.class)
.run((context) -> assertThat(context).hasSingleBean(MongoClient.class));
}
@Test
void doesNotTriggerDefaultRepositoryDetectionIfCustomized() {
this.contextRunner.withUserConfiguration(CustomizedConfiguration.class)
.run((context) -> assertThat(context).doesNotHaveBean(ReactiveCityMongoDbRepository.class));
}
@Test
void autoConfigurationShouldNotKickInEvenIfManualConfigDidNotCreateAnyRepositories() {
this.contextRunner.withUserConfiguration(SortOfInvalidCustomConfiguration.class)
.run((context) -> assertThat(context).doesNotHaveBean(ReactiveCityRepository.class));
}
@Test
void enablingImperativeRepositoriesDisablesReactiveRepositories() {
this.contextRunner.withUserConfiguration(TestConfiguration.class)
.withPropertyValues("spring.data.mongodb.repositories.type=imperative")
.run((context) -> assertThat(context).doesNotHaveBean(ReactiveCityRepository.class));
}
@Test
void enablingNoRepositoriesDisablesReactiveRepositories() {
this.contextRunner.withUserConfiguration(TestConfiguration.class)
.withPropertyValues("spring.data.mongodb.repositories.type=none")
.run((context) -> assertThat(context).doesNotHaveBean(ReactiveCityRepository.class));
}
@Configuration(proxyBeanMethods = false)
@TestAutoConfigurationPackage(City.class)
static | DataMongoReactiveRepositoriesAutoConfigurationTests |
java | spring-projects__spring-boot | module/spring-boot-transaction/src/main/java/org/springframework/boot/transaction/autoconfigure/TransactionManagerCustomizers.java | {
"start": 1127,
"end": 2286
} | class ____ {
private final List<? extends TransactionManagerCustomizer<?>> customizers;
private TransactionManagerCustomizers(List<? extends TransactionManagerCustomizer<?>> customizers) {
this.customizers = customizers;
}
/**
* Customize the given {@code transactionManager}.
* @param transactionManager the transaction manager to customize
*/
@SuppressWarnings("unchecked")
public void customize(TransactionManager transactionManager) {
LambdaSafe.callbacks(TransactionManagerCustomizer.class, this.customizers, transactionManager)
.withLogger(TransactionManagerCustomizers.class)
.invoke((customizer) -> customizer.customize(transactionManager));
}
/**
* Returns a new {@code TransactionManagerCustomizers} instance containing the given
* {@code customizers}.
* @param customizers the customizers
* @return the new instance
*/
public static TransactionManagerCustomizers of(
@Nullable Collection<? extends TransactionManagerCustomizer<?>> customizers) {
return new TransactionManagerCustomizers(
(customizers != null) ? new ArrayList<>(customizers) : Collections.emptyList());
}
}
| TransactionManagerCustomizers |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/support/ResourcePatternResolver.java | {
"start": 2517,
"end": 2640
} | interface ____ extends ResourceLoader {
/**
* Pseudo URL prefix for all matching resources from the | ResourcePatternResolver |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/WeekTimes.java | {
"start": 775,
"end": 6196
} | class ____ implements Times {
public static final EnumSet<DayOfWeek> DEFAULT_DAYS = EnumSet.of(DayOfWeek.MONDAY);
public static final DayTimes[] DEFAULT_TIMES = new DayTimes[] { new DayTimes() };
private final EnumSet<DayOfWeek> days;
private final DayTimes[] times;
public WeekTimes() {
this(DEFAULT_DAYS, DEFAULT_TIMES);
}
public WeekTimes(DayOfWeek day, DayTimes times) {
this(day, new DayTimes[] { times });
}
public WeekTimes(DayOfWeek day, DayTimes[] times) {
this(EnumSet.of(day), times);
}
public WeekTimes(EnumSet<DayOfWeek> days, DayTimes[] times) {
this.days = days.isEmpty() ? DEFAULT_DAYS : days;
this.times = times.length == 0 ? DEFAULT_TIMES : times;
}
public EnumSet<DayOfWeek> days() {
return days;
}
public DayTimes[] times() {
return times;
}
public Set<String> crons() {
Set<String> crons = new HashSet<>();
for (DayTimes times : this.times) {
String hrsStr = join(",", times.hour);
String minsStr = join(",", times.minute);
String daysStr = DayOfWeek.cronPart(this.days);
crons.add("0 " + minsStr + " " + hrsStr + " ? * " + daysStr);
}
return crons;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
WeekTimes that = (WeekTimes) o;
return days.equals(that.days)
// we don't care about order
&& newHashSet(times).equals(newHashSet(that.times));
}
@Override
public int hashCode() {
int result = days.hashCode();
result = 31 * result + Arrays.hashCode(times);
return result;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(DAY_FIELD.getPreferredName(), days);
builder.startArray(TIME_FIELD.getPreferredName());
for (DayTimes dayTimes : times) {
dayTimes.toXContent(builder, params);
}
builder.endArray();
return builder.endObject();
}
public static Builder builder() {
return new Builder();
}
public static WeekTimes parse(XContentParser parser, XContentParser.Token token) throws IOException, ElasticsearchParseException {
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("could not parse week times. expected an object, but found [{}]", token);
}
EnumSet<DayOfWeek> daysSet = EnumSet.noneOf(DayOfWeek.class);
Set<DayTimes> timesSet = new HashSet<>();
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (DAY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
if (token.isValue()) {
daysSet.add(parseDayValue(parser, token));
} else if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
daysSet.add(parseDayValue(parser, token));
}
} else {
throw new ElasticsearchParseException(
"invalid week day value for [{}] field. expected string/number value or an "
+ "array of string/number values, but found [{}]",
currentFieldName,
token
);
}
} else if (TIME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
if (token != XContentParser.Token.START_ARRAY) {
try {
timesSet.add(DayTimes.parse(parser, token));
} catch (ElasticsearchParseException pe) {
throw new ElasticsearchParseException("invalid time value for field [{}] - [{}]", pe, currentFieldName, token);
}
} else {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
try {
timesSet.add(DayTimes.parse(parser, token));
} catch (ElasticsearchParseException pe) {
throw new ElasticsearchParseException("invalid time value for field [{}] - [{}]", pe, currentFieldName, token);
}
}
}
}
}
return new WeekTimes(daysSet, timesSet.toArray(DayTimes[]::new));
}
static DayOfWeek parseDayValue(XContentParser parser, XContentParser.Token token) throws IOException {
if (token == XContentParser.Token.VALUE_STRING) {
return DayOfWeek.resolve(parser.text());
}
if (token == XContentParser.Token.VALUE_NUMBER) {
return DayOfWeek.resolve(parser.intValue());
}
throw new ElasticsearchParseException("invalid weekly day value. expected a string or a number value, but found [" + token + "]");
}
public static | WeekTimes |
java | apache__hadoop | hadoop-tools/hadoop-resourceestimator/src/main/java/org/apache/hadoop/resourceestimator/translator/api/LogParser.java | {
"start": 1425,
"end": 2535
} | interface ____ extends AutoCloseable {
/**
* Initializing the LogParser, including loading solver parameters from
* configuration file.
*
* @param config {@link Configuration} for the LogParser.
* @param skylineStore the {@link HistorySkylineStore} which stores recurring
* pipeline's {@code
* ResourceSkyline}s.
* @throws ResourceEstimatorException if initialization of a
* {@code SingleLineParser} fails.
*/
void init(Configuration config, HistorySkylineStore skylineStore)
throws ResourceEstimatorException;
/**
* Parses each line in the log stream, and adds extracted
* {@code ResourceSkyline}s to the {@code
* SkylineStore}.
*
* @param logs the stream of input logs.
* @throws SkylineStoreException if it fails to addHistory extracted
* {@code ResourceSkyline}s to the {@code SkylineStore}.
* @throws IOException if it fails to read from the {@link InputStream}.
*/
void parseStream(InputStream logs) throws SkylineStoreException, IOException;
@Override void close();
}
| LogParser |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerReportPBImpl.java | {
"start": 1849,
"end": 11007
} | class ____ extends ContainerReport {
ContainerReportProto proto = ContainerReportProto.getDefaultInstance();
ContainerReportProto.Builder builder = null;
boolean viaProto = false;
private ContainerId containerId = null;
private Resource resource = null;
private NodeId nodeId = null;
private Priority priority = null;
public ContainerReportPBImpl() {
builder = ContainerReportProto.newBuilder();
}
public ContainerReportPBImpl(ContainerReportProto proto) {
this.proto = proto;
viaProto = true;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
@Override
public Resource getAllocatedResource() {
if (this.resource != null) {
return this.resource;
}
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasResource()) {
return null;
}
this.resource = convertFromProtoFormat(p.getResource());
return this.resource;
}
@Override
public NodeId getAssignedNode() {
if (this.nodeId != null) {
return this.nodeId;
}
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasNodeId()) {
return null;
}
this.nodeId = convertFromProtoFormat(p.getNodeId());
return this.nodeId;
}
@Override
public ContainerId getContainerId() {
if (this.containerId != null) {
return this.containerId;
}
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasContainerId()) {
return null;
}
this.containerId = convertFromProtoFormat(p.getContainerId());
return this.containerId;
}
@Override
public String getDiagnosticsInfo() {
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasDiagnosticsInfo()) {
return null;
}
return (p.getDiagnosticsInfo());
}
@Override
public ContainerState getContainerState() {
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasContainerState()) {
return null;
}
return convertFromProtoFormat(p.getContainerState());
}
@Override
public long getFinishTime() {
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
return p.getFinishTime();
}
@Override
public String getLogUrl() {
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasLogUrl()) {
return null;
}
return (p.getLogUrl());
}
@Override
public Priority getPriority() {
if (this.priority != null) {
return this.priority;
}
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasPriority()) {
return null;
}
this.priority = convertFromProtoFormat(p.getPriority());
return this.priority;
}
@Override
public long getCreationTime() {
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
return p.getCreationTime();
}
@Override
public void setAllocatedResource(Resource resource) {
maybeInitBuilder();
if (resource == null)
builder.clearResource();
this.resource = resource;
}
@Override
public void setAssignedNode(NodeId nodeId) {
maybeInitBuilder();
if (nodeId == null)
builder.clearNodeId();
this.nodeId = nodeId;
}
@Override
public void setContainerId(ContainerId containerId) {
maybeInitBuilder();
if (containerId == null)
builder.clearContainerId();
this.containerId = containerId;
}
@Override
public void setDiagnosticsInfo(String diagnosticsInfo) {
maybeInitBuilder();
if (diagnosticsInfo == null) {
builder.clearDiagnosticsInfo();
return;
}
builder.setDiagnosticsInfo(diagnosticsInfo);
}
@Override
public void setContainerState(ContainerState containerState) {
maybeInitBuilder();
if (containerState == null) {
builder.clearContainerState();
return;
}
builder.setContainerState(convertToProtoFormat(containerState));
}
@Override
public int getContainerExitStatus() {
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
return p.getContainerExitStatus();
}
@Override
public void setContainerExitStatus(int containerExitStatus) {
maybeInitBuilder();
builder.setContainerExitStatus(containerExitStatus);
}
@Override
public String getExposedPorts() {
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
return p.getExposedPorts();
}
@Override
public void setExposedPorts(Map<String, List<Map<String, String>>> ports) {
maybeInitBuilder();
if (ports == null) {
builder.clearExposedPorts();
return;
}
Gson gson = new Gson();
String strPorts = gson.toJson(ports);
builder.setExposedPorts(strPorts);
}
@Override
public void setFinishTime(long finishTime) {
maybeInitBuilder();
builder.setFinishTime(finishTime);
}
@Override
public void setLogUrl(String logUrl) {
maybeInitBuilder();
if (logUrl == null) {
builder.clearLogUrl();
return;
}
builder.setLogUrl(logUrl);
}
@Override
public void setPriority(Priority priority) {
maybeInitBuilder();
if (priority == null) {
builder.clearPriority();
}
this.priority = priority;
}
@Override
public void setCreationTime(long creationTime) {
maybeInitBuilder();
builder.setCreationTime(creationTime);
}
public ContainerReportProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return this.getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
private void mergeLocalToBuilder() {
if (this.containerId != null
&& !((ContainerIdPBImpl) containerId).getProto().equals(
builder.getContainerId())) {
builder.setContainerId(convertToProtoFormat(this.containerId));
}
if (this.nodeId != null
&& !((NodeIdPBImpl) nodeId).getProto().equals(builder.getNodeId())) {
builder.setNodeId(convertToProtoFormat(this.nodeId));
}
if (this.resource != null) {
builder.setResource(convertToProtoFormat(this.resource));
}
if (this.priority != null
&& !((PriorityPBImpl) this.priority).getProto().equals(
builder.getPriority())) {
builder.setPriority(convertToProtoFormat(this.priority));
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = ContainerReportProto.newBuilder(proto);
}
viaProto = false;
}
private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) {
return new ContainerIdPBImpl(p);
}
private NodeIdPBImpl convertFromProtoFormat(NodeIdProto p) {
return new NodeIdPBImpl(p);
}
private ContainerIdProto convertToProtoFormat(ContainerId t) {
return ((ContainerIdPBImpl) t).getProto();
}
private NodeIdProto convertToProtoFormat(NodeId t) {
return ((NodeIdPBImpl) t).getProto();
}
private ResourcePBImpl convertFromProtoFormat(ResourceProto p) {
return new ResourcePBImpl(p);
}
private ResourceProto convertToProtoFormat(Resource t) {
return ProtoUtils.convertToProtoFormat(t);
}
private PriorityPBImpl convertFromProtoFormat(PriorityProto p) {
return new PriorityPBImpl(p);
}
private PriorityProto convertToProtoFormat(Priority p) {
return ((PriorityPBImpl) p).getProto();
}
private ContainerStateProto
convertToProtoFormat(ContainerState containerState) {
return ProtoUtils.convertToProtoFormat(containerState);
}
private ContainerState convertFromProtoFormat(
ContainerStateProto containerState) {
return ProtoUtils.convertFromProtoFormat(containerState);
}
@Override
public String getNodeHttpAddress() {
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasNodeHttpAddress()) {
return null;
}
return (p.getNodeHttpAddress());
}
@Override
public void setNodeHttpAddress(String nodeHttpAddress) {
maybeInitBuilder();
if (nodeHttpAddress == null) {
builder.clearNodeHttpAddress();
return;
}
builder.setNodeHttpAddress(nodeHttpAddress);
}
@Override
public ExecutionType getExecutionType() {
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasExecutionType()) {
return ExecutionType.GUARANTEED; // default value
}
return ProtoUtils.convertFromProtoFormat(p.getExecutionType());
}
@Override
public void setExecutionType(ExecutionType executionType) {
maybeInitBuilder();
if (executionType == null) {
builder.clearExecutionType();
return;
}
builder.setExecutionType(ProtoUtils.convertToProtoFormat(executionType));
}
}
| ContainerReportPBImpl |
java | spring-projects__spring-boot | build-plugin/spring-boot-maven-plugin/src/test/java/org/springframework/boot/maven/SystemPropertyFormatterTests.java | {
"start": 816,
"end": 1451
} | class ____ {
@Test
void parseEmpty() {
assertThat(SystemPropertyFormatter.format(null, null)).isEmpty();
}
@Test
void parseOnlyKey() {
assertThat(SystemPropertyFormatter.format("key1", null)).isEqualTo("-Dkey1");
}
@Test
void parseKeyWithValue() {
assertThat(SystemPropertyFormatter.format("key1", "value1")).isEqualTo("-Dkey1=value1");
}
@Test
void parseKeyWithEmptyValue() {
assertThat(SystemPropertyFormatter.format("key1", "")).isEqualTo("-Dkey1");
}
@Test
void parseKeyWithOnlySpaces() {
assertThat(SystemPropertyFormatter.format("key1", " ")).isEqualTo("-Dkey1= ");
}
}
| SystemPropertyFormatterTests |
java | google__dagger | hilt-compiler/main/java/dagger/hilt/processor/internal/ClassNames.java | {
"start": 7991,
"end": 12683
} | class ____ out when we factor out the android portion
public static final ClassName APPLICATION = get("android.app", "Application");
public static final ClassName MULTI_DEX_APPLICATION =
get("androidx.multidex", "MultiDexApplication");
public static final ClassName ANDROID_ENTRY_POINT =
get("dagger.hilt.android", "AndroidEntryPoint");
public static final ClassName HILT_ANDROID_APP =
get("dagger.hilt.android", "HiltAndroidApp");
public static final ClassName CONTEXT = get("android.content", "Context");
public static final ClassName APPLICATION_PROVIDER =
get("androidx.test.core.app", "ApplicationProvider");
public static final ClassName COMPONENT_SUPPLIER =
get("dagger.hilt.android.internal.managers", "ComponentSupplier");
public static final ClassName APPLICATION_CONTEXT_MODULE =
get("dagger.hilt.android.internal.modules", "ApplicationContextModule");
public static final ClassName DEFAULT_ROOT =
ClassName.get("dagger.hilt.android.internal.testing.root", "Default");
public static final ClassName INTERNAL_TEST_ROOT =
get("dagger.hilt.android.internal.testing", "InternalTestRoot");
public static final ClassName TEST_INJECTOR =
get("dagger.hilt.android.internal.testing", "TestInjector");
public static final ClassName TEST_APPLICATION_COMPONENT_MANAGER =
get("dagger.hilt.android.internal.testing", "TestApplicationComponentManager");
public static final ClassName TEST_APPLICATION_COMPONENT_MANAGER_HOLDER =
get("dagger.hilt.android.internal.testing", "TestApplicationComponentManagerHolder");
public static final ClassName TEST_INSTANCE_HOLDER =
get("dagger.hilt.android.internal.testing", "TestInstanceHolder");
public static final ClassName HILT_ANDROID_TEST =
get("dagger.hilt.android.testing", "HiltAndroidTest");
public static final ClassName SKIP_TEST_INJECTION =
get("dagger.hilt.android.testing", "SkipTestInjection");
public static final ClassName CUSTOM_TEST_APPLICATION =
get("dagger.hilt.android.testing", "CustomTestApplication");
public static final ClassName ON_COMPONENT_READY_RUNNER =
get("dagger.hilt.android.testing", "OnComponentReadyRunner");
public static final ClassName ON_COMPONENT_READY_RUNNER_HOLDER =
get("dagger.hilt.android.testing", "OnComponentReadyRunner", "OnComponentReadyRunnerHolder");
public static final ClassName ANDROID_BIND_VALUE =
get("dagger.hilt.android.testing", "BindValue");
public static final ClassName ANDROID_BIND_ELEMENTS_INTO_SET =
get("dagger.hilt.android.testing", "BindElementsIntoSet");
public static final ClassName ANDROID_BIND_VALUE_INTO_MAP =
get("dagger.hilt.android.testing", "BindValueIntoMap");
public static final ClassName ANDROID_BIND_VALUE_INTO_SET =
get("dagger.hilt.android.testing", "BindValueIntoSet");
public static final ClassName APPLICATION_CONTEXT =
get("dagger.hilt.android.qualifiers", "ApplicationContext");
public static final ClassName TEST_SINGLETON_COMPONENT =
get("dagger.hilt.internal", "TestSingletonComponent");
public static final ClassName TEST_COMPONENT_DATA =
get("dagger.hilt.android.internal.testing", "TestComponentData");
public static final ClassName TEST_COMPONENT_DATA_SUPPLIER =
get("dagger.hilt.android.internal.testing", "TestComponentDataSupplier");
public static final ClassName CLASS = get("java.lang", "Class");
public static final ClassName LIST = get("java.util", "List");
public static final ClassName SET = get("java.util", "Set");
public static final ClassName MAP = get("java.util", "Map");
public static final ClassName HASH_MAP = get("java.util", "HashMap");
public static final ClassName HASH_SET = get("java.util", "HashSet");
public static final ClassName COLLECTIONS = get("java.util", "Collections");
public static final ClassName ARRAYS = get("java.util", "Arrays");
// Standard components
public static final ClassName SINGLETON_COMPONENT =
get("dagger.hilt.components", "SingletonComponent");
public static final ClassName ACTIVITY_COMPONENT =
get("dagger.hilt.android.components", "ActivityComponent");
public static final ClassName PRECONDITIONS = get("dagger.hilt.internal", "Preconditions");
public static final ClassName OBJECT = get("java.lang", "Object");
public static final ClassName SUPPRESS_WARNINGS = get("java.lang", "SuppressWarnings");
public static final ClassName KOTLIN_SUPPRESS = get("kotlin", "Suppress");
public static final ClassName ON_RECEIVE_BYTECODE_INJECTION_MARKER =
get("dagger.hilt.android.internal", "OnReceiveBytecodeInjectionMarker");
// Kotlin-specific | names |
java | grpc__grpc-java | binder/src/main/java/io/grpc/binder/internal/MetadataHelper.java | {
"start": 7599,
"end": 8517
} | class ____<P extends Parcelable>
implements Metadata.BinaryStreamMarshaller<P> {
@Nullable private final Parcelable.Creator<P> creator;
private final boolean immutableType;
public ParcelableMetadataMarshaller(
@Nullable Parcelable.Creator<P> creator, boolean immutableType) {
this.creator = creator;
this.immutableType = immutableType;
}
@Override
public InputStream toStream(P value) {
return new ParcelableInputStream<>(creator, value, immutableType);
}
@Override
@SuppressWarnings("unchecked")
public P parseStream(InputStream stream) {
if (stream instanceof ParcelableInputStream) {
return ((ParcelableInputStream<P>) stream).getParcelable();
} else {
throw new UnsupportedOperationException(
"Can't unmarshall a parcelable from a regular byte stream");
}
}
}
}
| ParcelableMetadataMarshaller |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java | {
"start": 319,
"end": 2882
} | class ____ {
public static final String ELASTIC_NAME = "elastic";
public static final String ELASTIC_ROLE = "superuser";
public static final String DEPRECATED_KIBANA_NAME = "kibana";
public static final String KIBANA_NAME = "kibana_system";
public static final String KIBANA_ROLE = "kibana_system";
public static final String SYSTEM_NAME = "_system";
public static final String SYSTEM_ROLE = "_system";
public static final String XPACK_SECURITY_NAME = "_xpack_security";
public static final String XPACK_SECURITY_ROLE = "_xpack_security";
public static final String DATA_STREAM_LIFECYCLE_NAME = "_data_stream_lifecycle";
public static final String DATA_STREAM_LIFECYCLE_ROLE = "_data_stream_lifecycle";
public static final String SECURITY_PROFILE_NAME = "_security_profile";
public static final String SECURITY_PROFILE_ROLE = "_security_profile";
public static final String XPACK_NAME = "_xpack";
public static final String XPACK_ROLE = "_xpack";
public static final String LOGSTASH_NAME = "logstash_system";
public static final String LOGSTASH_ROLE = "logstash_system";
public static final String BEATS_NAME = "beats_system";
public static final String BEATS_ROLE = "beats_system";
public static final String APM_NAME = "apm_system";
public static final String APM_ROLE = "apm_system";
public static final String ASYNC_SEARCH_NAME = "_async_search";
public static final String ASYNC_SEARCH_ROLE = "_async_search";
public static final String STORAGE_USER_NAME = "_storage";
public static final String STORAGE_ROLE_NAME = "_storage";
public static final String SYNONYMS_USER_NAME = "_synonyms";
public static final String SYNONYMS_ROLE_NAME = "_synonyms";
public static final String CROSS_PROJECT_SEARCH_USER_NAME = "_cross_project_search";
public static final String CROSS_PROJECT_SEARCH_ROLE_NAME = "_cross_project_search";
public static final String REMOTE_MONITORING_NAME = "remote_monitoring_user";
public static final String REMOTE_MONITORING_COLLECTION_ROLE = "remote_monitoring_collector";
public static final String REMOTE_MONITORING_INDEXING_ROLE = "remote_monitoring_agent";
public static final String LAZY_ROLLOVER_NAME = "_lazy_rollover";
public static final String LAZY_ROLLOVER_ROLE = "_lazy_rollover";
public static final String REINDEX_DATA_STREAM_NAME = "_reindex_data_stream";
public static final String REINDEX_DATA_STREAM_ROLE = "_reindex_data_stream";
private UsernamesField() {}
}
| UsernamesField |
java | elastic__elasticsearch | x-pack/plugin/mapper-exponential-histogram/src/yamlRestTest/java/org/elasticsearch/xpack/exponentialhistogram/ExponentialHistogramYamlTestSuiteIT.java | {
"start": 728,
"end": 1731
} | class ____ extends ESClientYamlSuiteTestCase {
public ExponentialHistogramYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@Before
public void setup() {
// TODO: remove when FeatureFlag is removed and add minimum required version to yaml spec
assumeTrue("Only when exponential_histogram feature flag is enabled", Build.current().isSnapshot());
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@ClassRule
public static ElasticsearchCluster cluster = ElasticsearchCluster.local()
.module("x-pack-aggregate-metric")
.module("x-pack-analytics")
.module("exponential-histogram")
.module("data-streams")
.build();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}
| ExponentialHistogramYamlTestSuiteIT |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/deviceframework/FakeTestDevicePlugin5.java | {
"start": 1412,
"end": 2013
} | class ____ implements DevicePlugin {
@Override
public DeviceRegisterRequest getRegisterRequestInfo() throws Exception {
return DeviceRegisterRequest.Builder.newInstance()
.setResourceName("cmp.com/cmp").build();
}
@Override
public Set<Device> getDevices() throws Exception {
return null;
}
@Override
public DeviceRuntimeSpec onDevicesAllocated(Set<Device> allocatedDevices,
YarnRuntimeType yarnRuntime) throws Exception {
return null;
}
@Override
public void onDevicesReleased(Set<Device> releasedDevices) throws Exception {
}
}
| FakeTestDevicePlugin5 |
java | quarkusio__quarkus | extensions/smallrye-fault-tolerance/deployment/src/test/java/io/quarkus/smallrye/faulttolerance/test/retry/backoff/RetryOnClassBackoffOnMethodTest.java | {
"start": 439,
"end": 1019
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(RetryOnClassBackoffOnMethodService.class))
.assertException(e -> {
assertEquals(DefinitionException.class, e.getClass());
assertTrue(e.getMessage().contains("Backoff annotation"));
assertTrue(e.getMessage().contains("@Retry is missing"));
});
@Test
public void test() {
fail();
}
}
| RetryOnClassBackoffOnMethodTest |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/client/HttpClientErrorException.java | {
"start": 8520,
"end": 9164
} | class ____ extends HttpClientErrorException {
private MethodNotAllowed(String statusText, HttpHeaders headers, byte @Nullable [] body, @Nullable Charset charset) {
super(HttpStatus.METHOD_NOT_ALLOWED, statusText, headers, body, charset);
}
private MethodNotAllowed(String message, String statusText,
HttpHeaders headers, byte @Nullable [] body, @Nullable Charset charset) {
super(message, HttpStatus.METHOD_NOT_ALLOWED, statusText, headers, body, charset);
}
}
/**
* {@link HttpClientErrorException} for status HTTP 406 Not Acceptable.
* @since 5.1
*/
@SuppressWarnings("serial")
public static final | MethodNotAllowed |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/intarrays/IntArrays_assertContainsSequence_Test.java | {
"start": 1712,
"end": 7881
} | class ____ extends IntArraysBaseTest {
@Override
protected void initActualArray() {
actual = arrayOf(6, 8, 10, 12);
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContainsSequence(someInfo(), null, arrayOf(8)))
.withMessage(actualIsNull());
}
@Test
void should_throw_error_if_sequence_is_null() {
assertThatNullPointerException().isThrownBy(() -> arrays.assertContainsSequence(someInfo(), actual, null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_pass_if_actual_and_given_values_are_empty() {
actual = emptyArray();
arrays.assertContainsSequence(someInfo(), actual, emptyArray());
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContainsSequence(someInfo(), actual,
emptyArray()));
}
@Test
void should_fail_if_sequence_is_bigger_than_actual() {
AssertionInfo info = someInfo();
int[] sequence = { 6, 8, 10, 12, 20, 22 };
Throwable error = catchThrowable(() -> arrays.assertContainsSequence(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainSequence(actual, sequence));
}
@Test
void should_fail_if_actual_does_not_contain_whole_sequence() {
AssertionInfo info = someInfo();
int[] sequence = { 6, 20 };
Throwable error = catchThrowable(() -> arrays.assertContainsSequence(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainSequence(actual, sequence));
}
@Test
void should_fail_if_actual_contains_first_elements_of_sequence() {
AssertionInfo info = someInfo();
int[] sequence = { 6, 20, 22 };
Throwable error = catchThrowable(() -> arrays.assertContainsSequence(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainSequence(actual, sequence));
}
@Test
void should_pass_if_actual_contains_sequence() {
arrays.assertContainsSequence(someInfo(), actual, arrayOf(6, 8));
}
@Test
void should_pass_if_actual_and_sequence_are_equal() {
arrays.assertContainsSequence(someInfo(), actual, arrayOf(6, 8, 10, 12));
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsSequence(someInfo(),
null,
arrayOf(-8)))
.withMessage(actualIsNull());
}
@Test
void should_throw_error_if_sequence_is_null_whatever_custom_comparison_strategy_is() {
assertThatNullPointerException().isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsSequence(someInfo(),
actual,
null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsSequence(someInfo(),
actual,
emptyArray()));
}
@Test
void should_fail_if_sequence_is_bigger_than_actual_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
int[] sequence = { 6, -8, 10, 12, 20, 22 };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertContainsSequence(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainSequence(actual, sequence, absValueComparisonStrategy));
}
@Test
void should_fail_if_actual_does_not_contain_whole_sequence_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
int[] sequence = { 6, 20 };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertContainsSequence(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainSequence(actual, sequence, absValueComparisonStrategy));
}
@Test
void should_fail_if_actual_contains_first_elements_of_sequence_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
int[] sequence = { 6, 20, 22 };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertContainsSequence(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainSequence(actual, sequence, absValueComparisonStrategy));
}
@Test
void should_pass_if_actual_contains_sequence_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContainsSequence(someInfo(), actual, arrayOf(6, -8));
}
@Test
void should_pass_if_actual_and_sequence_are_equal_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContainsSequence(someInfo(), actual, arrayOf(6, -8, 10, 12));
}
}
| IntArrays_assertContainsSequence_Test |
java | spring-projects__spring-boot | module/spring-boot-webmvc/src/test/java/org/springframework/boot/webmvc/autoconfigure/WebMvcAutoConfigurationTests.java | {
"start": 59363,
"end": 59780
} | class ____ {
private int handlerAdapters;
@Bean
WebMvcRegistrations webMvcRegistrationsHandlerAdapter() {
return new WebMvcRegistrations() {
@Override
public RequestMappingHandlerAdapter getRequestMappingHandlerAdapter() {
CustomRequestMappingHandlerAdapter.this.handlerAdapters++;
return new MyRequestMappingHandlerAdapter();
}
};
}
}
static | CustomRequestMappingHandlerAdapter |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/concurrent/UncheckedFuture.java | {
"start": 1443,
"end": 3647
} | interface ____<V> extends Future<V> {
/**
* Maps the given instances as unchecked.
*
* @param <T> The result type returned by the Futures' {@link #get()} and {@link #get(long, TimeUnit)} methods.
* @param futures The Futures to uncheck.
* @return a new stream.
*/
static <T> Stream<UncheckedFuture<T>> map(final Collection<Future<T>> futures) {
return futures.stream().map(UncheckedFuture::on);
}
/**
* Maps the given instances as unchecked.
*
* @param <T> The result type returned by the Futures' {@link #get()} and {@link #get(long, TimeUnit)} methods.
* @param futures The Futures to uncheck.
* @return a new collection.
*/
static <T> Collection<UncheckedFuture<T>> on(final Collection<Future<T>> futures) {
return map(futures).collect(Collectors.toList());
}
/**
* Creates a new instance on the given Future.
*
* @param <T> The result type returned by this Future's {@link #get()} and {@link #get(long, TimeUnit)} methods.
* @param future The Future to uncheck.
* @return a new instance.
*/
static <T> UncheckedFuture<T> on(final Future<T> future) {
return new UncheckedFutureImpl<>(future);
}
/**
* Gets per {@link Future#get()} but rethrows checked exceptions as unchecked.
* <p>
* The default mapping from checked to unchecked is:
* </p>
* <ul>
* <li>{@link InterruptedException} \u2192 {@link UncheckedInterruptedException}</li>
* <li>{@link ExecutionException} \u2192 {@link UncheckedExecutionException}</li>
* </ul>
*/
@Override
V get();
/**
* Gets per {@link Future#get(long, TimeUnit)} but rethrows checked exceptions as unchecked.
* <p>
* The default mapping from checked to unchecked is:
* </p>
* <ul>
* <li>{@link InterruptedException} \u2192 {@link UncheckedInterruptedException}</li>
* <li>{@link ExecutionException} \u2192 {@link UncheckedExecutionException}</li>
* <li>{@link TimeoutException} \u2192 {@link UncheckedTimeoutException}</li>
* </ul>
*/
@Override
V get(long timeout, TimeUnit unit);
}
| UncheckedFuture |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/test/java/org/springframework/boot/autoconfigure/web/ServerPropertiesTests.java | {
"start": 1863,
"end": 5190
} | class ____ {
private final ServerProperties properties = new ServerProperties();
@Test
void testAddressBinding() throws Exception {
bind("server.address", "127.0.0.1");
assertThat(this.properties.getAddress()).isEqualTo(InetAddress.getByName("127.0.0.1"));
}
@Test
void testPortBinding() {
bind("server.port", "9000");
Integer port = this.properties.getPort();
assertThat(port).isNotNull();
assertThat(port.intValue()).isEqualTo(9000);
}
@Test
void testServerHeaderDefault() {
assertThat(this.properties.getServerHeader()).isNull();
}
@Test
void testServerHeader() {
bind("server.server-header", "Custom Server");
assertThat(this.properties.getServerHeader()).isEqualTo("Custom Server");
}
@Test
void testTrailingSlashOfContextPathIsRemoved() {
bind("server.servlet.context-path", "/foo/");
assertThat(this.properties.getServlet().getContextPath()).isEqualTo("/foo");
}
@Test
void testSlashOfContextPathIsDefaultValue() {
bind("server.servlet.context-path", "/");
assertThat(this.properties.getServlet().getContextPath()).isEmpty();
}
@Test
void testContextPathWithLeadingWhitespace() {
bind("server.servlet.context-path", " /assets");
assertThat(this.properties.getServlet().getContextPath()).isEqualTo("/assets");
}
@Test
void testContextPathWithTrailingWhitespace() {
bind("server.servlet.context-path", "/assets/copy/ ");
assertThat(this.properties.getServlet().getContextPath()).isEqualTo("/assets/copy");
}
@Test
void testContextPathWithLeadingAndTrailingWhitespace() {
bind("server.servlet.context-path", " /assets ");
assertThat(this.properties.getServlet().getContextPath()).isEqualTo("/assets");
}
@Test
void testContextPathWithLeadingAndTrailingWhitespaceAndContextWithSpace() {
bind("server.servlet.context-path", " /assets /copy/ ");
assertThat(this.properties.getServlet().getContextPath()).isEqualTo("/assets /copy");
}
@Test
void testDefaultMimeMapping() {
assertThat(this.properties.getMimeMappings()).isEmpty();
}
@Test
void testCustomizedMimeMapping() {
MimeMappings expectedMappings = new MimeMappings();
expectedMappings.add("mjs", "text/javascript");
bind("server.mime-mappings.mjs", "text/javascript");
assertThat(this.properties.getMimeMappings())
.containsExactly(expectedMappings.getAll().toArray(new Mapping[0]));
}
@Test
void testCustomizeMaxHttpRequestHeaderSize() {
bind("server.max-http-request-header-size", "1MB");
assertThat(this.properties.getMaxHttpRequestHeaderSize()).isEqualTo(DataSize.ofMegabytes(1));
}
@Test
void testCustomizeMaxHttpRequestHeaderSizeUseBytesByDefault() {
bind("server.max-http-request-header-size", "1024");
assertThat(this.properties.getMaxHttpRequestHeaderSize()).isEqualTo(DataSize.ofKilobytes(1));
}
@Test
void defaultMaxHttpRequestHeaderSizeMatchesTomcatsDefault() {
assertThat(this.properties.getMaxHttpRequestHeaderSize().toBytes())
.isEqualTo(new Http11Nio2Protocol().getMaxHttpRequestHeaderSize());
}
private void bind(String name, String value) {
bind(Collections.singletonMap(name, value));
}
private void bind(Map<String, String> map) {
ConfigurationPropertySource source = new MapConfigurationPropertySource(map);
new Binder(source).bind("server", Bindable.ofInstance(this.properties));
}
}
| ServerPropertiesTests |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamAutoShardingEventTests.java | {
"start": 802,
"end": 2702
} | class ____ extends SimpleDiffableSerializationTestCase<DataStreamAutoShardingEvent> {
@Override
protected DataStreamAutoShardingEvent doParseInstance(XContentParser parser) throws IOException {
return DataStreamAutoShardingEvent.fromXContent(parser);
}
@Override
protected Writeable.Reader<DataStreamAutoShardingEvent> instanceReader() {
return DataStreamAutoShardingEvent::new;
}
@Override
protected DataStreamAutoShardingEvent createTestInstance() {
return DataStreamAutoShardingEventTests.randomInstance();
}
@Override
protected DataStreamAutoShardingEvent mutateInstance(DataStreamAutoShardingEvent instance) {
String triggerIndex = instance.triggerIndexName();
long timestamp = instance.timestamp();
int targetNumberOfShards = instance.targetNumberOfShards();
switch (randomInt(2)) {
case 0 -> triggerIndex = randomValueOtherThan(triggerIndex, () -> randomAlphaOfLengthBetween(10, 50));
case 1 -> timestamp = randomValueOtherThan(timestamp, ESTestCase::randomNonNegativeLong);
case 2 -> targetNumberOfShards = randomValueOtherThan(targetNumberOfShards, ESTestCase::randomNonNegativeInt);
}
return new DataStreamAutoShardingEvent(triggerIndex, targetNumberOfShards, timestamp);
}
static DataStreamAutoShardingEvent randomInstance() {
return new DataStreamAutoShardingEvent(randomAlphaOfLengthBetween(10, 40), randomNonNegativeInt(), randomNonNegativeLong());
}
@Override
protected DataStreamAutoShardingEvent makeTestChanges(DataStreamAutoShardingEvent testInstance) {
return mutateInstance(testInstance);
}
@Override
protected Writeable.Reader<Diff<DataStreamAutoShardingEvent>> diffReader() {
return DataStreamAutoShardingEvent::readDiffFrom;
}
}
| DataStreamAutoShardingEventTests |
java | spring-projects__spring-boot | core/spring-boot-docker-compose/src/test/java/org/springframework/boot/docker/compose/core/ProcessRunnerTests.java | {
"start": 1098,
"end": 2054
} | class ____ {
private ProcessRunner processRunner = new ProcessRunner();
@Test
void run() {
String out = this.processRunner.run("docker", "--version");
assertThat(out).isNotEmpty();
}
@Test
void runWhenHasOutputConsumer() {
StringBuilder output = new StringBuilder();
this.processRunner.run(output::append, "docker", "--version");
assertThat(output.toString()).isNotEmpty();
}
@Test
void runWhenProcessDoesNotStart() {
assertThatExceptionOfType(ProcessStartException.class)
.isThrownBy(() -> this.processRunner.run("iverymuchdontexist", "--version"));
}
@Test
void runWhenProcessReturnsNonZeroExitCode() {
assertThatExceptionOfType(ProcessExitException.class)
.isThrownBy(() -> this.processRunner.run("docker", "-thisdoesntwork"))
.satisfies((ex) -> {
assertThat(ex.getExitCode()).isGreaterThan(0);
assertThat(ex.getStdOut()).isEmpty();
assertThat(ex.getStdErr()).isNotEmpty();
});
}
}
| ProcessRunnerTests |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestTests.java | {
"start": 1030,
"end": 2743
} | class ____ extends ESTestCase {
public void testToAndFromXContent() throws IOException {
IndicesAliasesRequest indicesAliasesRequest = createTestInstance();
XContentType xContentType = randomFrom(XContentType.values());
BytesReference shuffled = toShuffledXContent(indicesAliasesRequest, xContentType, ToXContent.EMPTY_PARAMS, true, "filter");
IndicesAliasesRequest parsedIndicesAliasesRequest;
try (XContentParser parser = createParser(xContentType.xContent(), shuffled)) {
parsedIndicesAliasesRequest = IndicesAliasesRequest.fromXContent(
() -> new IndicesAliasesRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT),
parser
);
assertNull(parser.nextToken());
}
for (int i = 0; i < parsedIndicesAliasesRequest.getAliasActions().size(); i++) {
AliasActions expectedAction = indicesAliasesRequest.getAliasActions().get(i);
AliasActions actualAction = parsedIndicesAliasesRequest.getAliasActions().get(i);
assertThat(actualAction, equalTo(expectedAction));
}
}
private IndicesAliasesRequest createTestInstance() {
int numItems = randomIntBetween(0, 32);
IndicesAliasesRequest request = new IndicesAliasesRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT);
if (randomBoolean()) {
request.ackTimeout(randomTimeValue());
}
if (randomBoolean()) {
request.masterNodeTimeout(randomTimeValue());
}
for (int i = 0; i < numItems; i++) {
request.addAliasAction(randomAliasAction());
}
return request;
}
}
| IndicesAliasesRequestTests |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/ReactiveOAuth2AuthorizedClientProviderBuilder.java | {
"start": 9811,
"end": 12238
} | class ____ implements Builder {
private ReactiveOAuth2AccessTokenResponseClient<OAuth2RefreshTokenGrantRequest> accessTokenResponseClient;
private Duration clockSkew;
private Clock clock;
private RefreshTokenGrantBuilder() {
}
/**
* Sets the client used when requesting an access token credential at the Token
* Endpoint.
* @param accessTokenResponseClient the client used when requesting an access
* token credential at the Token Endpoint
* @return the {@link RefreshTokenGrantBuilder}
*/
public RefreshTokenGrantBuilder accessTokenResponseClient(
ReactiveOAuth2AccessTokenResponseClient<OAuth2RefreshTokenGrantRequest> accessTokenResponseClient) {
this.accessTokenResponseClient = accessTokenResponseClient;
return this;
}
/**
* Sets the maximum acceptable clock skew, which is used when checking the access
* token expiry. An access token is considered expired if
* {@code OAuth2Token#getExpiresAt() - clockSkew} is before the current time
* {@code clock#instant()}.
* @param clockSkew the maximum acceptable clock skew
* @return the {@link RefreshTokenGrantBuilder}
* @see RefreshTokenReactiveOAuth2AuthorizedClientProvider#setClockSkew(Duration)
*/
public RefreshTokenGrantBuilder clockSkew(Duration clockSkew) {
this.clockSkew = clockSkew;
return this;
}
/**
* Sets the {@link Clock} used in {@link Instant#now(Clock)} when checking the
* access token expiry.
* @param clock the clock
* @return the {@link RefreshTokenGrantBuilder}
*/
public RefreshTokenGrantBuilder clock(Clock clock) {
this.clock = clock;
return this;
}
/**
* Builds an instance of
* {@link RefreshTokenReactiveOAuth2AuthorizedClientProvider}.
* @return the {@link RefreshTokenReactiveOAuth2AuthorizedClientProvider}
*/
@Override
public ReactiveOAuth2AuthorizedClientProvider build() {
RefreshTokenReactiveOAuth2AuthorizedClientProvider authorizedClientProvider = new RefreshTokenReactiveOAuth2AuthorizedClientProvider();
if (this.accessTokenResponseClient != null) {
authorizedClientProvider.setAccessTokenResponseClient(this.accessTokenResponseClient);
}
if (this.clockSkew != null) {
authorizedClientProvider.setClockSkew(this.clockSkew);
}
if (this.clock != null) {
authorizedClientProvider.setClock(this.clock);
}
return authorizedClientProvider;
}
}
}
| RefreshTokenGrantBuilder |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/IllegalStateException.java | {
"start": 848,
"end": 995
} | class ____ extends JmsException {
public IllegalStateException(jakarta.jms.IllegalStateException cause) {
super(cause);
}
}
| IllegalStateException |
java | apache__hadoop | hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlContractRenameLive.java | {
"start": 1088,
"end": 1304
} | class ____ extends AbstractContractRenameTest {
@Override
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
}
| TestAdlContractRenameLive |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java | {
"start": 33746,
"end": 33924
} | class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return definesFunction( dialect, "array_concat" );
}
}
public static | SupportsArrayConcat |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/bigquery/ast/BigQueryCreateModelStatement.java | {
"start": 434,
"end": 2186
} | class ____ extends SQLStatementImpl implements BigQueryObject {
private boolean ifNotExists;
private boolean replace;
private SQLName name;
private final List<SQLAssignItem> options = new ArrayList<>();
private SQLStatement trainingData;
private SQLStatement customHoliday;
public SQLName getName() {
return name;
}
public void setName(SQLName x) {
if (x != null) {
x.setParent(this);
}
this.name = x;
}
public List<SQLAssignItem> getOptions() {
return options;
}
public boolean isIfNotExists() {
return ifNotExists;
}
public void setIfNotExists(boolean ifNotExists) {
this.ifNotExists = ifNotExists;
}
public boolean isReplace() {
return replace;
}
public void setReplace(boolean replace) {
this.replace = replace;
}
public SQLStatement getTrainingData() {
return trainingData;
}
public void setTrainingData(SQLStatement x) {
if (x != null) {
x.setParent(this);
}
this.trainingData = x;
}
public SQLStatement getCustomHoliday() {
return customHoliday;
}
public void setCustomHoliday(SQLStatement x) {
if (x != null) {
x.setParent(this);
}
this.customHoliday = x;
}
@Override
public void accept0(SQLASTVisitor v) {
if (v instanceof BigQueryVisitor) {
accept0((BigQueryVisitor) v);
} else {
super.accept0(v);
}
}
@Override
public void accept0(BigQueryVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, name);
}
}
}
| BigQueryCreateModelStatement |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/runtime/operators/lifecycle/event/TestEventQueueImpl.java | {
"start": 1335,
"end": 2466
} | class ____ implements TestEventQueue {
private final List<TestEvent> events = new CopyOnWriteArrayList<>();
private final List<Consumer<TestEvent>> listeners = new CopyOnWriteArrayList<>();
public void add(TestEvent e) {
events.add(e);
listeners.forEach(l -> l.accept(e));
}
@Override
public void withHandler(TestEventHandler handler) throws Exception {
BlockingQueue<TestEvent> queue = new LinkedBlockingQueue<>();
Consumer<TestEvent> listener = queue::add;
addListener(listener);
try {
for (TestEventNextAction nextAction = CONTINUE; nextAction == CONTINUE; ) {
nextAction = handler.handle(queue.take());
}
} finally {
removeListener(listener);
}
}
public void removeListener(Consumer<TestEvent> listener) {
listeners.remove(listener);
}
public void addListener(Consumer<TestEvent> listener) {
listeners.add(listener);
}
@Override
public List<TestEvent> getAll() {
return Collections.unmodifiableList(events);
}
}
| TestEventQueueImpl |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/columnar/vector/writable/WritableIntVector.java | {
"start": 1038,
"end": 1988
} | interface ____ extends WritableColumnVector, IntColumnVector {
/** Set int at rowId with the provided value. */
void setInt(int rowId, int value);
/**
* Set ints from binary, need use UNSAFE to copy.
*
* @param rowId set start rowId.
* @param count count for int, so the bytes size is count * 4.
* @param src source binary.
* @param srcIndex source binary index, it is the index for byte index.
*/
void setIntsFromBinary(int rowId, int count, byte[] src, int srcIndex);
/**
* Sets value to [rowId, rowId + count) by the value, this is data that repeats continuously.
*/
void setInts(int rowId, int count, int value);
/** Sets values from [src[srcIndex], src[srcIndex + count]) to [rowId, rowId + count). */
void setInts(int rowId, int count, int[] src, int srcIndex);
/** Fill the column vector with the provided value. */
void fill(int value);
}
| WritableIntVector |
java | quarkusio__quarkus | independent-projects/tools/registry-client/src/main/java/io/quarkus/registry/catalog/PlatformStreamCoords.java | {
"start": 45,
"end": 973
} | class ____ {
final String platformKey;
final String streamId;
public static PlatformStreamCoords fromString(String stream) {
final int colon = stream.indexOf(':');
String platformKey = colon <= 0 ? null : stream.substring(0, colon);
String streamId = colon < 0 ? stream : stream.substring(colon + 1);
return new PlatformStreamCoords(platformKey, streamId);
}
public PlatformStreamCoords(String platformKey, String streamId) {
this.platformKey = platformKey;
this.streamId = streamId;
}
public String getPlatformKey() {
return platformKey;
}
public String getStreamId() {
return streamId;
}
@Override
public String toString() {
return "StreamCoords{" +
"platformKey='" + platformKey + '\'' +
", streamId='" + streamId + '\'' +
'}';
}
}
| PlatformStreamCoords |
java | quarkusio__quarkus | integration-tests/main/src/main/java/io/quarkus/it/jpa/JPACustomUserTypeEndpoint.java | {
"start": 333,
"end": 904
} | class ____ {
@Inject
EntityManager em;
@GET
@Transactional
@Produces(MediaType.TEXT_PLAIN)
public Long invokeCreation() {
CustomTypeEntity customTypeEntity = new CustomTypeEntity();
customTypeEntity.setBigInteger(BigInteger.ONE);
customTypeEntity.setCustomEnum(CustomEnum.ONE);
Animal animal = new Animal();
animal.setWeight(29.12);
customTypeEntity.setAnimal(animal);
em.persist(customTypeEntity);
em.flush();
return customTypeEntity.getId();
}
}
| JPACustomUserTypeEndpoint |
java | bumptech__glide | third_party/gif_decoder/src/test/java/com/bumptech/glide/gifdecoder/test/GifBytesTestUtilTest.java | {
"start": 351,
"end": 4005
} | class ____ {
@Test
public void testWriteHeaderAndLsdWithoutGct() {
ByteBuffer buffer = ByteBuffer.allocate(GifBytesTestUtil.HEADER_LENGTH);
GifBytesTestUtil.writeHeaderAndLsd(buffer, 8, 16, false, 0);
byte[] expected =
new byte[] { 0x47, 0x49, 0x46, 0x38, 0x39, 0x61, 0x00, 0x08, 0x00, 0x10, 0x20, 0x00, 0x00 };
assertEquals(expected, buffer);
}
@Test
public void testWriteHeaderAndLsdWithGct() {
ByteBuffer buffer = ByteBuffer.allocate(GifBytesTestUtil.HEADER_LENGTH);
GifBytesTestUtil.writeHeaderAndLsd(buffer, 8, 16, true, 4);
byte[] expected =
new byte[] { 0x47, 0x49, 0x46, 0x38, 0x39, 0x61, 0x00, 0x08, 0x00, 0x10, (byte) 0xA4, 0x00,
0x00 };
assertEquals(expected, buffer);
}
@Test
public void testWriteImageDescriptorWithoutColorTable() {
ByteBuffer buffer = ByteBuffer.allocate(GifBytesTestUtil.IMAGE_DESCRIPTOR_LENGTH);
GifBytesTestUtil.writeImageDescriptor(buffer, 10, 9, 8, 7, false, 0);
byte[] expected = new byte[] {
// Image separator.
0x2C,
// Image left.
0x00, 0x0A,
// Image right.
0x00, 0X09,
// Image width.
0x00, 0x08,
// Image height.
0x00, 0x07,
// Packed field.
0x00 };
assertEquals(expected, buffer);
}
@Test
public void testWriteImageDescriptorWithColorTable() {
ByteBuffer buffer = ByteBuffer.allocate(GifBytesTestUtil.IMAGE_DESCRIPTOR_LENGTH);
GifBytesTestUtil.writeImageDescriptor(buffer, 10, 9, 8, 7, true, 4);
byte packedField =
// Set LCT flag
(byte) 0x80
// Size of color table (2^(N + 1) == 4)
| 0x01;
byte[] expected = new byte[] {
// Image separator.
0x2C,
// Image left.
0x00, 0x0A,
// Image right.
0x00, 0X09,
// Image width.
0x00, 0x08,
// Image height.
0x00, 0x07, packedField };
assertEquals(expected, buffer);
}
@Test
public void testWriteColorTable() {
final int numColors = 4;
ByteBuffer buffer = ByteBuffer.allocate(GifBytesTestUtil.getColorTableLength(numColors));
GifBytesTestUtil.writeColorTable(buffer, numColors);
byte[] expected = new byte[] {
// First color.
0x00, 0x00, 0x00,
// Second color.
0x00, 0x00, 0x01,
// Third color.
0x00, 0x00, 0x02,
// Fourth color.
0x00, 0x00, 0x03, };
assertEquals(expected, buffer);
}
@Test
public void testWriteFakeImageData() {
ByteBuffer buffer = ByteBuffer.allocate(4);
GifBytesTestUtil.writeFakeImageData(buffer, 2);
byte[] expected = new byte[] { 0x02, 0x01, 0x01, 0x00 };
assertEquals(expected, buffer);
}
@Test
public void testWritesGraphicsControlExtension() {
short delay = 20;
ByteBuffer buffer = ByteBuffer.allocate(GifBytesTestUtil.GRAPHICS_CONTROL_EXTENSION_LENGTH);
byte[] expected = new byte[] {
// Extension inducer.
0x21,
// Graphic control label.
(byte) 0xF9,
// Block size.
0x04,
// Packed byte.
0x00,
// Frame delay.
0x00, 0x14,
// Transparent color index.
0x00,
// block terminator.
0x00 };
GifBytesTestUtil.writeGraphicsControlExtension(buffer, delay);
assertEquals(expected, buffer);
}
private static void assertEquals(byte[] expected, ByteBuffer buffer) {
assertArrayEquals(
"expected=" + Arrays.toString(expected) + " received=" + Arrays.toString(buffer.array()),
expected, buffer.array());
}
}
| GifBytesTestUtilTest |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/scenario/MaintenancePushNotificationMonitor.java | {
"start": 547,
"end": 1913
} | class ____ {
private static final Logger log = LoggerFactory.getLogger(MaintenancePushNotificationMonitor.class);
/**
* Sets up push notification monitoring. Lettuce automatically observes the input buffer and parses PUSH notifications.
*
* @param connection the Redis connection to monitor
* @param capture the capture implementation to handle notifications
* @param <T> the type of capture that implements MaintenanceNotificationCapture
*/
public static <T extends MaintenanceNotificationCapture> void setupMonitoring(
StatefulRedisConnection<String, String> connection, T capture) {
log.info("Setting up push notification monitoring for maintenance events...");
// Create and register the push listener
PushListener maintenanceListener = new MaintenanceEventPushListener<>(capture);
connection.addListener(maintenanceListener);
log.info("PushListener registered for maintenance event monitoring");
// No periodic ping monitoring needed - Lettuce automatically handles PUSH notifications
log.info("Push notification monitoring active - Lettuce will automatically parse PUSH notifications");
}
/**
* Internal PushListener implementation that handles all maintenance event types
*/
private static | MaintenancePushNotificationMonitor |
java | junit-team__junit5 | junit-platform-console/src/main/java/org/junit/platform/console/options/TestConsoleOutputOptionsMixin.java | {
"start": 963,
"end": 1174
} | class ____ {
@ArgGroup(validate = false, order = 5, heading = "%n@|bold CONSOLE OUTPUT|@%n%n")
ConsoleOutputOptions consoleOutputOptions = new ConsoleOutputOptions();
public static | TestConsoleOutputOptionsMixin |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/bcextensions/SyntheticComponentsImpl.java | {
"start": 427,
"end": 1944
} | class ____ implements SyntheticComponents {
final List<SyntheticBeanBuilderImpl<?>> syntheticBeans;
final List<SyntheticObserverBuilderImpl<?>> syntheticObservers;
final DotName extensionClass;
SyntheticComponentsImpl(List<SyntheticBeanBuilderImpl<?>> syntheticBeans,
List<SyntheticObserverBuilderImpl<?>> syntheticObservers, DotName extensionClass) {
this.syntheticBeans = syntheticBeans;
this.syntheticObservers = syntheticObservers;
this.extensionClass = extensionClass;
}
@Override
public <T> SyntheticBeanBuilder<T> addBean(Class<T> implementationClass) {
SyntheticBeanBuilderImpl<T> builder = new SyntheticBeanBuilderImpl<>(implementationClass);
syntheticBeans.add(builder);
return builder;
}
@Override
public <T> SyntheticObserverBuilder<T> addObserver(Class<T> eventType) {
org.jboss.jandex.Type jandexType = Types.jandexType(eventType);
SyntheticObserverBuilderImpl<T> builder = new SyntheticObserverBuilderImpl<>(extensionClass, jandexType);
syntheticObservers.add(builder);
return builder;
}
@Override
public <T> SyntheticObserverBuilder<T> addObserver(Type eventType) {
org.jboss.jandex.Type jandexType = ((TypeImpl<?>) eventType).jandexType;
SyntheticObserverBuilderImpl<T> builder = new SyntheticObserverBuilderImpl<>(extensionClass, jandexType);
syntheticObservers.add(builder);
return builder;
}
}
| SyntheticComponentsImpl |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/cglib_lazy_error/CglibNPELazyTest.java | {
"start": 1080,
"end": 4461
} | class ____ {
private static SqlSessionFactory sqlSessionFactory;
@BeforeAll
static void initDatabase() throws Exception {
try (Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/cglib_lazy_error/ibatisConfigLazy.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
sqlSessionFactory.getConfiguration().setLazyLoadingEnabled(true);
sqlSessionFactory.getConfiguration().setAggressiveLazyLoading(false);
}
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/cglib_lazy_error/CreateDB.sql");
}
@Test
void noParent() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person person = personMapper.selectById(1);
Assertions.assertNotNull(person, "Persons must not be null");
Person parent = person.getParent();
Assertions.assertNull(parent, "Parent must be null");
}
}
@Test
void ancestorSelf() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person person = personMapper.selectById(1);
Assertions.assertNotNull(person, "Persons must not be null");
Person ancestor = person.getAncestor();
Assertions.assertEquals(person, ancestor, "Ancestor must be John Smith sr.");
}
}
@Test
void ancestorAfterQueryingParents() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person expectedAncestor = personMapper.selectById(1);
Person person = personMapper.selectById(3);
// Load ancestor indirectly.
Assertions.assertNotNull(person, "Persons must not be null");
Assertions.assertNotNull(person.getParent(), "Parent must not be null");
Assertions.assertNotNull(person.getParent().getParent(), "Grandparent must not be null");
Assertions.assertEquals(expectedAncestor, person.getAncestor(), "Ancestor must be John Smith sr.");
}
}
@Test
void grandParent() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person expectedParent = personMapper.selectById(2);
Person expectedGrandParent = personMapper.selectById(1);
Person person = personMapper.selectById(3);
Assertions.assertNotNull(person, "Persons must not be null");
final Person actualParent = person.getParent();
final Person actualGrandParent = person.getParent().getParent();
Assertions.assertEquals(expectedParent, actualParent);
Assertions.assertEquals(expectedGrandParent, actualGrandParent);
}
}
@Test
void ancestor() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
PersonMapper personMapper = sqlSession.getMapper(PersonMapper.class);
Person expectedAncestor = personMapper.selectById(1);
Person person = personMapper.selectById(3);
Assertions.assertNotNull(person, "Persons must not be null");
final Person actualAncestor = person.getAncestor();
Assertions.assertEquals(expectedAncestor, actualAncestor);
}
}
}
| CglibNPELazyTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/buffer/BufferRecycler.java | {
"start": 974,
"end": 1299
} | interface ____ {
/**
* Recycles the {@link MemorySegment} to its original {@link BufferPool} instance.
*
* @param memorySegment The memory segment to be recycled.
*/
void recycle(MemorySegment memorySegment);
/** The buffer recycler does nothing for recycled segment. */
final | BufferRecycler |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeReinitializedClassBuildItem.java | {
"start": 452,
"end": 734
} | class ____ extends MultiBuildItem {
private final String className;
public RuntimeReinitializedClassBuildItem(String className) {
this.className = className;
}
public String getClassName() {
return className;
}
}
| RuntimeReinitializedClassBuildItem |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/collection/mapkey/Book.java | {
"start": 477,
"end": 743
} | class ____ {
@Id
@Size(min=10, max = 13)
String isbn;
@OneToMany(cascade = PERSIST,
mappedBy = "isbn")
@MapKey(name = "name")
Map<String,Chapter> chapters;
Book(String isbn) {
this.isbn = isbn;
chapters = new HashMap<>();
}
Book() {
}
}
@Entity
| Book |
java | google__dagger | javatests/dagger/internal/codegen/DaggerSuperficialValidationTest.java | {
"start": 21157,
"end": 21341
} | class ____<T> extends MissingType<T> {}",
"}"),
CompilerTests.kotlinSource(
"test.Outer.kt",
"package test",
"",
" | Parent |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateNodeRemovalIT.java | {
"start": 2084,
"end": 13218
} | class ____ extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(MockTransportService.TestPlugin.class);
}
public void testNodeRemovalFromNonRedCluster() throws Exception {
internalCluster().startMasterOnlyNode();
String node1 = internalCluster().startDataOnlyNode();
String node2 = internalCluster().startDataOnlyNode();
String indexName = "test-idx";
createIndex(indexName, 1, 1);
ensureGreen();
// Prevalidate removal of one of the two nodes
String nodeName = randomFrom(node1, node2);
PrevalidateNodeRemovalRequest.Builder req = PrevalidateNodeRemovalRequest.builder();
switch (randomIntBetween(0, 2)) {
case 0 -> req.setNames(nodeName);
case 1 -> req.setIds(getNodeId(nodeName));
case 2 -> req.setExternalIds(internalCluster().clusterService(nodeName).localNode().getExternalId());
default -> throw new IllegalStateException("Unexpected value");
}
PrevalidateNodeRemovalResponse resp = client().execute(PrevalidateNodeRemovalAction.INSTANCE, req.build(TEST_REQUEST_TIMEOUT))
.get();
assertTrue(resp.getPrevalidation().isSafe());
assertThat(resp.getPrevalidation().message(), equalTo("cluster status is not RED"));
assertThat(resp.getPrevalidation().nodes().size(), equalTo(1));
NodesRemovalPrevalidation.NodeResult nodeResult = resp.getPrevalidation().nodes().get(0);
assertNotNull(nodeResult);
assertThat(nodeResult.name(), equalTo(nodeName));
assertThat(nodeResult.result().reason(), equalTo(NodesRemovalPrevalidation.Reason.NO_PROBLEMS));
assertThat(nodeResult.result().message(), equalTo(""));
assertTrue(nodeResult.result().isSafe());
// Enforce a replica to get unassigned
updateIndexSettings(Settings.builder().put("index.routing.allocation.require._name", node1), indexName);
ensureYellow();
PrevalidateNodeRemovalRequest req2 = PrevalidateNodeRemovalRequest.builder().setNames(node2).build(TEST_REQUEST_TIMEOUT);
PrevalidateNodeRemovalResponse resp2 = client().execute(PrevalidateNodeRemovalAction.INSTANCE, req2).get();
assertTrue(resp2.getPrevalidation().isSafe());
assertThat(resp2.getPrevalidation().message(), equalTo("cluster status is not RED"));
assertThat(resp2.getPrevalidation().nodes().size(), equalTo(1));
NodesRemovalPrevalidation.NodeResult nodeResult2 = resp2.getPrevalidation().nodes().get(0);
assertNotNull(nodeResult2);
assertThat(nodeResult2.name(), equalTo(node2));
assertTrue(nodeResult2.result().isSafe());
assertThat(nodeResult2.result().reason(), equalTo(NodesRemovalPrevalidation.Reason.NO_PROBLEMS));
assertThat(nodeResult2.result().message(), equalTo(""));
}
// Test that in case the nodes that are being prevalidated do not contain copies of any of the
// red shards, their removal is considered to be safe.
public void testNodeRemovalFromRedClusterWithNoLocalShardCopy() throws Exception {
internalCluster().startMasterOnlyNode();
String nodeWithIndex = internalCluster().startDataOnlyNode();
List<String> otherNodes = internalCluster().startDataOnlyNodes(randomIntBetween(1, 3));
// Create an index pinned to one node, and then stop that node so the index is RED.
String indexName = "test-idx";
createIndex(
indexName,
Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put("index.routing.allocation.require._name", nodeWithIndex)
.build()
);
ensureYellow(indexName);
internalCluster().stopNode(nodeWithIndex);
ensureRed(indexName);
String[] otherNodeNames = otherNodes.toArray(new String[otherNodes.size()]);
PrevalidateNodeRemovalRequest req = PrevalidateNodeRemovalRequest.builder().setNames(otherNodeNames).build(TEST_REQUEST_TIMEOUT);
PrevalidateNodeRemovalResponse resp = client().execute(PrevalidateNodeRemovalAction.INSTANCE, req).get();
assertTrue(resp.getPrevalidation().isSafe());
assertThat(resp.getPrevalidation().message(), equalTo(""));
assertThat(resp.getPrevalidation().nodes().size(), equalTo(otherNodes.size()));
for (NodesRemovalPrevalidation.NodeResult nodeResult : resp.getPrevalidation().nodes()) {
assertThat(nodeResult.name(), oneOf(otherNodeNames));
assertThat(nodeResult.result().reason(), equalTo(NodesRemovalPrevalidation.Reason.NO_RED_SHARDS_ON_NODE));
assertTrue(nodeResult.result().isSafe());
}
}
public void testNodeRemovalFromRedClusterWithLocalShardCopy() throws Exception {
internalCluster().startMasterOnlyNode();
String node1 = internalCluster().startDataOnlyNode();
String node2 = internalCluster().startDataOnlyNode();
String indexName = "test-idx";
createIndex(indexName, indexSettings(1, 0).put("index.routing.allocation.require._name", node1).build());
ensureGreen(indexName);
// Prevent node1 from removing its local index shard copies upon removal, by blocking
// its ACTION_SHARD_EXISTS requests since after a relocation, the source first waits
// until the shard exists somewhere else, then it removes it locally.
final CountDownLatch shardActiveRequestSent = new CountDownLatch(1);
MockTransportService.getInstance(node1)
.addSendBehavior(MockTransportService.getInstance(node2), (connection, requestId, action, request, options) -> {
if (action.equals(IndicesStore.ACTION_SHARD_EXISTS)) {
shardActiveRequestSent.countDown();
logger.info("prevent shard active request from being sent");
throw new ConnectTransportException(connection.getNode(), "DISCONNECT: simulated");
}
connection.sendRequest(requestId, action, request, options);
});
logger.info("--> move shard from {} to {}, and wait for relocation to finish", node1, node2);
updateIndexSettings(Settings.builder().put("index.routing.allocation.require._name", node2), indexName);
shardActiveRequestSent.await();
ensureGreen(indexName);
// To ensure that the index doesn't get relocated back to node1 after stopping node2, we
// index a doc to make the index copy on node1 (in case not deleted after the relocation) stale.
indexDoc(indexName, "some_id", "foo", "bar");
internalCluster().stopNode(node2);
ensureRed(indexName);
// Ensure that node1 still has data for the unassigned index
NodeEnvironment nodeEnv = internalCluster().getInstance(NodeEnvironment.class, node1);
Index index = internalCluster().clusterService().state().metadata().getProject().index(indexName).getIndex();
ShardPath shardPath = ShardPath.loadShardPath(logger, nodeEnv, new ShardId(index, 0), "");
assertNotNull("local index shards not found", shardPath);
// Prevalidate removal of node1
PrevalidateNodeRemovalRequest req = PrevalidateNodeRemovalRequest.builder().setNames(node1).build(TEST_REQUEST_TIMEOUT);
PrevalidateNodeRemovalResponse resp = client().execute(PrevalidateNodeRemovalAction.INSTANCE, req).get();
String node1Id = getNodeId(node1);
assertFalse(resp.getPrevalidation().isSafe());
assertThat(resp.getPrevalidation().message(), equalTo("removal of the following nodes might not be safe: [" + node1Id + "]"));
assertThat(resp.getPrevalidation().nodes().size(), equalTo(1));
NodesRemovalPrevalidation.NodeResult nodeResult = resp.getPrevalidation().nodes().get(0);
assertThat(nodeResult.name(), equalTo(node1));
assertFalse(nodeResult.result().isSafe());
assertThat(nodeResult.result().reason(), equalTo(NodesRemovalPrevalidation.Reason.RED_SHARDS_ON_NODE));
assertThat(nodeResult.result().message(), equalTo("node contains copies of the following red shards: [[" + indexName + "][0]]"));
}
public void testNodeRemovalFromRedClusterWithTimeout() throws Exception {
internalCluster().startMasterOnlyNode();
String node1 = internalCluster().startDataOnlyNode();
String node2 = internalCluster().startDataOnlyNode();
String indexName = "test-index";
createIndex(indexName, indexSettings(1, 0).put("index.routing.allocation.require._name", node1).build());
ensureGreen(indexName);
// make it red!
internalCluster().stopNode(node1);
ensureRed(indexName);
CountDownLatch stallPrevalidateShardPathActionLatch = new CountDownLatch(1);
MockTransportService.getInstance(node2)
.addRequestHandlingBehavior(TransportPrevalidateShardPathAction.ACTION_NAME + "[n]", (handler, request, channel, task) -> {
logger.info("drop the check shards request");
safeAwait(stallPrevalidateShardPathActionLatch);
handler.messageReceived(request, channel, task);
});
try {
PrevalidateNodeRemovalRequest req = PrevalidateNodeRemovalRequest.builder()
.setNames(node2)
.build(TEST_REQUEST_TIMEOUT)
.masterNodeTimeout(TimeValue.timeValueSeconds(1))
.timeout(TimeValue.timeValueSeconds(1));
PrevalidateNodeRemovalResponse resp = client().execute(PrevalidateNodeRemovalAction.INSTANCE, req).get();
assertFalse("prevalidation result should return false", resp.getPrevalidation().isSafe());
String node2Id = getNodeId(node2);
assertThat(
resp.getPrevalidation().message(),
equalTo("cannot prevalidate removal of nodes with the following IDs: [" + node2Id + "]")
);
assertThat(resp.getPrevalidation().nodes().size(), equalTo(1));
NodesRemovalPrevalidation.NodeResult nodeResult = resp.getPrevalidation().nodes().get(0);
assertThat(nodeResult.name(), equalTo(node2));
assertFalse(nodeResult.result().isSafe());
assertThat(nodeResult.result().message(), startsWith("failed contacting the node"));
assertThat(nodeResult.result().reason(), equalTo(NodesRemovalPrevalidation.Reason.UNABLE_TO_VERIFY));
} finally {
stallPrevalidateShardPathActionLatch.countDown();
}
}
private void ensureRed(String indexName) throws Exception {
assertBusy(() -> {
ClusterHealthResponse healthResponse = clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT, indexName)
.setWaitForStatus(ClusterHealthStatus.RED)
.setWaitForEvents(Priority.LANGUID)
.get();
assertThat(healthResponse.getStatus(), equalTo(ClusterHealthStatus.RED));
});
}
}
| PrevalidateNodeRemovalIT |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/jmx/support/JmxUtils.java | {
"start": 10831,
"end": 11360
} | interface ____ the given class
*/
public static @Nullable Class<?> getMBeanInterface(@Nullable Class<?> clazz) {
if (clazz == null || clazz.getSuperclass() == null) {
return null;
}
String mbeanInterfaceName = clazz.getName() + MBEAN_SUFFIX;
Class<?>[] implementedInterfaces = clazz.getInterfaces();
for (Class<?> iface : implementedInterfaces) {
if (iface.getName().equals(mbeanInterfaceName)) {
return iface;
}
}
return getMBeanInterface(clazz.getSuperclass());
}
/**
* Return the Java MXBean | for |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/operators/RightOuterJoinDriver.java | {
"start": 2118,
"end": 8071
} | class ____<IT1, IT2, OT> extends AbstractOuterJoinDriver<IT1, IT2, OT> {
@Override
protected JoinTaskIterator<IT1, IT2, OT> getReusingOuterJoinIterator(
DriverStrategy driverStrategy,
MutableObjectIterator<IT1> in1,
MutableObjectIterator<IT2> in2,
TypeSerializer<IT1> serializer1,
TypeComparator<IT1> comparator1,
TypeSerializer<IT2> serializer2,
TypeComparator<IT2> comparator2,
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory,
MemoryManager memoryManager,
IOManager ioManager,
double driverMemFraction)
throws Exception {
switch (driverStrategy) {
case RIGHT_OUTER_MERGE:
int numPages = memoryManager.computeNumberOfPages(driverMemFraction);
return new ReusingMergeOuterJoinIterator<>(
OuterJoinType.RIGHT,
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager,
ioManager,
numPages,
super.taskContext.getContainingTask());
case RIGHT_HYBRIDHASH_BUILD_FIRST:
return new ReusingBuildFirstHashJoinIterator<>(
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator21(comparator1, comparator2),
memoryManager,
ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
true,
false,
false);
case RIGHT_HYBRIDHASH_BUILD_SECOND:
return new ReusingBuildSecondHashJoinIterator<>(
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager,
ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
false,
true,
false);
default:
throw new Exception(
"Unsupported driver strategy for right outer join driver: "
+ driverStrategy.name());
}
}
@Override
protected JoinTaskIterator<IT1, IT2, OT> getNonReusingOuterJoinIterator(
DriverStrategy driverStrategy,
MutableObjectIterator<IT1> in1,
MutableObjectIterator<IT2> in2,
TypeSerializer<IT1> serializer1,
TypeComparator<IT1> comparator1,
TypeSerializer<IT2> serializer2,
TypeComparator<IT2> comparator2,
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory,
MemoryManager memoryManager,
IOManager ioManager,
double driverMemFraction)
throws Exception {
switch (driverStrategy) {
case RIGHT_OUTER_MERGE:
int numPages = memoryManager.computeNumberOfPages(driverMemFraction);
return new NonReusingMergeOuterJoinIterator<>(
OuterJoinType.RIGHT,
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager,
ioManager,
numPages,
super.taskContext.getContainingTask());
case RIGHT_HYBRIDHASH_BUILD_FIRST:
return new NonReusingBuildFirstHashJoinIterator<>(
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator21(comparator1, comparator2),
memoryManager,
ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
true,
false,
false);
case RIGHT_HYBRIDHASH_BUILD_SECOND:
return new NonReusingBuildSecondHashJoinIterator<>(
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager,
ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
false,
true,
false);
default:
throw new Exception(
"Unsupported driver strategy for right outer join driver: "
+ driverStrategy.name());
}
}
}
| RightOuterJoinDriver |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java | {
"start": 1103,
"end": 6028
} | class ____ implements AggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("quart", ElementType.BYTES_REF) );
private final DriverContext driverContext;
private final QuantileStates.SingleState state;
private final List<Integer> channels;
private final double percentile;
public PercentileFloatAggregatorFunction(DriverContext driverContext, List<Integer> channels,
QuantileStates.SingleState state, double percentile) {
this.driverContext = driverContext;
this.channels = channels;
this.state = state;
this.percentile = percentile;
}
public static PercentileFloatAggregatorFunction create(DriverContext driverContext,
List<Integer> channels, double percentile) {
return new PercentileFloatAggregatorFunction(driverContext, channels, PercentileFloatAggregator.initSingle(driverContext, percentile), percentile);
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public void addRawInput(Page page, BooleanVector mask) {
if (mask.allFalse()) {
// Entire page masked away
} else if (mask.allTrue()) {
addRawInputNotMasked(page);
} else {
addRawInputMasked(page, mask);
}
}
private void addRawInputMasked(Page page, BooleanVector mask) {
FloatBlock vBlock = page.getBlock(channels.get(0));
FloatVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock, mask);
return;
}
addRawVector(vVector, mask);
}
private void addRawInputNotMasked(Page page) {
FloatBlock vBlock = page.getBlock(channels.get(0));
FloatVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock);
return;
}
addRawVector(vVector);
}
private void addRawVector(FloatVector vVector) {
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
float vValue = vVector.getFloat(valuesPosition);
PercentileFloatAggregator.combine(state, vValue);
}
}
private void addRawVector(FloatVector vVector, BooleanVector mask) {
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
if (mask.getBoolean(valuesPosition) == false) {
continue;
}
float vValue = vVector.getFloat(valuesPosition);
PercentileFloatAggregator.combine(state, vValue);
}
}
private void addRawBlock(FloatBlock vBlock) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
float vValue = vBlock.getFloat(vOffset);
PercentileFloatAggregator.combine(state, vValue);
}
}
}
private void addRawBlock(FloatBlock vBlock, BooleanVector mask) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
if (mask.getBoolean(p) == false) {
continue;
}
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
float vValue = vBlock.getFloat(vOffset);
PercentileFloatAggregator.combine(state, vValue);
}
}
}
@Override
public void addIntermediateInput(Page page) {
assert channels.size() == intermediateBlockCount();
assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size();
Block quartUncast = page.getBlock(channels.get(0));
if (quartUncast.areAllValuesNull()) {
return;
}
BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector();
assert quart.getPositionCount() == 1;
BytesRef quartScratch = new BytesRef();
PercentileFloatAggregator.combineIntermediate(state, quart.getBytesRef(0, quartScratch));
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
state.toIntermediate(blocks, offset, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) {
blocks[offset] = PercentileFloatAggregator.evaluateFinal(state, driverContext);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
| PercentileFloatAggregatorFunction |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/method/qualifierinjection/B.java | {
"start": 813,
"end": 1119
} | class ____ {
private A a;
private A a2;
@Inject
public void setA(@One A a) {
this.a = a;
}
@Inject
public void setAnother(@Named("twoA") A a2) {
this.a2 = a2;
}
public A getA() {
return a;
}
public A getA2() {
return a2;
}
}
| B |
java | google__guava | android/guava-testlib/src/com/google/common/testing/ClassSanityTester.java | {
"start": 10904,
"end": 13635
} | class ____ {
* public static Foo create(String a, String b, int c, boolean d) {
* return Foo.builder()
* .setA(a)
* .setB(b)
* .setC(c)
* .setD(d)
* .build();
* }
* }
*
* public void testEquals() {
* new ClassSanityTester()
* .forAllPublicStaticMethods(FooFactoryForTest.class)
* .thatReturn(Foo.class)
* .testEquals();
* }
* }
* </pre>
*
* <p>It will test that Foo objects created by the {@code create(a, b, c, d)} factory method with
* equal parameters are equal and vice versa, thus indirectly tests the builder equality.
*/
public void testEquals(Class<?> cls) {
try {
doTestEquals(cls);
} catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
}
void doTestEquals(Class<?> cls)
throws ParameterNotInstantiableException,
ParameterHasNoDistinctValueException,
IllegalAccessException,
InvocationTargetException,
FactoryMethodReturnsNullException {
if (cls.isEnum()) {
return;
}
List<? extends Invokable<?, ?>> factories = Lists.reverse(getFactories(TypeToken.of(cls)));
if (factories.isEmpty()) {
return;
}
int numberOfParameters = factories.get(0).getParameters().size();
List<ParameterNotInstantiableException> paramErrors = new ArrayList<>();
List<ParameterHasNoDistinctValueException> distinctValueErrors = new ArrayList<>();
List<InvocationTargetException> instantiationExceptions = new ArrayList<>();
List<FactoryMethodReturnsNullException> nullErrors = new ArrayList<>();
// Try factories with the greatest number of parameters.
for (Invokable<?, ?> factory : factories) {
if (factory.getParameters().size() == numberOfParameters) {
try {
testEqualsUsing(factory);
return;
} catch (ParameterNotInstantiableException e) {
paramErrors.add(e);
} catch (ParameterHasNoDistinctValueException e) {
distinctValueErrors.add(e);
} catch (InvocationTargetException e) {
instantiationExceptions.add(e);
} catch (FactoryMethodReturnsNullException e) {
nullErrors.add(e);
}
}
}
throwFirst(paramErrors);
throwFirst(distinctValueErrors);
throwFirst(instantiationExceptions);
throwFirst(nullErrors);
}
/**
* Instantiates {@code cls} by invoking one of its non-private constructors or non-private static
* factory methods with the parameters automatically provided using dummy values.
*
* @return The instantiated instance, or {@code null} if the | FooFactoryForTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/criteria/JpaTupleElement.java | {
"start": 446,
"end": 1063
} | interface ____<T> extends TupleElement<T>, JpaCriteriaNode {
@Nullable JavaType<T> getJavaTypeDescriptor();
@Override
default @Nullable Class<? extends T> getJavaType() {
// todo (6.0) : can this signature just return `Class<T>`?
final JavaType<T> javaType = getJavaTypeDescriptor();
return javaType == null ? null : javaType.getJavaTypeClass();
}
default String getJavaTypeName() {
final JavaType<T> javaType = getJavaTypeDescriptor();
return javaType == null ? null : javaType.getTypeName();
}
default boolean isEnum() {
return getJavaTypeDescriptor() instanceof EnumJavaType;
}
}
| JpaTupleElement |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/FullTextSearchesWithQueryExpansionTest.java | {
"start": 937,
"end": 1980
} | class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "SELECT * FROM articles WHERE MATCH (title,body) AGAINST ('database' IN NATURAL LANGUAGE MODE)";
SQLStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> stmtList = parser.parseStatementList();
SQLStatement stmt = stmtList.get(0);
{
String text = SQLUtils.toMySqlString(stmt);
assertEquals("SELECT *"
+ "\nFROM articles"
+ "\nWHERE MATCH (title, body) AGAINST ('database' IN NATURAL LANGUAGE MODE)",
text);
}
{
String text = SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION);
assertEquals("select *"
+ "\nfrom articles"
+ "\nwhere match (title, body) against ('database' in natural language mode)",
text);
}
}
}
| FullTextSearchesWithQueryExpansionTest |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/ClassUtils.java | {
"start": 10213,
"end": 10323
} | class ____, usually without significant loss of meaning.
* </p>
*
* <p>
* The abbreviated | name |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/processor/internals/TaskExecutionMetadataTest.java | {
"start": 1398,
"end": 5394
} | class ____ {
static final String TOPOLOGY1 = "topology1";
static final String TOPOLOGY2 = "topology2";
static final Set<String> NAMED_TOPOLOGIES = Set.of(TOPOLOGY1, TOPOLOGY2);
static final int TIME_ZERO = 0;
static final int CONSTANT_BACKOFF_MS = 5000;
@Test
public void testCanProcessWithoutNamedTopologies() {
final Set<String> topologies = Collections.singleton(UNNAMED_TOPOLOGY);
final Set<String> pausedTopologies = new HashSet<>();
final TaskExecutionMetadata metadata = new TaskExecutionMetadata(topologies, pausedTopologies, ProcessingMode.AT_LEAST_ONCE);
final Task mockTask = createMockTask(UNNAMED_TOPOLOGY);
assertTrue(metadata.canProcessTask(mockTask, TIME_ZERO));
// This pauses an UNNAMED_TOPOLOGY / a KafkaStreams instance without named/modular
// topologies.
pausedTopologies.add(UNNAMED_TOPOLOGY);
assertFalse(metadata.canProcessTask(mockTask, TIME_ZERO));
}
@Test
public void testNamedTopologiesCanBePausedIndependently() {
final Set<String> pausedTopologies = new HashSet<>();
final TaskExecutionMetadata metadata = new TaskExecutionMetadata(NAMED_TOPOLOGIES, pausedTopologies, ProcessingMode.AT_LEAST_ONCE);
final Task mockTask1 = createMockTask(TOPOLOGY1);
final Task mockTask2 = createMockTask(TOPOLOGY2);
assertTrue(metadata.canProcessTask(mockTask1, TIME_ZERO));
assertTrue(metadata.canProcessTask(mockTask2, TIME_ZERO));
pausedTopologies.add(TOPOLOGY1);
assertFalse(metadata.canProcessTask(mockTask1, TIME_ZERO));
assertTrue(metadata.canProcessTask(mockTask2, TIME_ZERO));
pausedTopologies.remove(TOPOLOGY1);
assertTrue(metadata.canProcessTask(mockTask1, TIME_ZERO));
assertTrue(metadata.canProcessTask(mockTask2, TIME_ZERO));
}
@Test
public void testNamedTopologiesCanBeStartedPaused() {
final Set<String> pausedTopologies = new HashSet<>();
pausedTopologies.add(TOPOLOGY1);
final TaskExecutionMetadata metadata = new TaskExecutionMetadata(NAMED_TOPOLOGIES, pausedTopologies, ProcessingMode.AT_LEAST_ONCE);
final Task mockTask1 = createMockTask(TOPOLOGY1);
final Task mockTask2 = createMockTask(TOPOLOGY2);
assertFalse(metadata.canProcessTask(mockTask1, TIME_ZERO));
assertTrue(metadata.canProcessTask(mockTask2, TIME_ZERO));
pausedTopologies.remove(TOPOLOGY1);
assertTrue(metadata.canProcessTask(mockTask1, TIME_ZERO));
assertTrue(metadata.canProcessTask(mockTask2, TIME_ZERO));
}
@Test
public void testNamedTopologiesCanBackoff() {
final Set<String> pausedTopologies = new HashSet<>();
final TaskExecutionMetadata metadata = new TaskExecutionMetadata(NAMED_TOPOLOGIES, pausedTopologies, ProcessingMode.AT_LEAST_ONCE);
final Task mockTask1 = createMockTask(TOPOLOGY1);
final Task mockTask2 = createMockTask(TOPOLOGY2);
assertTrue(metadata.canProcessTask(mockTask1, TIME_ZERO));
assertTrue(metadata.canProcessTask(mockTask2, TIME_ZERO));
metadata.registerTaskError(mockTask1, new Throwable("Error"), TIME_ZERO);
assertFalse(metadata.canProcessTask(mockTask1, CONSTANT_BACKOFF_MS - 1));
assertTrue(metadata.canProcessTask(mockTask2, CONSTANT_BACKOFF_MS - 1));
assertFalse(metadata.canProcessTask(mockTask1, CONSTANT_BACKOFF_MS));
assertTrue(metadata.canProcessTask(mockTask2, CONSTANT_BACKOFF_MS));
assertTrue(metadata.canProcessTask(mockTask1, CONSTANT_BACKOFF_MS + 1));
assertTrue(metadata.canProcessTask(mockTask2, CONSTANT_BACKOFF_MS + 1));
}
private static Task createMockTask(final String topologyName) {
final Task mockTask = mock(Task.class);
final TaskId taskId = new TaskId(0, 0, topologyName);
when(mockTask.id()).thenReturn(taskId);
return mockTask;
}
}
| TaskExecutionMetadataTest |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/requests/SyncGroupRequestTest.java | {
"start": 1076,
"end": 1482
} | class ____ {
@Test
public void testRequestVersionCompatibilityFailBuild() {
assertThrows(UnsupportedVersionException.class, () -> new SyncGroupRequest.Builder(
new SyncGroupRequestData()
.setGroupId("groupId")
.setMemberId("consumerId")
.setGroupInstanceId("groupInstanceId")
).build((short) 2));
}
}
| SyncGroupRequestTest |
java | apache__flink | flink-filesystems/flink-s3-fs-presto/src/main/java/org/apache/flink/fs/s3presto/S3FileSystemFactory.java | {
"start": 1464,
"end": 4159
} | class ____ extends AbstractS3FileSystemFactory {
private static final String[] FLINK_CONFIG_PREFIXES = {"s3.", "presto.s3."};
private static final String[][] MIRRORED_CONFIG_KEYS = {
{"presto.s3.access.key", "presto.s3.access-key"},
{"presto.s3.secret.key", "presto.s3.secret-key"},
{"presto.s3.path.style.access", "presto.s3.path-style-access"}
};
public S3FileSystemFactory() {
super("Presto S3 File System", createHadoopConfigLoader());
}
@Override
public String getScheme() {
return "s3";
}
@VisibleForTesting
static HadoopConfigLoader createHadoopConfigLoader() {
return new HadoopConfigLoader(
FLINK_CONFIG_PREFIXES,
MIRRORED_CONFIG_KEYS,
"presto.s3.",
Collections.emptySet(),
Collections.emptySet(),
"");
}
@Override
protected org.apache.flink.core.fs.FileSystem createFlinkFileSystem(
FileSystem fs,
@Nullable FlinkS3FileSystem.S5CmdConfiguration s5CmdConfiguration,
String localTmpDirectory,
String entropyInjectionKey,
int numEntropyChars,
S3AccessHelper s3AccessHelper,
long s3minPartSize,
int maxConcurrentUploads) {
return new FlinkS3PrestoFileSystem(
fs,
s5CmdConfiguration,
localTmpDirectory,
entropyInjectionKey,
numEntropyChars,
s3AccessHelper,
s3minPartSize,
maxConcurrentUploads);
}
@Override
protected org.apache.hadoop.fs.FileSystem createHadoopFileSystem() {
return new PrestoS3FileSystem();
}
@Override
protected URI getInitURI(URI fsUri, org.apache.hadoop.conf.Configuration hadoopConfig) {
final String scheme = fsUri.getScheme();
final String authority = fsUri.getAuthority();
final URI initUri;
if (scheme == null && authority == null) {
initUri = createURI("s3://s3.amazonaws.com");
} else if (scheme != null && authority == null) {
initUri = createURI(scheme + "://s3.amazonaws.com");
} else {
initUri = fsUri;
}
return initUri;
}
@Nullable
@Override
protected S3AccessHelper getS3AccessHelper(FileSystem fs) {
return null;
}
private URI createURI(String str) {
try {
return new URI(str);
} catch (URISyntaxException e) {
throw new FlinkRuntimeException("Error in s3 aws URI - " + str, e);
}
}
}
| S3FileSystemFactory |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/TailCallSubscribeTest.java | {
"start": 1233,
"end": 3081
} | class ____ {
private static Function<Flux<Object>, Flux<Object>> manyOperatorsOnFlux = flux -> {
for (int i = 0; i < 5; i++) {
flux = flux.<Object>map(Object::toString).filter(Objects::nonNull);
}
return flux;
};
private static Function<Mono<?>, Mono<Object>> manyOperatorsOnMono = mono -> {
for (int i = 0; i < 5; i++) {
mono = mono.<Object>map(Object::toString).filter(Objects::nonNull);
}
@SuppressWarnings("unchecked")
Mono<Object> result = (Mono<Object>) mono;
return result;
};
@Test
public void testStackDepth() throws Exception {
StackCapturingPublisher stackCapturingPublisher = new StackCapturingPublisher();
Mono
.from(stackCapturingPublisher)
.as(manyOperatorsOnMono)
.flux()
.as(manyOperatorsOnFlux)
.delayElements(Duration.ofSeconds(1))
.then()
.subscribe(new CancellingSubscriber());
assertThat(stackCapturingPublisher.get(1, TimeUnit.SECONDS))
.extracting(StackTraceElement::getClassName, StackTraceElement::getMethodName)
.startsWith(
tuple(Thread.class.getName(), "getStackTrace"),
tuple(stackCapturingPublisher.getClass().getName(), "subscribe"),
tuple(MonoFromPublisher.class.getName(), "subscribe"),
tuple(Mono.class.getName(), "subscribe"),
tuple(this.getClass().getName(), "testStackDepth")
);
}
@Test
public void testDebugHook() throws Exception {
Hooks.onOperatorDebug();
testStackDepth();
}
@Test
public void interop() throws Exception {
| TailCallSubscribeTest |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/discovery/NestedClassSelector.java | {
"start": 1444,
"end": 2078
} | class ____ are provided, the selector will only attempt
* to lazily load classes if {@link #getEnclosingClasses()} or
* {@link #getNestedClass()} is invoked.
*
* <p>In this context, Java {@link Class} means anything that can be referenced
* as a {@link Class} on the JVM — for example, classes from other JVM
* languages such Groovy, Scala, etc.
*
* @since 1.6
* @see DiscoverySelectors#selectNestedClass(List, Class)
* @see DiscoverySelectors#selectNestedClass(List, String)
* @see org.junit.platform.engine.support.descriptor.ClassSource
* @see ClassSelector
*/
@API(status = STABLE, since = "1.6")
public final | names |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/builtin/BuiltInTest.java | {
"start": 3882,
"end": 22430
} | class ____ {
@ProcessorTest
@WithClasses( {
JaxbMapper.class,
JaxbElementProperty.class,
} )
@WithJavaxJaxb
public void shouldApplyBuiltInOnJAXBElement() {
JaxbElementProperty source = new JaxbElementProperty();
source.setProp( createJaxb( "TEST" ) );
source.publicProp = createJaxb( "PUBLIC TEST" );
StringProperty target = JaxbMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isEqualTo( "TEST" );
assertThat( target.publicProp ).isEqualTo( "PUBLIC TEST" );
}
@ProcessorTest
@WithClasses( {
JakartaJaxbMapper.class,
JakartaJaxbElementProperty.class,
} )
@WithJakartaJaxb
public void shouldApplyBuiltInOnJakartaJaxbElement() {
JakartaJaxbElementProperty source = new JakartaJaxbElementProperty();
source.setProp( createJakartaJaxb( "TEST" ) );
source.publicProp = createJakartaJaxb( "PUBLIC TEST" );
StringProperty target = JakartaJaxbMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isEqualTo( "TEST" );
assertThat( target.publicProp ).isEqualTo( "PUBLIC TEST" );
}
@ProcessorTest
@WithClasses( {
JaxbMapper.class,
JaxbElementProperty.class,
} )
@WithJavaxJaxb
@IssueKey( "1698" )
public void shouldApplyBuiltInOnJAXBElementExtra() {
JaxbElementProperty source = new JaxbElementProperty();
source.setProp( createJaxb( "5" ) );
source.publicProp = createJaxb( "5" );
BigDecimalProperty target = JaxbMapper.INSTANCE.mapBD( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isEqualTo( new BigDecimal( "5" ) );
assertThat( target.publicProp ).isEqualTo( new BigDecimal( "5" ) );
JaxbElementProperty source2 = new JaxbElementProperty();
source2.setProp( createJaxb( "5" ) );
source2.publicProp = createJaxb( "5" );
SomeTypeProperty target2 = JaxbMapper.INSTANCE.mapSomeType( source2 );
assertThat( target2 ).isNotNull();
assertThat( target2.publicProp ).isNotNull();
assertThat( target2.getProp() ).isNotNull();
}
@ProcessorTest
@WithClasses( {
JakartaJaxbMapper.class,
JakartaJaxbElementProperty.class,
} )
@WithJakartaJaxb
@IssueKey( "1698" )
public void shouldApplyBuiltInOnJakartaJAXBElementExtra() {
JakartaJaxbElementProperty source = new JakartaJaxbElementProperty();
source.setProp( createJakartaJaxb( "5" ) );
source.publicProp = createJakartaJaxb( "5" );
BigDecimalProperty target = JakartaJaxbMapper.INSTANCE.mapBD( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isEqualTo( new BigDecimal( "5" ) );
assertThat( target.publicProp ).isEqualTo( new BigDecimal( "5" ) );
JakartaJaxbElementProperty source2 = new JakartaJaxbElementProperty();
source2.setProp( createJakartaJaxb( "5" ) );
source2.publicProp = createJakartaJaxb( "5" );
SomeTypeProperty target2 = JakartaJaxbMapper.INSTANCE.mapSomeType( source2 );
assertThat( target2 ).isNotNull();
assertThat( target2.publicProp ).isNotNull();
assertThat( target2.getProp() ).isNotNull();
}
@ProcessorTest
@WithClasses( {
JaxbListMapper.class,
JaxbElementListProperty.class,
} )
@WithJavaxJaxb
@IssueKey( "141" )
public void shouldApplyBuiltInOnJAXBElementList() {
JaxbElementListProperty source = new JaxbElementListProperty();
source.setProp( createJaxbList( "TEST2" ) );
source.publicProp = createJaxbList( "PUBLIC TEST2" );
StringListProperty target = JaxbListMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp().get( 0 ) ).isEqualTo( "TEST2" );
assertThat( target.publicProp.get( 0 ) ).isEqualTo( "PUBLIC TEST2" );
}
@ProcessorTest
@WithClasses( {
JakartaJaxbListMapper.class,
JakartaJaxbElementListProperty.class,
} )
@WithJakartaJaxb
@IssueKey( "141" )
public void shouldApplyBuiltInOnJakartaJAXBElementList() {
JakartaJaxbElementListProperty source = new JakartaJaxbElementListProperty();
source.setProp( createJakartaJaxbList( "TEST2" ) );
source.publicProp = createJakartaJaxbList( "PUBLIC TEST2" );
StringListProperty target = JakartaJaxbListMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp().get( 0 ) ).isEqualTo( "TEST2" );
assertThat( target.publicProp.get( 0 ) ).isEqualTo( "PUBLIC TEST2" );
}
@ProcessorTest
@WithClasses( DateToXmlGregCalMapper.class )
public void shouldApplyBuiltInOnDateToXmlGregCal() throws ParseException {
DateProperty source = new DateProperty();
source.setProp( createDate( "31-08-1982 10:20:56" ) );
source.publicProp = createDate( "31-08-2016 10:20:56" );
XmlGregorianCalendarProperty target = DateToXmlGregCalMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp().toString() ).isEqualTo( "1982-08-31T10:20:56.000+02:00" );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp.toString() ).isEqualTo( "2016-08-31T10:20:56.000+02:00" );
}
@ProcessorTest
@WithClasses( XmlGregCalToDateMapper.class )
public void shouldApplyBuiltInOnXmlGregCalToDate() throws DatatypeConfigurationException {
XmlGregorianCalendarProperty source = new XmlGregorianCalendarProperty();
source.setProp( createXmlCal( 1999, 3, 2, 60 ) );
source.publicProp = createXmlCal( 2016, 3, 2, 60 );
DateProperty target = XmlGregCalToDateMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp().toString() ).isEqualTo( "Tue Mar 02 00:00:00 CET 1999" );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp.toString() ).isEqualTo( "Wed Mar 02 00:00:00 CET 2016" );
}
@ProcessorTest
@WithClasses( StringToXmlGregCalMapper.class )
public void shouldApplyBuiltInStringToXmlGregCal() {
StringProperty source = new StringProperty();
source.setProp( "05.07.1999" );
source.publicProp = "05.07.2016";
XmlGregorianCalendarProperty target = StringToXmlGregCalMapper.INSTANCE.mapAndFormat( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp().toString() ).isEqualTo( "1999-07-05T00:00:00.000+02:00" );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp.toString() ).isEqualTo( "2016-07-05T00:00:00.000+02:00" );
// direct,via lexical representation
source.setProp( "2000-03-04T23:00:00+03:00" );
source.publicProp = "2016-03-04T23:00:00+03:00";
target = StringToXmlGregCalMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp().toString() ).isEqualTo( "2000-03-04T23:00:00+03:00" );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp.toString() ).isEqualTo( "2016-03-04T23:00:00+03:00" );
// null string
source.setProp( null );
source.publicProp = null;
target = StringToXmlGregCalMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNull();
assertThat( target.publicProp ).isNull();
}
@ProcessorTest
@WithClasses( XmlGregCalToStringMapper.class )
public void shouldApplyBuiltInXmlGregCalToString() throws DatatypeConfigurationException {
XmlGregorianCalendarProperty source = new XmlGregorianCalendarProperty();
source.setProp( createXmlCal( 1999, 3, 2, 60 ) );
source.publicProp = createXmlCal( 2016, 3, 2, 60 );
StringProperty target = XmlGregCalToStringMapper.INSTANCE.mapAndFormat( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp() ).isEqualTo( "02.03.1999" );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp ).isEqualTo( "02.03.2016" );
source.setProp( createXmlCal( 1999, 3, 2, 60 ) );
source.publicProp = createXmlCal( 2016, 3, 2, 60 );
target = XmlGregCalToStringMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp() ).isEqualTo( "1999-03-02+01:00" );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp ).isEqualTo( "2016-03-02+01:00" );
}
@ProcessorTest
@WithClasses( CalendarToXmlGregCalMapper.class )
public void shouldApplyBuiltInOnCalendarToXmlGregCal() throws ParseException {
CalendarProperty source = new CalendarProperty();
source.setProp( createCalendar( "02.03.1999" ) );
source.publicProp = createCalendar( "02.03.2016" );
XmlGregorianCalendarProperty target = CalendarToXmlGregCalMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp().toString() ).isEqualTo( "1999-03-02T00:00:00.000+01:00" );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp.toString() ).isEqualTo( "2016-03-02T00:00:00.000+01:00" );
}
@ProcessorTest
@WithClasses( XmlGregCalToCalendarMapper.class )
public void shouldApplyBuiltInOnXmlGregCalToCalendar() throws DatatypeConfigurationException {
XmlGregorianCalendarProperty source = new XmlGregorianCalendarProperty();
source.setProp( createXmlCal( 1999, 3, 2, 60 ) );
source.publicProp = createXmlCal( 2016, 3, 2, 60 );
CalendarProperty target = XmlGregCalToCalendarMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp().getTimeInMillis() ).isEqualTo( 920329200000L );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp.getTimeInMillis() ).isEqualTo( 1456873200000L );
assertThat( target.publicProp.getTimeInMillis() ).isEqualTo( 1456873200000L );
}
@ProcessorTest
@WithClasses( CalendarToDateMapper.class )
public void shouldApplyBuiltInOnCalendarToDate() throws ParseException {
CalendarProperty source = new CalendarProperty();
source.setProp( createCalendar( "02.03.1999" ) );
source.publicProp = createCalendar( "02.03.2016" );
DateProperty target = CalendarToDateMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp() ).isEqualTo( createCalendar( "02.03.1999" ).getTime() );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp ).isEqualTo( createCalendar( "02.03.2016" ).getTime() );
}
@ProcessorTest
@WithClasses( DateToCalendarMapper.class )
public void shouldApplyBuiltInOnDateToCalendar() throws ParseException {
DateProperty source = new DateProperty();
source.setProp( new SimpleDateFormat( "dd.MM.yyyy" ).parse( "02.03.1999" ) );
source.publicProp = new SimpleDateFormat( "dd.MM.yyyy" ).parse( "02.03.2016" );
CalendarProperty target = DateToCalendarMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp() ).isEqualTo( createCalendar( "02.03.1999" ) );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp ).isEqualTo( createCalendar( "02.03.2016" ) );
}
@ProcessorTest
@WithClasses( CalendarToStringMapper.class )
public void shouldApplyBuiltInOnCalendarToString() throws ParseException {
CalendarProperty source = new CalendarProperty();
source.setProp( createCalendar( "02.03.1999" ) );
source.publicProp = createCalendar( "02.03.2016" );
StringProperty target = CalendarToStringMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp() ).isEqualTo( "02.03.1999" );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp ).isEqualTo( "02.03.2016" );
}
@ProcessorTest
@WithClasses( StringToCalendarMapper.class )
public void shouldApplyBuiltInOnStringToCalendar() throws ParseException {
StringProperty source = new StringProperty();
source.setProp( "02.03.1999" );
source.publicProp = "02.03.2016";
CalendarProperty target = StringToCalendarMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp() ).isEqualTo( createCalendar( "02.03.1999" ) );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp ).isEqualTo( createCalendar( "02.03.2016" ) );
}
@ProcessorTest
@WithClasses( IterableSourceTargetMapper.class )
public void shouldApplyBuiltInOnIterable() throws DatatypeConfigurationException {
IterableSource source = new IterableSource();
source.setDates( Arrays.asList( createXmlCal( 1999, 3, 2, 60 ) ) );
source.publicDates = Arrays.asList( createXmlCal( 2016, 3, 2, 60 ) );
IterableTarget target = IterableSourceTargetMapper.INSTANCE.sourceToTarget( source );
assertThat( target ).isNotNull();
assertThat( target.getDates() ).containsExactly( "02.03.1999" );
assertThat( target.publicDates ).containsExactly( "02.03.2016" );
}
@ProcessorTest
@WithClasses( {
MapSourceTargetMapper.class,
MapSource.class,
} )
@WithJavaxJaxb
public void shouldApplyBuiltInOnMap() throws DatatypeConfigurationException {
MapSource source = new MapSource();
source.setExample( new HashMap<>() );
source.getExample().put( createJaxb( "TEST" ), createXmlCal( 1999, 3, 2, 60 ) );
source.publicExample = new HashMap<>();
source.publicExample.put( createJaxb( "TEST" ), createXmlCal( 2016, 3, 2, 60 ) );
MapTarget target = MapSourceTargetMapper.INSTANCE.sourceToTarget( source );
assertThat( target ).isNotNull();
assertThat( target.getExample().get( "TEST" ) ).isEqualTo( "1999-03-02+01:00" );
assertThat( target.publicExample.get( "TEST" ) ).isEqualTo( "2016-03-02+01:00" );
}
@ProcessorTest
@WithClasses( CalendarToZonedDateTimeMapper.class )
public void shouldApplyBuiltInOnCalendarToZonedDateTime() throws ParseException {
assertThat( CalendarToZonedDateTimeMapper.INSTANCE.map( null ) ).isNull();
CalendarProperty source = new CalendarProperty();
source.setProp( createCalendar( "02.03.1999" ) );
source.publicProp = createCalendar( "02.03.2016" );
ZonedDateTimeProperty target = CalendarToZonedDateTimeMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp() ).isEqualTo( ZonedDateTime.of( 1999, 3, 2, 0, 0, 0, 0, ZoneId.systemDefault() ) );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp ).isEqualTo( ZonedDateTime.of( 2016, 3, 2, 0, 0, 0, 0, ZoneId.systemDefault() ) );
}
@ProcessorTest
@WithClasses( ZonedDateTimeToCalendarMapper.class )
public void shouldApplyBuiltInOnZonedDateTimeToCalendar() throws ParseException {
assertThat( ZonedDateTimeToCalendarMapper.INSTANCE.map( null ) ).isNull();
ZonedDateTimeProperty source = new ZonedDateTimeProperty();
source.setProp( ZonedDateTime.of( 1999, 3, 2, 0, 0, 0, 0, ZoneId.systemDefault() ) );
source.publicProp = ZonedDateTime.of( 2016, 3, 2, 0, 0, 0, 0, ZoneId.systemDefault() );
CalendarProperty target = ZonedDateTimeToCalendarMapper.INSTANCE.map( source );
assertThat( target ).isNotNull();
assertThat( target.getProp() ).isNotNull();
assertThat( target.getProp() ).isEqualTo( createCalendar( "02.03.1999" ) );
assertThat( target.publicProp ).isNotNull();
assertThat( target.publicProp ).isEqualTo( createCalendar( "02.03.2016" ) );
}
private JAXBElement<String> createJaxb(String test) {
return new JAXBElement<>( new QName( "www.mapstruct.org", "test" ), String.class, test );
}
private jakarta.xml.bind.JAXBElement<String> createJakartaJaxb(String test) {
return new jakarta.xml.bind.JAXBElement<>( new QName( "www.mapstruct.org", "test" ), String.class, test );
}
private List<JAXBElement<String>> createJaxbList(String test) {
List<JAXBElement<String>> result = new ArrayList<>();
result.add( createJaxb( test ) );
return result;
}
private List<jakarta.xml.bind.JAXBElement<String>> createJakartaJaxbList(String test) {
List<jakarta.xml.bind.JAXBElement<String>> result = new ArrayList<>();
result.add( createJakartaJaxb( test ) );
return result;
}
private Date createDate(String date) throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat( "dd-M-yyyy hh:mm:ss" );
return sdf.parse( date );
}
private Calendar createCalendar(String cal) throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat( "dd.MM.yyyy" );
GregorianCalendar gcal = new GregorianCalendar();
gcal.setTime( sdf.parse( cal ) );
return gcal;
}
private XMLGregorianCalendar createXmlCal(int year, int month, int day, int tz)
throws DatatypeConfigurationException {
return DatatypeFactory.newInstance().newXMLGregorianCalendarDate( year, month, day, tz );
}
}
| BuiltInTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java | {
"start": 2246,
"end": 4808
} | class ____ extends SuggestionBuilder<PhraseSuggestionBuilder> {
public static final String SUGGESTION_NAME = "phrase";
protected static final ParseField MAXERRORS_FIELD = new ParseField("max_errors");
protected static final ParseField RWE_LIKELIHOOD_FIELD = new ParseField("real_word_error_likelihood");
protected static final ParseField SEPARATOR_FIELD = new ParseField("separator");
protected static final ParseField CONFIDENCE_FIELD = new ParseField("confidence");
protected static final ParseField GRAMSIZE_FIELD = new ParseField("gram_size");
protected static final ParseField SMOOTHING_MODEL_FIELD = new ParseField("smoothing");
protected static final ParseField FORCE_UNIGRAM_FIELD = new ParseField("force_unigrams");
protected static final ParseField TOKEN_LIMIT_FIELD = new ParseField("token_limit");
protected static final ParseField HIGHLIGHT_FIELD = new ParseField("highlight");
protected static final ParseField PRE_TAG_FIELD = new ParseField("pre_tag");
protected static final ParseField POST_TAG_FIELD = new ParseField("post_tag");
protected static final ParseField COLLATE_FIELD = new ParseField("collate");
protected static final ParseField COLLATE_QUERY_FIELD = new ParseField("query");
protected static final ParseField COLLATE_QUERY_PARAMS = new ParseField("params");
protected static final ParseField COLLATE_QUERY_PRUNE = new ParseField("prune");
private float maxErrors = PhraseSuggestionContext.DEFAULT_MAX_ERRORS;
private String separator = PhraseSuggestionContext.DEFAULT_SEPARATOR;
private float realWordErrorLikelihood = PhraseSuggestionContext.DEFAULT_RWE_ERRORLIKELIHOOD;
private float confidence = PhraseSuggestionContext.DEFAULT_CONFIDENCE;
// gramSize needs to be optional although there is a default, if unset parser try to detect and use shingle size
private Integer gramSize;
private boolean forceUnigrams = PhraseSuggestionContext.DEFAULT_REQUIRE_UNIGRAM;
private int tokenLimit = NoisyChannelSpellChecker.DEFAULT_TOKEN_LIMIT;
private String preTag;
private String postTag;
private Script collateQuery;
private Map<String, Object> collateParams;
private boolean collatePrune = PhraseSuggestionContext.DEFAULT_COLLATE_PRUNE;
private SmoothingModel model;
private final Map<String, List<CandidateGenerator>> generators = new HashMap<>();
public PhraseSuggestionBuilder(String field) {
super(field);
}
/**
* internal copy constructor that copies over all | PhraseSuggestionBuilder |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/concurrent/AtomicInitializer.java | {
"start": 1172,
"end": 2172
} | class ____ a member field of type {@link AtomicReference}. It
* implements the following algorithm to create and initialize an object in its
* {@link #get()} method:
* </p>
* <ul>
* <li>First it is checked whether the {@link AtomicReference} variable contains
* already a value. If this is the case, the value is directly returned.</li>
* <li>Otherwise the {@link #initialize()} method is called. This method must be
* defined in concrete subclasses to actually create the managed object.</li>
* <li>After the object was created by {@link #initialize()} it is checked
* whether the {@link AtomicReference} variable is still undefined. This has to
* be done because in the meantime another thread may have initialized the
* object. If the reference is still empty, the newly created object is stored
* in it and returned by this method.</li>
* <li>Otherwise the value stored in the {@link AtomicReference} is returned.</li>
* </ul>
* <p>
* Because atomic variables are used this | maintains |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DoNotCallSuggesterTest.java | {
"start": 4892,
"end": 5368
} | class ____ {
// BUG: Diagnostic contains: Always throws java.lang.RuntimeException
public final String get() {
throw new RuntimeException();
}
}
""")
.doTest();
}
@Test
public void finalClass_publicFinalMethod_overriddenMethod() {
testHelper
.addSourceLines(
"Test.java",
"""
import java.util.function.Supplier;
final | Test |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/test/java/org/springframework/boot/jdbc/autoconfigure/DataSourceAutoConfigurationTests.java | {
"start": 13562,
"end": 13948
} | class ____ {
private @Nullable BasicDataSource pool;
@Bean
DataSource dataSource() {
this.pool = new BasicDataSource();
this.pool.setDriverClassName("org.hsqldb.jdbcDriver");
this.pool.setUrl("jdbc:hsqldb:mem:overridedb");
this.pool.setUsername("sa");
return this.pool;
}
}
// see testExplicitDriverClassClearsUsername
public static | TestDataSourceConfiguration |
java | apache__spark | examples/src/main/java/org/apache/spark/examples/JavaWordCount.java | {
"start": 1085,
"end": 1968
} | class ____ {
private static final Pattern SPACE = Pattern.compile(" ");
public static void main(String[] args) throws Exception {
if (args.length < 1) {
System.err.println("Usage: JavaWordCount <file>");
System.exit(1);
}
SparkSession spark = SparkSession
.builder()
.appName("JavaWordCount")
.getOrCreate();
JavaRDD<String> lines = spark.read().textFile(args[0]).javaRDD();
JavaRDD<String> words = lines.flatMap(s -> Arrays.asList(SPACE.split(s)).iterator());
JavaPairRDD<String, Integer> ones = words.mapToPair(s -> new Tuple2<>(s, 1));
JavaPairRDD<String, Integer> counts = ones.reduceByKey((i1, i2) -> i1 + i2);
List<Tuple2<String, Integer>> output = counts.collect();
for (Tuple2<?,?> tuple : output) {
System.out.println(tuple._1() + ": " + tuple._2());
}
spark.stop();
}
}
| JavaWordCount |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/headers/CookiesTest.java | {
"start": 456,
"end": 10000
} | class ____ {
@RegisterExtension
static ResteasyReactiveUnitTest TEST = new ResteasyReactiveUnitTest()
.withApplicationRoot((jar) -> jar.addClasses(CookiesTestResource.class));
@Test
void testDefaults() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=hello;")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie()
.value("hello")
.version(1)
.secured(false)
.httpOnly(false)
.maxAge(-1)
.path(is(nullValue()))
.domain(is(nullValue()))
.comment(is(nullValue())));
}
@Test
void testVersion0() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Version=\"0\";")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").version(0));
}
@Test
void testVersion0WithoutColon() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Version=\"0\"")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").version(0));
}
@Test
void testVersion0Lowercase() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";version=\"0\"")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").version(0));
}
@Test
void testVersion1() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Version=\"1\";")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").version(1));
}
@Test
void testVersion1WithoutColon() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Version=\"1\"")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").version(1));
}
@Test
void testVersion1Lowercase() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";version=\"1\"")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").version(1));
}
@Test
void testSameSite() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";SameSite=\"Lax\";")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").sameSite("Lax"));
}
@Test
void testSameSiteWithoutColon() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";SameSite=\"None\"")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").sameSite("None"));
}
@Test
void testSameSiteLowercase() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";samesite=\"Strict\"")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").sameSite("Strict"));
}
@Test
void testHttpOnlyWithoutColon() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";HttpOnly")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").httpOnly(true));
}
@Test
void testHttpOnly() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";HttpOnly;")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").httpOnly(true));
}
@Test
void testHttpOnlyLowercase() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";httponly;")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").httpOnly(true));
}
@Test
void testSecureWithoutColon() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Secure")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").secured(true));
}
@Test
void testSecure() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Secure;")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").secured(true));
}
@Test
void testSecureLowercase() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";secure;")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").secured(true));
}
@Test
void testDomainWithoutColon() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Domain=\"quarkus.io\"")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").domain("quarkus.io"));
}
@Test
void testDomain() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Secure;Domain=\"quarkus.io\";")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").domain("quarkus.io"));
}
@Test
void testDomainLowercase() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Secure;domain=\"quarkus.io\";")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").domain("quarkus.io"));
}
@Test
void testPathWithoutColon() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Path=\"quarkus.io\"")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").path("quarkus.io"));
}
@Test
void testPath() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Path=\"quarkus.io\";")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").path("quarkus.io"));
}
@Test
void testPathLowercase() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";path=\"quarkus.io\";")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").path("quarkus.io"));
}
@Test
void testCommentWithoutColon() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Comment=\"quarkus.io\"")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").comment("quarkus.io"));
}
@Test
void testComment() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";Comment=\"quarkus.io\";")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").comment("quarkus.io"));
}
@Test
void testCommentLowercase() {
given()
.when()
.urlEncodingEnabled(true)
.formParam("cookie", "greeting=\"hello\";comment=\"quarkus.io\";")
.post("/cookies/set-cookie")
.then()
.cookie("greeting", detailedCookie().value("hello").comment("quarkus.io"));
}
}
| CookiesTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/WordpressEndpointBuilderFactory.java | {
"start": 32663,
"end": 32995
} | class ____ extends AbstractEndpointBuilder implements WordpressEndpointBuilder, AdvancedWordpressEndpointBuilder {
public WordpressEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new WordpressEndpointBuilderImpl(path);
}
} | WordpressEndpointBuilderImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/DialectLogging.java | {
"start": 825,
"end": 1212
} | interface ____ {
String LOGGER_NAME = SubSystemLogging.BASE + ".dialect";
Logger DIALECT_LOGGER = Logger.getLogger(LOGGER_NAME);
DialectLogging DIALECT_MESSAGE_LOGGER = Logger.getMessageLogger( MethodHandles.lookup(), DialectLogging.class, LOGGER_NAME );
@LogMessage(level = DEBUG)
@Message(value = "Using dialect: %s", id = 35001)
void usingDialect(Dialect dialect);
}
| DialectLogging |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/format/jaxb/JaxbXmlFormatMapper.java | {
"start": 20341,
"end": 20435
} | interface ____ {
int size();
}
@XmlRootElement(name = "Map")
public static | ManagedMapWrapper |
java | quarkusio__quarkus | extensions/tls-registry/spi/src/main/java/io/quarkus/tls/TlsConfiguration.java | {
"start": 298,
"end": 3251
} | interface ____ {
static Optional<TlsConfiguration> from(TlsConfigurationRegistry registry, Optional<String> name) {
if (name.isPresent()) {
Optional<TlsConfiguration> maybeConfiguration = registry.get(name.get());
if (maybeConfiguration.isEmpty()) {
throw new IllegalStateException("Unable to find the TLS configuration for name " + name.get() + ".");
}
return maybeConfiguration;
}
return Optional.empty();
}
/**
* Returns the key store.
*
* @return the key store if configured.
*/
KeyStore getKeyStore();
/**
* Returns the key store options.
*
* @return the key store options if configured.
*/
KeyCertOptions getKeyStoreOptions();
/**
* Returns the trust store.
*
* @return the trust store if configured.
*/
KeyStore getTrustStore();
/**
* Returns the trust store options.
*
* @return the trust store options if configured.
*/
TrustOptions getTrustStoreOptions();
/**
* Returns the (Vert.x) SSL options.
*
* @return the {@link SSLOptions}, {@code null} if not configured.
*/
SSLOptions getSSLOptions();
/**
* Creates and returns the SSL Context.
*
* @return the {@link SSLContext}, {@code null} if not configured.
* @throws Exception if the SSL Context cannot be created.
*/
SSLContext createSSLContext() throws Exception;
/**
* Returns whether the trust store is configured to trust all certificates.
*
* @return {@code true} if the trust store is configured to trust all certificates, {@code false} otherwise.
*/
boolean isTrustAll();
/**
* Returns the hostname verification algorithm for this configuration.
* {@code "NONE"} means no hostname verification.
*
* @return the hostname verification algorithm.
*/
Optional<String> getHostnameVerificationAlgorithm();
/**
* Returns whether the key store is configured to use SNI.
* When SNI is used, the client indicate the server name during the TLS handshake, allowing the server to select the
* right certificate.
*
* @return {@code true} if the key store is configured to use SNI, {@code false} otherwise.
*/
boolean usesSni();
/**
* Reloads the configuration.
* It usually means reloading the key store and trust store, especially when they are files.
*
* @return {@code true} if the configuration has been reloaded, {@code false} otherwise.
*/
boolean reload();
/**
* Returns the name which was associated with this configuration
* <p>
* Note: Although this was made default in order to not break deep integrations, it is strongly recommended that the method
* be implemented.
*/
default String getName() {
return "unset";
}
}
| TlsConfiguration |
java | spring-projects__spring-boot | module/spring-boot-cache/src/test/java/org/springframework/boot/cache/autoconfigure/CacheManagerCustomizersTests.java | {
"start": 1141,
"end": 2433
} | class ____ {
@Test
void customizeWithNullCustomizersShouldDoNothing() {
new CacheManagerCustomizers(null).customize(mock(CacheManager.class));
}
@Test
void customizeSimpleCacheManager() {
CacheManagerCustomizers customizers = new CacheManagerCustomizers(
Collections.singletonList(new CacheNamesCacheManagerCustomizer()));
ConcurrentMapCacheManager cacheManager = new ConcurrentMapCacheManager();
customizers.customize(cacheManager);
assertThat(cacheManager.getCacheNames()).containsOnly("one", "two");
}
@Test
void customizeShouldCheckGeneric() {
List<TestCustomizer<?>> list = new ArrayList<>();
list.add(new TestCustomizer<>());
list.add(new TestConcurrentMapCacheManagerCustomizer());
CacheManagerCustomizers customizers = new CacheManagerCustomizers(list);
customizers.customize(mock(CacheManager.class));
assertThat(list.get(0).getCount()).isOne();
assertThat(list.get(1).getCount()).isZero();
customizers.customize(mock(ConcurrentMapCacheManager.class));
assertThat(list.get(0).getCount()).isEqualTo(2);
assertThat(list.get(1).getCount()).isOne();
customizers.customize(mock(CaffeineCacheManager.class));
assertThat(list.get(0).getCount()).isEqualTo(3);
assertThat(list.get(1).getCount()).isOne();
}
static | CacheManagerCustomizersTests |
java | apache__camel | components/camel-bean/src/main/java/org/apache/camel/component/bean/BeanInfo.java | {
"start": 15058,
"end": 17163
} | class ____ any public constructors?
publicConstructors = clazz.getConstructors().length > 0;
publicNoArgConstructors = org.apache.camel.util.ObjectHelper.hasDefaultPublicNoArgConstructor(clazz);
MethodsFilter methods = new MethodsFilter(getType());
introspect(clazz, methods);
// now introspect the methods and filter non valid methods
for (Method method : methods.asReadOnlyList()) {
boolean valid = isValidMethod(clazz, method);
LOG.trace("Method: {} is valid: {}", method, valid);
if (valid) {
introspect(clazz, method);
}
}
}
private void introspect(Class<?> clazz, MethodsFilter filteredMethods) {
// get the target clazz as it could potentially have been enhanced by
// CGLIB etc.
clazz = getTargetClass(clazz);
org.apache.camel.util.ObjectHelper.notNull(clazz, "clazz", this);
LOG.trace("Introspecting class: {}", clazz);
for (Method m : clazz.getDeclaredMethods()) {
filteredMethods.filterMethod(m);
}
Class<?> superClass = clazz.getSuperclass();
if (superClass != null && !superClass.equals(Object.class)) {
introspect(superClass, filteredMethods);
}
for (Class<?> superInterface : clazz.getInterfaces()) {
introspect(superInterface, filteredMethods);
}
}
/**
* Introspects the given method
*
* @param clazz the class
* @param method the method
*/
private void introspect(Class<?> clazz, Method method) {
LOG.trace("Introspecting class: {}, method: {}", clazz, method);
String opName = method.getName();
MethodInfo methodInfo = createMethodInfo(clazz, method);
// Foster the use of a potentially already registered most specific override
MethodInfo existingMethodInfo = findMostSpecificOverride(methodInfo);
if (existingMethodInfo != null) {
LOG.trace("This method is already overridden in a subclass, so the method from the sub | have |
java | apache__dubbo | dubbo-registry/dubbo-registry-multicast/src/main/java/org/apache/dubbo/registry/multicast/MulticastServiceDiscoveryFactory.java | {
"start": 1023,
"end": 1269
} | class ____ extends AbstractServiceDiscoveryFactory {
@Override
protected ServiceDiscovery createDiscovery(URL registryURL) {
return new MulticastServiceDiscovery(applicationModel, registryURL);
}
}
| MulticastServiceDiscoveryFactory |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/i18n/LocalizedBundleDefaultLocaleConflictTest.java | {
"start": 522,
"end": 1594
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot(root -> root
.addClasses(Messages.class, EnMessages.class))
.overrideConfigKey("quarkus.default-locale", "en")
.assertException(t -> {
Throwable rootCause = ExceptionUtil.getRootCause(t);
if (rootCause instanceof MessageBundleException) {
assertEquals(
"Locale of [io.quarkus.qute.deployment.i18n.LocalizedBundleDefaultLocaleConflictTest$EnMessages] conflicts with the locale [en] of the default message bundle [io.quarkus.qute.deployment.i18n.LocalizedBundleDefaultLocaleConflictTest$Messages]",
rootCause.getMessage());
} else {
fail("No message bundle exception thrown: " + t);
}
});
@Test
public void testValidation() {
fail();
}
@MessageBundle
public | LocalizedBundleDefaultLocaleConflictTest |
java | spring-projects__spring-framework | framework-docs/src/main/java/org/springframework/docs/dataaccess/jdbc/jdbcjdbctemplateidioms/JdbcCorporateEventDaoConfiguration.java | {
"start": 290,
"end": 816
} | class ____ {
// tag::snippet[]
@Bean
JdbcCorporateEventDao corporateEventDao(DataSource dataSource) {
return new JdbcCorporateEventDao(dataSource);
}
@Bean(destroyMethod = "close")
BasicDataSource dataSource() {
BasicDataSource dataSource = new BasicDataSource();
dataSource.setDriverClassName("org.hsqldb.jdbcDriver");
dataSource.setUrl("jdbc:hsqldb:hsql://localhost:");
dataSource.setUsername("sa");
dataSource.setPassword("");
return dataSource;
}
// end::snippet[]
}
| JdbcCorporateEventDaoConfiguration |
java | google__guava | android/guava/src/com/google/common/io/ByteSource.java | {
"start": 24449,
"end": 26327
} | class ____ have potential problems with infinite Iterables. But unlike
// those, this method can cause issues even if the user is dealing with a (finite) slice()
// of this source, since the slice's sizeIfKnown() method needs to know the size of the
// underlying source to know what its size actually is.
return Optional.absent();
}
long result = 0L;
for (ByteSource source : sources) {
Optional<Long> sizeIfKnown = source.sizeIfKnown();
if (!sizeIfKnown.isPresent()) {
return Optional.absent();
}
result += sizeIfKnown.get();
if (result < 0) {
// Overflow (or one or more sources that returned a negative size, but all bets are off in
// that case)
// Can't represent anything higher, and realistically there probably isn't anything that
// can actually be done anyway with the supposed 8+ exbibytes of data the source is
// claiming to have if we get here, so just stop.
return Optional.of(Long.MAX_VALUE);
}
}
return Optional.of(result);
}
@Override
public long size() throws IOException {
long result = 0L;
for (ByteSource source : sources) {
result += source.size();
if (result < 0) {
// Overflow (or one or more sources that returned a negative size, but all bets are off in
// that case)
// Can't represent anything higher, and realistically there probably isn't anything that
// can actually be done anyway with the supposed 8+ exbibytes of data the source is
// claiming to have if we get here, so just stop.
return Long.MAX_VALUE;
}
}
return result;
}
@Override
public String toString() {
return "ByteSource.concat(" + sources + ")";
}
}
}
| also |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CompareToZeroTest.java | {
"start": 3691,
"end": 4272
} | class ____ {
void test(Integer i) {
boolean b1 = i.compareTo(2) == -1;
boolean b2 = i.compareTo(2) > -1;
boolean b3 = -1 < i.compareTo(2);
boolean b4 = i.compareTo(2) < 1;
boolean b5 = i.compareTo(2) != -1;
boolean b6 = i.compareTo(2) != 1;
boolean b7 = i.compareTo(2) <= -1;
boolean b8 = ((i.compareTo(2))) >= 1;
}
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsAnyOf_Test.java | {
"start": 859,
"end": 1234
} | class ____ extends CharSequenceAssertBaseTest {
@Override
protected CharSequenceAssert invoke_api_method() {
return assertions.containsAnyOf("a", "b", "c");
}
@Override
protected void verify_internal_effects() {
verify(strings).assertContainsAnyOf(getInfo(assertions), getActual(assertions), array("a", "b", "c"));
}
}
| CharSequenceAssert_containsAnyOf_Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java | {
"start": 67190,
"end": 67705
} | class ____ extends ContainerTransition {
@SuppressWarnings("unchecked")
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
container.sendContainerMonitorStartEvent();
container.wasLaunched = true;
container.setIsPaused(true);
}
}
/**
* Transition from RUNNING or KILLING state to
* EXITED_WITH_SUCCESS state upon EXITED_WITH_SUCCESS message.
*/
@SuppressWarnings("unchecked") // dispatcher not typed
static | RecoveredContainerTransition |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/test/java/org/apache/hadoop/yarn/server/router/webapp/MockRESTRequestInterceptor.java | {
"start": 4051,
"end": 13998
} | class ____ extends AbstractRESTRequestInterceptor {
@Override
public void setNextInterceptor(RESTRequestInterceptor next) {
throw new YarnRuntimeException(
"setNextInterceptor is being called on MockRESTRequestInterceptor,"
+ "which should be the last one in the chain. "
+ "Check if the interceptor pipeline configuration is correct");
}
@Override
public ClusterInfo get() {
return new ClusterInfo();
}
@Override
public ClusterInfo getClusterInfo() {
return new ClusterInfo();
}
@Override
public ClusterUserInfo getClusterUserInfo(HttpServletRequest hsr) {
return new ClusterUserInfo();
}
@Override
public ClusterMetricsInfo getClusterMetricsInfo() {
return new ClusterMetricsInfo();
}
@Override
public SchedulerTypeInfo getSchedulerInfo() {
return new SchedulerTypeInfo();
}
@Override
public String dumpSchedulerLogs(String time, HttpServletRequest hsr)
throws IOException {
return "Done";
}
@Override
public NodesInfo getNodes(String states) {
return new NodesInfo();
}
@Override
public NodeInfo getNode(String nodeId) {
return new NodeInfo();
}
@Override
public ResourceInfo updateNodeResource(HttpServletRequest hsr, String nodeId,
ResourceOptionInfo resourceOption) throws AuthorizationException {
return new ResourceInfo();
}
@SuppressWarnings("checkstyle:parameternumber")
@Override
public AppsInfo getApps(HttpServletRequest hsr, String stateQuery,
Set<String> statesQuery, String finalStatusQuery, String userQuery,
String queueQuery, String count, String startedBegin, String startedEnd,
String finishBegin, String finishEnd, Set<String> applicationTypes,
Set<String> applicationTags, String name, Set<String> unselectedFields) {
return new AppsInfo();
}
@Override
public ActivitiesInfo getActivities(HttpServletRequest hsr, String nodeId,
String groupBy) {
return new ActivitiesInfo();
}
@Override
public BulkActivitiesInfo getBulkActivities(HttpServletRequest hsr,
String groupBy, int activitiesCount) throws InterruptedException{
return new BulkActivitiesInfo();
}
@Override
public AppActivitiesInfo getAppActivities(HttpServletRequest hsr,
String appId, String time, Set<String> requestPriorities,
Set<String> allocationRequestIds, String groupBy, String limit,
Set<String> actions, boolean summarize) {
return new AppActivitiesInfo();
}
@Override
public ApplicationStatisticsInfo getAppStatistics(HttpServletRequest hsr,
Set<String> stateQueries, Set<String> typeQueries) {
return new ApplicationStatisticsInfo();
}
@Override
public AppInfo getApp(HttpServletRequest hsr, String appId,
Set<String> unselectedFields) {
return new AppInfo();
}
@Override
public AppState getAppState(HttpServletRequest hsr, String appId)
throws AuthorizationException {
return new AppState();
}
@Override
public Response updateAppState(AppState targetState, HttpServletRequest hsr,
String appId) throws AuthorizationException, YarnException,
InterruptedException, IOException {
return Response.status(Status.OK).build();
}
@Override
public NodeToLabelsInfo getNodeToLabels(HttpServletRequest hsr)
throws IOException {
return new NodeToLabelsInfo();
}
@Override
public NodeLabelsInfo getRMNodeLabels(HttpServletRequest hsr) throws IOException {
return new NodeLabelsInfo();
}
@Override
public LabelsToNodesInfo getLabelsToNodes(Set<String> labels)
throws IOException {
return new LabelsToNodesInfo();
}
@Override
public Response replaceLabelsOnNodes(NodeToLabelsEntryList newNodeToLabels,
HttpServletRequest hsr) throws Exception {
return Response.status(Status.OK).build();
}
@Override
public Response replaceLabelsOnNode(Set<String> newNodeLabelsName,
HttpServletRequest hsr, String nodeId) throws Exception {
return Response.status(Status.OK).build();
}
@Override
public NodeLabelsInfo getClusterNodeLabels(HttpServletRequest hsr)
throws IOException {
return new NodeLabelsInfo();
}
@Override
public Response addToClusterNodeLabels(NodeLabelsInfo newNodeLabels,
HttpServletRequest hsr) throws Exception {
return Response.status(Status.OK).build();
}
@Override
public Response removeFromClusterNodeLabels(Set<String> oldNodeLabels,
HttpServletRequest hsr) throws Exception {
return Response.status(Status.OK).build();
}
@Override
public NodeLabelsInfo getLabelsOnNode(HttpServletRequest hsr, String nodeId)
throws IOException {
return new NodeLabelsInfo();
}
@Override
public AppPriority getAppPriority(HttpServletRequest hsr, String appId)
throws AuthorizationException {
return new AppPriority();
}
@Override
public Response updateApplicationPriority(AppPriority targetPriority,
HttpServletRequest hsr, String appId) throws AuthorizationException,
YarnException, InterruptedException, IOException {
return Response.status(Status.OK).build();
}
@Override
public AppQueue getAppQueue(HttpServletRequest hsr, String appId)
throws AuthorizationException {
return new AppQueue();
}
@Override
public Response updateAppQueue(AppQueue targetQueue, HttpServletRequest hsr,
String appId) throws AuthorizationException, YarnException,
InterruptedException, IOException {
return Response.status(Status.OK).build();
}
@Override
public Response createNewApplication(HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
return Response.status(Status.OK).build();
}
@Override
public Response submitApplication(ApplicationSubmissionContextInfo newApp,
HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
return Response.status(Status.OK).build();
}
@Override
public Response postDelegationToken(DelegationToken tokenData,
HttpServletRequest hsr) throws AuthorizationException, IOException,
InterruptedException, Exception {
return Response.status(Status.OK).build();
}
@Override
public Response postDelegationTokenExpiration(HttpServletRequest hsr)
throws AuthorizationException, IOException, Exception {
return Response.status(Status.OK).build();
}
@Override
public Response cancelDelegationToken(HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException,
Exception {
return Response.status(Status.OK).build();
}
@Override
public Response createNewReservation(HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
return Response.status(Status.OK).build();
}
@Override
public Response submitReservation(ReservationSubmissionRequestInfo resContext,
HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
return Response.status(Status.OK).build();
}
@Override
public Response updateReservation(ReservationUpdateRequestInfo resContext,
HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
return Response.status(Status.OK).build();
}
@Override
public Response deleteReservation(ReservationDeleteRequestInfo resContext,
HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
return Response.status(Status.OK).build();
}
@Override
public Response listReservation(String queue, String reservationId,
long startTime, long endTime, boolean includeResourceAllocations,
HttpServletRequest hsr) throws Exception {
return Response.status(Status.OK).build();
}
@Override
public AppTimeoutInfo getAppTimeout(HttpServletRequest hsr, String appId,
String type) throws AuthorizationException {
return new AppTimeoutInfo();
}
@Override
public AppTimeoutsInfo getAppTimeouts(HttpServletRequest hsr, String appId)
throws AuthorizationException {
return new AppTimeoutsInfo();
}
@Override
public Response updateApplicationTimeout(AppTimeoutInfo appTimeout,
HttpServletRequest hsr, String appId) throws AuthorizationException,
YarnException, InterruptedException, IOException {
return Response.status(Status.OK).build();
}
@Override
public AppAttemptsInfo getAppAttempts(HttpServletRequest hsr, String appId) {
return new AppAttemptsInfo();
}
@Override
public RMQueueAclInfo checkUserAccessToQueue(String queue, String username,
String queueAclType, HttpServletRequest hsr) {
return new RMQueueAclInfo(true, username, "");
}
@Override
public AppAttemptInfo getAppAttempt(HttpServletRequest req,
HttpServletResponse res, String appId, String appAttemptId) {
return new AppAttemptInfo();
}
@Override
public ContainersInfo getContainers(HttpServletRequest req,
HttpServletResponse res, String appId, String appAttemptId) {
return new ContainersInfo();
}
@Override
public ContainerInfo getContainer(HttpServletRequest req,
HttpServletResponse res, String appId, String appAttemptId,
String containerId) {
return new ContainerInfo();
}
@Override
public Response signalToContainer(String containerId, String command,
HttpServletRequest req) {
return Response.status(Status.OK).build();
}
@Override
public Response updateSchedulerConfiguration(SchedConfUpdateInfo mutationInfo,
HttpServletRequest hsr) throws AuthorizationException, InterruptedException {
return Response.status(Status.OK).build();
}
@Override
public Response getSchedulerConfiguration(HttpServletRequest hsr)
throws AuthorizationException {
return Response.status(Status.OK).build();
}
} | MockRESTRequestInterceptor |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/RunSharedCacheCleanerTaskResponse.java | {
"start": 1220,
"end": 1887
} | class ____ {
/**
* Get whether or not the shared cache manager has accepted the request.
* Shared cache manager will reject the request if there is an ongoing task
*
* @return boolean True if the request has been accepted, false otherwise.
*/
@Public
@Unstable
public abstract boolean getAccepted();
/**
* Set whether or not the shared cache manager has accepted the request Shared
* cache manager will reject the request if there is an ongoing task
*
* @param b True if the request has been accepted, false otherwise.
*/
@Public
@Unstable
public abstract void setAccepted(boolean b);
}
| RunSharedCacheCleanerTaskResponse |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.