language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/request/ElasticInferenceServiceUnifiedChatCompletionRequestEntity.java | {
"start": 724,
"end": 1530
} | class ____ implements ToXContentObject {
private final UnifiedChatCompletionRequestEntity unifiedRequestEntity;
private final String modelId;
public ElasticInferenceServiceUnifiedChatCompletionRequestEntity(UnifiedChatInput unifiedChatInput, String modelId) {
this.unifiedRequestEntity = new UnifiedChatCompletionRequestEntity(unifiedChatInput);
this.modelId = Objects.requireNonNull(modelId);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
unifiedRequestEntity.toXContent(builder, UnifiedCompletionRequest.withMaxCompletionTokens(modelId, params));
builder.endObject();
return builder;
}
}
| ElasticInferenceServiceUnifiedChatCompletionRequestEntity |
java | micronaut-projects__micronaut-core | context/src/test/groovy/io/micronaut/runtime/event/annotation/OverloadedListener.java | {
"start": 800,
"end": 1076
} | class ____ {
StartupEvent startup;
ShutdownEvent shutdown;
@EventListener
void receive(StartupEvent event) {
this.startup = event;
}
@EventListener
void receive(ShutdownEvent event) {
this.shutdown = event;
}
}
| OverloadedListener |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/internal/pool/PoolWaiter.java | {
"start": 611,
"end": 711
} | class ____<C> {
static final Listener NULL_LISTENER = new Listener() {
};
/**
* An | PoolWaiter |
java | elastic__elasticsearch | x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityFeatureStateIntegTests.java | {
"start": 1349,
"end": 8021
} | class ____ extends AbstractPrivilegeTestCase {
private static final String LOCAL_TEST_USER_NAME = "feature_state_user";
private static final String LOCAL_TEST_USER_PASSWORD = "my_password";
private static Path repositoryLocation;
@BeforeClass
public static void setupRepositoryPath() {
repositoryLocation = createTempDir();
}
@AfterClass
public static void cleanupRepositoryPath() {
repositoryLocation = null;
}
@Override
protected boolean addMockHttpTransport() {
return false; // enable http
}
@Override
protected Settings nodeSettings() {
return Settings.builder().put(super.nodeSettings()).put("path.repo", repositoryLocation).build();
}
/**
* Test that, when the security system index is restored as a feature state,
* the security plugin's listeners detect the state change and reload native
* realm privileges.
*
* We use the admin client to handle snapshots and the rest API to manage
* security roles and users. We use the native realm instead of the file
* realm because this test relies on dynamically changing privileges.
*/
public void testSecurityFeatureStateSnapshotAndRestore() throws Exception {
// set up a snapshot repository
final String repositoryName = "test-repo";
clusterAdmin().preparePutRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, repositoryName)
.setType("fs")
.setSettings(Settings.builder().put("location", repositoryLocation))
.get();
// create a new role
final String roleName = "extra_role";
final Request createRoleRequest = new Request("PUT", "/_security/role/" + roleName);
createRoleRequest.addParameter("refresh", "wait_for");
createRoleRequest.setJsonEntity("""
{
"indices": [
{
"names": [ "test_index" ],
"privileges": [ "create", "create_index", "create_doc" ]
}
]
}""");
performSuperuserRequest(createRoleRequest);
// create a test user
final Request createUserRequest = new Request("PUT", "/_security/user/" + LOCAL_TEST_USER_NAME);
createUserRequest.addParameter("refresh", "wait_for");
createUserRequest.setJsonEntity(Strings.format("""
{ "password": "%s", "roles": [ "%s" ]}
""", LOCAL_TEST_USER_PASSWORD, roleName));
performSuperuserRequest(createUserRequest);
// test user posts a document
final Request postTestDocument1 = new Request("POST", "/test_index/_doc");
postTestDocument1.setJsonEntity("""
{"message": "before snapshot"}
""");
performTestUserRequest(postTestDocument1);
// snapshot state
final String snapshotName = "security-state";
clusterAdmin().prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repositoryName, snapshotName)
.setIndices("test_index")
.setFeatureStates("LocalStateSecurity")
.get();
waitForSnapshotToFinish(repositoryName, snapshotName);
// modify user's roles
final Request modifyUserRequest = new Request("PUT", "/_security/user/" + LOCAL_TEST_USER_NAME);
modifyUserRequest.addParameter("refresh", "wait_for");
modifyUserRequest.setJsonEntity("{\"roles\": [] }");
performSuperuserRequest(modifyUserRequest);
// new user has lost privileges and can't post a document
final Request postDocumentRequest2 = new Request("POST", "/test_index/_doc");
postDocumentRequest2.setJsonEntity("{\"message\": \"between snapshot and restore\"}");
ResponseException exception = expectThrows(ResponseException.class, () -> performTestUserRequest(postDocumentRequest2));
assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(403));
assertThat(
exception.getMessage(),
containsString("action [" + TransportIndexAction.NAME + "] is unauthorized for user [" + LOCAL_TEST_USER_NAME + "]")
);
client().admin().indices().prepareClose("test_index").get();
// restore state
clusterAdmin().prepareRestoreSnapshot(TEST_REQUEST_TIMEOUT, repositoryName, snapshotName)
.setFeatureStates("LocalStateSecurity")
.setIndices("test_index")
.setWaitForCompletion(true)
.get();
// user has privileges again
final Request postDocumentRequest3 = new Request("POST", "/test_index/_doc");
postDocumentRequest3.setJsonEntity("{\"message\": \"after restore\"}");
performTestUserRequest(postDocumentRequest3);
}
private Response performSuperuserRequest(Request request) throws Exception {
String token = UsernamePasswordToken.basicAuthHeaderValue(
ES_TEST_ROOT_USER,
new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())
);
return performAuthenticatedRequest(request, token);
}
private Response performTestUserRequest(Request request) throws Exception {
String token = UsernamePasswordToken.basicAuthHeaderValue(
LOCAL_TEST_USER_NAME,
new SecureString(LOCAL_TEST_USER_PASSWORD.toCharArray())
);
return performAuthenticatedRequest(request, token);
}
private Response performAuthenticatedRequest(Request request, String token) throws Exception {
RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
options.addHeader("Authorization", token);
request.setOptions(options);
return getRestClient().performRequest(request);
}
private void waitForSnapshotToFinish(String repo, String snapshot) throws Exception {
assertBusy(() -> {
SnapshotsStatusResponse response = clusterAdmin().prepareSnapshotStatus(TEST_REQUEST_TIMEOUT, repo)
.setSnapshots(snapshot)
.get();
assertThat(response.getSnapshots().get(0).getState(), is(SnapshotsInProgress.State.SUCCESS));
// The status of the snapshot in the repository can become SUCCESS before it is fully finalized in the cluster state so wait for
// it to disappear from the cluster state as well
SnapshotsInProgress snapshotsInProgress = SnapshotsInProgress.get(
clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).get().getState()
);
assertTrue(snapshotsInProgress.isEmpty());
});
}
}
| SecurityFeatureStateIntegTests |
java | spring-projects__spring-security | oauth2/oauth2-core/src/main/java/org/springframework/security/oauth2/core/http/converter/OAuth2ErrorHttpMessageConverter.java | {
"start": 5288,
"end": 5907
} | class ____ implements Converter<Map<String, String>, OAuth2Error> {
@Override
public OAuth2Error convert(Map<String, String> parameters) {
String errorCode = parameters.get(OAuth2ParameterNames.ERROR);
String errorDescription = parameters.get(OAuth2ParameterNames.ERROR_DESCRIPTION);
String errorUri = parameters.get(OAuth2ParameterNames.ERROR_URI);
return new OAuth2Error(errorCode, errorDescription, errorUri);
}
}
/**
* A {@link Converter} that converts the provided {@link OAuth2Error} to a {@code Map}
* representation of OAuth 2.0 Error parameters.
*/
private static | OAuth2ErrorConverter |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/secured/assertion/FileAssertionJwtTemplate.java | {
"start": 4622,
"end": 4991
} | class ____ {
private final Map<String, Object> header;
private final Map<String, Object> payload;
private CachedJwtTemplate(Map<String, Object> header, Map<String, Object> payload) {
this.header = Collections.unmodifiableMap(header);
this.payload = Collections.unmodifiableMap(payload);
}
}
}
| CachedJwtTemplate |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeohexTests.java | {
"start": 1063,
"end": 2478
} | class ____ extends AbstractScalarFunctionTestCase {
public ToGeohexTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
final String attribute = "Attribute[channel=0]";
final String evaluator = "ToGeohexFromStringEvaluator[in=Attribute[channel=0]]";
final List<TestCaseSupplier> suppliers = new ArrayList<>();
TestCaseSupplier.forUnaryGeoGrid(suppliers, attribute, DataType.GEOHEX, DataType.GEOHEX, v -> v, List.of());
TestCaseSupplier.forUnaryGeoGrid(suppliers, attribute, DataType.LONG, DataType.GEOHEX, v -> v, List.of());
TestCaseSupplier.forUnaryGeoGrid(suppliers, evaluator, DataType.KEYWORD, DataType.GEOHEX, ToGeohexTests::valueOf, List.of());
TestCaseSupplier.forUnaryGeoGrid(suppliers, evaluator, DataType.TEXT, DataType.GEOHEX, ToGeohexTests::valueOf, List.of());
return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers);
}
private static long valueOf(Object gridAddress) {
assert gridAddress instanceof BytesRef;
return H3.stringToH3(((BytesRef) gridAddress).utf8ToString());
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new ToGeohex(source, args.get(0));
}
}
| ToGeohexTests |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/stat/JdbcSqlStatTest.java | {
"start": 153,
"end": 971
} | class ____ extends TestCase {
private JdbcSqlStat item;
public void test_0() throws Exception {
item = new JdbcSqlStat("");
gc();
long memoryStart = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed();
final int COUNT = 1000 * 5;
JdbcSqlStat[] items = new JdbcSqlStat[COUNT];
for (int i = 0; i < COUNT; ++i) {
items[i] = new JdbcSqlStat("");
// items[i] = Histogram.makeHistogram(20);
}
long memoryEnd = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed();
System.out.println("memory used : " + NumberFormat.getInstance().format(memoryEnd - memoryStart));
}
private void gc() {
for (int i = 0; i < 10; ++i) {
System.gc();
}
}
}
| JdbcSqlStatTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/fetchmode/toone/ManyToOneWithCircularity3Test.java | {
"start": 3407,
"end": 4141
} | class ____ {
@Id
private Long id;
private String name;
@ManyToOne
private Connector connector;
@ManyToOne(fetch = FetchType.LAZY)
private Sub sub;
@ManyToOne(fetch = FetchType.LAZY)
private Sub2 sub2;
public Main() {
}
public Main(Long id, String name, Connector connector, Sub sub, Sub2 sub2) {
this.id = id;
this.name = name;
this.connector = connector;
this.sub = sub;
this.sub2 = sub2;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
public Connector getConnector() {
return connector;
}
public Sub getSub() {
return sub;
}
public Sub2 getSub2() {
return sub2;
}
}
@Entity(name = "Connector")
public static | Main |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/injection/guice/internal/BindingBuilder.java | {
"start": 1317,
"end": 3856
} | class ____<T> extends AbstractBindingBuilder<T> implements AnnotatedBindingBuilder<T> {
public BindingBuilder(Binder binder, List<Element> elements, Object source, Key<T> key) {
super(binder, elements, source, key);
}
@Override
public BindingBuilder<T> to(Class<? extends T> implementation) {
Key<? extends T> linkedKey = Key.get(implementation);
Objects.requireNonNull(linkedKey, "linkedKey");
checkNotTargetted();
BindingImpl<T> base = getBinding();
setBinding(new LinkedBindingImpl<>(base.getSource(), base.getKey(), base.getScoping(), linkedKey));
return this;
}
@Override
public void toInstance(T instance) {
checkNotTargetted();
// lookup the injection points, adding any errors to the binder's errors list
Set<InjectionPoint> injectionPoints;
if (instance != null) {
try {
injectionPoints = InjectionPoint.forInstanceMethods(instance.getClass());
} catch (ConfigurationException e) {
for (Message message : e.getErrorMessages()) {
binder.addError(message);
}
injectionPoints = Set.copyOf(e.getPartialValue());
}
} else {
binder.addError(BINDING_TO_NULL);
injectionPoints = emptySet();
}
BindingImpl<T> base = getBinding();
setBinding(new InstanceBindingImpl<>(base.getSource(), base.getKey(), base.getScoping(), injectionPoints, instance));
}
@Override
public BindingBuilder<T> toProvider(Provider<? extends T> provider) {
Objects.requireNonNull(provider, "provider");
checkNotTargetted();
// lookup the injection points, adding any errors to the binder's errors list
Set<InjectionPoint> injectionPoints;
try {
injectionPoints = InjectionPoint.forInstanceMethods(provider.getClass());
} catch (ConfigurationException e) {
for (Message message : e.getErrorMessages()) {
binder.addError(message);
}
injectionPoints = Set.copyOf(e.getPartialValue());
}
BindingImpl<T> base = getBinding();
setBinding(new ProviderInstanceBindingImpl<>(base.getSource(), base.getKey(), base.getScoping(), injectionPoints, provider));
return this;
}
@Override
public String toString() {
return "BindingBuilder<" + getBinding().getKey().getTypeLiteral() + ">";
}
}
| BindingBuilder |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/bind/annotation/GetMapping.java | {
"start": 1938,
"end": 3045
} | interface ____ {
/**
* Alias for {@link RequestMapping#name}.
*/
@AliasFor(annotation = RequestMapping.class)
String name() default "";
/**
* Alias for {@link RequestMapping#value}.
*/
@AliasFor(annotation = RequestMapping.class)
String[] value() default {};
/**
* Alias for {@link RequestMapping#path}.
*/
@AliasFor(annotation = RequestMapping.class)
String[] path() default {};
/**
* Alias for {@link RequestMapping#params}.
*/
@AliasFor(annotation = RequestMapping.class)
String[] params() default {};
/**
* Alias for {@link RequestMapping#headers}.
*/
@AliasFor(annotation = RequestMapping.class)
String[] headers() default {};
/**
* Alias for {@link RequestMapping#consumes}.
* @since 4.3.5
*/
@AliasFor(annotation = RequestMapping.class)
String[] consumes() default {};
/**
* Alias for {@link RequestMapping#produces}.
*/
@AliasFor(annotation = RequestMapping.class)
String[] produces() default {};
/**
* Alias for {@link RequestMapping#version()}.
*/
@AliasFor(annotation = RequestMapping.class)
String version() default "";
}
| GetMapping |
java | elastic__elasticsearch | x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetFlameGraphActionIT.java | {
"start": 312,
"end": 1437
} | class ____ extends ProfilingTestCase {
public void testGetStackTracesUnfiltered() throws Exception {
GetStackTracesRequest request = new GetStackTracesRequest(
1000,
600.0d,
1.0d,
1.0d,
null,
null,
null,
null,
null,
null,
null,
null,
null
);
GetFlamegraphResponse response = client().execute(GetFlamegraphAction.INSTANCE, request).get();
// only spot-check top level properties - detailed tests are done in unit tests
assertEquals(1008, response.getSize());
assertEquals(1.0d, response.getSamplingRate(), 0.001d);
assertEquals(45, response.getSelfCPU());
assertEquals(1974, response.getTotalCPU());
assertEquals(45, response.getTotalSamples());
// The root node's values are the same as the top-level values.
assertEquals("", response.getFileIds().get(0));
assertEquals(response.getSelfCPU(), response.getCountInclusive().get(0).longValue());
}
}
| GetFlameGraphActionIT |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/collection/StringMapLobTest.java | {
"start": 2098,
"end": 5113
} | class ____ {
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
final Simple simple = new Simple( 1, "Simple" );
simple.getEmbeddedMap().put( "1", "One" );
simple.getEmbeddedMap().put( "2", "Two" );
entityManager.persist( simple );
} );
scope.inTransaction( entityManager -> {
final Simple simple = entityManager.find( Simple.class, 1 );
simple.getEmbeddedMap().put( "3", "Three" );
entityManager.merge( simple );
} );
scope.inTransaction( entityManager -> {
final Simple simple = entityManager.find( Simple.class, 1 );
simple.getEmbeddedMap().remove( "1" );
simple.getEmbeddedMap().remove( "2" );
entityManager.merge( simple );
} );
scope.inTransaction( entityManager -> {
final Simple simple = entityManager.find( Simple.class, 1 );
simple.getEmbeddedMap().remove( "3" );
simple.getEmbeddedMap().put( "3", "Three-New" );
entityManager.merge( simple );
} );
scope.inTransaction( entityManager -> {
final Simple simple = entityManager.find( Simple.class, 1 );
simple.getEmbeddedMap().clear();
entityManager.merge( simple );
} );
}
@Test
public void testRevisionCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1, 2, 3, 4, 5 ), auditReader.getRevisions( Simple.class, 1 ) );
} );
}
@Test
public void testRevisionHistory(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
final Simple rev1 = auditReader.find( Simple.class, 1, 1 );
assertEquals( 2, rev1.getEmbeddedMap().entrySet().size() );
TestTools.assertCollectionsEqual(
TestTools.<String, String>mapBuilder()
.add( "1", "One" )
.add( "2", "Two" )
.entries(),
rev1.getEmbeddedMap().entrySet()
);
final Simple rev2 = auditReader.find( Simple.class, 1, 2 );
assertEquals( 3, rev2.getEmbeddedMap().entrySet().size() );
TestTools.assertCollectionsEqual(
TestTools.<String,String>mapBuilder()
.add( "1", "One" )
.add( "2", "Two" )
.add( "3", "Three" )
.entries(),
rev2.getEmbeddedMap().entrySet()
);
final Simple rev3 = auditReader.find( Simple.class, 1, 3 );
assertEquals( 1, rev3.getEmbeddedMap().entrySet().size() );
TestTools.assertCollectionsEqual(
TestTools.<String,String>mapBuilder()
.add( "3", "Three" )
.entries(),
rev3.getEmbeddedMap().entrySet()
);
final Simple rev4 = auditReader.find( Simple.class, 1, 4 );
TestTools.assertCollectionsEqual(
TestTools.<String,String>mapBuilder()
.add( "3", "Three-New" )
.entries(),
rev4.getEmbeddedMap().entrySet()
);
final Simple rev5 = auditReader.find( Simple.class, 1, 5 );
assertEquals( 0, rev5.getEmbeddedMap().size() );
} );
}
@Entity(name = "Simple")
@Audited
public static | StringMapLobTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/EqualsGetClassTest.java | {
"start": 4318,
"end": 4792
} | class ____ {
private int a;
@Override
public boolean equals(Object o) {
if (!(o instanceof Test)) {
return false;
}
return ((Test) o).a == a;
}
}
""")
.doTest();
}
@Test
public void separateNullCheck_noParens() {
refactoringHelper
.addInputLines(
"Test.java",
"""
| Test |
java | elastic__elasticsearch | modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java | {
"start": 952,
"end": 5755
} | class ____ extends ESTestCase {
public void testJavaPattern() throws Exception {
Function<String, ZonedDateTime> function = DateFormat.Java.getFunction("yyyy-MM-dd'T'HH:mm:ss.SSSXX", ZoneOffset.UTC, Locale.ROOT);
DateIndexNameProcessor processor = createProcessor("_field", List.of(function), ZoneOffset.UTC, "events-", "y", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_id", 1, null, null, Map.of("_field", "2016-04-25T12:24:20.101Z"));
processor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{20160425||/y{yyyyMMdd|UTC}}>"));
}
public void testTAI64N() throws Exception {
Function<String, ZonedDateTime> function = DateFormat.Tai64n.getFunction(null, ZoneOffset.UTC, null);
DateIndexNameProcessor dateProcessor = createProcessor("_field", List.of(function), ZoneOffset.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument(
"_index",
"_id",
1,
null,
null,
Map.of("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024")
);
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{20121222||/m{yyyyMMdd|UTC}}>"));
}
public void testUnixMs() throws Exception {
Function<String, ZonedDateTime> function = DateFormat.UnixMs.getFunction(null, ZoneOffset.UTC, null);
DateIndexNameProcessor dateProcessor = createProcessor("_field", List.of(function), ZoneOffset.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_id", 1, null, null, Map.of("_field", "1000500"));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{19700101||/m{yyyyMMdd|UTC}}>"));
document = new IngestDocument("_index", "_id", 1, null, null, Map.of("_field", 1000500L));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{19700101||/m{yyyyMMdd|UTC}}>"));
}
public void testUnix() throws Exception {
Function<String, ZonedDateTime> function = DateFormat.Unix.getFunction(null, ZoneOffset.UTC, null);
DateIndexNameProcessor dateProcessor = createProcessor("_field", List.of(function), ZoneOffset.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_id", 1, null, null, Map.of("_field", "1000.5"));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{19700101||/m{yyyyMMdd|UTC}}>"));
}
public void testTemplatedFields() throws Exception {
String indexNamePrefix = randomAlphaOfLength(10);
String dateRounding = randomFrom("y", "M", "w", "d", "h", "m", "s");
String indexNameFormat = randomFrom("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", "yyyyMMdd", "MM/dd/yyyy");
String date = Integer.toString(randomInt());
Function<String, ZonedDateTime> dateTimeFunction = DateFormat.Unix.getFunction(null, ZoneOffset.UTC, null);
DateIndexNameProcessor dateProcessor = createProcessor(
"_field",
List.of(dateTimeFunction),
ZoneOffset.UTC,
indexNamePrefix,
dateRounding,
indexNameFormat
);
IngestDocument document = new IngestDocument("_index", "_id", 1, null, null, Map.of("_field", date));
dateProcessor.execute(document);
assertThat(
document.getSourceAndMetadata().get("_index"),
equalTo(
"<"
+ indexNamePrefix
+ "{"
+ DateFormatter.forPattern(indexNameFormat).format(dateTimeFunction.apply(date))
+ "||/"
+ dateRounding
+ "{"
+ indexNameFormat
+ "|UTC}}>"
)
);
}
private DateIndexNameProcessor createProcessor(
String field,
List<Function<String, ZonedDateTime>> dateFormats,
ZoneId timezone,
String indexNamePrefix,
String dateRounding,
String indexNameFormat
) {
return new DateIndexNameProcessor(
randomAlphaOfLength(10),
null,
field,
dateFormats,
timezone,
new TestTemplateService.MockTemplateScript.Factory(indexNamePrefix),
new TestTemplateService.MockTemplateScript.Factory(dateRounding),
new TestTemplateService.MockTemplateScript.Factory(indexNameFormat)
);
}
}
| DateIndexNameProcessorTests |
java | spring-projects__spring-framework | spring-websocket/src/main/java/org/springframework/web/socket/messaging/StompSubProtocolHandler.java | {
"start": 25195,
"end": 25972
} | class ____ implements Consumer<Principal> {
private final MessageChannel channel;
private final @Nullable Principal webSocketUser;
private volatile @Nullable Principal stompUser;
SessionInfo(MessageChannel channel, @Nullable Principal user) {
this.channel = channel;
this.webSocketUser = user;
}
public MessageChannel getMessageChannelToUse() {
return this.channel;
}
public @Nullable Principal getUser() {
return (this.stompUser != null ? this.stompUser : this.webSocketUser);
}
@Override
public void accept(@Nullable Principal stompUser) {
if (stompUser != null && stompUser != this.webSocketUser) {
this.stompUser = stompUser;
}
}
}
/**
* Contract for access to session counters.
* @since 5.2
*/
public | SessionInfo |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/MessageQueryParameter.java | {
"start": 1394,
"end": 2879
} | class ____<X> extends MessageParameter<List<X>> {
protected MessageQueryParameter(String key, MessageParameterRequisiteness requisiteness) {
super(key, requisiteness);
}
@Override
public List<X> convertFromString(String values) throws ConversionException {
String[] splitValues = values.split(",");
List<X> list = new ArrayList<>();
for (String value : splitValues) {
list.add(convertStringToValue(value));
}
return list;
}
/**
* Converts the given string to a valid value of this parameter.
*
* @param value string representation of parameter value
* @return parameter value
*/
public abstract X convertStringToValue(String value) throws ConversionException;
@Override
public String convertToString(List<X> values) {
StringBuilder sb = new StringBuilder();
boolean first = true;
for (X value : values) {
if (first) {
sb.append(convertValueToString(value));
first = false;
} else {
sb.append(",");
sb.append(convertValueToString(value));
}
}
return sb.toString();
}
/**
* Converts the given value to its string representation.
*
* @param value parameter value
* @return string representation of typed value
*/
public abstract String convertValueToString(X value);
}
| MessageQueryParameter |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/records/UpdateApplicationHomeSubClusterRequest.java | {
"start": 1471,
"end": 2645
} | class ____ {
@Private
@Unstable
public static UpdateApplicationHomeSubClusterRequest newInstance(
ApplicationHomeSubCluster applicationHomeSubCluster) {
UpdateApplicationHomeSubClusterRequest updateApplicationRequest =
Records.newRecord(UpdateApplicationHomeSubClusterRequest.class);
updateApplicationRequest
.setApplicationHomeSubCluster(applicationHomeSubCluster);
return updateApplicationRequest;
}
/**
* Get the {@link ApplicationHomeSubCluster} representing the mapping of the
* application to it's home sub-cluster.
*
* @return the mapping of the application to it's home sub-cluster.
*/
@Public
@Unstable
public abstract ApplicationHomeSubCluster getApplicationHomeSubCluster();
/**
* Set the {@link ApplicationHomeSubCluster} representing the mapping of the
* application to it's home sub-cluster.
*
* @param applicationHomeSubCluster the mapping of the application to it's
* home sub-cluster.
*/
@Private
@Unstable
public abstract void setApplicationHomeSubCluster(
ApplicationHomeSubCluster applicationHomeSubCluster);
}
| UpdateApplicationHomeSubClusterRequest |
java | quarkusio__quarkus | integration-tests/test-extension/extension/runtime/src/main/java/io/quarkus/extest/runtime/TestRecorder.java | {
"start": 571,
"end": 2179
} | class ____ {
static final Logger log = Logger.getLogger(TestRecorder.class);
/**
* Create a non-CDI based RuntimeXmlConfigService from the XmlConfig
*
* @param config - parse XML configuration
* @return RuntimeValue<RuntimeXmlConfigService>
*/
public RuntimeValue<RuntimeXmlConfigService> initRuntimeService(XmlConfig config) {
RuntimeXmlConfigService service = new RuntimeXmlConfigService(config);
return new RuntimeValue<>(service);
}
/**
* Invoke the RuntimeXmlConfigService#startService method and register a stopService call with the shutdown context.
*
* @param shutdownContext - context for adding shutdown hooks
* @param runtimeValue - service value
* @throws IOException - on startup failure
*/
public void startRuntimeService(ShutdownContext shutdownContext, RuntimeValue<RuntimeXmlConfigService> runtimeValue)
throws IOException {
RuntimeXmlConfigService service = runtimeValue.getValue();
service.startService();
shutdownContext.addShutdownTask(service::stopService);
}
/**
* Passes the public key to the PublicKeyProducer for injection into CDI beans at runtime
*
* @param publicKey - public key
* @param beanContainer - CDI bean container
*/
public void loadDSAPublicKeyProducer(DSAPublicKey publicKey, BeanContainer beanContainer) {
PublicKeyProducer keyProducer = beanContainer.beanInstance(PublicKeyProducer.class);
keyProducer.setPublicKey(publicKey);
}
/**
* Access the primitive | TestRecorder |
java | netty__netty | transport-sctp/src/test/java/io/netty/channel/sctp/SctpLimitStreamsTest.java | {
"start": 1332,
"end": 3391
} | class ____ {
@BeforeAll
public static void checkSupported() {
assumeTrue(SctpTestUtil.isSctpSupported());
}
@SuppressForbidden(reason = "test-only")
@Test
@Timeout(value = 5000, unit = TimeUnit.MILLISECONDS)
public void testSctpInitMaxstreams() throws Exception {
EventLoopGroup loop = newEventLoopGroup();
try {
ServerBootstrap serverBootstrap = new ServerBootstrap();
serverBootstrap.group(loop)
.channel(serverClass())
.option(ChannelOption.SO_REUSEADDR, true)
.option(SctpChannelOption.SCTP_INIT_MAXSTREAMS,
SctpStandardSocketOptions.InitMaxStreams.create(1, 1))
.localAddress(new InetSocketAddress(0))
.childHandler(new ChannelInboundHandlerAdapter());
Bootstrap clientBootstrap = new Bootstrap()
.group(loop)
.channel(clientClass())
.option(SctpChannelOption.SCTP_INIT_MAXSTREAMS,
SctpStandardSocketOptions.InitMaxStreams.create(112, 112))
.handler(new ChannelInboundHandlerAdapter());
Channel serverChannel = serverBootstrap.bind()
.syncUninterruptibly().channel();
SctpChannel clientChannel = (SctpChannel) clientBootstrap.connect(serverChannel.localAddress())
.syncUninterruptibly().channel();
assertEquals(1, clientChannel.association().maxOutboundStreams());
assertEquals(1, clientChannel.association().maxInboundStreams());
serverChannel.close().syncUninterruptibly();
clientChannel.close().syncUninterruptibly();
} finally {
loop.shutdownGracefully();
}
}
protected abstract EventLoopGroup newEventLoopGroup();
protected abstract Class<? extends SctpChannel> clientClass();
protected abstract Class<? extends SctpServerChannel> serverClass();
}
| SctpLimitStreamsTest |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/AutoConfigurationImportFilter.java | {
"start": 1662,
"end": 1763
} | interface ____ {
/**
* Apply the filter to the given auto-configuration | AutoConfigurationImportFilter |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/xpack/compute/operator/lookup/EnrichResultBuilderForInt.java | {
"start": 842,
"end": 5372
} | class ____ extends EnrichResultBuilder {
private ObjectArray<int[]> cells;
EnrichResultBuilderForInt(BlockFactory blockFactory, int channel) {
super(blockFactory, channel);
this.cells = blockFactory.bigArrays().newObjectArray(1);
}
@Override
void addInputPage(IntVector positions, Page page) {
IntBlock block = page.getBlock(channel);
for (int i = 0; i < positions.getPositionCount(); i++) {
int valueCount = block.getValueCount(i);
if (valueCount == 0) {
continue;
}
int cellPosition = positions.getInt(i);
cells = blockFactory.bigArrays().grow(cells, cellPosition + 1);
final var oldCell = cells.get(cellPosition);
final var newCell = extendCell(oldCell, valueCount);
cells.set(cellPosition, newCell);
int dstIndex = oldCell != null ? oldCell.length : 0;
adjustBreaker(RamUsageEstimator.sizeOf(newCell) - (oldCell != null ? RamUsageEstimator.sizeOf(oldCell) : 0));
int firstValueIndex = block.getFirstValueIndex(i);
for (int v = 0; v < valueCount; v++) {
newCell[dstIndex + v] = block.getInt(firstValueIndex + v);
}
}
}
private int[] extendCell(int[] oldCell, int newValueCount) {
if (oldCell == null) {
return new int[newValueCount];
} else {
return Arrays.copyOf(oldCell, oldCell.length + newValueCount);
}
}
private int[] combineCell(int[] first, int[] second) {
if (first == null) {
return second;
}
if (second == null) {
return first;
}
var result = new int[first.length + second.length];
System.arraycopy(first, 0, result, 0, first.length);
System.arraycopy(second, 0, result, first.length, second.length);
return result;
}
private void appendGroupToBlockBuilder(IntBlock.Builder builder, int[] group) {
if (group == null) {
builder.appendNull();
} else if (group.length == 1) {
builder.appendInt(group[0]);
} else {
builder.beginPositionEntry();
// TODO: sort and dedup and set MvOrdering
for (var v : group) {
builder.appendInt(v);
}
builder.endPositionEntry();
}
}
private int[] getCellOrNull(int position) {
return position < cells.size() ? cells.get(position) : null;
}
private Block buildWithSelected(IntBlock selected) {
try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(selected.getPositionCount())) {
for (int i = 0; i < selected.getPositionCount(); i++) {
int selectedCount = selected.getValueCount(i);
switch (selectedCount) {
case 0 -> builder.appendNull();
case 1 -> {
int groupId = selected.getInt(selected.getFirstValueIndex(i));
appendGroupToBlockBuilder(builder, getCellOrNull(groupId));
}
default -> {
int firstValueIndex = selected.getFirstValueIndex(i);
var cell = getCellOrNull(selected.getInt(firstValueIndex));
for (int p = 1; p < selectedCount; p++) {
int groupId = selected.getInt(firstValueIndex + p);
cell = combineCell(cell, getCellOrNull(groupId));
}
appendGroupToBlockBuilder(builder, cell);
}
}
}
return builder.build();
}
}
private Block buildWithSelected(IntVector selected) {
try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(selected.getPositionCount())) {
for (int i = 0; i < selected.getPositionCount(); i++) {
appendGroupToBlockBuilder(builder, getCellOrNull(selected.getInt(i)));
}
return builder.build();
}
}
@Override
Block build(IntBlock selected) {
var vector = selected.asVector();
if (vector != null) {
return buildWithSelected(vector);
} else {
return buildWithSelected(selected);
}
}
@Override
public void close() {
Releasables.close(cells, super::close);
}
}
| EnrichResultBuilderForInt |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/customexceptions/UnwrapExceptionTest.java | {
"start": 4669,
"end": 4828
} | class ____ extends RuntimeException {
public FifthException(Throwable cause) {
super(cause);
}
}
public static | FifthException |
java | google__guice | extensions/assistedinject/test/com/google/inject/assistedinject/FactoryProviderTest.java | {
"start": 24199,
"end": 25964
} | interface ____ {
public Insurance<Camaro> create(Camaro car, double premium);
}
public void testAssistedFactoryForConcreteType() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(Double.class).annotatedWith(Names.named("lowLimit")).toInstance(50000.0d);
bind(Double.class).annotatedWith(Names.named("highLimit")).toInstance(100000.0d);
bind(MustangInsuranceFactory.class)
.toProvider(
FactoryProvider.newFactory(
MustangInsuranceFactory.class, MustangInsurance.class));
bind(CamaroInsuranceFactory.class)
.toProvider(
FactoryProvider.newFactory(
CamaroInsuranceFactory.class, CamaroInsurance.class));
}
});
MustangInsuranceFactory mustangInsuranceFactory =
injector.getInstance(MustangInsuranceFactory.class);
CamaroInsuranceFactory camaroInsuranceFactory =
injector.getInstance(CamaroInsuranceFactory.class);
Mustang mustang = new Mustang(5000d, Color.BLACK);
MustangInsurance mustangPolicy =
(MustangInsurance) mustangInsuranceFactory.create(mustang, 800.0d);
assertEquals(800.0d, mustangPolicy.premium, 0.0);
assertEquals(50000.0d, mustangPolicy.limit, 0.0);
Camaro camaro = new Camaro(3000, 1967, Color.BLUE);
CamaroInsurance camaroPolicy = (CamaroInsurance) camaroInsuranceFactory.create(camaro, 800.0d);
assertEquals(800.0d, camaroPolicy.premium, 0.0);
assertEquals(100000.0d, camaroPolicy.limit, 0.0);
}
public | CamaroInsuranceFactory |
java | apache__camel | components/camel-file/src/main/java/org/apache/camel/component/file/strategy/FileChangedExclusiveReadLockStrategy.java | {
"start": 1523,
"end": 5533
} | class ____ extends MarkerFileExclusiveReadLockStrategy {
private static final Logger LOG = LoggerFactory.getLogger(FileChangedExclusiveReadLockStrategy.class);
private long timeout;
private long checkInterval = 1000;
private long minLength = 1;
private long minAge;
private LoggingLevel readLockLoggingLevel = LoggingLevel.DEBUG;
@Override
public boolean acquireExclusiveReadLock(GenericFileOperations<File> operations, GenericFile<File> file, Exchange exchange)
throws Exception {
// must call super
if (!super.acquireExclusiveReadLock(operations, file, exchange)) {
return false;
}
File target = new File(file.getAbsoluteFilePath());
boolean exclusive = false;
LOG.trace("Waiting for exclusive read lock to file: {}", file);
long lastModified = Long.MIN_VALUE;
long length = Long.MIN_VALUE;
StopWatch watch = new StopWatch();
long startTime = (new Date()).getTime();
while (!exclusive) {
// timeout check
if (timeout > 0) {
if (isTimedOut(watch, target, timeout, readLockLoggingLevel)) {
return false;
}
}
if (!target.exists()) {
CamelLogger.log(LOG, readLockLoggingLevel,
"Cannot acquire read lock as file no longer exists. Will skip the file: " + file);
return false;
}
long newLastModified = target.lastModified();
long newLength = target.length();
long newOlderThan = startTime + watch.taken() - minAge;
LOG.trace("Previous last modified: {}, new last modified: {}", lastModified, newLastModified);
LOG.trace("Previous length: {}, new length: {}", length, newLength);
LOG.trace("New older than threshold: {}", newOlderThan);
// CHECKSTYLE:OFF
if (newLength >= minLength && ((minAge == 0 && newLastModified == lastModified && newLength == length)
|| (minAge != 0 && newLastModified < newOlderThan))) {
// CHECKSTYLE:ON
LOG.trace("Read lock acquired.");
exclusive = true;
} else {
// set new base file change information
lastModified = newLastModified;
length = newLength;
boolean interrupted = sleep();
if (interrupted) {
// we were interrupted while sleeping, we are likely being
// shutdown so return false
return false;
}
}
}
return exclusive;
}
private boolean sleep() {
LOG.trace("Exclusive read lock not granted. Sleeping for {} millis.", checkInterval);
try {
Thread.sleep(checkInterval);
return false;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.debug("Sleep interrupted while waiting for exclusive read lock, so breaking out");
return true;
}
}
public long getTimeout() {
return timeout;
}
@Override
public void setTimeout(long timeout) {
this.timeout = timeout;
}
public long getCheckInterval() {
return checkInterval;
}
@Override
public void setCheckInterval(long checkInterval) {
this.checkInterval = checkInterval;
}
@Override
public void setReadLockLoggingLevel(LoggingLevel readLockLoggingLevel) {
this.readLockLoggingLevel = readLockLoggingLevel;
}
public long getMinLength() {
return minLength;
}
public void setMinLength(long minLength) {
this.minLength = minLength;
}
public long getMinAge() {
return minAge;
}
public void setMinAge(long minAge) {
this.minAge = minAge;
}
}
| FileChangedExclusiveReadLockStrategy |
java | google__error-prone | core/src/test/java/com/google/errorprone/refaster/TemplatingTest.java | {
"start": 1391,
"end": 1947
} | class ____ {",
" public double example(double[] array) {",
" return array[5];",
" }",
"}");
assertThat(UTemplater.createTemplate(context, getMethodDeclaration("example")))
.isEqualTo(
ExpressionTemplate.create(
ImmutableMap.of("array", UArrayType.create(UPrimitiveType.DOUBLE)),
UArrayAccess.create(UFreeIdent.create("array"), ULiteral.intLit(5)),
UPrimitiveType.DOUBLE));
}
@Test
public void binary() {
compile(
" | ArrayAccessExample |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/decorator/AnotherPersonMapper.java | {
"start": 445,
"end": 666
} | interface ____ {
AnotherPersonMapper INSTANCE = Mappers.getMapper( AnotherPersonMapper.class );
PersonDto personToPersonDto(Person person);
AddressDto addressToAddressDto(Address address);
}
| AnotherPersonMapper |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RequestSender.java | {
"start": 578,
"end": 828
} | interface ____ {
void send(
Logger logger,
Request request,
Supplier<Boolean> hasRequestTimedOutFunction,
ResponseHandler responseHandler,
ActionListener<InferenceServiceResults> listener
);
}
| RequestSender |
java | elastic__elasticsearch | modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLongitudeValueSource.java | {
"start": 921,
"end": 2244
} | class ____ extends FieldDataBasedDoubleValuesSource {
GeoLongitudeValueSource(IndexFieldData<?> fieldData) {
super(fieldData);
}
@Override
public DoubleValues getValues(LeafReaderContext leaf, DoubleValues scores) {
LeafGeoPointFieldData leafData = (LeafGeoPointFieldData) fieldData.load(leaf);
final MultiGeoPointValues values = leafData.getPointValues();
return new DoubleValues() {
@Override
public double doubleValue() throws IOException {
return values.nextValue().getLon();
}
@Override
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
};
}
@Override
public int hashCode() {
return 31 * getClass().hashCode() + fieldData.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
GeoLongitudeValueSource other = (GeoLongitudeValueSource) obj;
return fieldData.equals(other.fieldData);
}
@Override
public String toString() {
return "lon: field(" + fieldData.getFieldName() + ")";
}
}
| GeoLongitudeValueSource |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/TestMapEntrySetGenerator.java | {
"start": 1061,
"end": 2213
} | class ____<
K extends @Nullable Object, V extends @Nullable Object>
implements TestSetGenerator<Map.Entry<K, V>> {
private final SampleElements<K> keys;
private final SampleElements<V> values;
protected TestMapEntrySetGenerator(SampleElements<K> keys, SampleElements<V> values) {
this.keys = keys;
this.values = values;
}
@Override
public SampleElements<Entry<K, V>> samples() {
return SampleElements.mapEntries(keys, values);
}
@Override
public Set<Entry<K, V>> create(Object... elements) {
Entry<K, V>[] entries = createArray(elements.length);
arraycopy(elements, 0, entries, 0, elements.length);
return createFromEntries(entries);
}
public abstract Set<Entry<K, V>> createFromEntries(Entry<K, V>[] entries);
@Override
@SuppressWarnings("unchecked") // generic arrays make typesafety sad
public Entry<K, V>[] createArray(int length) {
return (Entry<K, V>[]) new Entry<?, ?>[length];
}
/** Returns the original element list, unchanged. */
@Override
public List<Entry<K, V>> order(List<Entry<K, V>> insertionOrder) {
return insertionOrder;
}
}
| TestMapEntrySetGenerator |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java | {
"start": 5905,
"end": 5971
} | class ____ build an IndexGraveyard.
*/
public static final | to |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/Endpoint2MustBeStartedBeforeSendProcessorTest.java | {
"start": 1391,
"end": 3797
} | class ____ extends ContextTestSupport {
private MyEndpoint myendpoint;
private volatile String order = "";
@Test
public void testEndpointMustBeStartedBeforeProducer() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
myendpoint = new MyEndpoint("myendpoint", context);
from("direct:start").to(myendpoint);
}
});
context.start();
assertEquals("EndpointProducer", order);
}
@Test
public void testEndpointMustBeStartedBeforeConsumer() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
myendpoint = new MyEndpoint("myendpoint", context);
from(myendpoint).to("mock:result");
}
});
context.start();
assertEquals("EndpointConsumer", order);
}
@Test
public void testEndpointMustBeStartedBeforeConsumerAndProducer() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
myendpoint = new MyEndpoint("myendpoint", context);
from(myendpoint).to("mock:result").to(myendpoint);
}
});
context.start();
assertEquals("EndpointProducerConsumer", order);
}
@Test
public void testEndpointStartedOnceAndOnlyStoppedOnShutdown() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
myendpoint = new MyEndpoint("myendpoint", context);
from(myendpoint).routeId("foo").to("mock:result").to(myendpoint);
}
});
context.start();
assertEquals("EndpointProducerConsumer", order);
order = "";
context.getRouteController().stopRoute("foo");
assertEquals("StopConsumerStopProducer", order);
order = "";
context.getRouteController().startRoute("foo");
assertEquals("ProducerConsumer", order);
order = "";
context.stop();
assertEquals("StopConsumerStopProducerStopEndpoint", order);
}
@Override
public boolean isUseRouteBuilder() {
return false;
}
private final | Endpoint2MustBeStartedBeforeSendProcessorTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/runtime/src/main/java/io/quarkus/rest/client/reactive/ClientQueryParam.java | {
"start": 1519,
"end": 2227
} | interface ____ {
*
* static AtomicInteger counter = new AtomicInteger(1);
*
* default String determineQueryValue(String name) {
* if ("SomeQuery".equals(name)) {
* return "InvokedCount " + counter.getAndIncrement();
* }
* throw new UnsupportedOperationException("unknown name");
* }
*
* {@literal @}ClientQueryParam(name="SomeName", value="ExplicitlyDefinedValue")
* {@literal @}GET
* Response useExplicitQueryValue();
*
* {@literal @}ClientQueryParam(name="SomeName", value="{determineQueryValue}")
* {@literal @}DELETE
* Response useComputedQueryValue();
* }
* </pre>
*
* The implementation should fail to deploy a client | MyClient |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/resource/IOUringProvider.java | {
"start": 1933,
"end": 3442
} | class ____ {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(IOUringProvider.class);
private static final String IOURING_ENABLED_KEY = "io.lettuce.core.iouring";
private static final boolean IOURING_ENABLED = Boolean.parseBoolean(SystemPropertyUtil.get(IOURING_ENABLED_KEY, "true"));
private static final boolean IOURING_AVAILABLE;
private static final EventLoopResources IOURING_RESOURCES;
static {
boolean availability;
try {
Class.forName("io.netty.channel.uring.IoUring");
availability = IoUring.isAvailable();
} catch (ClassNotFoundException e) {
availability = false;
}
IOURING_AVAILABLE = availability;
if (IOURING_AVAILABLE) {
logger.debug("Starting with io_uring library");
IOURING_RESOURCES = new EventLoopResourcesWrapper(IOUringResources.INSTANCE,
IOUringProvider::checkForIOUringLibrary);
} else {
logger.debug("Starting without optional io_uring library");
IOURING_RESOURCES = new EventLoopResourcesWrapper(UnavailableResources.INSTANCE,
IOUringProvider::checkForIOUringLibrary);
}
}
/**
* @return {@code true} if io_uring is available.
*/
public static boolean isAvailable() {
return IOURING_AVAILABLE && IOURING_ENABLED;
}
/**
* Check whether the io_uring library is available on the | IOUringProvider |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet2/Hamlet.java | {
"start": 552791,
"end": 563977
} | class ____<T extends __> extends EImp<T> implements HamletSpec.DFN {
public DFN(String name, T parent, EnumSet<EOpt> opts) {
super(name, parent, opts);
}
@Override
public DFN<T> $id(String value) {
addAttr("id", value);
return this;
}
@Override
public DFN<T> $class(String value) {
addAttr("class", value);
return this;
}
@Override
public DFN<T> $title(String value) {
addAttr("title", value);
return this;
}
@Override
public DFN<T> $style(String value) {
addAttr("style", value);
return this;
}
@Override
public DFN<T> $lang(String value) {
addAttr("lang", value);
return this;
}
@Override
public DFN<T> $dir(Dir value) {
addAttr("dir", value);
return this;
}
@Override
public DFN<T> $onclick(String value) {
addAttr("onclick", value);
return this;
}
@Override
public DFN<T> $ondblclick(String value) {
addAttr("ondblclick", value);
return this;
}
@Override
public DFN<T> $onmousedown(String value) {
addAttr("onmousedown", value);
return this;
}
@Override
public DFN<T> $onmouseup(String value) {
addAttr("onmouseup", value);
return this;
}
@Override
public DFN<T> $onmouseover(String value) {
addAttr("onmouseover", value);
return this;
}
@Override
public DFN<T> $onmousemove(String value) {
addAttr("onmousemove", value);
return this;
}
@Override
public DFN<T> $onmouseout(String value) {
addAttr("onmouseout", value);
return this;
}
@Override
public DFN<T> $onkeypress(String value) {
addAttr("onkeypress", value);
return this;
}
@Override
public DFN<T> $onkeydown(String value) {
addAttr("onkeydown", value);
return this;
}
@Override
public DFN<T> $onkeyup(String value) {
addAttr("onkeyup", value);
return this;
}
@Override
public DFN<T> __(Object... lines) {
_p(true, lines);
return this;
}
@Override
public DFN<T> _r(Object... lines) {
_p(false, lines);
return this;
}
@Override
public B<DFN<T>> b() {
closeAttrs();
return b_(this, true);
}
@Override
public DFN<T> b(String cdata) {
return b().__(cdata).__();
}
@Override
public DFN<T> b(String selector, String cdata) {
return setSelector(b(), selector).__(cdata).__();
}
@Override
public I<DFN<T>> i() {
closeAttrs();
return i_(this, true);
}
@Override
public DFN<T> i(String cdata) {
return i().__(cdata).__();
}
@Override
public DFN<T> i(String selector, String cdata) {
return setSelector(i(), selector).__(cdata).__();
}
@Override
public SMALL<DFN<T>> small() {
closeAttrs();
return small_(this, true);
}
@Override
public DFN<T> small(String cdata) {
return small().__(cdata).__();
}
@Override
public DFN<T> small(String selector, String cdata) {
return setSelector(small(), selector).__(cdata).__();
}
@Override
public DFN<T> em(String cdata) {
return em().__(cdata).__();
}
@Override
public EM<DFN<T>> em() {
closeAttrs();
return em_(this, true);
}
@Override
public DFN<T> em(String selector, String cdata) {
return setSelector(em(), selector).__(cdata).__();
}
@Override
public STRONG<DFN<T>> strong() {
closeAttrs();
return strong_(this, true);
}
@Override
public DFN<T> strong(String cdata) {
return strong().__(cdata).__();
}
@Override
public DFN<T> strong(String selector, String cdata) {
return setSelector(strong(), selector).__(cdata).__();
}
@Override
public DFN<DFN<T>> dfn() {
closeAttrs();
return dfn_(this, true);
}
@Override
public DFN<T> dfn(String cdata) {
return dfn().__(cdata).__();
}
@Override
public DFN<T> dfn(String selector, String cdata) {
return setSelector(dfn(), selector).__(cdata).__();
}
@Override
public CODE<DFN<T>> code() {
closeAttrs();
return code_(this, true);
}
@Override
public DFN<T> code(String cdata) {
return code().__(cdata).__();
}
@Override
public DFN<T> code(String selector, String cdata) {
return setSelector(code(), selector).__(cdata).__();
}
@Override
public DFN<T> samp(String cdata) {
return samp().__(cdata).__();
}
@Override
public SAMP<DFN<T>> samp() {
closeAttrs();
return samp_(this, true);
}
@Override
public DFN<T> samp(String selector, String cdata) {
return setSelector(samp(), selector).__(cdata).__();
}
@Override
public KBD<DFN<T>> kbd() {
closeAttrs();
return kbd_(this, true);
}
@Override
public DFN<T> kbd(String cdata) {
return kbd().__(cdata).__();
}
@Override
public DFN<T> kbd(String selector, String cdata) {
return setSelector(kbd(), selector).__(cdata).__();
}
@Override
public VAR<DFN<T>> var() {
closeAttrs();
return var_(this, true);
}
@Override
public DFN<T> var(String cdata) {
return var().__(cdata).__();
}
@Override
public DFN<T> var(String selector, String cdata) {
return setSelector(var(), selector).__(cdata).__();
}
@Override
public CITE<DFN<T>> cite() {
closeAttrs();
return cite_(this, true);
}
@Override
public DFN<T> cite(String cdata) {
return cite().__(cdata).__();
}
@Override
public DFN<T> cite(String selector, String cdata) {
return setSelector(cite(), selector).__(cdata).__();
}
@Override
public ABBR<DFN<T>> abbr() {
closeAttrs();
return abbr_(this, true);
}
@Override
public DFN<T> abbr(String cdata) {
return abbr().__(cdata).__();
}
@Override
public DFN<T> abbr(String selector, String cdata) {
return setSelector(abbr(), selector).__(cdata).__();
}
@Override
public A<DFN<T>> a() {
closeAttrs();
return a_(this, true);
}
@Override
public A<DFN<T>> a(String selector) {
return setSelector(a(), selector);
}
@Override
public DFN<T> a(String href, String anchorText) {
return a().$href(href).__(anchorText).__();
}
@Override
public DFN<T> a(String selector, String href, String anchorText) {
return setSelector(a(), selector).$href(href).__(anchorText).__();
}
@Override
public IMG<DFN<T>> img() {
closeAttrs();
return img_(this, true);
}
@Override
public DFN<T> img(String src) {
return img().$src(src).__();
}
@Override
public OBJECT<DFN<T>> object() {
closeAttrs();
return object_(this, true);
}
@Override
public OBJECT<DFN<T>> object(String selector) {
return setSelector(object(), selector);
}
@Override
public SUB<DFN<T>> sub() {
closeAttrs();
return sub_(this, true);
}
@Override
public DFN<T> sub(String cdata) {
return sub().__(cdata).__();
}
@Override
public DFN<T> sub(String selector, String cdata) {
return setSelector(sub(), selector).__(cdata).__();
}
@Override
public SUP<DFN<T>> sup() {
closeAttrs();
return sup_(this, true);
}
@Override
public DFN<T> sup(String cdata) {
return sup().__(cdata).__();
}
@Override
public DFN<T> sup(String selector, String cdata) {
return setSelector(sup(), selector).__(cdata).__();
}
@Override
public MAP<DFN<T>> map() {
closeAttrs();
return map_(this, true);
}
@Override
public MAP<DFN<T>> map(String selector) {
return setSelector(map(), selector);
}
@Override
public DFN<T> q(String cdata) {
return q().__(cdata).__();
}
@Override
public DFN<T> q(String selector, String cdata) {
return setSelector(q(), selector).__(cdata).__();
}
@Override
public Q<DFN<T>> q() {
closeAttrs();
return q_(this, true);
}
@Override
public BR<DFN<T>> br() {
closeAttrs();
return br_(this, true);
}
@Override
public DFN<T> br(String selector) {
return setSelector(br(), selector).__();
}
@Override
public BDO<DFN<T>> bdo() {
closeAttrs();
return bdo_(this, true);
}
@Override
public DFN<T> bdo(Dir dir, String cdata) {
return bdo().$dir(dir).__(cdata).__();
}
@Override
public SPAN<DFN<T>> span() {
closeAttrs();
return span_(this, true);
}
@Override
public DFN<T> span(String cdata) {
return span().__(cdata).__();
}
@Override
public DFN<T> span(String selector, String cdata) {
return setSelector(span(), selector).__(cdata).__();
}
@Override
public SCRIPT<DFN<T>> script() {
closeAttrs();
return script_(this, true);
}
@Override
public DFN<T> script(String src) {
return setScriptSrc(script(), src).__();
}
@Override
public INS<DFN<T>> ins() {
closeAttrs();
return ins_(this, true);
}
@Override
public DFN<T> ins(String cdata) {
return ins().__(cdata).__();
}
@Override
public DEL<DFN<T>> del() {
closeAttrs();
return del_(this, true);
}
@Override
public DFN<T> del(String cdata) {
return del().__(cdata).__();
}
@Override
public LABEL<DFN<T>> label() {
closeAttrs();
return label_(this, true);
}
@Override
public DFN<T> label(String forId, String cdata) {
return label().$for(forId).__(cdata).__();
}
@Override
public INPUT<DFN<T>> input(String selector) {
return setSelector(input(), selector);
}
@Override
public INPUT<DFN<T>> input() {
closeAttrs();
return input_(this, true);
}
@Override
public SELECT<DFN<T>> select() {
closeAttrs();
return select_(this, true);
}
@Override
public SELECT<DFN<T>> select(String selector) {
return setSelector(select(), selector);
}
@Override
public TEXTAREA<DFN<T>> textarea(String selector) {
return setSelector(textarea(), selector);
}
@Override
public TEXTAREA<DFN<T>> textarea() {
closeAttrs();
return textarea_(this, true);
}
@Override
public DFN<T> textarea(String selector, String cdata) {
return setSelector(textarea(), selector).__(cdata).__();
}
@Override
public BUTTON<DFN<T>> button() {
closeAttrs();
return button_(this, true);
}
@Override
public BUTTON<DFN<T>> button(String selector) {
return setSelector(button(), selector);
}
@Override
public DFN<T> button(String selector, String cdata) {
return setSelector(button(), selector).__(cdata).__();
}
}
public | DFN |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/AnnotationUtilsTests.java | {
"start": 67557,
"end": 67678
} | interface ____ {
TestRepeatable[] value();
}
@TestRepeatable("a")
@TestRepeatable("b")
static | TestRepeatableContainer |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/changelog/StateChangelogStorageFactory.java | {
"start": 1325,
"end": 1922
} | interface ____ {
/** Get the identifier for user to use this changelog storage factory. */
String getIdentifier();
/** Create the storage based on a configuration. */
StateChangelogStorage<?> createStorage(
JobID jobID,
Configuration configuration,
TaskManagerJobMetricGroup metricGroup,
LocalRecoveryConfig localRecoveryConfig)
throws IOException;
/** Create the storage for recovery. */
StateChangelogStorageView<?> createStorageView(Configuration configuration) throws IOException;
}
| StateChangelogStorageFactory |
java | apache__camel | components/camel-jira/src/main/java/org/apache/camel/component/jira/oauth/OAuthAsynchronousHttpClientFactory.java | {
"start": 3621,
"end": 5490
} | class ____ implements ApplicationProperties {
private final String baseUrl;
private RestClientApplicationProperties(URI jiraURI) {
this.baseUrl = jiraURI.getPath();
}
@Override
public String getBaseUrl() {
return baseUrl;
}
/**
* We'll always have an absolute URL as a client.
*/
@Override
public String getBaseUrl(UrlMode urlMode) {
return baseUrl;
}
@Override
public String getDisplayName() {
return "Atlassian JIRA Rest Java Client";
}
@Override
public String getPlatformId() {
return ApplicationProperties.PLATFORM_JIRA;
}
@Override
public String getVersion() {
return JIRA_REST_CLIENT_VERSION;
}
@Override
public Date getBuildDate() {
// TODO implement using MavenUtils, JRJC-123
throw new UnsupportedOperationException();
}
@Override
public String getBuildNumber() {
// TODO implement using MavenUtils, JRJC-123
return String.valueOf(0);
}
@Override
public File getHomeDirectory() {
return new File(".");
}
@Override
public Optional<Path> getLocalHomeDirectory() {
return Optional.empty();
}
@Override
public Optional<Path> getSharedHomeDirectory() {
return Optional.empty();
}
@Override
public String getPropertyValue(final String s) {
throw new UnsupportedOperationException("Not implemented");
}
@Override
public String getApplicationFileEncoding() {
return "UTF-8";
}
}
private static final | RestClientApplicationProperties |
java | quarkusio__quarkus | extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProviderClientImpl.java | {
"start": 16600,
"end": 33120
} | enum ____ {
CLIENT_SECRET,
CLIENT_SECRET_BASIC_AUTH_SCHEME
}
}
private PreparedHttpRequest prepareHttpRequest(OidcRequestContextProperties requestProps, String uri,
MultiMap formBody, TokenOperation op, OidcEndpoint.Type endpointType) {
HttpRequest<Buffer> request = client.postAbs(uri);
final Buffer buffer;
PreparedHttpRequest.CredentialsToRetry credentialsToRetry = null;
if (!clientSecretQueryAuthentication) {
request.putHeader(CONTENT_TYPE_HEADER, APPLICATION_X_WWW_FORM_URLENCODED);
request.putHeader(ACCEPT_HEADER, APPLICATION_JSON);
if (isIntrospection(op) && introspectionBasicAuthScheme != null) {
request.putHeader(AUTHORIZATION_HEADER, introspectionBasicAuthScheme);
if (oidcConfig.clientId().isPresent() && oidcConfig.introspectionCredentials().includeClientId()) {
formBody.set(OidcConstants.CLIENT_ID, oidcConfig.clientId().get());
}
} else if (clientSecretBasicAuthScheme != null) {
request.putHeader(AUTHORIZATION_HEADER, clientSecretBasicAuthScheme);
if (hasClientSecretProvider()) {
credentialsToRetry = PreparedHttpRequest.CredentialsToRetry.CLIENT_SECRET_BASIC_AUTH_SCHEME;
}
} else if (jwtBearerAuthentication) {
final String clientAssertion = clientAssertionProvider.getClientAssertion();
if (clientAssertion == null) {
throw new OIDCException(String.format(
"Cannot get token for tenant '%s' because a JWT bearer client_assertion is not available",
oidcConfig.tenantId().get()));
}
formBody.add(OidcConstants.CLIENT_ASSERTION, clientAssertion);
formBody.add(OidcConstants.CLIENT_ASSERTION_TYPE, OidcConstants.JWT_BEARER_CLIENT_ASSERTION_TYPE);
} else if (clientJwtKey != null) {
String jwt = OidcCommonUtils.signJwtWithKey(oidcConfig, metadata.getTokenUri(), clientJwtKey);
if (OidcCommonUtils.isClientSecretPostJwtAuthRequired(oidcConfig.credentials())) {
formBody.add(OidcConstants.CLIENT_ID, oidcConfig.clientId().get());
formBody.add(OidcConstants.CLIENT_SECRET, jwt);
} else {
formBody.add(OidcConstants.CLIENT_ASSERTION_TYPE, OidcConstants.JWT_BEARER_CLIENT_ASSERTION_TYPE);
formBody.add(OidcConstants.CLIENT_ASSERTION, jwt);
}
} else if (OidcCommonUtils.isClientSecretPostAuthRequired(oidcConfig.credentials())) {
formBody.add(OidcConstants.CLIENT_ID, oidcConfig.clientId().get());
formBody.add(OidcConstants.CLIENT_SECRET, clientSecret);
if (hasClientSecretProvider()) {
credentialsToRetry = PreparedHttpRequest.CredentialsToRetry.CLIENT_SECRET;
}
} else {
formBody.add(OidcConstants.CLIENT_ID, oidcConfig.clientId().get());
}
buffer = OidcCommonUtils.encodeForm(formBody);
} else {
formBody.add(OidcConstants.CLIENT_ID, oidcConfig.clientId().get());
formBody.add(OidcConstants.CLIENT_SECRET, clientSecret);
if (hasClientSecretProvider()) {
credentialsToRetry = PreparedHttpRequest.CredentialsToRetry.CLIENT_SECRET;
}
for (Map.Entry<String, String> entry : formBody) {
request.addQueryParam(entry.getKey(), OidcCommonUtils.urlEncode(entry.getValue()));
}
request.putHeader(ACCEPT_HEADER, APPLICATION_JSON);
buffer = Buffer.buffer();
}
if (oidcConfig.codeGrant().headers() != null) {
for (Map.Entry<String, String> headerEntry : oidcConfig.codeGrant().headers().entrySet()) {
request.putHeader(headerEntry.getKey(), headerEntry.getValue());
}
}
if (LOG.isDebugEnabled()) {
LOG.debugf("%s token: url : %s, headers: %s, request params: %s", op.operation(), request.uri(), request.headers(),
formBody);
}
// Retry up to three times with a one-second delay between the retries if the connection is closed.
var preparedResponse = filterHttpRequest(requestProps, endpointType, request, buffer)
.sendBuffer(OidcCommonUtils.getRequestBuffer(requestProps, buffer))
.onFailure(SocketException.class)
.retry()
.atMost(oidcConfig.connectionRetryCount())
.onFailure().transform(Throwable::getCause);
return new PreparedHttpRequest(preparedResponse, credentialsToRetry);
}
private UniOnItem<HttpResponse<Buffer>> getHttpResponse(OidcRequestContextProperties requestProps, String uri,
MultiMap formBody, TokenOperation op, OidcEndpoint.Type endpointType) {
final MultiMap newFormBody;
boolean hasClientSecretProvider = hasClientSecretProvider();
if (hasClientSecretProvider) {
// copy to avoid duplications on credentials refresh
var delegate = io.vertx.core.MultiMap.caseInsensitiveMultiMap().addAll(formBody.getDelegate());
newFormBody = new MultiMap(delegate);
} else {
newFormBody = formBody;
}
var preparedRequest = prepareHttpRequest(requestProps, uri, newFormBody, op, endpointType);
if (hasClientSecretProvider && preparedRequest.credentialsToRetry != null) {
return preparedRequest.httpRequestUni.flatMap(httpResponse -> {
if (httpResponse.statusCode() == 401) {
// here we need to deal with error responses (like unauthorized_client) possibly caused by
// invalid credentialsToRetry; if credentialsToRetry provider updated credentialsToRetry, we should retry
var credentialsRefresh = switch (preparedRequest.credentialsToRetry) {
case CLIENT_SECRET -> OidcCommonUtils.clientSecret(oidcConfig.credentials())
.map(newClientSecret -> {
if (newClientSecret != null && !newClientSecret.equals(clientSecret)) {
this.clientSecret = newClientSecret;
return true;
}
return false;
});
case CLIENT_SECRET_BASIC_AUTH_SCHEME -> OidcCommonUtils.clientSecret(oidcConfig.credentials())
.map(newClientSecret -> {
var newClientSecretBasicAuthScheme = OidcCommonUtils.initClientSecretBasicAuth(oidcConfig,
newClientSecret);
if (newClientSecretBasicAuthScheme != null
&& !newClientSecretBasicAuthScheme.equals(clientSecretBasicAuthScheme)) {
this.clientSecret = newClientSecret;
this.clientSecretBasicAuthScheme = newClientSecretBasicAuthScheme;
return true;
}
return false;
});
};
return credentialsRefresh.flatMap(credentialsRefreshed -> {
if (Boolean.TRUE.equals(credentialsRefreshed)) {
LOG.debug("HTTP request failed with response status code 401 and the CredentialsProvider"
+ " provided new credentials, retrying the request with new credentials");
return prepareHttpRequest(requestProps, uri, formBody, op, endpointType).httpRequestUni;
}
return Uni.createFrom().item(httpResponse);
});
}
return Uni.createFrom().item(httpResponse);
}).onItem();
}
return preparedRequest.httpRequestUni.onItem();
}
private boolean hasClientSecretProvider() {
return oidcConfig.credentials().clientSecret().provider().key().isPresent();
}
private AuthorizationCodeTokens getAuthorizationCodeTokens(OidcRequestContextProperties requestProps,
HttpResponse<Buffer> resp) {
JsonObject json = getJsonObject(requestProps, metadata.getTokenUri(), resp, OidcEndpoint.Type.TOKEN);
final String idToken = json.getString(OidcConstants.ID_TOKEN_VALUE);
final String accessToken = json.getString(OidcConstants.ACCESS_TOKEN_VALUE);
final String refreshToken = json.getString(OidcConstants.REFRESH_TOKEN_VALUE);
Long tokenExpiresIn = null;
Object tokenExpiresInObj = json.getValue(OidcConstants.EXPIRES_IN);
if (tokenExpiresInObj != null) {
tokenExpiresIn = tokenExpiresInObj instanceof Number ? ((Number) tokenExpiresInObj).longValue()
: Long.parseLong(tokenExpiresInObj.toString());
}
final String accessTokenScope = json.getString(OidcConstants.TOKEN_SCOPE);
return new AuthorizationCodeTokens(idToken, accessToken, refreshToken, tokenExpiresIn, accessTokenScope);
}
private UserInfoResponse getUserInfo(OidcRequestContextProperties requestProps, HttpResponse<Buffer> resp) {
return new UserInfoResponse(resp.getHeader(CONTENT_TYPE_HEADER),
getString(requestProps, metadata.getUserInfoUri(), resp, OidcEndpoint.Type.USERINFO));
}
private TokenIntrospection getTokenIntrospection(OidcRequestContextProperties requestProps, HttpResponse<Buffer> resp) {
return new TokenIntrospection(
getString(requestProps, metadata.getIntrospectionUri(), resp, OidcEndpoint.Type.INTROSPECTION));
}
private JsonObject getJsonObject(OidcRequestContextProperties requestProps, String requestUri, HttpResponse<Buffer> resp,
OidcEndpoint.Type endpoint) {
Buffer buffer = OidcCommonUtils.filterHttpResponse(requestProps, resp, responseFilters, endpoint);
if (resp.statusCode() == 200) {
LOG.debugf("Request succeeded: %s", resp.bodyAsJsonObject());
return buffer.toJsonObject();
} else if (resp.statusCode() == 302) {
throw OidcCommonUtils.createOidcClientRedirectException(resp);
} else {
throw responseException(requestUri, resp, buffer);
}
}
private String getString(final OidcRequestContextProperties requestProps, String requestUri, HttpResponse<Buffer> resp,
OidcEndpoint.Type endpoint) {
Buffer buffer = OidcCommonUtils.filterHttpResponse(requestProps, resp, responseFilters, endpoint);
if (resp.statusCode() == 200) {
LOG.debugf("Request succeeded: %s", resp.bodyAsString());
return buffer.toString();
} else if (resp.statusCode() == 302) {
throw OidcCommonUtils.createOidcClientRedirectException(resp);
} else {
throw responseException(requestUri, resp, buffer);
}
}
private static OIDCException responseException(String requestUri, HttpResponse<Buffer> resp, Buffer buffer) {
String errorMessage = buffer == null ? null : buffer.toString();
if (errorMessage != null && !errorMessage.isEmpty()) {
LOG.errorf("Request %s has failed: status: %d, error message: %s", requestUri, resp.statusCode(), errorMessage);
throw new OIDCException(errorMessage);
} else {
LOG.errorf("Request %s has failed: status: %d", requestUri, resp.statusCode());
throw new OIDCException("Error status:" + resp.statusCode());
}
}
@Override
public void close() {
client.close();
if (clientAssertionProvider != null) {
clientAssertionProvider.close();
}
}
Key getClientJwtKey() {
return clientJwtKey;
}
String getClientSecret() {
return clientSecret;
}
private HttpRequest<Buffer> filterHttpRequest(OidcRequestContextProperties requestProps, OidcEndpoint.Type endpointType,
HttpRequest<Buffer> request, Buffer body) {
if (!requestFilters.isEmpty()) {
OidcRequestContext context = new OidcRequestContext(request, body, requestProps);
for (OidcRequestFilter filter : OidcCommonUtils.getMatchingOidcRequestFilters(requestFilters, endpointType)) {
filter.filter(context);
}
}
return request;
}
private OidcRequestContextProperties getRequestProps(String grantType) {
return getRequestProps(null, grantType);
}
private OidcRequestContextProperties getRequestProps(OidcRequestContextProperties contextProperties) {
return getRequestProps(contextProperties, null);
}
private OidcRequestContextProperties getRequestProps(OidcRequestContextProperties contextProperties, String grantType) {
if (requestFilters.isEmpty() && responseFilters.isEmpty()) {
return null;
}
Map<String, Object> newProperties = contextProperties == null ? new HashMap<>()
: new HashMap<>(contextProperties.getAll());
newProperties.put(OidcUtils.TENANT_ID_ATTRIBUTE, oidcConfig.tenantId().orElse(OidcUtils.DEFAULT_TENANT_ID));
newProperties.put(OidcConfigurationMetadata.class.getName(), metadata);
if (grantType != null) {
newProperties.put(OidcConstants.GRANT_TYPE, grantType);
}
return new OidcRequestContextProperties(newProperties);
}
Vertx getVertx() {
return vertx;
}
public WebClient getWebClient() {
return client;
}
record UserInfoResponse(String contentType, String data) {
}
static boolean isIntrospection(TokenOperation op) {
return op == TokenOperation.INTROSPECT;
}
static Uni<OidcProviderClientImpl> of(WebClient client, Vertx vertx, OidcConfigurationMetadata metadata,
OidcTenantConfig oidcConfig,
Map<OidcEndpoint.Type, List<OidcRequestFilter>> requestFilters,
Map<OidcEndpoint.Type, List<OidcResponseFilter>> responseFilters) {
return OidcCommonUtils.clientSecret(oidcConfig.credentials())
.onItem().ifNotNull()
.transform(clientSecret -> new ClientCredentials(clientSecret,
OidcCommonUtils.initClientSecretBasicAuth(oidcConfig, clientSecret)))
.onItem().ifNull().switchTo(() -> OidcCommonUtils.initClientJwtKey(oidcConfig, true)
.onItem().ifNotNull().transform(ClientCredentials::new)
.onItem().ifNull()
.switchTo(() -> OidcCommonUtils.jwtSecret(oidcConfig.credentials()).map(ClientCredentials::new)))
.onFailure().invoke(t -> LOG.error("Failed to create OidcProviderClientImpl", t))
.map(clientCredentials -> new OidcProviderClientImpl(client, vertx, metadata, oidcConfig,
clientCredentials, requestFilters, responseFilters));
}
String getClientOrJwtSecret() {
if (clientSecret != null) {
return clientSecret;
} else if (jwtSecret != null) {
LOG.debug("Client secret is not configured, returning configured 'client_jwt_secret' secret");
return jwtSecret;
}
LOG.debug("Client secret and the 'client_jwt_secret' secret are not configured");
return null;
}
private record ClientCredentials(Key clientJwtKey, String clientSecret, String jwtSecret,
String clientSecretBasicAuthScheme) {
private ClientCredentials(Key clientJwtKey) {
this(clientJwtKey, null, null, null);
}
private ClientCredentials(String jwtSecret) {
this(null, null, jwtSecret, null);
}
private ClientCredentials(String clientSecret, String clientSecretBasicAuthScheme) {
this(null, clientSecret, null, clientSecretBasicAuthScheme);
}
}
}
| CredentialsToRetry |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java | {
"start": 3848,
"end": 4299
} | class ____ {
public void doTest() {
Client client = new Client();
String result = client.after("\\"");
}
}
""")
.doTest();
}
@Test
public void method_withParamSwap() {
refactoringTestHelper
.addInputLines(
"Client.java",
"""
import com.google.errorprone.annotations.InlineMe;
public final | Caller |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/search/index/FlatVectorIndexParams.java | {
"start": 704,
"end": 2771
} | class ____ implements FlatVectorIndex,
VectorDimParam<FlatVectorOptionalArgs>,
VectorDistParam<FlatVectorOptionalArgs>,
FlatVectorOptionalArgs {
private final String fieldName;
private VectorTypeParam.Type type;
private int dim;
private VectorDistParam.DistanceMetric distanceMetric;
private Integer initialCapacity;
private Integer blockSize;
private int count;
private String as;
FlatVectorIndexParams(String name) {
this.fieldName = name;
}
@Override
public FlatVectorIndexParams as(String as) {
this.as = as;
return this;
}
@Override
public VectorDimParam<FlatVectorOptionalArgs> type(Type type) {
count++;
this.type = type;
return this;
}
@Override
public VectorDistParam<FlatVectorOptionalArgs> dim(int value) {
count++;
this.dim = value;
return this;
}
@Override
public FlatVectorOptionalArgs distance(DistanceMetric metric) {
count++;
this.distanceMetric = metric;
return this;
}
@Override
public FlatVectorOptionalArgs initialCapacity(int value) {
count++;
this.initialCapacity = value;
return this;
}
@Override
public FlatVectorOptionalArgs blockSize(int value) {
count++;
this.blockSize = value;
return this;
}
public String getFieldName() {
return fieldName;
}
public Type getType() {
return type;
}
public int getDim() {
return dim;
}
public DistanceMetric getDistanceMetric() {
return distanceMetric;
}
public Integer getInitialCapacity() {
return initialCapacity;
}
public Integer getBlockSize() {
return blockSize;
}
public int getCount() {
return count;
}
public String getAs() {
return as;
}
}
| FlatVectorIndexParams |
java | apache__hadoop | hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/BaseSLSRunnerTest.java | {
"start": 1541,
"end": 1669
} | class ____ ease the implementation of SLS-based tests.
*/
@NotThreadSafe
@SuppressWarnings("VisibilityModifier")
public abstract | to |
java | apache__camel | components/camel-mongodb/src/test/java/org/apache/camel/component/mongodb/integration/MongoDbBulkWriteOperationIT.java | {
"start": 1853,
"end": 6850
} | class ____ extends AbstractMongoDbITSupport implements ConfigurableRoute {
@Test
public void testBulkWrite() {
// Test that the collection has 0 documents in it
assertEquals(0, testCollection.countDocuments());
pumpDataIntoTestCollection();
List<WriteModel<Document>> bulkOperations = Arrays
.asList(new InsertOneModel<>(new Document("scientist", "Pierre Curie")),
new UpdateOneModel<>(
new Document("_id", "2"),
new Document("$set", new Document("scientist", "Charles Darwin"))),
new UpdateManyModel<>(
new Document("scientist", "Curie"),
new Document("$set", new Document("scientist", "Marie Curie"))),
new ReplaceOneModel<>(new Document("_id", "1"), new Document("scientist", "Albert Einstein")),
new DeleteOneModel<>(new Document("_id", "3")),
new DeleteManyModel<>(new Document("scientist", "Bohr")));
BulkWriteResult result = template.requestBody("direct:bulkWrite", bulkOperations, BulkWriteResult.class);
assertNotNull(result);
// 1 insert
assertEquals(1, result.getInsertedCount(), "Records inserted should be 2 : ");
// 1 updateOne + 100 updateMany + 1 replaceOne
assertEquals(102, result.getMatchedCount(), "Records matched should be 102 : ");
assertEquals(102, result.getModifiedCount(), "Records modified should be 102 : ");
// 1 deleteOne + 100 deleteMany
assertEquals(101, result.getDeletedCount(), "Records deleted should be 101 : ");
}
@Test
public void testOrderedBulkWriteWithError() {
// Test that the collection has 0 documents in it
assertEquals(0, testCollection.countDocuments());
pumpDataIntoTestCollection();
List<WriteModel<Document>> bulkOperations = Arrays
.asList(new InsertOneModel<>(new Document("scientist", "Pierre Curie")),
// this insert failed and bulk stop
new InsertOneModel<>(new Document("_id", "1")),
new InsertOneModel<>(new Document("scientist", "Descartes")),
new UpdateOneModel<>(
new Document("_id", "5"), new Document("$set", new Document("scientist", "Marie Curie"))),
new DeleteOneModel<>(new Document("_id", "2")));
try {
template.requestBody("direct:bulkWrite", bulkOperations, BulkWriteResult.class);
fail("Bulk operation should throw Exception");
} catch (CamelExecutionException e) {
extractAndAssertCamelMongoDbException(e, "duplicate key error");
// count = 1000 records + 1 inserted
assertEquals(1001, testCollection.countDocuments());
}
}
@Test
public void testUnorderedBulkWriteWithError() {
// Test that the collection has 0 documents in it
assertEquals(0, testCollection.countDocuments());
pumpDataIntoTestCollection();
List<WriteModel<Document>> bulkOperations = Arrays
.asList(new InsertOneModel<>(new Document("scientist", "Pierre Curie")),
// this insert failed and bulk continue
new InsertOneModel<>(new Document("_id", "1")),
new InsertOneModel<>(new Document("scientist", "Descartes")),
new UpdateOneModel<>(
new Document("_id", "5"), new Document("$set", new Document("scientist", "Marie Curie"))),
new DeleteOneModel<>(new Document("_id", "2")));
try {
template.requestBody("direct:unorderedBulkWrite", bulkOperations, BulkWriteResult.class);
fail("Bulk operation should throw Exception");
} catch (CamelExecutionException e) {
extractAndAssertCamelMongoDbException(e, "duplicate key error");
// count = 1000 + 2 inserted + 1 deleted
assertEquals(1001, testCollection.countDocuments());
}
}
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:bulkWrite").to(
"mongodb:myDb?database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=bulkWrite");
from("direct:unorderedBulkWrite").setHeader(MongoDbConstants.BULK_ORDERED).constant(false)
.to("mongodb:myDb?database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=bulkWrite");
}
};
}
@RouteFixture
@Override
public void createRouteBuilder(CamelContext context) throws Exception {
context.addRoutes(createRouteBuilder());
}
}
| MongoDbBulkWriteOperationIT |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/telemetry/endpoints/onerror/GlobalErrorHandler.java | {
"start": 239,
"end": 528
} | class ____ {
public static final CountDownLatch ILLEGAL_ARGUMENT_EXCEPTION_LATCH = new CountDownLatch(1);
@OnError
public String onError(IllegalArgumentException e) {
ILLEGAL_ARGUMENT_EXCEPTION_LATCH.countDown();
return e.getMessage();
}
}
| GlobalErrorHandler |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/generic/GenericDataArrayTest.java | {
"start": 950,
"end": 1527
} | class ____ {
@Test
void test() {
GenericData.Array<String> array = new GenericData.Array<>(10,
Schema.createArray(Schema.create(Schema.Type.STRING)));
array.add("One");
array.add("Two");
array.add("Two");
array.add("Three");
array.add(4, "Four");
array.remove(1);
Assertions.assertEquals(4, array.size());
Assertions.assertEquals("One", array.get(0));
Assertions.assertEquals("Two", array.get(1));
Assertions.assertEquals("Three", array.get(2));
Assertions.assertEquals("Four", array.get(3));
}
}
| GenericDataArrayTest |
java | apache__camel | core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/DefaultFactoryFinder.java | {
"start": 6116,
"end": 6192
} | interface ____ {
Class<?> get() throws Exception;
}
}
| ClassSupplier |
java | apache__camel | components/camel-jms/src/main/java/org/apache/camel/component/jms/ReplyToType.java | {
"start": 887,
"end": 949
} | enum ____ {
Temporary,
Shared,
Exclusive
}
| ReplyToType |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/client/RestClientVersionTests.java | {
"start": 1309,
"end": 4470
} | class ____ {
private final MockWebServer server = new MockWebServer();
private RestClient.Builder restClientBuilder;
@BeforeEach
void setUp() throws IOException {
this.server.start();
this.restClientBuilder = RestClient.builder()
.requestFactory(new JdkClientHttpRequestFactory())
.baseUrl(this.server.url("/").toString());
MockResponse response = new MockResponse.Builder()
.setHeader("Content-Type", "text/plain")
.body("body")
.build();
this.server.enqueue(response);
}
@AfterEach
void shutdown() {
this.server.close();
}
@Test
void header() {
performRequest(ApiVersionInserter.useHeader("API-Version"));
expectRequest(request -> assertThat(request.getHeaders().get("API-Version")).isEqualTo("1.2"));
}
@Test
void queryParam() {
performRequest(ApiVersionInserter.useQueryParam("api-version"));
expectRequest(request -> assertThat(request.getTarget()).isEqualTo("/path?api-version=1.2"));
}
@Test
void mediaTypeParam() {
performRequest(ApiVersionInserter.useMediaTypeParam("v"));
expectRequest(request -> assertThat(request.getHeaders().get("Content-Type")).isEqualTo("application/json;v=1.2"));
}
@Test
void pathSegmentIndexLessThanSize() {
performRequest(ApiVersionInserter.builder().usePathSegment(0).withVersionFormatter(v -> "v" + v).build());
expectRequest(request -> assertThat(request.getTarget()).isEqualTo("/v1.2/path"));
}
@Test
void pathSegmentIndexEqualToSize() {
performRequest(ApiVersionInserter.builder().usePathSegment(1).withVersionFormatter(v -> "v" + v).build());
expectRequest(request -> assertThat(request.getTarget()).isEqualTo("/path/v1.2"));
}
@Test
void pathSegmentIndexGreaterThanSize() {
assertThatIllegalStateException()
.isThrownBy(() -> performRequest(ApiVersionInserter.usePathSegment(2)))
.withMessage("Cannot insert version into '/path' at path segment index 2");
}
@Test
void defaultVersion() {
ApiVersionInserter inserter = ApiVersionInserter.useHeader("API-Version");
RestClient restClient = restClientBuilder.defaultApiVersion(1.2).apiVersionInserter(inserter).build();
restClient.get().uri("/path").retrieve().body(String.class);
expectRequest(request -> assertThat(request.getHeaders().get("API-Version")).isEqualTo("1.2"));
}
@Test
void noVersion() {
ApiVersionInserter inserter = ApiVersionInserter.useHeader("API-Version");
RestClient restClient = restClientBuilder.defaultApiVersion(1.2).apiVersionInserter(inserter).build();
restClient.get().uri("/path").apiVersion(null).retrieve().body(String.class);
expectRequest(request -> assertThat(request.getHeaders().get("API-Version")).isNull());
}
private void performRequest(ApiVersionInserter versionInserter) {
restClientBuilder.apiVersionInserter(versionInserter).build()
.post().uri("/path")
.contentType(MediaType.APPLICATION_JSON)
.apiVersion(1.2)
.retrieve()
.body(String.class);
}
private void expectRequest(Consumer<RecordedRequest> consumer) {
try {
consumer.accept(this.server.takeRequest());
}
catch (InterruptedException ex) {
throw new IllegalStateException(ex);
}
}
}
| RestClientVersionTests |
java | redisson__redisson | redisson/src/main/java/org/redisson/jcache/configuration/JCacheConfiguration.java | {
"start": 1208,
"end": 4682
} | class ____<K, V> implements CompleteConfiguration<K, V> {
private static final long serialVersionUID = -7861479608049089078L;
private final ExpiryPolicy expiryPolicy;
private final MutableConfiguration<K, V> delegate;
public JCacheConfiguration(Configuration<K, V> configuration) {
if (configuration != null) {
if (configuration instanceof RedissonConfiguration) {
configuration = ((RedissonConfiguration<K, V>) configuration).getJcacheConfig();
}
if (configuration instanceof CompleteConfiguration) {
delegate = new MutableConfiguration<K, V>((CompleteConfiguration<K, V>) configuration);
} else {
delegate = new MutableConfiguration<K, V>();
delegate.setStoreByValue(configuration.isStoreByValue());
delegate.setTypes(configuration.getKeyType(), configuration.getValueType());
}
} else {
delegate = new MutableConfiguration<K, V>();
}
this.expiryPolicy = delegate.getExpiryPolicyFactory().create();
}
@Override
public Class<K> getKeyType() {
if (delegate.getKeyType() == null) {
return (Class<K>) Object.class;
}
return delegate.getKeyType();
}
@Override
public Class<V> getValueType() {
if (delegate.getValueType() == null) {
return (Class<V>) Object.class;
}
return delegate.getValueType();
}
@Override
public boolean isStoreByValue() {
return delegate.isStoreByValue();
}
@Override
public boolean isReadThrough() {
return delegate.isReadThrough();
}
@Override
public boolean isWriteThrough() {
return delegate.isWriteThrough();
}
@Override
public boolean isStatisticsEnabled() {
return delegate.isStatisticsEnabled();
}
public void setStatisticsEnabled(boolean enabled) {
delegate.setStatisticsEnabled(enabled);
}
public void setManagementEnabled(boolean enabled) {
delegate.setManagementEnabled(enabled);
}
@Override
public boolean isManagementEnabled() {
return delegate.isManagementEnabled();
}
@Override
public Iterable<CacheEntryListenerConfiguration<K, V>> getCacheEntryListenerConfigurations() {
return delegate.getCacheEntryListenerConfigurations();
}
public void addCacheEntryListenerConfiguration(
CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
delegate.addCacheEntryListenerConfiguration(cacheEntryListenerConfiguration);
}
public void removeCacheEntryListenerConfiguration(
CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
delegate.removeCacheEntryListenerConfiguration(cacheEntryListenerConfiguration);
}
@Override
public Factory<CacheLoader<K, V>> getCacheLoaderFactory() {
return delegate.getCacheLoaderFactory();
}
@Override
public Factory<CacheWriter<? super K, ? super V>> getCacheWriterFactory() {
return delegate.getCacheWriterFactory();
}
@Override
public Factory<ExpiryPolicy> getExpiryPolicyFactory() {
return delegate.getExpiryPolicyFactory();
}
public ExpiryPolicy getExpiryPolicy() {
return expiryPolicy;
}
}
| JCacheConfiguration |
java | apache__spark | core/src/test/java/org/apache/spark/memory/TaskMemoryManagerSuite.java | {
"start": 1112,
"end": 11794
} | class ____ {
@Test
public void leakedPageMemoryIsDetected() {
final TaskMemoryManager manager = new TaskMemoryManager(
new UnifiedMemoryManager(
new SparkConf().set(package$.MODULE$.MEMORY_OFFHEAP_ENABLED(), false),
Long.MAX_VALUE,
Long.MAX_VALUE / 2,
1),
0);
final MemoryConsumer c = new TestMemoryConsumer(manager);
manager.allocatePage(4096, c); // leak memory
Assertions.assertEquals(4096, manager.getMemoryConsumptionForThisTask());
Assertions.assertEquals(4096, manager.cleanUpAllAllocatedMemory());
}
@Test
public void encodePageNumberAndOffsetOffHeap() {
final SparkConf conf = new SparkConf()
.set(package$.MODULE$.MEMORY_OFFHEAP_ENABLED(), true)
.set(package$.MODULE$.MEMORY_OFFHEAP_SIZE(), 1000L);
final TaskMemoryManager manager = new TaskMemoryManager(new TestMemoryManager(conf), 0);
final MemoryConsumer c = new TestMemoryConsumer(manager, MemoryMode.OFF_HEAP);
final MemoryBlock dataPage = manager.allocatePage(256, c);
// In off-heap mode, an offset is an absolute address that may require more than 51 bits to
// encode. This test exercises that corner-case:
final long offset = ((1L << TaskMemoryManager.OFFSET_BITS) + 10);
final long encodedAddress = manager.encodePageNumberAndOffset(dataPage, offset);
Assertions.assertNull(manager.getPage(encodedAddress));
Assertions.assertEquals(offset, manager.getOffsetInPage(encodedAddress));
manager.freePage(dataPage, c);
}
@Test
public void encodePageNumberAndOffsetOnHeap() {
final TaskMemoryManager manager = new TaskMemoryManager(
new TestMemoryManager(
new SparkConf().set(package$.MODULE$.MEMORY_OFFHEAP_ENABLED(), false)), 0);
final MemoryConsumer c = new TestMemoryConsumer(manager, MemoryMode.ON_HEAP);
final MemoryBlock dataPage = manager.allocatePage(256, c);
final long encodedAddress = manager.encodePageNumberAndOffset(dataPage, 64);
Assertions.assertEquals(dataPage.getBaseObject(), manager.getPage(encodedAddress));
Assertions.assertEquals(64, manager.getOffsetInPage(encodedAddress));
}
@Test
public void freeingPageSetsPageNumberToSpecialConstant() {
final TaskMemoryManager manager = new TaskMemoryManager(
new TestMemoryManager(
new SparkConf().set(package$.MODULE$.MEMORY_OFFHEAP_ENABLED(), false)), 0);
final MemoryConsumer c = new TestMemoryConsumer(manager, MemoryMode.ON_HEAP);
final MemoryBlock dataPage = manager.allocatePage(256, c);
c.freePage(dataPage);
Assertions.assertEquals(MemoryBlock.FREED_IN_ALLOCATOR_PAGE_NUMBER, dataPage.pageNumber);
}
@Test
public void freeingPageDirectlyInAllocatorTriggersAssertionError() {
final TaskMemoryManager manager = new TaskMemoryManager(
new TestMemoryManager(
new SparkConf().set(package$.MODULE$.MEMORY_OFFHEAP_ENABLED(), false)), 0);
final MemoryConsumer c = new TestMemoryConsumer(manager, MemoryMode.ON_HEAP);
final MemoryBlock dataPage = manager.allocatePage(256, c);
Assertions.assertThrows(AssertionError.class, () -> MemoryAllocator.HEAP.free(dataPage));
}
@Test
public void callingFreePageOnDirectlyAllocatedPageTriggersAssertionError() {
final TaskMemoryManager manager = new TaskMemoryManager(
new TestMemoryManager(
new SparkConf().set(package$.MODULE$.MEMORY_OFFHEAP_ENABLED(), false)), 0);
final MemoryConsumer c = new TestMemoryConsumer(manager, MemoryMode.ON_HEAP);
final MemoryBlock dataPage = MemoryAllocator.HEAP.allocate(256);
Assertions.assertThrows(AssertionError.class, () -> manager.freePage(dataPage, c));
}
@Test
public void cooperativeSpilling() {
final TestMemoryManager memoryManager = new TestMemoryManager(new SparkConf());
memoryManager.limit(100);
final TaskMemoryManager manager = new TaskMemoryManager(memoryManager, 0);
TestMemoryConsumer c1 = new TestMemoryConsumer(manager);
TestMemoryConsumer c2 = new TestMemoryConsumer(manager);
c1.use(100);
Assertions.assertEquals(100, c1.getUsed());
c2.use(100);
Assertions.assertEquals(100, c2.getUsed());
Assertions.assertEquals(0, c1.getUsed()); // spilled
c1.use(100);
Assertions.assertEquals(100, c1.getUsed());
Assertions.assertEquals(0, c2.getUsed()); // spilled
c1.use(50);
Assertions.assertEquals(50, c1.getUsed()); // spilled
Assertions.assertEquals(0, c2.getUsed());
c2.use(50);
Assertions.assertEquals(50, c1.getUsed());
Assertions.assertEquals(50, c2.getUsed());
c1.use(100);
Assertions.assertEquals(100, c1.getUsed());
Assertions.assertEquals(0, c2.getUsed()); // spilled
c1.free(20);
Assertions.assertEquals(80, c1.getUsed());
c2.use(10);
Assertions.assertEquals(80, c1.getUsed());
Assertions.assertEquals(10, c2.getUsed());
c2.use(100);
Assertions.assertEquals(100, c2.getUsed());
Assertions.assertEquals(0, c1.getUsed()); // spilled
c1.free(0);
c2.free(100);
Assertions.assertEquals(0, manager.cleanUpAllAllocatedMemory());
}
@Test
public void cooperativeSpilling2() {
final TestMemoryManager memoryManager = new TestMemoryManager(new SparkConf());
memoryManager.limit(100);
final TaskMemoryManager manager = new TaskMemoryManager(memoryManager, 0);
TestMemoryConsumer c1 = new TestMemoryConsumer(manager);
TestMemoryConsumer c2 = new TestMemoryConsumer(manager);
TestMemoryConsumer c3 = new TestMemoryConsumer(manager);
c1.use(20);
Assertions.assertEquals(20, c1.getUsed());
c2.use(80);
Assertions.assertEquals(80, c2.getUsed());
c3.use(80);
Assertions.assertEquals(20, c1.getUsed()); // c1: not spilled
Assertions.assertEquals(0, c2.getUsed()); // c2: spilled as it has required size of memory
Assertions.assertEquals(80, c3.getUsed());
c2.use(80);
Assertions.assertEquals(20, c1.getUsed()); // c1: not spilled
Assertions.assertEquals(0, c3.getUsed()); // c3: spilled as it has required size of memory
Assertions.assertEquals(80, c2.getUsed());
c3.use(10);
Assertions.assertEquals(0, c1.getUsed()); // c1: spilled as it has required size of memory
Assertions
.assertEquals(80, c2.getUsed()); // c2: not spilled as spilling c1 already satisfies c3
Assertions.assertEquals(10, c3.getUsed());
c1.free(0);
c2.free(80);
c3.free(10);
Assertions.assertEquals(0, manager.cleanUpAllAllocatedMemory());
}
@Test
public void selfSpillIsLowestPriorities() {
// Test that requesting memory consumer (a "self-spill") is chosen last to spill.
final TestMemoryManager memoryManager = new TestMemoryManager(new SparkConf());
memoryManager.limit(100);
final TaskMemoryManager manager = new TaskMemoryManager(memoryManager, 0);
TestMemoryConsumer c1 = new TestMemoryConsumer(manager);
TestMemoryConsumer c2 = new TestMemoryConsumer(manager);
TestMemoryConsumer c3 = new TestMemoryConsumer(manager);
// Self-spill is the lowest priority: c2 and c3 are spilled first even though they have less
// memory.
c1.use(50);
c2.use(40);
c3.use(10);
c1.use(50);
Assertions.assertEquals(100, c1.getUsed());
Assertions.assertEquals(0, c2.getUsed());
Assertions.assertEquals(0, c3.getUsed());
// Force a self-spill.
c1.use(50);
Assertions.assertEquals(50, c1.getUsed());
// Force a self-spill after c2 is spilled.
c2.use(10);
c1.use(60);
Assertions.assertEquals(60, c1.getUsed());
Assertions.assertEquals(0, c2.getUsed());
c1.free(c1.getUsed());
// Redo a similar scenario but with a different memory requester.
c1.use(50);
c2.use(40);
c3.use(10);
c3.use(50);
Assertions.assertEquals(0, c1.getUsed());
Assertions.assertEquals(40, c2.getUsed());
Assertions.assertEquals(60, c3.getUsed());
}
@Test
public void prefersSmallestBigEnoughAllocation() {
// Test that the smallest consumer with at least the requested size is chosen to spill.
final TestMemoryManager memoryManager = new TestMemoryManager(new SparkConf());
memoryManager.limit(100);
final TaskMemoryManager manager = new TaskMemoryManager(memoryManager, 0);
TestMemoryConsumer c1 = new TestMemoryConsumer(manager);
TestMemoryConsumer c2 = new TestMemoryConsumer(manager);
TestMemoryConsumer c3 = new TestMemoryConsumer(manager);
TestMemoryConsumer c4 = new TestMemoryConsumer(manager);
c1.use(50);
c2.use(40);
c3.use(10);
c4.use(5);
Assertions.assertEquals(50, c1.getUsed());
Assertions.assertEquals(40, c2.getUsed());
Assertions.assertEquals(0, c3.getUsed());
Assertions.assertEquals(5, c4.getUsed());
// Allocate 45. 5 is unused and 40 will come from c2.
c3.use(45);
Assertions.assertEquals(50, c1.getUsed());
Assertions.assertEquals(0, c2.getUsed());
Assertions.assertEquals(45, c3.getUsed());
Assertions.assertEquals(5, c4.getUsed());
// Allocate 51. 50 is taken from c1, then c4 is the best fit to get 1 more byte.
c2.use(51);
Assertions.assertEquals(0, c1.getUsed());
Assertions.assertEquals(51, c2.getUsed());
Assertions.assertEquals(45, c3.getUsed());
Assertions.assertEquals(0, c4.getUsed());
}
@Test
public void shouldNotForceSpillingInDifferentModes() {
final TestMemoryManager memoryManager = new TestMemoryManager(new SparkConf());
memoryManager.limit(100);
final TaskMemoryManager manager = new TaskMemoryManager(memoryManager, 0);
TestMemoryConsumer c1 = new TestMemoryConsumer(manager, MemoryMode.ON_HEAP);
TestMemoryConsumer c2 = new TestMemoryConsumer(manager, MemoryMode.OFF_HEAP);
c1.use(80);
Assertions.assertEquals(80, c1.getUsed());
c2.use(80);
Assertions.assertEquals(20, c2.getUsed()); // not enough memory
Assertions.assertEquals(80, c1.getUsed()); // not spilled
c2.use(10);
Assertions.assertEquals(10, c2.getUsed()); // spilled
Assertions.assertEquals(80, c1.getUsed()); // not spilled
}
@Test
public void offHeapConfigurationBackwardsCompatibility() {
// Tests backwards-compatibility with the old `spark.unsafe.offHeap` configuration, which
// was deprecated in Spark 1.6 and replaced by `spark.memory.offHeap.enabled` (see SPARK-12251).
final SparkConf conf = new SparkConf()
.set("spark.unsafe.offHeap", "true")
.set(package$.MODULE$.MEMORY_OFFHEAP_SIZE(), 1000L);
final TaskMemoryManager manager = new TaskMemoryManager(new TestMemoryManager(conf), 0);
Assertions.assertSame(MemoryMode.OFF_HEAP, manager.tungstenMemoryMode);
}
}
| TaskMemoryManagerSuite |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/factories/TestModelProviderFactory.java | {
"start": 1418,
"end": 3149
} | class ____ implements ModelProviderFactory {
public static final String IDENTIFIER = "test-model";
public static final ConfigOption<String> TASK =
ConfigOptions.key("task")
.stringType()
.noDefaultValue()
.withDescription("Task of the test model.");
public static final ConfigOption<String> ENDPOINT =
ConfigOptions.key("endpoint")
.stringType()
.noDefaultValue()
.withDescription("Endpoint of the test model.");
public static final ConfigOption<Integer> MODEL_VERSION =
ConfigOptions.key("version")
.intType()
.defaultValue(1)
.withDescription("Version of the test model.");
@Override
public ModelProvider createModelProvider(Context context) {
final ModelProviderFactoryHelper helper =
FactoryUtil.createModelProviderFactoryHelper(this, context);
helper.validate();
return new TestModelProviderMock(context.getCatalogModel());
}
@Override
public String factoryIdentifier() {
return IDENTIFIER;
}
@Override
public Set<ConfigOption<?>> requiredOptions() {
final Set<ConfigOption<?>> options = new HashSet<>();
options.add(TASK);
options.add(ENDPOINT);
return options;
}
@Override
public Set<ConfigOption<?>> optionalOptions() {
Set<ConfigOption<?>> options = new HashSet<>();
options.add(MODEL_VERSION);
return options;
}
/** Test implementation of {@link ModelProvider} for testing purposes. */
public static | TestModelProviderFactory |
java | micronaut-projects__micronaut-core | inject-java-test/src/test/groovy/io/micronaut/inject/visitor/beans/builder/TestBuildMe5.java | {
"start": 60,
"end": 456
} | class ____ {
private final String name;
private final int age;
private TestBuildMe5(String name, int age) {
this.name = name;
this.age = age;
}
public String getName() {
return name;
}
public int getAge() {
return age;
}
public static Builder builder() {
return new Builder();
}
public static final | TestBuildMe5 |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java | {
"start": 4162,
"end": 4728
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory val;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) {
this.source = source;
this.val = val;
}
@Override
public SqrtIntEvaluator get(DriverContext context) {
return new SqrtIntEvaluator(source, val.get(context), context);
}
@Override
public String toString() {
return "SqrtIntEvaluator[" + "val=" + val + "]";
}
}
}
| Factory |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/TaskManagerSlotInformation.java | {
"start": 1347,
"end": 2127
} | interface ____ {
SlotID getSlotId();
@Nullable
AllocationID getAllocationId();
@Nullable
JobID getJobId();
SlotState getState();
InstanceID getInstanceId();
TaskExecutorConnection getTaskManagerConnection();
/**
* Returns true if the required {@link ResourceProfile} can be fulfilled by this slot.
*
* @param required resources
* @return true if the this slot can fulfill the resource requirements
*/
default boolean isMatchingRequirement(ResourceProfile required) {
return getResourceProfile().isMatching(required);
}
/**
* Get resource profile of this slot.
*
* @return resource profile of this slot
*/
ResourceProfile getResourceProfile();
}
| TaskManagerSlotInformation |
java | elastic__elasticsearch | x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterUsageTelemetryIT.java | {
"start": 1645,
"end": 14949
} | class ____ extends AbstractCrossClusterUsageTelemetryIT {
private static final String INDEX_WITH_RUNTIME_MAPPING = "blocking";
@Override
protected Collection<Class<? extends Plugin>> nodePlugins(String clusterAlias) {
List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins(clusterAlias));
plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class);
plugins.add(CrossClusterQueryIT.InternalExchangePlugin.class);
plugins.add(SimplePauseFieldPlugin.class);
plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action
return plugins;
}
@Before
public void resetPlugin() {
SimplePauseFieldPlugin.resetPlugin();
}
public void assertPerClusterCount(CCSTelemetrySnapshot.PerClusterCCSTelemetry perCluster, long count) {
assertThat(perCluster.getCount(), equalTo(count));
assertThat(perCluster.getSkippedCount(), equalTo(0L));
assertThat(perCluster.getTook().count(), equalTo(count));
}
public void testLocalRemote() throws Exception {
setupClusters();
var telemetry = getTelemetryFromQuery("from logs-*,c*:logs-* | stats sum (v)", "kibana");
assertThat(telemetry.getTotalCount(), equalTo(1L));
assertThat(telemetry.getSuccessCount(), equalTo(1L));
assertThat(telemetry.getFailureReasons().size(), equalTo(0));
assertThat(telemetry.getTook().count(), equalTo(1L));
assertThat(telemetry.getTookMrtFalse().count(), equalTo(0L));
assertThat(telemetry.getTookMrtTrue().count(), equalTo(0L));
assertThat(telemetry.getRemotesPerSearchAvg(), equalTo(2.0));
assertThat(telemetry.getRemotesPerSearchMax(), equalTo(2L));
assertThat(telemetry.getSearchCountWithSkippedRemotes(), equalTo(0L));
assertThat(telemetry.getClientCounts().size(), equalTo(1));
assertThat(telemetry.getClientCounts().get("kibana"), equalTo(1L));
assertThat(telemetry.getFeatureCounts().get(ASYNC_FEATURE), equalTo(null));
var perCluster = telemetry.getByRemoteCluster();
assertThat(perCluster.size(), equalTo(3));
for (String clusterAlias : remoteClusterAlias()) {
assertPerClusterCount(perCluster.get(clusterAlias), 1L);
}
assertPerClusterCount(perCluster.get(LOCAL_CLUSTER), 1L);
telemetry = getTelemetryFromQuery("from logs-*,c*:logs-* | stats sum (v)", "kibana");
assertThat(telemetry.getTotalCount(), equalTo(2L));
assertThat(telemetry.getClientCounts().get("kibana"), equalTo(2L));
perCluster = telemetry.getByRemoteCluster();
assertThat(perCluster.size(), equalTo(3));
for (String clusterAlias : remoteClusterAlias()) {
assertPerClusterCount(perCluster.get(clusterAlias), 2L);
}
assertPerClusterCount(perCluster.get(LOCAL_CLUSTER), 2L);
}
public void testLocalOnly() throws Exception {
setupClusters();
// Should not produce any usage info since it's a local search
var telemetry = getTelemetryFromQuery("from logs-* | stats sum (v)", "kibana");
assertThat(telemetry.getTotalCount(), equalTo(0L));
assertThat(telemetry.getSuccessCount(), equalTo(0L));
assertThat(telemetry.getByRemoteCluster().size(), equalTo(0));
}
@SkipUnavailableRule.NotSkipped(aliases = REMOTE1)
public void testFailed() throws Exception {
setupClusters();
// Should not produce any usage info since it's a local search
var telemetry = getTelemetryFromFailedQuery("from no_such_index | stats sum (v)");
assertThat(telemetry.getTotalCount(), equalTo(0L));
assertThat(telemetry.getSuccessCount(), equalTo(0L));
assertThat(telemetry.getByRemoteCluster().size(), equalTo(0));
// Errors from both remotes
telemetry = getTelemetryFromFailedQuery("from logs-*,c*:no_such_index | stats sum (v)");
assertThat(telemetry.getTotalCount(), equalTo(1L));
assertThat(telemetry.getSuccessCount(), equalTo(0L));
assertThat(telemetry.getByRemoteCluster().size(), equalTo(1));
assertThat(telemetry.getRemotesPerSearchAvg(), equalTo(2.0));
assertThat(telemetry.getRemotesPerSearchMax(), equalTo(2L));
assertThat(telemetry.getSearchCountWithSkippedRemotes(), equalTo(1L));
Map<String, Long> expectedFailure = Map.of(CCSUsageTelemetry.Result.NOT_FOUND.getName(), 1L);
assertThat(telemetry.getFailureReasons(), equalTo(expectedFailure));
// this is only for cluster-a now
telemetry = getTelemetryFromFailedQuery("from logs-*,cluster-a:no_such_index | stats sum (v)");
assertThat(telemetry.getTotalCount(), equalTo(2L));
assertThat(telemetry.getSuccessCount(), equalTo(0L));
assertThat(telemetry.getByRemoteCluster().size(), equalTo(1));
assertThat(telemetry.getRemotesPerSearchAvg(), equalTo(2.0));
assertThat(telemetry.getRemotesPerSearchMax(), equalTo(2L));
assertThat(telemetry.getSearchCountWithSkippedRemotes(), equalTo(1L));
expectedFailure = Map.of(CCSUsageTelemetry.Result.NOT_FOUND.getName(), 2L);
assertThat(telemetry.getFailureReasons(), equalTo(expectedFailure));
}
public void testRemoteOnly() throws Exception {
setupClusters();
var telemetry = getTelemetryFromQuery("from c*:logs-* | stats sum (v)", "kibana");
assertThat(telemetry.getTotalCount(), equalTo(1L));
assertThat(telemetry.getSuccessCount(), equalTo(1L));
assertThat(telemetry.getFailureReasons().size(), equalTo(0));
assertThat(telemetry.getTook().count(), equalTo(1L));
assertThat(telemetry.getTookMrtFalse().count(), equalTo(0L));
assertThat(telemetry.getTookMrtTrue().count(), equalTo(0L));
assertThat(telemetry.getRemotesPerSearchAvg(), equalTo(2.0));
assertThat(telemetry.getRemotesPerSearchMax(), equalTo(2L));
assertThat(telemetry.getSearchCountWithSkippedRemotes(), equalTo(0L));
assertThat(telemetry.getClientCounts().size(), equalTo(1));
assertThat(telemetry.getClientCounts().get("kibana"), equalTo(1L));
assertThat(telemetry.getFeatureCounts().get(ASYNC_FEATURE), equalTo(null));
var perCluster = telemetry.getByRemoteCluster();
assertThat(perCluster.size(), equalTo(2));
for (String clusterAlias : remoteClusterAlias()) {
assertPerClusterCount(perCluster.get(clusterAlias), 1L);
}
assertThat(telemetry.getByRemoteCluster().size(), equalTo(2));
}
public void testAsync() throws Exception {
setupClusters();
var telemetry = getTelemetryFromAsyncQuery("from logs-*,c*:logs-* | stats sum (v)");
assertThat(telemetry.getTotalCount(), equalTo(1L));
assertThat(telemetry.getSuccessCount(), equalTo(1L));
assertThat(telemetry.getFailureReasons().size(), equalTo(0));
assertThat(telemetry.getTook().count(), equalTo(1L));
assertThat(telemetry.getTookMrtFalse().count(), equalTo(0L));
assertThat(telemetry.getTookMrtTrue().count(), equalTo(0L));
assertThat(telemetry.getRemotesPerSearchAvg(), equalTo(2.0));
assertThat(telemetry.getRemotesPerSearchMax(), equalTo(2L));
assertThat(telemetry.getSearchCountWithSkippedRemotes(), equalTo(0L));
assertThat(telemetry.getClientCounts().size(), equalTo(0));
assertThat(telemetry.getFeatureCounts().get(ASYNC_FEATURE), equalTo(1L));
var perCluster = telemetry.getByRemoteCluster();
assertThat(perCluster.size(), equalTo(3));
for (String clusterAlias : remoteClusterAlias()) {
assertPerClusterCount(perCluster.get(clusterAlias), 1L);
}
assertPerClusterCount(perCluster.get(LOCAL_CLUSTER), 1L);
// do it again
telemetry = getTelemetryFromAsyncQuery("from logs-*,c*:logs-* | stats sum (v)");
assertThat(telemetry.getTotalCount(), equalTo(2L));
assertThat(telemetry.getFeatureCounts().get(ASYNC_FEATURE), equalTo(2L));
perCluster = telemetry.getByRemoteCluster();
assertThat(perCluster.size(), equalTo(3));
for (String clusterAlias : remoteClusterAlias()) {
assertPerClusterCount(perCluster.get(clusterAlias), 2L);
}
assertPerClusterCount(perCluster.get(LOCAL_CLUSTER), 2L);
}
public void testAsyncStop() throws Exception {
setupClusters();
populateRuntimeIndex(REMOTE1, "pause", INDEX_WITH_RUNTIME_MAPPING);
populateRuntimeIndex(REMOTE2, "pause", INDEX_WITH_RUNTIME_MAPPING);
EsqlQueryRequest request = asyncEsqlQueryRequest("from logs-*,c*:logs-*,c*:blocking | eval v1=coalesce(const, v) | stats sum (v1)")
.pragmas(AbstractEsqlIntegTestCase.randomPragmas())
.columnar(randomBoolean())
.includeCCSMetadata(randomBoolean());
AtomicReference<String> asyncExecutionId = new AtomicReference<>();
assertResponse(cluster(LOCAL_CLUSTER).client(queryNode).execute(EsqlQueryAction.INSTANCE, request), resp -> {
if (resp.isRunning()) {
assertNotNull("async execution id is null", resp.asyncExecutionId());
asyncExecutionId.set(resp.asyncExecutionId().get());
}
});
SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId.get());
ActionFuture<EsqlQueryResponse> actionFuture = cluster(LOCAL_CLUSTER).client(queryNode)
.execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
// Release the pause
SimplePauseFieldPlugin.allowEmitting.countDown();
try (EsqlQueryResponse resp = actionFuture.actionGet(30, TimeUnit.SECONDS)) {
assertTrue(resp.getExecutionInfo().isPartial());
CCSTelemetrySnapshot telemetry = getTelemetrySnapshot(queryNode);
assertThat(telemetry.getTotalCount(), equalTo(1L));
assertThat(telemetry.getSuccessCount(), equalTo(1L));
assertThat(telemetry.getFailureReasons().size(), equalTo(0));
assertThat(telemetry.getTook().count(), equalTo(1L));
assertThat(telemetry.getTookMrtFalse().count(), equalTo(0L));
assertThat(telemetry.getTookMrtTrue().count(), equalTo(0L));
assertThat(telemetry.getRemotesPerSearchAvg(), equalTo(2.0));
assertThat(telemetry.getRemotesPerSearchMax(), equalTo(2L));
assertThat(telemetry.getSearchCountWithSkippedRemotes(), equalTo(0L));
assertThat(telemetry.getClientCounts().size(), equalTo(0));
assertThat(telemetry.getFeatureCounts().get(ASYNC_FEATURE), equalTo(1L));
var perCluster = telemetry.getByRemoteCluster();
assertThat(perCluster.size(), equalTo(3));
for (String clusterAlias : remoteClusterAlias()) {
assertPerClusterCount(perCluster.get(clusterAlias), 1L);
}
assertPerClusterCount(perCluster.get(LOCAL_CLUSTER), 1L);
} finally {
// Clean up
assertAcked(deleteAsyncId(client(), asyncExecutionId.get()));
}
}
public void testNoSuchCluster() throws Exception {
setupClusters();
// This is not recognized as a cross-cluster search
var telemetry = getTelemetryFromFailedQuery("from c*:logs*, nocluster:nomatch | stats sum (v)");
assertThat(telemetry.getTotalCount(), equalTo(0L));
assertThat(telemetry.getSuccessCount(), equalTo(0L));
assertThat(telemetry.getByRemoteCluster().size(), equalTo(0));
}
@SkipUnavailableRule.NotSkipped(aliases = REMOTE1)
public void testDisconnect() throws Exception {
setupClusters();
// Disconnect remote1
cluster(REMOTE1).close();
var telemetry = getTelemetryFromFailedQuery("from logs-*,cluster-a:logs-* | stats sum (v)");
assertThat(telemetry.getTotalCount(), equalTo(1L));
assertThat(telemetry.getSuccessCount(), equalTo(0L));
Map<String, Long> expectedFailure = Map.of(CCSUsageTelemetry.Result.REMOTES_UNAVAILABLE.getName(), 1L);
assertThat(telemetry.getFailureReasons(), equalTo(expectedFailure));
}
void populateRuntimeIndex(String clusterAlias, String langName, String indexName) throws IOException {
XContentBuilder mapping = JsonXContent.contentBuilder().startObject();
mapping.startObject("runtime");
{
mapping.startObject("const");
{
mapping.field("type", "long");
mapping.startObject("script").field("source", "").field("lang", langName).endObject();
}
mapping.endObject();
}
mapping.endObject();
mapping.endObject();
client(clusterAlias).admin().indices().prepareCreate(indexName).setMapping(mapping).get();
BulkRequestBuilder bulk = client(clusterAlias).prepareBulk(indexName).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int i = 0; i < 10; i++) {
bulk.add(new IndexRequest().source("foo", i));
}
bulk.get();
}
}
| CrossClusterUsageTelemetryIT |
java | apache__kafka | raft/src/test/java/org/apache/kafka/raft/ElectionStateTest.java | {
"start": 1326,
"end": 4439
} | class ____ {
@Test
void testVotedCandidateWithoutVotedId() {
ElectionState electionState = ElectionState.withUnknownLeader(5, Set.of());
assertFalse(electionState.isVotedCandidate(ReplicaKey.of(1, ReplicaKey.NO_DIRECTORY_ID)));
}
@Test
void testVotedCandidateWithoutVotedDirectoryId() {
ElectionState electionState = ElectionState.withVotedCandidate(
5,
ReplicaKey.of(1, ReplicaKey.NO_DIRECTORY_ID),
Set.of()
);
assertTrue(electionState.isVotedCandidate(ReplicaKey.of(1, ReplicaKey.NO_DIRECTORY_ID)));
assertTrue(
electionState.isVotedCandidate(ReplicaKey.of(1, Uuid.randomUuid()))
);
}
@Test
void testVotedCandidateWithVotedDirectoryId() {
ReplicaKey votedKey = ReplicaKey.of(1, Uuid.randomUuid());
ElectionState electionState = ElectionState.withVotedCandidate(
5,
votedKey,
Set.of()
);
assertFalse(electionState.isVotedCandidate(ReplicaKey.of(1, ReplicaKey.NO_DIRECTORY_ID)));
assertTrue(electionState.isVotedCandidate(votedKey));
}
@ParameterizedTest
@ValueSource(shorts = {0, 1})
void testQuorumStateDataRoundTrip(short version) {
ReplicaKey votedKey = ReplicaKey.of(1, Uuid.randomUuid());
List<ElectionState> electionStates = List.of(
ElectionState.withUnknownLeader(5, Set.of(1, 2, 3)),
ElectionState.withElectedLeader(5, 1, Optional.empty(), Set.of(1, 2, 3)),
ElectionState.withVotedCandidate(5, votedKey, Set.of(1, 2, 3)),
ElectionState.withElectedLeader(5, 1, Optional.of(votedKey), Set.of(1, 2, 3))
);
final List<ElectionState> expected;
if (version == 0) {
expected = List.of(
ElectionState.withUnknownLeader(5, Set.of(1, 2, 3)),
ElectionState.withElectedLeader(5, 1, Optional.empty(), Set.of(1, 2, 3)),
ElectionState.withVotedCandidate(
5,
ReplicaKey.of(1, ReplicaKey.NO_DIRECTORY_ID),
Set.of(1, 2, 3)
),
ElectionState.withElectedLeader(
5,
1,
Optional.of(ReplicaKey.of(1, ReplicaKey.NO_DIRECTORY_ID)),
Set.of(1, 2, 3)
)
);
} else {
expected = List.of(
ElectionState.withUnknownLeader(5, Set.of()),
ElectionState.withElectedLeader(5, 1, Optional.empty(), Set.of()),
ElectionState.withVotedCandidate(5, votedKey, Set.of()),
ElectionState.withElectedLeader(5, 1, Optional.of(votedKey), Set.of())
);
}
int expectedId = 0;
for (ElectionState electionState : electionStates) {
QuorumStateData data = electionState.toQuorumStateData(version);
assertEquals(expected.get(expectedId), ElectionState.fromQuorumStateData(data));
expectedId++;
}
}
}
| ElectionStateTest |
java | google__error-prone | check_api/src/test/java/com/google/errorprone/util/ASTHelpersTest.java | {
"start": 51602,
"end": 52038
} | class ____ implements AutoCloseable {
@Override
// BUG: Diagnostic contains:
public abstract void close() throws InterruptedException;
}
}
""")
.doTest();
}
@Test
public void getThrownExceptions_tryWithResourcesVariable() {
replaceExceptionHelper
.addSourceLines(
"Test.java",
"""
abstract | C |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/services/VersionResolverResult.java | {
"start": 1128,
"end": 1350
} | interface ____ extends Result<VersionResolverRequest> {
@Nonnull
List<Exception> getExceptions();
@Nonnull
Version getVersion();
@Nonnull
Optional<Repository> getRepository();
}
| VersionResolverResult |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/lombok/LombokSimpleDataProperties.java | {
"start": 1007,
"end": 1336
} | class ____ {
private final String id = "super-id";
/**
* Name description.
*/
private String name;
private String description;
private Integer counter;
@Deprecated
private Integer number = 0;
private final List<String> items = new ArrayList<>();
private final String ignored = "foo";
}
| LombokSimpleDataProperties |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java | {
"start": 5302,
"end": 6109
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory val;
private final EvalOperator.ExpressionEvaluator.Factory decimals;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val,
EvalOperator.ExpressionEvaluator.Factory decimals) {
this.source = source;
this.val = val;
this.decimals = decimals;
}
@Override
public RoundUnsignedLongEvaluator get(DriverContext context) {
return new RoundUnsignedLongEvaluator(source, val.get(context), decimals.get(context), context);
}
@Override
public String toString() {
return "RoundUnsignedLongEvaluator[" + "val=" + val + ", decimals=" + decimals + "]";
}
}
}
| Factory |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java | {
"start": 115517,
"end": 115924
} | class ____ extends ScalarFunction {
@FunctionHint(
arguments = {
@ArgumentHint(type = @DataTypeHint("STRING"), name = "in1"),
@ArgumentHint(type = @DataTypeHint("INTEGER"))
})
public String eval(String f1, Integer f2) {
return "";
}
}
private static | ArgumentHintMissingPartialNameScalarFunction |
java | spring-projects__spring-boot | module/spring-boot-integration/src/main/java/org/springframework/boot/integration/autoconfigure/IntegrationAutoConfiguration.java | {
"start": 14111,
"end": 14273
} | class ____ {
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(TcpServerTransport.class)
protected static | RSocketOutboundGatewayAvailable |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/event/spi/RefreshContext.java | {
"start": 483,
"end": 693
} | interface ____ {
boolean add(Object entity);
default boolean isEmpty() {
return false;
}
static RefreshContext create() {
// use extension to avoid creating
// a useless wrapper object
| RefreshContext |
java | quarkusio__quarkus | integration-tests/hibernate-orm-envers/src/test/java/io/quarkus/it/envers/JPAEnversTestInGraalITCase.java | {
"start": 116,
"end": 176
} | class ____ extends JPAEnversTest {
}
| JPAEnversTestInGraalITCase |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithNonKeyedOperatorTest.java | {
"start": 3662,
"end": 6430
} | class ____
extends BroadcastProcessFunction<String, Integer, String> {
private static final long serialVersionUID = 7496674620398203933L;
@Override
public void processBroadcastElement(Integer value, Context ctx, Collector<String> out)
throws Exception {
ctx.getBroadcastState(STATE_DESCRIPTOR).put("key." + value, value);
ctx.getBroadcastState(STATE_DESCRIPTOR_A).put(value, "value." + value);
}
@Override
public void processElement(String value, ReadOnlyContext ctx, Collector<String> out)
throws Exception {
for (Map.Entry<String, Integer> entry :
ctx.getBroadcastState(STATE_DESCRIPTOR).immutableEntries()) {
out.collect(value + ":" + entry.getKey() + "->" + entry.getValue());
}
for (Map.Entry<Integer, String> entry :
ctx.getBroadcastState(STATE_DESCRIPTOR_A).immutableEntries()) {
out.collect(value + ":" + entry.getKey() + "->" + entry.getValue());
}
}
}
@Test
void testBroadcastState() throws Exception {
final Set<String> keysToRegister = new HashSet<>();
keysToRegister.add("test1");
keysToRegister.add("test2");
keysToRegister.add("test3");
try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness =
getInitializedTestHarness(new TestFunction(keysToRegister), STATE_DESCRIPTOR)) {
testHarness.processWatermark1(new Watermark(10L));
testHarness.processWatermark2(new Watermark(10L));
testHarness.processElement2(new StreamRecord<>(5, 12L));
testHarness.processWatermark1(new Watermark(40L));
testHarness.processWatermark2(new Watermark(40L));
testHarness.processElement1(new StreamRecord<>("6", 13L));
testHarness.processElement1(new StreamRecord<>("6", 15L));
testHarness.processWatermark1(new Watermark(50L));
testHarness.processWatermark2(new Watermark(50L));
Queue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new Watermark(10L));
expectedOutput.add(new StreamRecord<>("5WM:10 TS:12", 12L));
expectedOutput.add(new Watermark(40L));
expectedOutput.add(new StreamRecord<>("6WM:40 TS:13", 13L));
expectedOutput.add(new StreamRecord<>("6WM:40 TS:15", 15L));
expectedOutput.add(new Watermark(50L));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
}
}
private static | FunctionWithMultipleStates |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/LineRecordReader.java | {
"start": 3031,
"end": 11112
} | class ____ extends org.apache.hadoop.util.LineReader {
LineReader(InputStream in) {
super(in);
}
LineReader(InputStream in, int bufferSize) {
super(in, bufferSize);
}
public LineReader(InputStream in, Configuration conf) throws IOException {
super(in, conf);
}
LineReader(InputStream in, byte[] recordDelimiter) {
super(in, recordDelimiter);
}
LineReader(InputStream in, int bufferSize, byte[] recordDelimiter) {
super(in, bufferSize, recordDelimiter);
}
public LineReader(InputStream in, Configuration conf,
byte[] recordDelimiter) throws IOException {
super(in, conf, recordDelimiter);
}
}
public LineRecordReader(Configuration job,
FileSplit split) throws IOException {
this(job, split, null);
}
public LineRecordReader(Configuration job, FileSplit split,
byte[] recordDelimiter) throws IOException {
this.maxLineLength = job.getInt(org.apache.hadoop.mapreduce.lib.input.
LineRecordReader.MAX_LINE_LENGTH, Integer.MAX_VALUE);
start = split.getStart();
end = start + split.getLength();
final Path file = split.getPath();
compressionCodecs = new CompressionCodecFactory(job);
codec = compressionCodecs.getCodec(file);
// open the file and seek to the start of the split
final FutureDataInputStreamBuilder builder =
file.getFileSystem(job).openFile(file);
// the start and end of the split may be used to build
// an input strategy.
builder.optLong(FS_OPTION_OPENFILE_SPLIT_START, start)
.optLong(FS_OPTION_OPENFILE_SPLIT_END, end);
FutureIO.propagateOptions(builder, job,
MRJobConfig.INPUT_FILE_OPTION_PREFIX,
MRJobConfig.INPUT_FILE_MANDATORY_PREFIX);
fileIn = FutureIO.awaitFuture(builder.build());
if (isCompressedInput()) {
decompressor = CodecPool.getDecompressor(codec);
if (codec instanceof SplittableCompressionCodec) {
final SplitCompressionInputStream cIn =
((SplittableCompressionCodec)codec).createInputStream(
fileIn, decompressor, start, end,
SplittableCompressionCodec.READ_MODE.BYBLOCK);
in = new CompressedSplitLineReader(cIn, job, recordDelimiter);
start = cIn.getAdjustedStart();
end = cIn.getAdjustedEnd();
filePosition = cIn; // take pos from compressed stream
} else {
if (start != 0) {
// So we have a split that is part of a file stored using
// a Compression codec that cannot be split.
throw new IOException("Cannot seek in " +
codec.getClass().getSimpleName() + " compressed stream");
}
in = new SplitLineReader(codec.createInputStream(fileIn,
decompressor), job, recordDelimiter);
filePosition = fileIn;
}
} else {
fileIn.seek(start);
in = new UncompressedSplitLineReader(
fileIn, job, recordDelimiter, split.getLength());
filePosition = fileIn;
}
// If this is not the first split, we always throw away first record
// because we always (except the last split) read one extra line in
// next() method.
if (start != 0) {
try {
start += in.readLine(new Text(), 0, maxBytesToConsume(start));
} catch (Exception e) {
close();
throw e;
}
}
this.pos = start;
}
public LineRecordReader(InputStream in, long offset, long endOffset,
int maxLineLength) {
this(in, offset, endOffset, maxLineLength, null);
}
public LineRecordReader(InputStream in, long offset, long endOffset,
int maxLineLength, byte[] recordDelimiter) {
this.maxLineLength = maxLineLength;
this.in = new SplitLineReader(in, recordDelimiter);
this.start = offset;
this.pos = offset;
this.end = endOffset;
filePosition = null;
}
public LineRecordReader(InputStream in, long offset, long endOffset,
Configuration job)
throws IOException{
this(in, offset, endOffset, job, null);
}
public LineRecordReader(InputStream in, long offset, long endOffset,
Configuration job, byte[] recordDelimiter)
throws IOException{
this.maxLineLength = job.getInt(org.apache.hadoop.mapreduce.lib.input.
LineRecordReader.MAX_LINE_LENGTH, Integer.MAX_VALUE);
this.in = new SplitLineReader(in, job, recordDelimiter);
this.start = offset;
this.pos = offset;
this.end = endOffset;
filePosition = null;
}
public LongWritable createKey() {
return new LongWritable();
}
public Text createValue() {
return new Text();
}
private boolean isCompressedInput() {
return (codec != null);
}
private int maxBytesToConsume(long pos) {
return isCompressedInput()
? Integer.MAX_VALUE
: (int) Math.max(Math.min(Integer.MAX_VALUE, end - pos), maxLineLength);
}
private long getFilePosition() throws IOException {
long retVal;
if (isCompressedInput() && null != filePosition) {
retVal = filePosition.getPos();
} else {
retVal = pos;
}
return retVal;
}
private int skipUtfByteOrderMark(Text value) throws IOException {
// Strip BOM(Byte Order Mark)
// Text only support UTF-8, we only need to check UTF-8 BOM
// (0xEF,0xBB,0xBF) at the start of the text stream.
int newMaxLineLength = (int) Math.min(3L + (long) maxLineLength,
Integer.MAX_VALUE);
int newSize = in.readLine(value, newMaxLineLength, maxBytesToConsume(pos));
// Even we read 3 extra bytes for the first line,
// we won't alter existing behavior (no backwards incompat issue).
// Because the newSize is less than maxLineLength and
// the number of bytes copied to Text is always no more than newSize.
// If the return size from readLine is not less than maxLineLength,
// we will discard the current line and read the next line.
pos += newSize;
int textLength = value.getLength();
byte[] textBytes = value.getBytes();
if ((textLength >= 3) && (textBytes[0] == (byte)0xEF) &&
(textBytes[1] == (byte)0xBB) && (textBytes[2] == (byte)0xBF)) {
// find UTF-8 BOM, strip it.
LOG.info("Found UTF-8 BOM and skipped it");
textLength -= 3;
newSize -= 3;
if (textLength > 0) {
// It may work to use the same buffer and not do the copyBytes
textBytes = value.copyBytes();
value.set(textBytes, 3, textLength);
} else {
value.clear();
}
}
return newSize;
}
/** Read a line. */
public synchronized boolean next(LongWritable key, Text value)
throws IOException {
// We always read one extra line, which lies outside the upper
// split limit i.e. (end - 1)
while (getFilePosition() <= end || in.needAdditionalRecordAfterSplit()) {
key.set(pos);
int newSize = 0;
if (pos == 0) {
newSize = skipUtfByteOrderMark(value);
} else {
newSize = in.readLine(value, maxLineLength, maxBytesToConsume(pos));
pos += newSize;
}
if (newSize == 0) {
return false;
}
if (newSize < maxLineLength) {
return true;
}
// line too long. try again
LOG.info("Skipped line of size " + newSize + " at pos " + (pos - newSize));
}
return false;
}
/**
* Get the progress within the split
*/
public synchronized float getProgress() throws IOException {
if (start == end) {
return 0.0f;
} else {
return Math.min(1.0f, (getFilePosition() - start) / (float)(end - start));
}
}
public synchronized long getPos() throws IOException {
return pos;
}
public synchronized void close() throws IOException {
try {
if (in != null) {
in.close();
} else if (fileIn != null) {
fileIn.close();
}
} finally {
if (decompressor != null) {
CodecPool.returnDecompressor(decompressor);
decompressor = null;
}
}
}
}
| LineReader |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/core/BeanFactoryMessageChannelDestinationResolver.java | {
"start": 1224,
"end": 2577
} | class ____
implements DestinationResolver<MessageChannel>, BeanFactoryAware {
private @Nullable BeanFactory beanFactory;
/**
* A default constructor that can be used when the resolver itself is configured
* as a Spring bean and will have the {@code BeanFactory} injected as a result
* of ing having implemented {@link BeanFactoryAware}.
*/
public BeanFactoryMessageChannelDestinationResolver() {
}
/**
* A constructor that accepts a {@link BeanFactory} useful if instantiating this
* resolver manually rather than having it defined as a Spring-managed bean.
* @param beanFactory the bean factory to perform lookups against
*/
public BeanFactoryMessageChannelDestinationResolver(BeanFactory beanFactory) {
Assert.notNull(beanFactory, "beanFactory must not be null");
this.beanFactory = beanFactory;
}
@Override
public void setBeanFactory(BeanFactory beanFactory) {
this.beanFactory = beanFactory;
}
@Override
public MessageChannel resolveDestination(String name) {
Assert.state(this.beanFactory != null, "No BeanFactory configured");
try {
return this.beanFactory.getBean(name, MessageChannel.class);
}
catch (BeansException ex) {
throw new DestinationResolutionException(
"Failed to find MessageChannel bean with name '" + name + "'", ex);
}
}
}
| BeanFactoryMessageChannelDestinationResolver |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/resume/ResumeStrategyHelper.java | {
"start": 1187,
"end": 1234
} | class ____ the resume strategy
*/
public final | for |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/framework/autoproxy/AutoProxyCreatorTests.java | {
"start": 19845,
"end": 19915
} | class ____ {
}
@SuppressWarnings("serial")
public static | NoInterfaces |
java | playframework__playframework | core/play/src/main/java/play/inject/BindingTarget.java | {
"start": 226,
"end": 309
} | class ____ the four possible types of targets.
*
* <p>See the {@link Module} | captures |
java | google__guava | android/guava/src/com/google/common/collect/ImmutableAsList.java | {
"start": 1180,
"end": 1917
} | class ____<E> extends ImmutableList<E> {
abstract ImmutableCollection<E> delegateCollection();
@Override
public boolean contains(@Nullable Object target) {
// The collection's contains() is at least as fast as ImmutableList's
// and is often faster.
return delegateCollection().contains(target);
}
@Override
public int size() {
return delegateCollection().size();
}
@Override
public boolean isEmpty() {
return delegateCollection().isEmpty();
}
@Override
boolean isPartialView() {
return delegateCollection().isPartialView();
}
/** Serialized form that leads to the same performance as the original list. */
@GwtIncompatible
@J2ktIncompatible
private static final | ImmutableAsList |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/main/java/org/springframework/boot/web/server/context/MissingWebServerFactoryBeanFailureAnalyzer.java | {
"start": 1182,
"end": 1904
} | class ____ extends AbstractFailureAnalyzer<MissingWebServerFactoryBeanException> {
@Override
protected FailureAnalysis analyze(Throwable rootFailure, MissingWebServerFactoryBeanException cause) {
Class<?> beanType = cause.getBeanType();
Assert.state(beanType != null, "'beanType' must not be null");
return new FailureAnalysis(
"Web application could not be started as there was no " + beanType.getName()
+ " bean defined in the context.",
"Check your application's dependencies for a supported "
+ cause.getWebApplicationType().name().toLowerCase(Locale.ENGLISH) + " web server.\n"
+ "Check the configured web application type.",
cause);
}
}
| MissingWebServerFactoryBeanFailureAnalyzer |
java | google__guava | android/guava/src/com/google/common/collect/ForwardingSortedMultiset.java | {
"start": 4337,
"end": 8797
} | class ____ extends DescendingMultiset<E> {
/** Constructor for use by subclasses. */
public StandardDescendingMultiset() {}
@Override
SortedMultiset<E> forwardMultiset() {
return ForwardingSortedMultiset.this;
}
}
@Override
public @Nullable Entry<E> firstEntry() {
return delegate().firstEntry();
}
/**
* A sensible definition of {@link #firstEntry()} in terms of {@code entrySet().iterator()}.
*
* <p>If you override {@link #entrySet()}, you may wish to override {@link #firstEntry()} to
* forward to this implementation.
*/
protected @Nullable Entry<E> standardFirstEntry() {
Iterator<Entry<E>> entryIterator = entrySet().iterator();
if (!entryIterator.hasNext()) {
return null;
}
Entry<E> entry = entryIterator.next();
return Multisets.immutableEntry(entry.getElement(), entry.getCount());
}
@Override
public @Nullable Entry<E> lastEntry() {
return delegate().lastEntry();
}
/**
* A sensible definition of {@link #lastEntry()} in terms of {@code
* descendingMultiset().entrySet().iterator()}.
*
* <p>If you override {@link #descendingMultiset} or {@link #entrySet()}, you may wish to override
* {@link #firstEntry()} to forward to this implementation.
*/
protected @Nullable Entry<E> standardLastEntry() {
Iterator<Entry<E>> entryIterator = descendingMultiset().entrySet().iterator();
if (!entryIterator.hasNext()) {
return null;
}
Entry<E> entry = entryIterator.next();
return Multisets.immutableEntry(entry.getElement(), entry.getCount());
}
@Override
public @Nullable Entry<E> pollFirstEntry() {
return delegate().pollFirstEntry();
}
/**
* A sensible definition of {@link #pollFirstEntry()} in terms of {@code entrySet().iterator()}.
*
* <p>If you override {@link #entrySet()}, you may wish to override {@link #pollFirstEntry()} to
* forward to this implementation.
*/
protected @Nullable Entry<E> standardPollFirstEntry() {
Iterator<Entry<E>> entryIterator = entrySet().iterator();
if (!entryIterator.hasNext()) {
return null;
}
Entry<E> entry = entryIterator.next();
entry = Multisets.immutableEntry(entry.getElement(), entry.getCount());
entryIterator.remove();
return entry;
}
@Override
public @Nullable Entry<E> pollLastEntry() {
return delegate().pollLastEntry();
}
/**
* A sensible definition of {@link #pollLastEntry()} in terms of {@code
* descendingMultiset().entrySet().iterator()}.
*
* <p>If you override {@link #descendingMultiset()} or {@link #entrySet()}, you may wish to
* override {@link #pollLastEntry()} to forward to this implementation.
*/
protected @Nullable Entry<E> standardPollLastEntry() {
Iterator<Entry<E>> entryIterator = descendingMultiset().entrySet().iterator();
if (!entryIterator.hasNext()) {
return null;
}
Entry<E> entry = entryIterator.next();
entry = Multisets.immutableEntry(entry.getElement(), entry.getCount());
entryIterator.remove();
return entry;
}
@Override
public SortedMultiset<E> headMultiset(@ParametricNullness E upperBound, BoundType boundType) {
return delegate().headMultiset(upperBound, boundType);
}
@Override
public SortedMultiset<E> subMultiset(
@ParametricNullness E lowerBound,
BoundType lowerBoundType,
@ParametricNullness E upperBound,
BoundType upperBoundType) {
return delegate().subMultiset(lowerBound, lowerBoundType, upperBound, upperBoundType);
}
/**
* A sensible definition of {@link #subMultiset(Object, BoundType, Object, BoundType)} in terms of
* {@link #headMultiset(Object, BoundType) headMultiset} and {@link #tailMultiset(Object,
* BoundType) tailMultiset}.
*
* <p>If you override either of these methods, you may wish to override {@link
* #subMultiset(Object, BoundType, Object, BoundType)} to forward to this implementation.
*/
protected SortedMultiset<E> standardSubMultiset(
@ParametricNullness E lowerBound,
BoundType lowerBoundType,
@ParametricNullness E upperBound,
BoundType upperBoundType) {
return tailMultiset(lowerBound, lowerBoundType).headMultiset(upperBound, upperBoundType);
}
@Override
public SortedMultiset<E> tailMultiset(@ParametricNullness E lowerBound, BoundType boundType) {
return delegate().tailMultiset(lowerBound, boundType);
}
}
| StandardDescendingMultiset |
java | junit-team__junit5 | platform-tooling-support-tests/src/test/java/platform/tooling/support/tests/MavenPomFileTests.java | {
"start": 638,
"end": 2275
} | class ____ {
@Test
void jupiterAggregatorPomDependencies() throws Exception {
var expected = List.of(">> HEAD >>", //
" <dependencyManagement>", //
" <dependencies>", //
" <dependency>", //
" <groupId>org.junit</groupId>", //
" <artifactId>junit-bom</artifactId>", //
">> VERSION >>", //
" <type>pom</type>", //
" <scope>import</scope>", //
" </dependency>", //
" </dependencies>", //
" </dependencyManagement>", //
" <dependencies>", //
" <dependency>", //
" <groupId>org.junit.jupiter</groupId>", //
" <artifactId>junit-jupiter-api</artifactId>", //
">> VERSION >>", //
" <scope>compile</scope>", //
" </dependency>", //
" <dependency>", //
" <groupId>org.junit.jupiter</groupId>", //
" <artifactId>junit-jupiter-params</artifactId>", //
">> VERSION >>", //
" <scope>compile</scope>", //
" </dependency>", //
" <dependency>", //
" <groupId>org.junit.jupiter</groupId>", //
" <artifactId>junit-jupiter-engine</artifactId>", //
">> VERSION >>", //
" <scope>runtime</scope>", //
" </dependency>", //
" </dependencies>", //
">> TAIL >>");
assertLinesMatch(expected, Files.readAllLines(MavenRepo.pom("junit-jupiter")));
}
@Test
void jupiterAggregatorGradleMetadataMarker() throws Exception {
var expected = List.of(">> HEAD >>", //
" <!-- do_not_remove: published-with-gradle-metadata -->", //
">> TAIL >>");
assertLinesMatch(expected, Files.readAllLines(MavenRepo.pom("junit-jupiter")));
}
}
| MavenPomFileTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/CachedQueryTest.java | {
"start": 1557,
"end": 6138
} | class ____ implements SettingProvider.Provider<SharedCacheMode> {
@Override
public SharedCacheMode getSetting() {
return SharedCacheMode.ALL;
}
}
@BeforeEach
public void setUp(EntityManagerFactoryScope scope) {
scope.inTransaction(
em -> {
for ( int i = 0; i < 10; i++ ) {
em.persist( new Employee( "John" + i, 20d + i ) );
}
}
);
Statistics stats = getStatistics( scope );
assertEquals( 0, stats.getQueryCacheHitCount() );
assertEquals( 0, stats.getQueryCacheMissCount() );
assertEquals( 0, stats.getQueryCachePutCount() );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 10, stats.getSecondLevelCachePutCount() );
}
@AfterEach
public void tearDown(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
}
@Test
public void testCacheableQuery(EntityManagerFactoryScope scope) {
Statistics stats = getStatistics( scope );
stats.clear();
// First time the query is executed, query and results are cached.
scope.inTransaction(
em -> {
List<Employee> employees = getEmployees( em );
assertThatAnSQLQueryHasBeenExecuted( stats );
assertEquals( 0, stats.getQueryCacheHitCount() );
assertEquals( 1, stats.getQueryCacheMissCount() );
assertEquals( 1, stats.getQueryCachePutCount() );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 0, stats.getSecondLevelCachePutCount() );
}
);
stats.clear();
// Second time the query is executed, list of entities are read from query cache
scope.inTransaction(
em -> {
List<Employee> employees = getEmployees( em );
assertThatNoSQLQueryHasBeenExecuted( stats );
assertEquals( 1, stats.getQueryCacheHitCount() );
assertEquals( 0, stats.getQueryCacheMissCount() );
assertEquals( 0, stats.getQueryCachePutCount() );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 0, stats.getSecondLevelCachePutCount() );
}
);
// NOTE: JPACache.evictAll() only evicts entity regions;
// it does not evict the collection regions or query cache region
scope.getEntityManagerFactory().getCache().evictAll();
stats.clear();
scope.inTransaction(
em -> {
List<Employee> employees = getEmployees( em );
// query is still found in the cache
assertThatNoSQLQueryHasBeenExecuted( stats );
assertEquals( 1, stats.getQueryCacheHitCount() );
assertEquals( 0, stats.getQueryCacheMissCount() );
assertEquals( 0, stats.getQueryCachePutCount() );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 0, stats.getSecondLevelCachePutCount() );
}
);
stats.clear();
// this time call clear the entity regions and the query cache region
scope.inTransaction(
em -> {
em.getEntityManagerFactory().getCache().evictAll();
em.unwrap( SessionImplementor.class )
.getFactory()
.getCache()
.evictQueryRegions();
List<Employee> employees = getEmployees( em );
// query is no longer found in the cache
assertThatAnSQLQueryHasBeenExecuted( stats );
assertEquals( 0, stats.getQueryCacheHitCount() );
assertEquals( 1, stats.getQueryCacheMissCount() );
assertEquals( 1, stats.getQueryCachePutCount() );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 10, stats.getSecondLevelCachePutCount() );
}
);
}
private static Statistics getStatistics(EntityManagerFactoryScope scope) {
return ( (SessionFactoryImplementor) scope.getEntityManagerFactory() ).getStatistics();
}
private static List<Employee> getEmployees(EntityManager em) {
TypedQuery<Employee> query = em.createQuery(
HQL,
Employee.class
)
.setHint( HINT_CACHEABLE, true );
List<Employee> employees = query.getResultList();
assertEquals( 10, employees.size() );
return employees;
}
private static void assertThatAnSQLQueryHasBeenExecuted(Statistics stats) {
assertEquals( 1, stats.getQueryStatistics( HQL ).getExecutionCount() );
}
private static void assertThatNoSQLQueryHasBeenExecuted(Statistics stats) {
assertEquals( 0, stats.getQueryStatistics( HQL ).getExecutionCount() );
}
}
| SharedCacheModeProvider |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/authentication/ott/OneTimeTokenAuthenticationFilterTests.java | {
"start": 2222,
"end": 5200
} | class ____ {
@Mock
private FilterChain chain;
@Mock
private AuthenticationManager authenticationManager;
private final OneTimeTokenAuthenticationFilter filter = new OneTimeTokenAuthenticationFilter();
private final HttpServletResponse response = new MockHttpServletResponse();
@BeforeEach
void setUp() {
this.filter.setAuthenticationManager(this.authenticationManager);
}
@Test
void setAuthenticationConverterWhenNullThenIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.filter.setAuthenticationConverter(null));
}
@Test
void doFilterWhenUrlDoesNotMatchThenContinues() throws ServletException, IOException {
OneTimeTokenAuthenticationConverter converter = mock(OneTimeTokenAuthenticationConverter.class);
HttpServletResponse response = mock(HttpServletResponse.class);
this.filter.setAuthenticationConverter(converter);
this.filter.doFilter(post("/nomatch").buildRequest(new MockServletContext()), response, this.chain);
verifyNoInteractions(converter, response);
verify(this.chain).doFilter(any(), any());
}
@Test
void doFilterWhenMethodDoesNotMatchThenContinues() throws ServletException, IOException {
OneTimeTokenAuthenticationConverter converter = mock(OneTimeTokenAuthenticationConverter.class);
HttpServletResponse response = mock(HttpServletResponse.class);
this.filter.setAuthenticationConverter(converter);
this.filter.doFilter(get("/login/ott").buildRequest(new MockServletContext()), response, this.chain);
verifyNoInteractions(converter, response);
verify(this.chain).doFilter(any(), any());
}
@Test
void doFilterWhenMissingTokenThenPropagatesRequest() throws ServletException, IOException {
FilterChain chain = mock(FilterChain.class);
this.filter.doFilter(post("/login/ott").buildRequest(new MockServletContext()), this.response, chain);
verify(chain).doFilter(any(), any());
}
@Test
void doFilterWhenInvalidTokenThenUnauthorized() throws ServletException, IOException {
given(this.authenticationManager.authenticate(any())).willThrow(new BadCredentialsException("invalid token"));
this.filter.doFilter(
post("/login/ott").param("token", "some-token-value").buildRequest(new MockServletContext()),
this.response, this.chain);
assertThat(this.response.getStatus()).isEqualTo(HttpStatus.UNAUTHORIZED.value());
verifyNoInteractions(this.chain);
}
@Test
void doFilterWhenValidThenRedirectsToSavedRequest() throws ServletException, IOException {
given(this.authenticationManager.authenticate(any()))
.willReturn(OneTimeTokenAuthenticationToken.authenticated("username", AuthorityUtils.NO_AUTHORITIES));
this.filter.doFilter(
post("/login/ott").param("token", "some-token-value").buildRequest(new MockServletContext()),
this.response, this.chain);
assertThat(this.response.getStatus()).isEqualTo(HttpStatus.FOUND.value());
assertThat(this.response.getHeader("location")).endsWith("/");
}
}
| OneTimeTokenAuthenticationFilterTests |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/TypeExtractorTest.java | {
"start": 77191,
"end": 78123
} | class ____<T> implements MapFunction<Tuple1<T>, Tuple2<T, T>> {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<T, T> map(Tuple1<T> vertex) {
return new Tuple2<T, T>(vertex.f0, vertex.f0);
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void testDuplicateValue() {
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(
(MapFunction) new DuplicateValue<String>(),
TypeInformation.of(new TypeHint<Tuple1<String>>() {}));
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(2);
TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti;
assertThat(tti.getTypeAt(0)).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(tti.getTypeAt(1)).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
public static | DuplicateValue |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/annotation/DeleteOperation.java | {
"start": 1309,
"end": 1654
} | interface ____ {
/**
* The media type of the result of the operation.
* @return the media type
*/
String[] produces() default {};
/**
* The media types of the result of the operation.
* @return the media types
*/
@SuppressWarnings("rawtypes")
Class<? extends Producible> producesFrom() default Producible.class;
}
| DeleteOperation |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-websocket-jetty/src/main/java/smoketest/websocket/jetty/reverse/ReverseWebSocketEndpoint.java | {
"start": 853,
"end": 1076
} | class ____ {
@OnMessage
public void handleMessage(Session session, String message) throws IOException {
session.getBasicRemote().sendText("Reversed: " + new StringBuilder(message).reverse());
}
}
| ReverseWebSocketEndpoint |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/query/DateRangeIncludingNowQuery.java | {
"start": 985,
"end": 1930
} | class ____ extends Query {
private final Query in;
public DateRangeIncludingNowQuery(Query in) {
this.in = in;
}
public Query getQuery() {
return in;
}
@Override
public Query rewrite(IndexSearcher searcher) throws IOException {
return in;
}
@Override
public String toString(String field) {
return "DateRangeIncludingNowQuery(" + in + ")";
}
@Override
public void visit(QueryVisitor visitor) {
in.visit(visitor.getSubVisitor(BooleanClause.Occur.MUST, this));
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (sameClassAs(o) == false) return false;
DateRangeIncludingNowQuery that = (DateRangeIncludingNowQuery) o;
return Objects.equals(in, that.in);
}
@Override
public int hashCode() {
return Objects.hash(classHash(), in);
}
}
| DateRangeIncludingNowQuery |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ReferenceEqualityTest.java | {
"start": 8917,
"end": 9151
} | interface ____ {
public abstract boolean equals(Object o);
}
""")
.addSourceLines(
"Test.java",
"""
import com.google.common.base.Optional;
| Sup |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/driver/StateStoreRecordOperations.java | {
"start": 1602,
"end": 1866
} | class ____ the data store. To use
* the default implementations in this class, getAll must return new instances
* of the records on each call. It is recommended to override the default
* implementations for better performance.
*
* @param <T> Record | from |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/stream/StreamExecAsyncCorrelate.java | {
"start": 2280,
"end": 3940
} | class ____ extends CommonExecAsyncCorrelate
implements StreamExecNode<RowData> {
public StreamExecAsyncCorrelate(
ReadableConfig tableConfig,
FlinkJoinType joinType,
RexCall invocation,
InputProperty inputProperty,
RowType outputType,
String description) {
this(
ExecNodeContext.newNodeId(),
ExecNodeContext.newContext(StreamExecAsyncCorrelate.class),
ExecNodeContext.newPersistedConfig(StreamExecAsyncCorrelate.class, tableConfig),
joinType,
invocation,
Collections.singletonList(inputProperty),
outputType,
description);
}
@JsonCreator
public StreamExecAsyncCorrelate(
@JsonProperty(FIELD_NAME_ID) int id,
@JsonProperty(FIELD_NAME_TYPE) ExecNodeContext context,
@JsonProperty(FIELD_NAME_CONFIGURATION) ReadableConfig persistedConfig,
@JsonProperty(FIELD_NAME_JOIN_TYPE) FlinkJoinType joinType,
@JsonProperty(FIELD_NAME_FUNCTION_CALL) RexNode invocation,
@JsonProperty(FIELD_NAME_INPUT_PROPERTIES) List<InputProperty> inputProperties,
@JsonProperty(FIELD_NAME_OUTPUT_TYPE) RowType outputType,
@JsonProperty(FIELD_NAME_DESCRIPTION) String description) {
super(
id,
context,
persistedConfig,
joinType,
(RexCall) invocation,
inputProperties,
outputType,
description);
}
}
| StreamExecAsyncCorrelate |
java | quarkusio__quarkus | extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ObserverValidationProcessor.java | {
"start": 849,
"end": 2948
} | class ____ {
private static final Logger LOGGER = Logger.getLogger(ObserverValidationProcessor.class.getName());
@BuildStep
public void validateApplicationObserver(ApplicationArchivesBuildItem applicationArchivesBuildItem,
ValidationPhaseBuildItem validationPhase, BuildProducer<ValidationErrorBuildItem> errors) {
// an index of all root archive classes (usually src/main/classes)
IndexView applicationClassesIndex = applicationArchivesBuildItem.getRootArchive().getIndex();
// do the validation for each observer that can be found within application classes
for (ObserverInfo observer : validationPhase.getContext().get(BuildExtension.Key.OBSERVERS)) {
if (observer.isSynthetic()) {
// Skip synthetic observers
continue;
}
DotName declaringBeanDotName = observer.getDeclaringBean().getBeanClass();
AnnotationInstance instance = Annotations.getParameterAnnotation(observer.getObserverMethod(),
DotNames.INITIALIZED);
if (applicationClassesIndex.getClassByName(declaringBeanDotName) != null && instance != null &&
instance.value().asClass().name().equals(BuiltinScope.APPLICATION.getName())) {
// found an observer for @Initialized(ApplicationScoped.class)
// log a warning and recommend to use StartupEvent instead
final String observerWarning = "The method %s#%s is an observer for " +
"@Initialized(ApplicationScoped.class). Observer notification for this event may " +
"vary between JVM and native modes! We strongly recommend to observe StartupEvent " +
"instead as that one is consistently delivered in both modes once the container is " +
"running.";
LOGGER.warnf(observerWarning, observer.getDeclaringBean().getImplClazz(),
observer.getObserverMethod().name());
}
}
}
}
| ObserverValidationProcessor |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/TestSchedulingMonitor.java | {
"start": 2005,
"end": 6013
} | class ____ {
@Test
@Timeout(value = 10)
public void testRMStarts() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.RM_SCHEDULER_ENABLE_MONITORS, true);
conf.set(YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES,
ProportionalCapacityPreemptionPolicy.class.getCanonicalName());
ResourceManager rm = new MockRM();
rm.init(conf);
SchedulingEditPolicy mPolicy = mock(SchedulingEditPolicy.class);
when(mPolicy.getMonitoringInterval()).thenReturn(1000L);
SchedulingMonitor monitor = new SchedulingMonitor(rm.getRMContext(),
mPolicy);
monitor.serviceInit(conf);
monitor.serviceStart();
verify(mPolicy, timeout(10000)).editSchedule();
monitor.close();
rm.close();
}
@Test
@Timeout(value = 10)
public void testRMUpdateSchedulingEditPolicy() throws Exception {
CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
conf.setBoolean(YarnConfiguration.RM_SCHEDULER_ENABLE_MONITORS, true);
MockRM rm = new MockRM(conf);
rm.start();
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
SchedulingMonitorManager smm = cs.getSchedulingMonitorManager();
// runningSchedulingMonitors should not be empty when initialize RM
// scheduler monitor
cs.reinitialize(conf, rm.getRMContext());
assertFalse(smm.isRSMEmpty());
// make sure runningSchedulingPolicies contains all the configured policy
// in YARNConfiguration
String[] configuredPolicies = conf.getStrings(
YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES);
Set<String> configurePoliciesSet = new HashSet<>();
for (String s : configuredPolicies) {
configurePoliciesSet.add(s);
}
assertTrue(smm.isSameConfiguredPolicies(configurePoliciesSet));
// disable RM scheduler monitor
conf.setBoolean(
YarnConfiguration.RM_SCHEDULER_ENABLE_MONITORS,
YarnConfiguration.DEFAULT_RM_SCHEDULER_ENABLE_MONITORS);
cs.reinitialize(conf, rm.getRMContext());
assertTrue(smm.isRSMEmpty());
rm.close();
}
@Test
@Timeout(value = 10)
public void testRMUpdateAutoCreatedQueueDeletionPolicy() throws Exception {
CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
conf.setBoolean(YarnConfiguration.RM_SCHEDULER_ENABLE_MONITORS, true);
conf.set(YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES,
AutoCreatedQueueDeletionPolicy.class.getCanonicalName());
MockRM rm = new MockRM(conf);
rm.start();
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
SchedulingMonitorManager smm = cs.getSchedulingMonitorManager();
// runningSchedulingMonitors should not be empty when initialize RM
// scheduler monitor
cs.reinitialize(conf, rm.getRMContext());
assertFalse(smm.isRSMEmpty());
// make sure runningSchedulingPolicies contains all the configured policy
// in YARNConfiguration
String[] configuredPolicies = conf.getStrings(
YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES);
Set<String> configurePoliciesSet = new HashSet<>();
for (String s : configuredPolicies) {
configurePoliciesSet.add(s);
}
assertTrue(smm.isSameConfiguredPolicies(configurePoliciesSet));
// make sure the running monitor contains AutoCreatedQueueDeletionPolicy
assertTrue(configurePoliciesSet.
contains(AutoCreatedQueueDeletionPolicy.class.getCanonicalName()));
// disable RM scheduler monitor
conf.setBoolean(
YarnConfiguration.RM_SCHEDULER_ENABLE_MONITORS,
YarnConfiguration.DEFAULT_RM_SCHEDULER_ENABLE_MONITORS);
cs.reinitialize(conf, rm.getRMContext());
assertTrue(smm.isRSMEmpty());
rm.close();
}
}
| TestSchedulingMonitor |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/connection/CachingConnectionFactory.java | {
"start": 20124,
"end": 21408
} | class ____ extends DestinationCacheKey {
private final @Nullable String selector;
private final @Nullable Boolean noLocal;
private final @Nullable String subscription;
private final boolean durable;
public ConsumerCacheKey(Destination destination, @Nullable String selector, @Nullable Boolean noLocal,
@Nullable String subscription, boolean durable) {
super(destination);
this.selector = selector;
this.noLocal = noLocal;
this.subscription = subscription;
this.durable = durable;
}
@Override
public boolean equals(@Nullable Object other) {
return (this == other || (other instanceof ConsumerCacheKey that &&
destinationEquals(that) &&
ObjectUtils.nullSafeEquals(this.selector, that.selector) &&
ObjectUtils.nullSafeEquals(this.noLocal, that.noLocal) &&
ObjectUtils.nullSafeEquals(this.subscription, that.subscription) &&
this.durable == that.durable));
}
@Override
public int hashCode() {
return super.hashCode() * 31 + ObjectUtils.nullSafeHashCode(this.selector);
}
@Override
public String toString() {
return super.toString() + " [selector=" + this.selector + ", noLocal=" + this.noLocal +
", subscription=" + this.subscription + ", durable=" + this.durable + "]";
}
}
}
| ConsumerCacheKey |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java | {
"start": 79566,
"end": 80367
} | class ____ {
private final Object actual = Lists.list("Homer", "Marge", "Bart", "Lisa", "Maggie");
@Test
void createAssert() {
// WHEN
AbstractCollectionAssert<?, Collection<?>, Object, ObjectAssert<Object>> result = COLLECTION.createAssert(actual);
// THEN
result.contains("Bart", "Lisa");
}
@Test
void createAssert_with_ValueProvider() {
// GIVEN
ValueProvider<?> valueProvider = mockThatDelegatesTo(type -> actual);
// WHEN
AbstractCollectionAssert<?, Collection<?>, Object, ObjectAssert<Object>> result = COLLECTION.createAssert(valueProvider);
// THEN
result.contains("Bart", "Lisa");
verify(valueProvider).apply(parameterizedType(Collection.class, Object.class));
}
}
@Nested
| Collection_Factory |
java | apache__camel | components/camel-opentelemetry-metrics/src/test/java/org/apache/camel/opentelemetry/metrics/routepolicy/OpenTelemetryRoutePolicyExchangeStatusTest.java | {
"start": 1586,
"end": 3667
} | class ____ extends AbstractOpenTelemetryRoutePolicyTest {
@Test
public void testMetricsExchangeStatus() throws Exception {
int count = 10;
MockEndpoint mockEndpoint = getMockEndpoint("mock:result");
mockEndpoint.expectedMessageCount(count / 2);
for (int i = 0; i < count; i++) {
if (i % 2 == 0) {
template.sendBody("direct:completing", "Hello");
} else {
assertThrows(RuntimeException.class, () -> template.sendBody("direct:failing", "Hello"));
}
}
MockEndpoint.assertIsSatisfied(context);
// total meter
assertEquals(count / 2,
getSingleLongPointData(DEFAULT_CAMEL_ROUTE_POLICY_EXCHANGES_TOTAL_METER_NAME, "completing").getValue());
assertEquals(count / 2,
getSingleLongPointData(DEFAULT_CAMEL_ROUTE_POLICY_EXCHANGES_TOTAL_METER_NAME, "failing").getValue());
// succeeded meter
assertEquals(count / 2,
getSingleLongPointData(DEFAULT_CAMEL_ROUTE_POLICY_EXCHANGES_SUCCEEDED_METER_NAME, "completing").getValue());
assertTrue(getAllPointDataForRouteId(DEFAULT_CAMEL_ROUTE_POLICY_EXCHANGES_SUCCEEDED_METER_NAME, "failing").isEmpty());
// failed meter
assertTrue(getAllPointDataForRouteId(DEFAULT_CAMEL_ROUTE_POLICY_EXCHANGES_FAILED_METER_NAME, "completing").isEmpty());
assertEquals(count / 2,
getSingleLongPointData(DEFAULT_CAMEL_ROUTE_POLICY_EXCHANGES_FAILED_METER_NAME, "failing").getValue());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:completing").routeId("completing")
.to("mock:result");
from("direct:failing").routeId("failing")
.throwException(RuntimeException.class, "Failing")
.to("mock:result");
}
};
}
}
| OpenTelemetryRoutePolicyExchangeStatusTest |
java | google__guava | android/guava/src/com/google/common/base/FinalizableReferenceQueue.java | {
"start": 10621,
"end": 10801
} | class ____ implements FinalizerLoader {
// This is used by the ClassLoader-leak test in FinalizableReferenceQueueTest to disable
// finding Finalizer on the system | SystemLoader |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/json/Json.java | {
"start": 2315,
"end": 2382
} | class ____.
* @param str the JSON string.
* @param clazz the | type |
java | netty__netty | example/src/main/java/io/netty/example/localecho/LocalEchoClientHandler.java | {
"start": 781,
"end": 1181
} | class ____ extends SimpleChannelInboundHandler<Object> {
@Override
public void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception {
// Print as received
System.out.println(msg);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
ctx.close();
}
}
| LocalEchoClientHandler |
java | spring-projects__spring-boot | module/spring-boot-webmvc-test/src/test/java/org/springframework/boot/webmvc/test/autoconfigure/mockmvc/ExampleController3.java | {
"start": 1266,
"end": 1415
} | class ____ {
@GetMapping("/three/{id}")
public String three(@PathVariable @Size(max = 4) String id) {
return "Hello " + id;
}
}
| ExampleController3 |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/DefaultListableBeanFactoryTests.java | {
"start": 137377,
"end": 137571
} | class ____ {
public ConstructorDependencyWithClassResolution(Class<?> clazz) {
}
public ConstructorDependencyWithClassResolution() {
}
}
static | ConstructorDependencyWithClassResolution |
java | quarkusio__quarkus | extensions/openshift-client/runtime/src/main/java/io/quarkus/it/openshift/client/runtime/graal/MiscellaneousSubstitutions.java | {
"start": 2133,
"end": 3930
} | class ____ {
@Substitute
public NonNamespaceOperation<APIRequestCount, APIRequestCountList, Resource<APIRequestCount>> apiRequestCounts() {
throw new RuntimeException(Constants.ERROR_MESSAGE);
}
@Substitute
public MixedOperation<BareMetalHost, BareMetalHostList, Resource<BareMetalHost>> bareMetalHosts() {
throw new RuntimeException(Constants.ERROR_MESSAGE);
}
@Substitute
public MixedOperation<CredentialsRequest, CredentialsRequestList, Resource<CredentialsRequest>> credentialsRequests() {
throw new RuntimeException(Constants.ERROR_MESSAGE);
}
@Substitute
public NonNamespaceOperation<HelmChartRepository, HelmChartRepositoryList, Resource<HelmChartRepository>> helmChartRepositories() {
throw new RuntimeException(Constants.ERROR_MESSAGE);
}
@Substitute
public MixedOperation<Metal3Remediation, Metal3RemediationList, Resource<Metal3Remediation>> metal3Remediations() {
throw new RuntimeException(Constants.ERROR_MESSAGE);
}
@Substitute
public MixedOperation<Metal3RemediationTemplate, Metal3RemediationTemplateList, Resource<Metal3RemediationTemplate>> metal3RemediationTemplates() {
throw new RuntimeException(Constants.ERROR_MESSAGE);
}
@Substitute
public MixedOperation<NetworkAttachmentDefinition, NetworkAttachmentDefinitionList, Resource<NetworkAttachmentDefinition>> networkAttachmentDefinitions() {
throw new RuntimeException(Constants.ERROR_MESSAGE);
}
@Substitute
public MixedOperation<ProjectHelmChartRepository, ProjectHelmChartRepositoryList, Resource<ProjectHelmChartRepository>> projectHelmChartRepositories() {
throw new RuntimeException(Constants.ERROR_MESSAGE);
}
static final | MiscellaneousSubstitutions |
java | apache__camel | components/camel-jmx/src/main/java/org/apache/camel/component/jmx/JMXConsumerNotificationFilter.java | {
"start": 1237,
"end": 2651
} | class ____ extends AttributeChangeNotificationFilter {
private final Lock lock = new ReentrantLock();
private final String stringToCompare;
private final boolean notifyMatch;
public JMXConsumerNotificationFilter(String observedAttribute, String stringToCompare, boolean notifyMatch) {
enableAttribute(observedAttribute);
this.stringToCompare = stringToCompare;
this.notifyMatch = notifyMatch;
}
@Override
public boolean isNotificationEnabled(Notification notification) {
lock.lock();
try {
boolean enabled = super.isNotificationEnabled(notification);
if (!enabled) {
return false;
}
boolean match = false;
if (stringToCompare != null) {
AttributeChangeNotification acn = (AttributeChangeNotification) notification;
Object newValue = acn.getNewValue();
// special for null
if ("null".equals(stringToCompare) && newValue == null) {
match = true;
} else if (newValue != null) {
match = stringToCompare.equals(newValue.toString());
}
return notifyMatch == match;
} else {
return true;
}
} finally {
lock.unlock();
}
}
}
| JMXConsumerNotificationFilter |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/scan/PackageScanFiltersTest.java | {
"start": 1755,
"end": 3361
} | class ____ {
@Test
public void testAssignableToPackageScanFilter() {
AssignableToPackageScanFilter filter = new AssignableToPackageScanFilter();
assertFalse(filter.matches(ScanTargetOne.class));
filter = new AssignableToPackageScanFilter(ScanTargetOne.class);
validateFilter(filter, ScanTargetOne.class);
filter = new AssignableToPackageScanFilter(ScanTargetOne.class);
validateFilter(filter, ScanTargetTwo.class);
Set<Class<?>> classes = new LinkedHashSet<>();
classes.add(ScanTargetOne.class);
classes.add(ScanTargetThree.class);
filter = new AssignableToPackageScanFilter(classes);
validateFilter(filter, ScanTargetOne.class);
validateFilter(filter, ScanTargetTwo.class);
validateFilter(filter, ScanTargetThree.class);
assertTrue(filter.toString().contains("ScanTargetOne"));
assertTrue(filter.toString().contains("ScanTargetThree"));
}
@Test
public void testAnnotatedWithAnyPackageScanFilter() {
Set<Class<? extends Annotation>> annotations = new LinkedHashSet<>();
annotations.add(org.apache.camel.spring.scan.ScannableOne.class);
annotations.add(org.apache.camel.spring.scan.ScannableTwo.class);
AnnotatedWithAnyPackageScanFilter filter = new AnnotatedWithAnyPackageScanFilter(annotations);
Class<ScanTargetOne> type = ScanTargetOne.class;
validateFilter(filter, type);
validateFilter(filter, ScanTargetThree.class);
assertEquals(
"annotated with any @[[ | PackageScanFiltersTest |
java | apache__rocketmq | broker/src/test/java/org/apache/rocketmq/broker/processor/RecallMessageProcessorTest.java | {
"start": 2795,
"end": 12637
} | class ____ {
private static final String TOPIC = "topic";
private static final String BROKER_NAME = "brokerName";
private RecallMessageProcessor recallMessageProcessor;
@Mock
private BrokerConfig brokerConfig;
@Mock
private BrokerController brokerController;
@Mock
private ChannelHandlerContext handlerContext;
@Mock
private MessageStoreConfig messageStoreConfig;
@Mock
private TopicConfigManager topicConfigManager;
@Mock
private MessageStore messageStore;
@Mock
private BrokerStatsManager brokerStatsManager;
@Mock
private Channel channel;
@Before
public void init() throws IllegalAccessException, NoSuchFieldException {
when(brokerController.getMessageStoreConfig()).thenReturn(messageStoreConfig);
when(brokerController.getTopicConfigManager()).thenReturn(topicConfigManager);
when(brokerController.getMessageStore()).thenReturn(messageStore);
when(brokerController.getBrokerConfig()).thenReturn(brokerConfig);
when(brokerConfig.getBrokerName()).thenReturn(BROKER_NAME);
when(brokerConfig.isRecallMessageEnable()).thenReturn(true);
when(brokerController.getBrokerStatsManager()).thenReturn(brokerStatsManager);
when(handlerContext.channel()).thenReturn(channel);
recallMessageProcessor = new RecallMessageProcessor(brokerController);
}
@Test
public void testBuildMessage() {
String timestampStr = String.valueOf(System.currentTimeMillis());
String id = "id";
RecallMessageHandle.HandleV1 handle = new RecallMessageHandle.HandleV1(TOPIC, "brokerName", timestampStr, id);
MessageExtBrokerInner msg =
recallMessageProcessor.buildMessage(handlerContext, new RecallMessageRequestHeader(), handle);
Assert.assertEquals(TOPIC, msg.getTopic());
Map<String, String> properties = MessageDecoder.string2messageProperties(msg.getPropertiesString());
Assert.assertEquals(timestampStr, properties.get(MessageConst.PROPERTY_TIMER_DELIVER_MS));
Assert.assertEquals(id, properties.get(MessageConst.PROPERTY_UNIQ_CLIENT_MESSAGE_ID_KEYIDX));
Assert.assertEquals(TOPIC + "+" + id, properties.get(MessageConst.PROPERTY_TIMER_DEL_UNIQKEY));
}
@Test
public void testHandlePutMessageResult() {
MessageExt message = new MessageExt();
MessageAccessor.putProperty(message, MessageConst.PROPERTY_UNIQ_CLIENT_MESSAGE_ID_KEYIDX, "id");
RemotingCommand response = RemotingCommand.createResponseCommand(RecallMessageResponseHeader.class);
recallMessageProcessor.handlePutMessageResult(null, null, response, message, handlerContext, 0L);
Assert.assertEquals(ResponseCode.SYSTEM_ERROR, response.getCode());
List<PutMessageStatus> okStatus = Arrays.asList(PutMessageStatus.PUT_OK, PutMessageStatus.FLUSH_DISK_TIMEOUT,
PutMessageStatus.FLUSH_SLAVE_TIMEOUT, PutMessageStatus.SLAVE_NOT_AVAILABLE);
for (PutMessageStatus status : PutMessageStatus.values()) {
PutMessageResult putMessageResult =
new PutMessageResult(status, new AppendMessageResult(AppendMessageStatus.PUT_OK));
recallMessageProcessor.handlePutMessageResult(putMessageResult, null, response, message, handlerContext, 0L);
if (okStatus.contains(status)) {
Assert.assertEquals(ResponseCode.SUCCESS, response.getCode());
RecallMessageResponseHeader responseHeader = (RecallMessageResponseHeader) response.readCustomHeader();
Assert.assertEquals("id", responseHeader.getMsgId());
} else {
Assert.assertEquals(ResponseCode.SYSTEM_ERROR, response.getCode());
}
}
}
@Test
public void testProcessRequest_notEnable() throws RemotingCommandException {
when(brokerConfig.isRecallMessageEnable()).thenReturn(false);
RemotingCommand request = mockRequest(0, TOPIC, TOPIC, "id", BROKER_NAME);
RemotingCommand response = recallMessageProcessor.processRequest(handlerContext, request);
Assert.assertEquals(ResponseCode.NO_PERMISSION, response.getCode());
}
@Test
public void testProcessRequest_invalidStatus() throws RemotingCommandException {
RemotingCommand request = mockRequest(0, TOPIC, TOPIC, "id", BROKER_NAME);
RemotingCommand response;
// role slave
when(messageStoreConfig.getBrokerRole()).thenReturn(BrokerRole.SLAVE);
response = recallMessageProcessor.processRequest(handlerContext, request);
Assert.assertEquals(ResponseCode.SLAVE_NOT_AVAILABLE, response.getCode());
// not reach startTimestamp
when(messageStoreConfig.getBrokerRole()).thenReturn(BrokerRole.SYNC_MASTER);
when(messageStore.now()).thenReturn(0L);
when(brokerConfig.getStartAcceptSendRequestTimeStamp()).thenReturn(System.currentTimeMillis());
response = recallMessageProcessor.processRequest(handlerContext, request);
Assert.assertEquals(ResponseCode.SERVICE_NOT_AVAILABLE, response.getCode());
}
@Test
public void testProcessRequest_notWriteable() throws RemotingCommandException {
when(brokerConfig.getBrokerPermission()).thenReturn(4);
when(brokerConfig.isAllowRecallWhenBrokerNotWriteable()).thenReturn(false);
RemotingCommand request = mockRequest(0, TOPIC, TOPIC, "id", BROKER_NAME);
RemotingCommand response = recallMessageProcessor.processRequest(handlerContext, request);
Assert.assertEquals(ResponseCode.SERVICE_NOT_AVAILABLE, response.getCode());
}
@Test
public void testProcessRequest_topicNotFound_or_notMatch() throws RemotingCommandException {
when(brokerConfig.getBrokerPermission()).thenReturn(6);
RemotingCommand request;
RemotingCommand response;
// not found
request = mockRequest(0, TOPIC, TOPIC, "id", BROKER_NAME);
response = recallMessageProcessor.processRequest(handlerContext, request);
Assert.assertEquals(ResponseCode.TOPIC_NOT_EXIST, response.getCode());
// not match
when(topicConfigManager.selectTopicConfig(TOPIC)).thenReturn(new TopicConfig(TOPIC));
request = mockRequest(0, TOPIC, "anotherTopic", "id", BROKER_NAME);
response = recallMessageProcessor.processRequest(handlerContext, request);
Assert.assertEquals(ResponseCode.ILLEGAL_OPERATION, response.getCode());
}
@Test
public void testProcessRequest_brokerNameNotMatch() throws RemotingCommandException {
when(brokerConfig.getBrokerPermission()).thenReturn(6);
when(topicConfigManager.selectTopicConfig(TOPIC)).thenReturn(new TopicConfig(TOPIC));
RemotingCommand request = mockRequest(0, TOPIC, "anotherTopic", "id", BROKER_NAME + "_other");
RemotingCommand response = recallMessageProcessor.processRequest(handlerContext, request);
Assert.assertEquals(ResponseCode.ILLEGAL_OPERATION, response.getCode());
}
@Test
public void testProcessRequest_timestampInvalid() throws RemotingCommandException {
when(brokerConfig.getBrokerPermission()).thenReturn(6);
when(topicConfigManager.selectTopicConfig(TOPIC)).thenReturn(new TopicConfig(TOPIC));
RemotingCommand request;
RemotingCommand response;
// past timestamp
request = mockRequest(0, TOPIC, TOPIC, "id", BROKER_NAME);
response = recallMessageProcessor.processRequest(handlerContext, request);
Assert.assertEquals(ResponseCode.ILLEGAL_OPERATION, response.getCode());
// timestamp overflow
when(messageStoreConfig.getTimerMaxDelaySec()).thenReturn(86400);
request = mockRequest(System.currentTimeMillis() + 86400 * 2 * 1000, TOPIC, TOPIC, "id", BROKER_NAME);
response = recallMessageProcessor.processRequest(handlerContext, request);
Assert.assertEquals(ResponseCode.ILLEGAL_OPERATION, response.getCode());
}
@Test
public void testProcessRequest_success() throws RemotingCommandException {
when(brokerConfig.getBrokerPermission()).thenReturn(6);
when(topicConfigManager.selectTopicConfig(TOPIC)).thenReturn(new TopicConfig(TOPIC));
when(messageStoreConfig.getTimerMaxDelaySec()).thenReturn(86400);
when(messageStore.putMessage(any())).thenReturn(
new PutMessageResult(PutMessageStatus.PUT_OK, new AppendMessageResult(AppendMessageStatus.PUT_OK)));
String msgId = "msgId";
RemotingCommand request = mockRequest(System.currentTimeMillis() + 90 * 1000, TOPIC, TOPIC, msgId, BROKER_NAME);
RemotingCommand response = recallMessageProcessor.processRequest(handlerContext, request);
RecallMessageResponseHeader responseHeader = (RecallMessageResponseHeader) response.readCustomHeader();
Assert.assertEquals(ResponseCode.SUCCESS, response.getCode());
Assert.assertEquals(msgId, responseHeader.getMsgId());
verify(messageStore, times(1)).putMessage(any());
}
private RemotingCommand mockRequest(long timestamp, String requestTopic, String handleTopic,
String msgId, String brokerName) {
String handle =
RecallMessageHandle.HandleV1.buildHandle(handleTopic, brokerName, String.valueOf(timestamp), msgId);
RecallMessageRequestHeader requestHeader = new RecallMessageRequestHeader();
requestHeader.setProducerGroup("group");
requestHeader.setTopic(requestTopic);
requestHeader.setRecallHandle(handle);
requestHeader.setBrokerName(brokerName);
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.RECALL_MESSAGE, requestHeader);
request.makeCustomHeaderToNet();
return request;
}
}
| RecallMessageProcessorTest |
java | apache__camel | components/camel-zipfile/src/test/java/org/apache/camel/dataformat/zipfile/ZipFileSplitIteratorCorruptTest.java | {
"start": 1193,
"end": 2345
} | class ____ extends CamelTestSupport {
@Test
public void testZipFileUnmarshal() throws Exception {
getMockEndpoint("mock:dead").expectedMessageCount(1);
getMockEndpoint("mock:dead").message(0).exchangeProperty(Exchange.EXCEPTION_CAUGHT)
.isInstanceOf(RuntimeCamelException.class);
getMockEndpoint("mock:end").expectedMessageCount(0);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
ZipFileDataFormat zf = new ZipFileDataFormat();
zf.setUsingIterator(true);
errorHandler(deadLetterChannel("mock:dead"));
from("file://src/test/resources?delay=10&fileName=corrupt.zip&noop=true")
.unmarshal(zf)
.split(bodyAs(Iterator.class)).streaming()
.convertBodyTo(String.class)
.to("mock:end")
.end();
}
};
}
}
| ZipFileSplitIteratorCorruptTest |
java | quarkusio__quarkus | extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/test/MockServiceDiscoveryConfiguration.java | {
"start": 211,
"end": 1271
} | class ____ implements io.smallrye.stork.api.config.ConfigWithType {
private final Map<String, String> parameters;
/**
* Creates a new MockConfiguration
*
* @param params the parameters, must not be {@code null}
*/
public MockServiceDiscoveryConfiguration(Map<String, String> params) {
parameters = Collections.unmodifiableMap(params);
}
/**
* Creates a new MockConfiguration
*/
public MockServiceDiscoveryConfiguration() {
parameters = Collections.emptyMap();
}
/**
* @return the type
*/
@Override
public String type() {
return "mock";
}
/**
* @return the parameters
*/
@Override
public Map<String, String> parameters() {
return parameters;
}
private MockServiceDiscoveryConfiguration extend(String key, String value) {
Map<String, String> copy = new HashMap<>(parameters);
copy.put(key, value);
return new MockServiceDiscoveryConfiguration(copy);
}
}
| MockServiceDiscoveryConfiguration |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java | {
"start": 55141,
"end": 56266
} | class ____ implements Describable {
final List<DriverFactory> driverFactories;
LocalExecutionPlan(List<DriverFactory> driverFactories) {
this.driverFactories = driverFactories;
}
public List<Driver> createDrivers(String sessionId) {
List<Driver> drivers = new ArrayList<>();
boolean success = false;
try {
for (DriverFactory df : driverFactories) {
for (int i = 0; i < df.driverParallelism.instanceCount; i++) {
logger.trace("building {} {}", i, df);
drivers.add(df.driverSupplier.apply(sessionId));
}
}
success = true;
return drivers;
} finally {
if (success == false) {
Releasables.close(Releasables.wrap(drivers));
}
}
}
@Override
public String describe() {
return driverFactories.stream().map(DriverFactory::describe).collect(joining("\n"));
}
}
}
| LocalExecutionPlan |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.