language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_element_Test.java
|
{
"start": 1305,
"end": 1941
}
|
class ____ {
private final Iterable<String> iterable = asList("Homer", "Marge", "Lisa", "Bart", "Maggie");
@Test
void should_fail_if_iterable_is_empty() {
// GIVEN
Iterable<String> iterable = emptyList();
// WHEN
var assertionError = expectAssertionError(() -> assertThat(iterable).element(1));
// THEN
then(assertionError).hasMessage(actualIsEmpty());
}
@Test
void should_pass_allowing_object_assertions_if_iterable_contains_enough_elements() {
// WHEN
ObjectAssert<String> result = assertThat(iterable).element(1);
// THEN
result.isEqualTo("Marge");
}
}
|
IterableAssert_element_Test
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/type/TypeException.java
|
{
"start": 782,
"end": 1159
}
|
class ____ extends PersistenceException {
private static final long serialVersionUID = 8614420898975117130L;
public TypeException() {
}
public TypeException(String message) {
super(message);
}
public TypeException(String message, Throwable cause) {
super(message, cause);
}
public TypeException(Throwable cause) {
super(cause);
}
}
|
TypeException
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java
|
{
"start": 2251,
"end": 12827
}
|
class ____ extends Assertions {
public static final String SC_HADOOP = "org-apache-hadoop";
public static final String USER = "devteam/";
public static final String NAME = "hdfs";
public static final String API_WEBHDFS = "classpath:org.apache.hadoop.namenode.webhdfs";
public static final String API_HDFS = "classpath:org.apache.hadoop.namenode.dfs";
public static final String USERPATH = RegistryConstants.PATH_USERS + USER;
public static final String PARENT_PATH = USERPATH + SC_HADOOP + "/";
public static final String ENTRY_PATH = PARENT_PATH + NAME;
public static final String NNIPC = "uuid:423C2B93-C927-4050-AEC6-6540E6646437";
public static final String IPC2 = "uuid:0663501D-5AD3-4F7E-9419-52F5D6636FCF";
private static final Logger LOG =
LoggerFactory.getLogger(RegistryTestHelper.class);
private static final RegistryUtils.ServiceRecordMarshal recordMarshal =
new RegistryUtils.ServiceRecordMarshal();
public static final String HTTP_API = "http://";
/**
* Assert the path is valid by ZK rules
* @param path path to check
*/
public static void assertValidZKPath(String path) {
try {
PathUtils.validatePath(path);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Invalid Path " + path + ": " + e, e);
}
}
/**
* Assert that a string is not empty (null or "")
* @param message message to raise if the string is empty
* @param check string to check
*/
public static void assertNotEmpty(String message, String check) {
if (StringUtils.isEmpty(check)) {
fail(message);
}
}
/**
* Assert that a string is empty (null or "")
* @param check string to check
*/
public static void assertNotEmpty(String check) {
if (StringUtils.isEmpty(check)) {
fail("Empty string");
}
}
/**
* Log the details of a login context
* @param name name to assert that the user is logged in as
* @param loginContext the login context
*/
public static void logLoginDetails(String name,
LoginContext loginContext) {
assertNotNull(loginContext, "Null login context");
Subject subject = loginContext.getSubject();
LOG.info("Logged in as {}:\n {}", name, subject);
}
/**
* Set the JVM property to enable Kerberos debugging
*/
public static void enableKerberosDebugging() {
System.setProperty(AbstractSecureRegistryTest.SUN_SECURITY_KRB5_DEBUG,
"true");
}
/**
* Set the JVM property to enable Kerberos debugging
*/
public static void disableKerberosDebugging() {
System.setProperty(AbstractSecureRegistryTest.SUN_SECURITY_KRB5_DEBUG,
"false");
}
/**
* General code to validate bits of a component/service entry built iwth
* {@link #addSampleEndpoints(ServiceRecord, String)}
* @param record instance to check
*/
public static void validateEntry(ServiceRecord record) {
assertNotNull(record, "null service record");
List<Endpoint> endpoints = record.external;
assertEquals(2, endpoints.size());
Endpoint webhdfs = findEndpoint(record, API_WEBHDFS, true, 1, 1);
assertEquals(API_WEBHDFS, webhdfs.api);
assertEquals(AddressTypes.ADDRESS_URI, webhdfs.addressType);
assertEquals(ProtocolTypes.PROTOCOL_REST, webhdfs.protocolType);
List<Map<String, String>> addressList = webhdfs.addresses;
Map<String, String> url = addressList.get(0);
String addr = url.get("uri");
assertTrue(addr.contains("http"));
assertTrue(addr.contains(":8020"));
Endpoint nnipc = findEndpoint(record, NNIPC, false, 1,2);
assertEquals(ProtocolTypes.PROTOCOL_THRIFT,
nnipc.protocolType, "wrong protocol in " + nnipc);
Endpoint ipc2 = findEndpoint(record, IPC2, false, 1,2);
assertNotNull(ipc2);
Endpoint web = findEndpoint(record, HTTP_API, true, 1, 1);
assertEquals(1, web.addresses.size());
assertEquals(1, web.addresses.get(0).size());
}
/**
* Assert that an endpoint matches the criteria
* @param endpoint endpoint to examine
* @param addressType expected address type
* @param protocolType expected protocol type
* @param api API
*/
public static void assertMatches(Endpoint endpoint,
String addressType,
String protocolType,
String api) {
assertNotNull(endpoint);
assertEquals(addressType, endpoint.addressType);
assertEquals(protocolType, endpoint.protocolType);
assertEquals(api, endpoint.api);
}
/**
* Assert the records match.
* @param source record that was written
* @param resolved the one that resolved.
*/
public static void assertMatches(ServiceRecord source, ServiceRecord resolved) {
assertNotNull(source, "Null source record ");
assertNotNull(resolved, "Null resolved record ");
assertEquals(source.description, resolved.description);
Map<String, String> srcAttrs = source.attributes();
Map<String, String> resolvedAttrs = resolved.attributes();
String sourceAsString = source.toString();
String resolvedAsString = resolved.toString();
assertEquals(srcAttrs.size(),
resolvedAttrs.size(), "Wrong count of attrs in \n" + sourceAsString
+ "\nfrom\n" + resolvedAsString);
for (Map.Entry<String, String> entry : srcAttrs.entrySet()) {
String attr = entry.getKey();
assertEquals(entry.getValue(), resolved.get(attr), "attribute "+ attr);
}
assertEquals(source.external.size(), resolved.external.size(),
"wrong external endpoint count");
assertEquals(source.internal.size(), resolved.internal.size(),
"wrong external endpoint count");
}
/**
* Find an endpoint in a record or fail,
* @param record record
* @param api API
* @param external external?
* @param addressElements expected # of address elements?
* @param addressTupleSize expected size of a type
* @return the endpoint.
*/
public static Endpoint findEndpoint(ServiceRecord record,
String api, boolean external, int addressElements, int addressTupleSize) {
Endpoint epr = external ? record.getExternalEndpoint(api)
: record.getInternalEndpoint(api);
if (epr != null) {
assertEquals(addressElements, epr.addresses.size(),
"wrong # of addresses");
assertEquals(addressTupleSize, epr.addresses.get(0).size(),
"wrong # of elements in an address tuple");
return epr;
}
List<Endpoint> endpoints = external ? record.external : record.internal;
StringBuilder builder = new StringBuilder();
for (Endpoint endpoint : endpoints) {
builder.append("\"").append(endpoint).append("\" ");
}
fail("Did not find " + api + " in endpoints " + builder);
// never reached; here to keep the compiler happy
return null;
}
/**
* Log a record
* @param name record name
* @param record details
* @throws IOException only if something bizarre goes wrong marshalling
* a record.
*/
public static void logRecord(String name, ServiceRecord record) throws
IOException {
LOG.info(" {} = \n{}\n", name, recordMarshal.toJson(record));
}
/**
* Create a service entry with the sample endpoints
* @param persistence persistence policy
* @return the record
* @throws IOException on a failure
*/
public static ServiceRecord buildExampleServiceEntry(String persistence) throws
IOException,
URISyntaxException {
ServiceRecord record = new ServiceRecord();
record.set(YarnRegistryAttributes.YARN_ID, "example-0001");
record.set(YarnRegistryAttributes.YARN_PERSISTENCE, persistence);
addSampleEndpoints(record, "namenode");
return record;
}
/**
* Add some endpoints
* @param entry entry
*/
public static void addSampleEndpoints(ServiceRecord entry, String hostname)
throws URISyntaxException {
assertNotNull(hostname);
entry.addExternalEndpoint(webEndpoint(HTTP_API,
new URI("http", hostname + ":80", "/")));
entry.addExternalEndpoint(
restEndpoint(API_WEBHDFS,
new URI("http", hostname + ":8020", "/")));
Endpoint endpoint = ipcEndpoint(API_HDFS, null);
endpoint.addresses.add(RegistryTypeUtils.hostnamePortPair(hostname, 8030));
entry.addInternalEndpoint(endpoint);
InetSocketAddress localhost = new InetSocketAddress("localhost", 8050);
entry.addInternalEndpoint(
inetAddrEndpoint(NNIPC, ProtocolTypes.PROTOCOL_THRIFT, "localhost",
8050));
entry.addInternalEndpoint(
RegistryTypeUtils.ipcEndpoint(
IPC2, localhost));
}
/**
* Describe the stage in the process with a box around it -so as
* to highlight it in test logs
* @param log log to use
* @param text text
* @param args logger args
*/
public static void describe(Logger log, String text, Object...args) {
log.info("\n=======================================");
log.info(text, args);
log.info("=======================================\n");
}
/**
* log out from a context if non-null ... exceptions are caught and logged
* @param login login context
* @return null, always
*/
public static LoginContext logout(LoginContext login) {
try {
if (login != null) {
LOG.debug("Logging out login context {}", login.toString());
login.logout();
}
} catch (LoginException e) {
LOG.warn("Exception logging out: {}", e, e);
}
return null;
}
/**
* Login via a UGI. Requres UGI to have been set up
* @param user username
* @param keytab keytab to list
* @return the UGI
* @throws IOException
*/
public static UserGroupInformation loginUGI(String user, File keytab) throws
IOException {
LOG.info("Logging in as {} from {}", user, keytab);
return UserGroupInformation.loginUserFromKeytabAndReturnUGI(user,
keytab.getAbsolutePath());
}
public static ServiceRecord createRecord(String persistence) {
return createRecord("01", persistence, "description");
}
public static ServiceRecord createRecord(String id, String persistence,
String description) {
ServiceRecord serviceRecord = new ServiceRecord();
serviceRecord.set(YarnRegistryAttributes.YARN_ID, id);
serviceRecord.description = description;
serviceRecord.set(YarnRegistryAttributes.YARN_PERSISTENCE, persistence);
return serviceRecord;
}
public static ServiceRecord createRecord(String id, String persistence,
String description, String data) {
return createRecord(id, persistence, description);
}
}
|
RegistryTestHelper
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/fields/RecursiveComparisonAssert_isEqualTo_strictTypeCheck_Test.java
|
{
"start": 8542,
"end": 8625
}
|
class ____ {
@SuppressWarnings("unused")
int value;
}
private static
|
Inner
|
java
|
junit-team__junit5
|
junit-platform-engine/src/main/java/org/junit/platform/engine/support/hierarchical/WorkerThreadPoolHierarchicalTestExecutorService.java
|
{
"start": 22653,
"end": 24605
}
|
class ____ {
private static final Comparator<Entry> QUEUE_COMPARATOR = comparing(Entry::level).reversed() //
.thenComparing(Entry::isContainer) // tests before containers
.thenComparing(Entry::index) //
.thenComparing(Entry::uniqueId, new SameLengthUniqueIdComparator());
private static final Comparator<Entry> CHILD_COMPARATOR = comparing(Entry::isContainer).reversed() // containers before tests
.thenComparing(Entry::index);
private final TestTask task;
private final CompletableFuture<@Nullable Void> future;
private final int index;
@SuppressWarnings("FutureReturnValueIgnored")
Entry(TestTask task, int index) {
this.future = new CompletableFuture<>();
this.future.whenComplete((__, t) -> {
if (t == null) {
LOGGER.trace(() -> "completed normally: " + task);
}
else {
LOGGER.trace(t, () -> "completed exceptionally: " + task);
}
});
this.task = task;
this.index = index;
}
private int index() {
return this.index;
}
private int level() {
return uniqueId().getSegments().size();
}
private boolean isContainer() {
return task.getTestDescriptor().isContainer();
}
private UniqueId uniqueId() {
return task.getTestDescriptor().getUniqueId();
}
CompletableFuture<@Nullable Void> future() {
return future;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || obj.getClass() != this.getClass()) {
return false;
}
var that = (Entry) obj;
return Objects.equals(this.uniqueId(), that.uniqueId()) && this.index == that.index;
}
@Override
public int hashCode() {
return Objects.hash(uniqueId(), index);
}
@Override
public String toString() {
return new ToStringBuilder(this) //
.append("task", task) //
.append("index", index) //
.toString();
}
private static
|
Entry
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/S3AInternalAuditConstants.java
|
{
"start": 1180,
"end": 1565
}
|
class ____ {
private S3AInternalAuditConstants() {
}
/**
* Exceution attribute for audit span callbacks.
* This is used to retrieve the span in the AWS code.
*/
public static final ExecutionAttribute<AuditSpanS3A>
AUDIT_SPAN_EXECUTION_ATTRIBUTE =
new ExecutionAttribute<>(
"org.apache.hadoop.fs.s3a.audit.AuditSpanS3A");
}
|
S3AInternalAuditConstants
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/data/WithMap.java
|
{
"start": 699,
"end": 996
}
|
class ____<K, V> {
public Map<K, V> map;
public WithMap(Map<K, V> map) {
this.map = map;
}
public static <K, V> WithMap<K, V> withMap(Map<K, V> map) {
return new WithMap<>(map);
}
@Override
public String toString() {
return "WithMap map=r%s".formatted(map);
}
}
|
WithMap
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/calcite/TimestampSchemaVersion.java
|
{
"start": 1009,
"end": 1830
}
|
class ____ implements SchemaVersion {
private final long timestamp;
private TimestampSchemaVersion(long timestamp) {
this.timestamp = timestamp;
}
public static SchemaVersion of(long timestamp) {
return new TimestampSchemaVersion(timestamp);
}
public long getTimestamp() {
return timestamp;
}
@Override
public boolean isBefore(SchemaVersion other) {
if (!(other instanceof TimestampSchemaVersion)) {
throw new IllegalArgumentException(
"Cannot compare a TimestampSchemaVersion object with a "
+ other.getClass()
+ " object.");
} else {
return this.timestamp < ((TimestampSchemaVersion) other).timestamp;
}
}
}
|
TimestampSchemaVersion
|
java
|
resilience4j__resilience4j
|
resilience4j-spring-boot2/src/test/java/io/github/resilience4j/circuitbreaker/monitoring/events/CircuitBreakerStreamEventsTest.java
|
{
"start": 2050,
"end": 7141
}
|
class ____ {
public static final String ACTUATOR_STREAM_CIRCUITBREAKER_EVENTS = "/actuator/streamcircuitbreakerevents";
public static final String ACTUATOR_CIRCUITBREAKEREVENTS = "/actuator/circuitbreakerevents";
@LocalServerPort
int randomServerPort;
@Autowired
DummyService dummyService;
@Autowired
private WebTestClient webTestClient;
private WebClient webStreamClient;
@Before
public void setup(){
webStreamClient = WebClient.create("http://localhost:" + randomServerPort);
}
private final ParameterizedTypeReference<ServerSentEvent<String>> type
= new ParameterizedTypeReference<ServerSentEvent<String>>() {
};
@Test
public void streamAllEvents() throws IOException, InterruptedException {
int noOfEvents =2;
List<ServerSentEvent<String>> noOfEventsFromStream = getServerSentEvents(ACTUATOR_STREAM_CIRCUITBREAKER_EVENTS);
CircuitBreakerEventsEndpointResponse circuitBreakerEventsBefore = circuitBreakerEvents(ACTUATOR_CIRCUITBREAKEREVENTS);
publishEvents(noOfEvents);
CircuitBreakerEventsEndpointResponse circuitBreakerEventsAfter = circuitBreakerEvents(ACTUATOR_CIRCUITBREAKEREVENTS);
assertThat(circuitBreakerEventsBefore.getCircuitBreakerEvents().size()).isLessThan(circuitBreakerEventsAfter.getCircuitBreakerEvents().size());
Thread.sleep(1000);
assertThat(noOfEventsFromStream).hasSize(noOfEvents);
}
@Test
public void streamEventsbyName() throws IOException, InterruptedException {
int noOfEvents =2;
List<ServerSentEvent<String>> noOfEventsFromStream = getServerSentEvents(ACTUATOR_STREAM_CIRCUITBREAKER_EVENTS + "/backendA");
CircuitBreakerEventsEndpointResponse circuitBreakerEventsBefore = circuitBreakerEvents(ACTUATOR_CIRCUITBREAKEREVENTS + "/backendA");
publishEvents(noOfEvents);
CircuitBreakerEventsEndpointResponse circuitBreakerEventsAfter = circuitBreakerEvents(ACTUATOR_CIRCUITBREAKEREVENTS + "/backendA");
assertThat(circuitBreakerEventsBefore.getCircuitBreakerEvents().size()).isLessThan(circuitBreakerEventsAfter.getCircuitBreakerEvents().size());
Thread.sleep(1000);
assertThat(noOfEventsFromStream).hasSize(noOfEvents);
}
@Test
public void streamEventsbyNameAndType() throws IOException, InterruptedException {
int noOfSuccessfulEvents =1;
List<ServerSentEvent<String>> noOfEventsFromStream = getServerSentEvents(ACTUATOR_STREAM_CIRCUITBREAKER_EVENTS + "/backendA/SUCCESS");
CircuitBreakerEventsEndpointResponse circuitBreakerEventsBefore = circuitBreakerEvents(ACTUATOR_CIRCUITBREAKEREVENTS + "/backendA");
publishEventsWithSuccessAndError();
CircuitBreakerEventsEndpointResponse circuitBreakerEventsAfter = circuitBreakerEvents(ACTUATOR_CIRCUITBREAKEREVENTS + "/backendA");
assertThat(circuitBreakerEventsBefore.getCircuitBreakerEvents().size()).isLessThan(circuitBreakerEventsAfter.getCircuitBreakerEvents().size());
Thread.sleep(1000);
assertThat(noOfEventsFromStream).hasSize(noOfSuccessfulEvents);
}
private List<ServerSentEvent<String>> getServerSentEvents(String s) {
Flux<ServerSentEvent<String>> circuitBreakerStreamEventsForAfter = circuitBreakerStreamEvents(s);
List<ServerSentEvent<String>> events = new ArrayList<>();
circuitBreakerStreamEventsForAfter.subscribe(
content -> events.add(content),
error -> System.out.println("Error receiving SSE: {}" + error),
() -> System.out.println("Completed!!!"));
return events;
}
private CircuitBreakerEventsEndpointResponse circuitBreakerEvents(String s) {
return this.webTestClient.get().uri(s).exchange()
.expectStatus().isOk()
.expectBody(CircuitBreakerEventsEndpointResponse.class)
.returnResult()
.getResponseBody();
}
private Flux<ServerSentEvent<String>> circuitBreakerStreamEvents(String s) {
Flux<ServerSentEvent<String>> eventStream = webStreamClient.get()
.uri(s)
.accept(MediaType.TEXT_EVENT_STREAM)
.retrieve()
.bodyToFlux(type)
.filter(eventData -> !eventData.event().equals("ping"))
.take(3);
return eventStream;
}
private void publishEvents(int noOfEvents) throws IOException {
int i =0;
while( i < noOfEvents){
dummyService.doSomething(false);
// The invocation is recorded by the CircuitBreaker as a success.
i++;
}
}
private void publishEventsWithSuccessAndError() throws IOException {
try {
dummyService.doSomething(true);
} catch (IOException ex) {
// Do nothing. The IOException is recorded by the CircuitBreaker as part of the recordFailurePredicate as a failure.
}
// The invocation is recorded by the CircuitBreaker as a success.
dummyService.doSomething(false);
}
}
|
CircuitBreakerStreamEventsTest
|
java
|
apache__camel
|
components/camel-azure/camel-azure-servicebus/src/test/java/org/apache/camel/component/azure/servicebus/ServiceBusEndpointTest.java
|
{
"start": 1548,
"end": 10895
}
|
class ____ extends CamelTestSupport {
@Test
void testCreateWithInvalidData() {
Exception exception = assertThrows(FailedToCreateProducerException.class, () -> {
template.sendBody("azure-servicebus:test//?", null);
});
assertInstanceOf(IllegalArgumentException.class, exception.getCause());
exception = assertThrows(ResolveEndpointFailedException.class, () -> {
template.sendBody("azure-servicebus://?connectionString=test", null);
});
assertInstanceOf(IllegalArgumentException.class, exception.getCause());
// provided credential but no fully qualified namespace
context.getRegistry().bind("credential", new TokenCredential() {
@Override
public Mono<AccessToken> getToken(TokenRequestContext tokenRequestContext) {
return Mono.empty();
}
});
exception = assertThrows(FailedToCreateProducerException.class, () -> {
template.sendBody("azure-servicebus:test?tokenCredential=#credential", null);
});
assertInstanceOf(IllegalArgumentException.class, exception.getCause());
}
@Test
void testCreateEndpointWithConfig() throws Exception {
final String uri = "azure-servicebus://testTopicOrQueue";
final String remaining = "testTopicOrQueue";
final Map<String, Object> params = new HashMap<>();
params.put("serviceBusType", ServiceBusType.topic);
params.put("prefetchCount", 10);
params.put("connectionString", "testString");
params.put("binary", "true");
final ServiceBusEndpoint endpoint
= (ServiceBusEndpoint) context.getComponent("azure-servicebus", ServiceBusComponent.class)
.createEndpoint(uri, remaining, params);
assertEquals(ServiceBusType.topic, endpoint.getConfiguration().getServiceBusType());
assertEquals("testTopicOrQueue", endpoint.getConfiguration().getTopicOrQueueName());
assertEquals(10, endpoint.getConfiguration().getPrefetchCount());
assertEquals("testString", endpoint.getConfiguration().getConnectionString());
assertEquals(true, endpoint.getConfiguration().isBinary());
}
@Test
void testCreateEndpointWithConfigAndSession() throws Exception {
final String uri = "azure-servicebus://testTopicOrQueue";
final String remaining = "testTopicOrQueue";
final Map<String, Object> params = new HashMap<>();
params.put("serviceBusType", ServiceBusType.topic);
params.put("prefetchCount", 10);
params.put("connectionString", "testString");
params.put("binary", "true");
params.put("sessionId", "session-1");
final ServiceBusEndpoint endpoint
= (ServiceBusEndpoint) context.getComponent("azure-servicebus", ServiceBusComponent.class)
.createEndpoint(uri, remaining, params);
assertEquals(ServiceBusType.topic, endpoint.getConfiguration().getServiceBusType());
assertEquals("testTopicOrQueue", endpoint.getConfiguration().getTopicOrQueueName());
assertEquals(10, endpoint.getConfiguration().getPrefetchCount());
assertEquals("testString", endpoint.getConfiguration().getConnectionString());
assertEquals(true, endpoint.getConfiguration().isBinary());
assertEquals("session-1", endpoint.getConfiguration().getSessionId());
}
@Test
void testCreateEndpointWithFqns() throws Exception {
final String uri = "azure-servicebus://testTopicOrQueue";
final String remaining = "testTopicOrQueue";
final String fullyQualifiedNamespace = "namespace.servicebus.windows.net";
final Map<String, Object> params = new HashMap<>();
params.put("serviceBusType", ServiceBusType.topic);
params.put("prefetchCount", 10);
params.put("fullyQualifiedNamespace", fullyQualifiedNamespace);
final ServiceBusEndpoint endpoint
= (ServiceBusEndpoint) context.getComponent("azure-servicebus", ServiceBusComponent.class)
.createEndpoint(uri, remaining, params);
assertEquals(ServiceBusType.topic, endpoint.getConfiguration().getServiceBusType());
assertEquals("testTopicOrQueue", endpoint.getConfiguration().getTopicOrQueueName());
assertEquals(10, endpoint.getConfiguration().getPrefetchCount());
assertEquals(fullyQualifiedNamespace, endpoint.getConfiguration().getFullyQualifiedNamespace());
assertNull(endpoint.getConfiguration().getTokenCredential());
}
@Test
void testCreateEndpointWithFqnsAndCredential() throws Exception {
final String uri = "azure-servicebus://testTopicOrQueue";
final String remaining = "testTopicOrQueue";
final String fullyQualifiedNamespace = "namespace.servicebus.windows.net";
final TokenCredential credential = new DefaultAzureCredentialBuilder().build();
final Map<String, Object> params = new HashMap<>();
params.put("serviceBusType", ServiceBusType.topic);
params.put("prefetchCount", 10);
params.put("fullyQualifiedNamespace", fullyQualifiedNamespace);
params.put("tokenCredential", credential);
final ServiceBusEndpoint endpoint
= (ServiceBusEndpoint) context.getComponent("azure-servicebus", ServiceBusComponent.class)
.createEndpoint(uri, remaining, params);
assertEquals(ServiceBusType.topic, endpoint.getConfiguration().getServiceBusType());
assertEquals("testTopicOrQueue", endpoint.getConfiguration().getTopicOrQueueName());
assertEquals(10, endpoint.getConfiguration().getPrefetchCount());
assertEquals(fullyQualifiedNamespace, endpoint.getConfiguration().getFullyQualifiedNamespace());
assertEquals(credential, endpoint.getConfiguration().getTokenCredential());
}
@Test
void testCreateEndpointWithFqnsAndCredentialFromRegistry() throws Exception {
final String uri = "azure-servicebus://testTopicOrQueue";
final String remaining = "testTopicOrQueue";
final String fullyQualifiedNamespace = "namespace.servicebus.windows.net";
final TokenCredential credential = new DefaultAzureCredentialBuilder().build();
final Map<String, Object> params = new HashMap<>();
context().getRegistry().bind("tokenCredential", credential);
params.put("serviceBusType", ServiceBusType.topic);
params.put("prefetchCount", 10);
params.put("fullyQualifiedNamespace", fullyQualifiedNamespace);
final ServiceBusEndpoint endpoint
= (ServiceBusEndpoint) context.getComponent("azure-servicebus", ServiceBusComponent.class)
.createEndpoint(uri, remaining, params);
assertEquals(ServiceBusType.topic, endpoint.getConfiguration().getServiceBusType());
assertEquals("testTopicOrQueue", endpoint.getConfiguration().getTopicOrQueueName());
assertEquals(10, endpoint.getConfiguration().getPrefetchCount());
assertEquals(fullyQualifiedNamespace, endpoint.getConfiguration().getFullyQualifiedNamespace());
assertEquals(credential, endpoint.getConfiguration().getTokenCredential());
assertEquals(CredentialType.AZURE_IDENTITY, endpoint.getConfiguration().getCredentialType());
}
@Test
void testCreateEndpointWithAzureIdentity() throws Exception {
final String uri = "azure-servicebus://testTopicOrQueue";
final String remaining = "testTopicOrQueue";
final String fullyQualifiedNamespace = "namespace.servicebus.windows.net";
final TokenCredential credential = new DefaultAzureCredentialBuilder().build();
final Map<String, Object> params = new HashMap<>();
params.put("serviceBusType", ServiceBusType.topic);
params.put("prefetchCount", 10);
params.put("fullyQualifiedNamespace", fullyQualifiedNamespace);
params.put("credentialType", CredentialType.AZURE_IDENTITY);
final ServiceBusEndpoint endpoint
= (ServiceBusEndpoint) context.getComponent("azure-servicebus", ServiceBusComponent.class)
.createEndpoint(uri, remaining, params);
assertEquals(ServiceBusType.topic, endpoint.getConfiguration().getServiceBusType());
assertEquals("testTopicOrQueue", endpoint.getConfiguration().getTopicOrQueueName());
assertEquals(10, endpoint.getConfiguration().getPrefetchCount());
assertEquals(fullyQualifiedNamespace, endpoint.getConfiguration().getFullyQualifiedNamespace());
assertNull(endpoint.getConfiguration().getTokenCredential());
}
@Test
void testCreateBaseServiceBusClientWithNoCredentialType() throws Exception {
ServiceBusConfiguration configuration = new ServiceBusConfiguration();
configuration.setConnectionString("Endpoint=sb://camel.apache.org/;SharedAccessKeyName=test;SharedAccessKey=test");
configuration.setTopicOrQueueName("myQueue");
ServiceBusClientFactory factory = new ServiceBusClientFactory();
ServiceBusSenderClient senderClient = factory.createServiceBusSenderClient(configuration);
assertNotNull(senderClient);
senderClient.close();
}
}
|
ServiceBusEndpointTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/exec/internal/JdbcCallImpl.java
|
{
"start": 1182,
"end": 4123
}
|
class ____ implements JdbcOperationQueryCall {
private final String callableName;
private final JdbcCallFunctionReturn functionReturn;
private final List<JdbcCallParameterRegistration> parameterRegistrations;
private final List<JdbcParameterBinder> parameterBinders;
private final List<JdbcCallParameterExtractor<?>> parameterExtractors;
private final List<JdbcCallRefCursorExtractor> refCursorExtractors;
public JdbcCallImpl(Builder builder) {
this(
builder.callableName,
builder.functionReturn,
builder.parameterRegistrations == null
? emptyList()
: unmodifiableList( builder.parameterRegistrations ),
builder.parameterBinders == null
? emptyList()
: unmodifiableList( builder.parameterBinders ),
builder.parameterExtractors == null
? emptyList()
: unmodifiableList( builder.parameterExtractors ),
builder.refCursorExtractors == null
? emptyList()
: unmodifiableList( builder.refCursorExtractors )
);
}
protected JdbcCallImpl(
String callableName,
JdbcCallFunctionReturn functionReturn,
List<JdbcCallParameterRegistration> parameterRegistrations,
List<JdbcParameterBinder> parameterBinders,
List<JdbcCallParameterExtractor<?>> parameterExtractors,
List<JdbcCallRefCursorExtractor> refCursorExtractors) {
this.callableName = callableName;
this.functionReturn = functionReturn;
this.parameterRegistrations = parameterRegistrations;
this.parameterBinders = parameterBinders;
this.parameterExtractors = parameterExtractors;
this.refCursorExtractors = refCursorExtractors;
}
@Override
public String getSqlString() {
return callableName;
}
@Override
public JdbcCallFunctionReturn getFunctionReturn() {
return functionReturn;
}
@Override
public List<JdbcCallParameterRegistration> getParameterRegistrations() {
return parameterRegistrations == null ? emptyList() : parameterRegistrations;
}
@Override
public List<JdbcParameterBinder> getParameterBinders() {
return parameterBinders == null ? emptyList() : parameterBinders;
}
@Override
public Set<String> getAffectedTableNames() {
throw new UnsupportedOperationException();
}
@Override
public boolean dependsOnParameterBindings() {
return false;
}
@Override
public Map<JdbcParameter, JdbcParameterBinding> getAppliedParameters() {
return emptyMap();
}
@Override
public boolean isCompatibleWith(
JdbcParameterBindings jdbcParameterBindings, QueryOptions queryOptions) {
return true;
}
@Override
public List<JdbcCallParameterExtractor<?>> getParameterExtractors() {
return parameterExtractors == null ? emptyList() : parameterExtractors;
}
@Override
public List<JdbcCallRefCursorExtractor> getCallRefCursorExtractors() {
return refCursorExtractors == null ? emptyList() : refCursorExtractors;
}
@Override
public JdbcValuesMappingProducer getJdbcValuesMappingProducer() {
return null;
}
public static
|
JdbcCallImpl
|
java
|
elastic__elasticsearch
|
modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/test/RandomShapeGenerator.java
|
{
"start": 2115,
"end": 15174
}
|
enum ____ {
POINT,
MULTIPOINT,
LINESTRING,
MULTILINESTRING,
POLYGON;
private static final ShapeType[] types = values();
public static ShapeType randomType(Random r) {
return types[RandomNumbers.randomIntBetween(r, 0, types.length - 1)];
}
}
public static ShapeBuilder<?, ?, ?> createShape(Random r) throws InvalidShapeException {
return createShapeNear(r, null);
}
public static ShapeBuilder<?, ?, ?> createShape(Random r, ShapeType st) {
return createShapeNear(r, null, st);
}
public static ShapeBuilder<?, ?, ?> createShapeNear(Random r, Point nearPoint) throws InvalidShapeException {
return createShape(r, nearPoint, null, null);
}
public static ShapeBuilder<?, ?, ?> createShapeNear(Random r, Point nearPoint, ShapeType st) throws InvalidShapeException {
return createShape(r, nearPoint, null, st);
}
public static ShapeBuilder<?, ?, ?> createShapeWithin(Random r, Rectangle bbox) throws InvalidShapeException {
return createShape(r, null, bbox, null);
}
public static ShapeBuilder<?, ?, ?> createShapeWithin(Random r, Rectangle bbox, ShapeType st) throws InvalidShapeException {
return createShape(r, null, bbox, st);
}
public static GeometryCollectionBuilder createGeometryCollection(Random r) throws InvalidShapeException {
return createGeometryCollection(r, null, null, 0);
}
public static GeometryCollectionBuilder createGeometryCollectionNear(Random r, Point nearPoint) throws InvalidShapeException {
return createGeometryCollection(r, nearPoint, null, 0);
}
public static GeometryCollectionBuilder createGeometryCollectionNear(Random r, Point nearPoint, int size) throws InvalidShapeException {
return createGeometryCollection(r, nearPoint, null, size);
}
public static GeometryCollectionBuilder createGeometryCollectionWithin(Random r, Rectangle within) throws InvalidShapeException {
return createGeometryCollection(r, null, within, 0);
}
public static GeometryCollectionBuilder createGeometryCollectionWithin(Random r, Rectangle within, int size)
throws InvalidShapeException {
return createGeometryCollection(r, null, within, size);
}
protected static GeometryCollectionBuilder createGeometryCollection(Random r, Point nearPoint, Rectangle bounds, int numGeometries)
throws InvalidShapeException {
if (numGeometries <= 0) {
// cap geometry collection at 4 shapes (to save test time)
numGeometries = RandomNumbers.randomIntBetween(r, 2, 4);
}
if (nearPoint == null) {
nearPoint = xRandomPoint(r);
}
if (bounds == null) {
bounds = xRandomRectangle(r, nearPoint);
}
GeometryCollectionBuilder gcb = new GeometryCollectionBuilder();
for (int i = 0; i < numGeometries;) {
ShapeBuilder<?, ?, ?> builder = createShapeWithin(r, bounds);
// due to world wrapping, and the possibility for ambiguous polygons, the random shape generation could bail with
// a null shape. We catch that situation here, and only increment the counter when a valid shape is returned.
// Not the most efficient but its the lesser of the evil alternatives
if (builder != null) {
gcb.shape(builder);
++i;
}
}
return gcb;
}
private static ShapeBuilder<?, ?, ?> createShape(Random r, Point nearPoint, Rectangle within, ShapeType st)
throws InvalidShapeException {
ShapeBuilder<?, ?, ?> shape;
short i = 0;
do {
shape = createShape(r, nearPoint, within, st, ST_VALIDATE);
if (shape != null) {
return shape;
}
} while (++i != 100);
throw new InvalidShapeException("Unable to create a valid random shape with provided seed");
}
/**
* Creates a random shape useful for randomized testing, NOTE: exercise caution when using this to build random GeometryCollections
* as creating a large random number of random shapes can result in massive resource consumption
*
* The following options are included
* @param nearPoint Create a shape near a provided point
* @param within Create a shape within the provided rectangle (note: if not null this will override the provided point)
* @param st Create a random shape of the provided type
* @return the ShapeBuilder for a random shape
*/
private static ShapeBuilder<?, ?, ?> createShape(Random r, Point nearPoint, Rectangle within, ShapeType st, boolean validate)
throws InvalidShapeException {
if (st == null) {
st = ShapeType.randomType(r);
}
if (within == null) {
within = xRandomRectangle(r, nearPoint);
}
// NOTE: multipolygon not yet supported. Overlapping polygons are invalid so randomization
// requires an approach to avoid overlaps. This could be approached by creating polygons
// inside non overlapping bounding rectangles
switch (st) {
case POINT:
Point p = xRandomPointIn(r, within);
PointBuilder pb = new PointBuilder().coordinate(new Coordinate(p.getX(), p.getY(), Double.NaN));
return pb;
case MULTIPOINT:
case LINESTRING:
// for random testing having a maximum number of 10 points for a line string is more than sufficient
// if this number gets out of hand, the number of self intersections for a linestring can become
// (n^2-n)/2 and computing the relation intersection matrix will become NP-Hard
int numPoints = RandomNumbers.randomIntBetween(r, 3, 10);
CoordinatesBuilder coordinatesBuilder = new CoordinatesBuilder();
for (int i = 0; i < numPoints; ++i) {
p = xRandomPointIn(r, within);
coordinatesBuilder.coordinate(p.getX(), p.getY());
}
ShapeBuilder<?, ?, ?> pcb = (st == ShapeType.MULTIPOINT)
? new MultiPointBuilder(coordinatesBuilder.build())
: new LineStringBuilder(coordinatesBuilder);
return pcb;
case MULTILINESTRING:
MultiLineStringBuilder mlsb = new MultiLineStringBuilder();
for (int i = 0; i < RandomNumbers.randomIntBetween(r, 1, 10); ++i) {
mlsb.linestring((LineStringBuilder) createShape(r, nearPoint, within, ShapeType.LINESTRING, false));
}
return mlsb;
case POLYGON:
numPoints = RandomNumbers.randomIntBetween(r, 5, 25);
Coordinate[] coordinates = new Coordinate[numPoints];
for (int i = 0; i < numPoints; ++i) {
p = (Point) createShape(r, nearPoint, within, ShapeType.POINT, false).buildS4J();
coordinates[i] = new Coordinate(p.getX(), p.getY());
}
// random point order or random linestrings can lead to invalid self-crossing polygons,
// compute the convex hull for a set of points to ensure polygon does not self cross
Geometry shell = new ConvexHull(coordinates, ctx.getGeometryFactory()).getConvexHull();
Coordinate[] shellCoords = shell.getCoordinates();
// if points are in a line the convex hull will be 2 points which will also lead to an invalid polygon
// when all else fails, use the bounding box as the polygon
if (shellCoords.length < 3) {
shellCoords = new Coordinate[4];
shellCoords[0] = new Coordinate(within.getMinX(), within.getMinY());
shellCoords[1] = new Coordinate(within.getMinX(), within.getMaxY());
shellCoords[2] = new Coordinate(within.getMaxX(), within.getMaxY());
shellCoords[3] = new Coordinate(within.getMaxX(), within.getMinY());
}
PolygonBuilder pgb = new PolygonBuilder(new CoordinatesBuilder().coordinates(shellCoords).close());
if (validate) {
// This test framework builds semi-random geometry (in the sense that points are not truly random due to spatial
// auto-correlation) As a result of the semi-random nature of the geometry, one can not predict the orientation
// intent for ambiguous polygons. Therefore, an invalid oriented dateline crossing polygon could be built.
// The validate flag will check for these possibilities and bail if an incorrect geometry is created
try {
pgb.buildS4J();
} catch (AssertionError | InvalidShapeException e) {
// jts bug may occasionally misinterpret coordinate order causing an unhelpful ('geom' assertion)
// or InvalidShapeException
return null;
}
}
return pgb;
default:
throw new ElasticsearchException("Unable to create shape of type [" + st + "]");
}
}
public static Point xRandomPoint(Random r) {
return xRandomPointIn(r, ctx.getWorldBounds());
}
protected static Point xRandomPointIn(Random rand, Rectangle r) {
double[] pt = new double[2];
RandomGeoGenerator.randomPointIn(rand, r.getMinX(), r.getMinY(), r.getMaxX(), r.getMaxY(), pt);
Point p = ctx.makePoint(pt[0], pt[1]);
Assert.assertEquals(CONTAINS, r.relate(p));
return p;
}
private static Rectangle xRandomRectangle(Random r, Point nearP, Rectangle bounds, boolean small) {
if (nearP == null) nearP = xRandomPointIn(r, bounds);
if (small) {
// between 3 and 6 degrees
final double latRange = 3 * r.nextDouble() + 3;
final double lonRange = 3 * r.nextDouble() + 3;
double minX = nearP.getX();
double maxX = minX + lonRange;
if (maxX > 180) {
maxX = minX;
minX -= lonRange;
}
double minY = nearP.getY();
double maxY = nearP.getY() + latRange;
if (maxY > 90) {
maxY = minY;
minY -= latRange;
}
return ctx.makeRectangle(minX, maxX, minY, maxY);
}
Range xRange = xRandomRange(r, rarely(r) ? 0 : nearP.getX(), Range.xRange(bounds, ctx));
Range yRange = xRandomRange(r, rarely(r) ? 0 : nearP.getY(), Range.yRange(bounds, ctx));
return xMakeNormRect(
xDivisible(xRange.getMin() * 10e3) / 10e3,
xDivisible(xRange.getMax() * 10e3) / 10e3,
xDivisible(yRange.getMin() * 10e3) / 10e3,
xDivisible(yRange.getMax() * 10e3) / 10e3
);
}
/** creates a small random rectangle by default to keep shape test performance at bay */
public static Rectangle xRandomRectangle(Random r, Point nearP) {
return xRandomRectangle(r, nearP, ctx.getWorldBounds(), true);
}
public static Rectangle xRandomRectangle(Random r, Point nearP, boolean small) {
return xRandomRectangle(r, nearP, ctx.getWorldBounds(), small);
}
private static boolean rarely(Random r) {
return r.nextInt(100) >= 90;
}
private static Range xRandomRange(Random r, double near, Range bounds) {
double mid = near + r.nextGaussian() * bounds.getWidth() / 6;
double width = Math.abs(r.nextGaussian()) * bounds.getWidth() / 6;// 1/3rd
return new Range(mid - width / 2, mid + width / 2);
}
private static double xDivisible(double v, double divisible) {
return (int) (Math.round(v / divisible) * divisible);
}
private static double xDivisible(double v) {
return xDivisible(v, xDIVISIBLE);
}
protected static Rectangle xMakeNormRect(double minX, double maxX, double minY, double maxY) {
minX = DistanceUtils.normLonDEG(minX);
maxX = DistanceUtils.normLonDEG(maxX);
if (maxX < minX) {
double t = minX;
minX = maxX;
maxX = t;
}
double minWorldY = ctx.getWorldBounds().getMinY();
double maxWorldY = ctx.getWorldBounds().getMaxY();
if (minY < minWorldY || minY > maxWorldY) {
minY = DistanceUtils.normLatDEG(minY);
}
if (maxY < minWorldY || maxY > maxWorldY) {
maxY = DistanceUtils.normLatDEG(maxY);
}
if (maxY < minY) {
double t = minY;
minY = maxY;
maxY = t;
}
return ctx.makeRectangle(minX, maxX, minY, maxY);
}
}
|
ShapeType
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/AnnotationFilterTests.java
|
{
"start": 1062,
"end": 3379
}
|
class ____ {
private static final AnnotationFilter FILTER = annotationType ->
ObjectUtils.nullSafeEquals(annotationType, TestAnnotation.class.getName());
@Test
void matchesAnnotationWhenMatchReturnsTrue() {
TestAnnotation annotation = WithTestAnnotation.class.getDeclaredAnnotation(TestAnnotation.class);
assertThat(FILTER.matches(annotation)).isTrue();
}
@Test
void matchesAnnotationWhenNoMatchReturnsFalse() {
OtherAnnotation annotation = WithOtherAnnotation.class.getDeclaredAnnotation(OtherAnnotation.class);
assertThat(FILTER.matches(annotation)).isFalse();
}
@Test
void matchesAnnotationClassWhenMatchReturnsTrue() {
Class<TestAnnotation> annotationType = TestAnnotation.class;
assertThat(FILTER.matches(annotationType)).isTrue();
}
@Test
void matchesAnnotationClassWhenNoMatchReturnsFalse() {
Class<OtherAnnotation> annotationType = OtherAnnotation.class;
assertThat(FILTER.matches(annotationType)).isFalse();
}
@Test
void plainWhenJavaLangAnnotationReturnsTrue() {
assertThat(AnnotationFilter.PLAIN.matches(Retention.class)).isTrue();
}
@Test
void plainWhenSpringLangAnnotationReturnsTrue() {
assertThat(AnnotationFilter.PLAIN.matches(Contract.class)).isTrue();
}
@Test
void plainWhenOtherAnnotationReturnsFalse() {
assertThat(AnnotationFilter.PLAIN.matches(TestAnnotation.class)).isFalse();
}
@Test
void javaWhenJavaLangAnnotationReturnsTrue() {
assertThat(AnnotationFilter.JAVA.matches(Retention.class)).isTrue();
}
@Test
void javaWhenJavaxAnnotationReturnsTrue() {
assertThat(AnnotationFilter.JAVA.matches(ThreadSafe.class)).isTrue();
}
@Test
@SuppressWarnings("deprecation")
void javaWhenSpringLangAnnotationReturnsFalse() {
assertThat(AnnotationFilter.JAVA.matches(org.springframework.lang.Nullable.class)).isFalse();
}
@Test
void javaWhenOtherAnnotationReturnsFalse() {
assertThat(AnnotationFilter.JAVA.matches(TestAnnotation.class)).isFalse();
}
@Test
@SuppressWarnings("deprecation")
void noneReturnsFalse() {
assertThat(AnnotationFilter.NONE.matches(Retention.class)).isFalse();
assertThat(AnnotationFilter.NONE.matches(org.springframework.lang.Nullable.class)).isFalse();
assertThat(AnnotationFilter.NONE.matches(TestAnnotation.class)).isFalse();
}
@Retention(RetentionPolicy.RUNTIME)
@
|
AnnotationFilterTests
|
java
|
quarkusio__quarkus
|
integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java
|
{
"start": 258,
"end": 1061
}
|
class ____ {
final CommonBean common;
final LibraryBean library;
@Inject
@ConfigProperty(name = "greeting")
String greeting;
@Inject
ModuleList moduleList;
public HelloResource(CommonBean common, LibraryBean library) {
this.common = java.util.Objects.requireNonNull(common);
this.library = java.util.Objects.requireNonNull(library);
}
@GET
@Produces(MediaType.TEXT_PLAIN)
public String hello() {
return "hello";
}
@GET
@Path("/greeting")
@Produces(MediaType.TEXT_PLAIN)
public String greeting() {
return greeting;
}
@GET
@Path("/local-modules")
@Produces(MediaType.TEXT_PLAIN)
public String localModules() {
return moduleList.getModules().toString();
}
}
|
HelloResource
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/process/internal/UserTypeResolution.java
|
{
"start": 620,
"end": 2364
}
|
class ____<T> implements BasicValue.Resolution<T> {
private final CustomType<T> userTypeAdapter;
private final MutabilityPlan<T> mutabilityPlan;
/**
* We need this for the way envers interprets the boot-model
* and builds its own :(
*/
private final Properties combinedTypeParameters;
public UserTypeResolution(
CustomType<T> userTypeAdapter,
MutabilityPlan<T> explicitMutabilityPlan,
Properties combinedTypeParameters) {
this.userTypeAdapter = userTypeAdapter;
this.combinedTypeParameters = combinedTypeParameters;
this.mutabilityPlan = explicitMutabilityPlan != null
? explicitMutabilityPlan
: new UserTypeMutabilityPlanAdapter<>( userTypeAdapter.getUserType() );
}
@Override
public JavaType<T> getDomainJavaType() {
return userTypeAdapter.getJavaTypeDescriptor();
}
@Override
public JavaType<?> getRelationalJavaType() {
return userTypeAdapter.getJavaTypeDescriptor();
}
@Override
public JdbcType getJdbcType() {
return userTypeAdapter.getJdbcType();
}
@Override
public BasicValueConverter<T,?> getValueConverter() {
// Even though we could expose the value converter of the user type here,
// we can not do it, as the conversion is done behind the scenes in the binder/extractor,
// whereas the converter returned here would, AFAIU, be used to construct a converted attribute mapping
return null;
}
@Override
public MutabilityPlan<T> getMutabilityPlan() {
return mutabilityPlan;
}
@Override
public BasicType<T> getLegacyResolvedBasicType() {
return userTypeAdapter;
}
@Override
public Properties getCombinedTypeParameters() {
return combinedTypeParameters;
}
@Override
public JdbcMapping getJdbcMapping() {
return userTypeAdapter;
}
}
|
UserTypeResolution
|
java
|
apache__rocketmq
|
remoting/src/test/java/org/apache/rocketmq/remoting/protocol/filter/FilterAPITest.java
|
{
"start": 1126,
"end": 3394
}
|
class ____ {
private String topic = "FooBar";
private String group = "FooBarGroup";
private String subString = "TAG1 || Tag2 || tag3";
@Test
public void testBuildSubscriptionData() throws Exception {
SubscriptionData subscriptionData =
FilterAPI.buildSubscriptionData(topic, subString);
assertThat(subscriptionData.getTopic()).isEqualTo(topic);
assertThat(subscriptionData.getSubString()).isEqualTo(subString);
String[] tags = subString.split("\\|\\|");
Set<String> tagSet = new HashSet<>();
for (String tag : tags) {
tagSet.add(tag.trim());
}
assertThat(subscriptionData.getTagsSet()).isEqualTo(tagSet);
}
@Test
public void testBuildTagSome() {
try {
SubscriptionData subscriptionData = FilterAPI.build(
"TOPIC", "A || B", ExpressionType.TAG
);
assertThat(subscriptionData).isNotNull();
assertThat(subscriptionData.getTopic()).isEqualTo("TOPIC");
assertThat(subscriptionData.getSubString()).isEqualTo("A || B");
assertThat(ExpressionType.isTagType(subscriptionData.getExpressionType())).isTrue();
assertThat(subscriptionData.getTagsSet()).isNotNull();
assertThat(subscriptionData.getTagsSet()).containsExactlyInAnyOrder("A", "B");
} catch (Exception e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
}
@Test
public void testBuildSQL() {
try {
SubscriptionData subscriptionData = FilterAPI.build(
"TOPIC", "a is not null", ExpressionType.SQL92
);
assertThat(subscriptionData).isNotNull();
assertThat(subscriptionData.getTopic()).isEqualTo("TOPIC");
assertThat(subscriptionData.getExpressionType()).isEqualTo(ExpressionType.SQL92);
} catch (Exception e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
}
@Test(expected = IllegalArgumentException.class)
public void testBuildSQLWithNullSubString() throws Exception {
FilterAPI.build("TOPIC", null, ExpressionType.SQL92);
}
}
|
FilterAPITest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/fetchmode/toone/ManyToOneWithCircularity2Test.java
|
{
"start": 4145,
"end": 4578
}
|
class ____ {
@Id
private Long id;
private String name;
@ManyToOne
@Fetch(FetchMode.SELECT)
private Sub2 sub;
public Connector() {
}
public Connector(Long id, String name) {
this.id = id;
this.name = name;
}
public SubParent getSub2() {
return sub;
}
public void setSub2(Sub2 sub) {
this.sub = sub;
}
public Long getId() {
return id;
}
}
@MappedSuperclass
public static
|
Connector
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-spring-soap/src/test/java/org/apache/camel/component/cxf/CxfComponentEnableMtomTest.java
|
{
"start": 3779,
"end": 4878
}
|
class ____ {
@Bean
public CamelContext context() {
return new SpringCamelContext();
}
@Bean("mtomByQueryParameters")
public CxfEndpoint mtomByQueryParameters(CamelContext context) {
CxfEndpoint endpoint = new CxfEndpoint();
endpoint.setCamelContext(context);
return endpoint;
}
@Bean("mtomByURIProperties")
public CxfEndpoint mtomByURIProperties() {
return new CxfEndpoint();
}
@Bean("mtomByBeanProperties")
public CxfEndpoint mtomByBeanProperties() {
CxfEndpoint endpoint = new CxfEndpoint();
Map<String, Object> properties = new HashMap<>();
properties.put(Message.MTOM_ENABLED, true);
endpoint.setProperties(properties);
return endpoint;
}
@Bean("mtomByBeanSetter")
public CxfEndpoint mtomByBeanSetter() {
CxfEndpoint endpoint = new CxfEndpoint();
endpoint.setMtomEnabled(true);
return endpoint;
}
}
}
|
TestConfig
|
java
|
apache__rocketmq
|
common/src/test/java/org/apache/rocketmq/common/compression/ZlibCompressorTest.java
|
{
"start": 1004,
"end": 2256
}
|
class ____ {
private static final String TEST_STRING = "The quick brown fox jumps over the lazy dog";
@Test
public void testCompressionAndDecompression() throws Exception {
byte[] originalData = TEST_STRING.getBytes();
ZlibCompressor compressor = new ZlibCompressor();
byte[] compressedData = compressor.compress(originalData, 0);
assertTrue("Compressed data should be bigger than original", compressedData.length > originalData.length);
byte[] decompressedData = compressor.decompress(compressedData);
assertArrayEquals("Decompressed data should match original", originalData, decompressedData);
}
@Test
public void testCompressionFailureWithInvalidData() throws Exception {
byte[] originalData = new byte[] {0, 1, 2, 3, 4};
ZlibCompressor compressor = new ZlibCompressor();
compressor.compress(originalData, 0);
}
@Test(expected = IOException.class)
public void testDecompressionFailureWithInvalidData() throws Exception {
byte[] compressedData = new byte[] {0, 1, 2, 3, 4};
ZlibCompressor compressor = new ZlibCompressor();
compressor.decompress(compressedData); // Invalid compressed data
}
}
|
ZlibCompressorTest
|
java
|
quarkusio__quarkus
|
extensions/smallrye-graphql/deployment/src/test/java/io/quarkus/smallrye/graphql/deployment/CompletionStageTest.java
|
{
"start": 4853,
"end": 5309
}
|
class ____ {
public String isbn;
public String title;
public LocalDate published;
public List<String> authors;
public Book() {
}
public Book(String isbn, String title, LocalDate published, String... authors) {
this.isbn = isbn;
this.title = title;
this.published = published;
this.authors = Arrays.asList(authors);
}
}
public static
|
Book
|
java
|
spring-projects__spring-framework
|
spring-tx/src/main/java/org/springframework/transaction/PlatformTransactionManager.java
|
{
"start": 1484,
"end": 2043
}
|
interface ____
* {@link org.springframework.transaction.jta.JtaTransactionManager}. However,
* in common single-resource scenarios, Spring's specific transaction managers
* for example, JDBC, JPA, JMS are preferred choices.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @since 16.05.2003
* @see org.springframework.transaction.support.TransactionTemplate
* @see org.springframework.transaction.interceptor.TransactionInterceptor
* @see org.springframework.transaction.ReactiveTransactionManager
* @see ConfigurableTransactionManager
*/
public
|
is
|
java
|
quarkusio__quarkus
|
integration-tests/infinispan-cache-jpa/src/test/java/io/quarkus/it/infinispan/cache/jpa/InfinispanCacheJPAFunctionalityTest.java
|
{
"start": 418,
"end": 1092
}
|
class ____ {
@Test
public void testCacheJPAFunctionalityFromServlet() {
RestAssured.when().get("/infinispan-cache-jpa/testfunctionality").then().body(is("OK"));
}
@Test
public void testEntityMemoryObjectCountOverride() {
RestAssured.when()
.get("/infinispan-cache-jpa/memory-object-count/com.example.EntityA")
.then().body(is("200"));
}
@Test
public void testEntityExpirationMaxIdleOverride() {
RestAssured.when()
.get("/infinispan-cache-jpa/expiration-max-idle/com.example.EntityB")
.then().body(is("86400"));
}
}
|
InfinispanCacheJPAFunctionalityTest
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/JgroupsComponentBuilderFactory.java
|
{
"start": 7008,
"end": 8277
}
|
class ____
extends AbstractComponentBuilder<JGroupsComponent>
implements JgroupsComponentBuilder {
@Override
protected JGroupsComponent buildConcreteComponent() {
return new JGroupsComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "channel": ((JGroupsComponent) component).setChannel((org.jgroups.JChannel) value); return true;
case "channelProperties": ((JGroupsComponent) component).setChannelProperties((java.lang.String) value); return true;
case "bridgeErrorHandler": ((JGroupsComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "enableViewMessages": ((JGroupsComponent) component).setEnableViewMessages((boolean) value); return true;
case "lazyStartProducer": ((JGroupsComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((JGroupsComponent) component).setAutowiredEnabled((boolean) value); return true;
default: return false;
}
}
}
}
|
JgroupsComponentBuilderImpl
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_1608/Issue1608Test.java
|
{
"start": 526,
"end": 958
}
|
class ____ {
@ProcessorTest
public void shouldCorrectlyUseFluentSettersStartingWithIs() {
Book book = new Book();
book.setIsbn( "978-3-16-148410-0" );
book.setIssueYear( 2018 );
BookDto bookDto = Issue1608Mapper.INSTANCE.map( book );
assertThat( bookDto.getIsbn() ).isEqualTo( "978-3-16-148410-0" );
assertThat( bookDto.getIssueYear() ).isEqualTo( 2018 );
}
}
|
Issue1608Test
|
java
|
micronaut-projects__micronaut-core
|
inject-groovy/src/main/groovy/io/micronaut/ast/groovy/scan/AnnotationClassReader.java
|
{
"start": 1190,
"end": 1395
}
|
class ____ for each field,
* method and bytecode instruction encountered.
*
* @author Eric Bruneton
* @author Eugene Kuleshov
* @author Graeme Rocher
*/
@SuppressWarnings("MagicNumber")
@Internal
|
visitor
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java
|
{
"start": 2766,
"end": 3185
}
|
class ____ implements InferenceService {
protected final OriginSettingClient client;
protected final ThreadPool threadPool;
protected final ExecutorService inferenceExecutor;
protected final Consumer<ActionListener<PreferredModelVariant>> preferredModelVariantFn;
private final ClusterService clusterService;
private final InferenceStats inferenceStats;
public
|
BaseElasticsearchInternalService
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/examples/HTTPExamples.java
|
{
"start": 22890,
"end": 28362
}
|
interface ____ {
Future<HttpClientResponse> get(String requestURI);
}
public void exampleClientComposition01(HttpClient2 client) throws Exception {
Future<HttpClientResponse> get = client.get("some-uri");
// Assuming we have a client that returns a future response
// assuming this is *not* on the event-loop
// introduce a potential data race for the sake of this example
Thread.sleep(100);
get.onSuccess(response -> {
// Response events might have happen already
response
.body()
.onComplete(ar -> {
});
});
}
// Seems to fail javac in CI
// public void exampleClientComposition02(Vertx vertx, HttpClient client) throws Exception {
//
// vertx.deployVerticle(() -> new AbstractVerticle() {
// @Override
// public void start() {
//
// HttpClient client = vertx.createHttpClient();
//
// Future<HttpClientRequest> future = client.request(HttpMethod.GET, "some-uri");
// }
// }, new DeploymentOptions());
// }
public void exampleClientComposition03(HttpClient client) throws Exception {
Future<JsonObject> future = client
.request(HttpMethod.GET, "some-uri")
.compose(request -> request
.send()
.compose(response -> {
// Process the response on the event-loop which guarantees no races
if (response.statusCode() == 200 &&
response.getHeader(HttpHeaders.CONTENT_TYPE).equals("application/json")) {
return response
.body()
.map(buffer -> buffer.toJsonObject());
} else {
return Future.failedFuture("Incorrect HTTP response");
}
}));
// Listen to the composed final json result
future.onSuccess(json -> {
System.out.println("Received json result " + json);
}).onFailure(err -> {
System.out.println("Something went wrong " + err.getMessage());
});
}
public void exampleClientComposition03_(HttpClient client) throws Exception {
Future<JsonObject> future = client
.request(HttpMethod.GET, "some-uri")
.compose(request -> request
.send()
.expecting(HttpResponseExpectation.SC_OK.and(HttpResponseExpectation.JSON))
.compose(response -> response
.body()
.map(buffer -> buffer.toJsonObject())));
// Listen to the composed final json result
future.onSuccess(json -> {
System.out.println("Received json result " + json);
}).onFailure(err -> {
System.out.println("Something went wrong " + err.getMessage());
});
}
public void exampleClientComposition04(HttpClient client, FileSystem fileSystem) throws Exception {
Future<Void> future = client
.request(HttpMethod.GET, "some-uri")
.compose(request -> request
.send()
.compose(response -> {
// Process the response on the event-loop which guarantees no races
if (response.statusCode() == 200) {
// Create a pipe, this pauses the response
Pipe<Buffer> pipe = response.pipe();
// Write the file on the disk
return fileSystem
.open("/some/large/file", new OpenOptions().setWrite(true))
.onFailure(err -> pipe.close())
.compose(file -> pipe.to(file));
} else {
return Future.failedFuture("Incorrect HTTP response");
}
}));
}
public void usingPredefinedExpectations(HttpClient client, RequestOptions options) {
Future<Buffer> fut = client
.request(options)
.compose(request -> request
.send()
.expecting(HttpResponseExpectation.SC_SUCCESS)
.compose(response -> response.body()));
}
public void usingPredicates(HttpClient client) {
// Check CORS header allowing to do POST
HttpResponseExpectation methodsPredicate =
resp -> {
String methods = resp.getHeader("Access-Control-Allow-Methods");
return methods != null && methods.contains("POST");
};
// Send pre-flight CORS request
client
.request(new RequestOptions()
.setMethod(HttpMethod.OPTIONS)
.setPort(8080)
.setHost("myserver.mycompany.com")
.setURI("/some-uri")
.putHeader("Origin", "Server-b.com")
.putHeader("Access-Control-Request-Method", "POST"))
.compose(request -> request
.send()
.expecting(methodsPredicate))
.onSuccess(res -> {
// Process the POST request now
})
.onFailure(err ->
System.out.println("Something went wrong " + err.getMessage()));
}
public void usingSpecificStatus(HttpClient client, RequestOptions options) {
client
.request(options)
.compose(request -> request
.send()
.expecting(HttpResponseExpectation.status(200, 202)))
.onSuccess(res -> {
// ....
});
}
public void usingSpecificContentType(HttpClient client, RequestOptions options) {
client
.request(options)
.compose(request -> request
.send()
.expecting(HttpResponseExpectation.contentType("some/content-type")))
.onSuccess(res -> {
// ....
});
}
public void expectationCustomError() {
Expectation<HttpResponseHead> expectation = HttpResponseExpectation.SC_SUCCESS
.wrappingFailure((resp, err) -> new MyCustomException(resp.statusCode(), err.getMessage()));
}
private static
|
HttpClient2
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/type/java/InstantDescriptorTest.java
|
{
"start": 270,
"end": 788
}
|
class ____ extends AbstractDescriptorTest<Instant> {
final Instant original = Instant.ofEpochMilli( 1476340818745L );
final Instant copy = Instant.ofEpochMilli( 1476340818745L );
final Instant different = Instant.ofEpochMilli( 1476340818746L );
public InstantDescriptorTest() {
super( InstantJavaType.INSTANCE);
}
@Override
protected Data<Instant> getTestData() {
return new Data<>( original, copy, different );
}
@Override
protected boolean shouldBeMutable() {
return false;
}
}
|
InstantDescriptorTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webflux-test/src/test/java/org/springframework/boot/webflux/test/autoconfigure/WebFluxTypeExcludeFilterTests.java
|
{
"start": 6567,
"end": 6644
}
|
class ____ {
}
@WebFluxTest(useDefaultFilters = false)
static
|
WithController
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDeploymentStatsAction.java
|
{
"start": 3368,
"end": 5148
}
|
class ____ extends BaseTasksResponse implements ToXContentObject {
public static final ParseField DEPLOYMENT_STATS = new ParseField("deployment_stats");
private final QueryPage<AssignmentStats> stats;
public Response(
List<TaskOperationFailure> taskFailures,
List<? extends ElasticsearchException> nodeFailures,
List<AssignmentStats> stats,
long count
) {
super(taskFailures, nodeFailures);
this.stats = new QueryPage<>(stats, count, DEPLOYMENT_STATS);
}
public Response(StreamInput in) throws IOException {
super(in);
stats = new QueryPage<>(in, AssignmentStats::new);
}
public QueryPage<AssignmentStats> getStats() {
return stats;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
stats.doXContentBody(builder, params);
toXContentCommon(builder, params);
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
stats.writeTo(out);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (super.equals(o) == false) return false;
Response response = (Response) o;
return Objects.equals(stats, response.stats);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), stats);
}
}
}
|
Response
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/http/TooLongHttpLineException.java
|
{
"start": 877,
"end": 1527
}
|
class ____ extends TooLongFrameException {
private static final long serialVersionUID = 1614751125592211890L;
/**
* Creates a new instance.
*/
public TooLongHttpLineException() {
}
/**
* Creates a new instance.
*/
public TooLongHttpLineException(String message, Throwable cause) {
super(message, cause);
}
/**
* Creates a new instance.
*/
public TooLongHttpLineException(String message) {
super(message);
}
/**
* Creates a new instance.
*/
public TooLongHttpLineException(Throwable cause) {
super(cause);
}
}
|
TooLongHttpLineException
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/ValueDeserializer.java
|
{
"start": 2039,
"end": 5406
}
|
class ____<T>
implements NullValueProvider
{
/*
/**********************************************************************
/* Initialization, with former `ResolvableDeserializer`, `ContextualDeserializer`
/**********************************************************************
*/
/**
* Method called after deserializer instance has been constructed
* (and registered as necessary by provider objects),
* but before it has returned it to the caller.
* Called object can then resolve its dependencies to other types,
* including self-references (direct or indirect).
*
* @param ctxt Context to use for accessing configuration, resolving
* secondary deserializers
*/
public void resolve(DeserializationContext ctxt) {
// Default implementation does nothing
}
/**
* Method called to see if a different (or differently configured) deserializer
* is needed to deserialize values of specified property.
* Note that instance that this method is called on is typically shared one and
* as a result method should <b>NOT</b> modify this instance but rather construct
* and return a new instance. This instance should only be returned as-is, in case
* it is already suitable for use.
*
* @param ctxt Deserialization context to access configuration, additional
* deserializers that may be needed by this deserializer
* @param property Method, field or constructor parameter that represents the property
* (and is used to assign deserialized value).
* Should be available; but there may be cases where caller cannot provide it and
* null is passed instead (in which case impls usually pass 'this' deserializer as is)
*
* @return Deserializer to use for deserializing values of specified property;
* may be this instance or a new instance.
*/
public ValueDeserializer<?> createContextual(DeserializationContext ctxt,
BeanProperty property) {
// default implementation returns instance unmodified
return this;
}
/*
/**********************************************************************
/* Main deserialization methods
/**********************************************************************
*/
/**
* Method that can be called to ask implementation to deserialize
* JSON content into the value type this serializer handles.
* Returned instance is to be constructed by method itself.
*<p>
* Pre-condition for this method is that the parser points to the
* first event that is part of value to deserializer (and which
* is never JSON 'null' literal, more on this below): for simple
* types it may be the only value; and for structured types the
* Object start marker or a FIELD_NAME.
* </p>
* <p>
* The two possible input conditions for structured types result
* from polymorphism via fields. In the ordinary case, Jackson
* calls this method when it has encountered an OBJECT_START,
* and the method implementation must advance to the next token to
* see the first field name. If the application configures
* polymorphism via a field, then the object looks like the following.
* <pre>
* {
* "@class": "
|
ValueDeserializer
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/interceptor/CollectionRecreateInterceptorTest.java
|
{
"start": 1169,
"end": 2646
}
|
class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Employee employee = new Employee( 1L );
Project project = new Project( 1L );
employee.projects.add( project );
project.employees.add( employee );
session.persist( project );
session.persist( employee );
}
);
}
@Test
@JiraKey("HHH-3129")
public void testInterceptorNpe(SessionFactoryScope scope) {
scope.inTransaction(
(SessionImplementor) scope.getSessionFactory().withOptions()
.interceptor( new Interceptor() {
@Override
public void onCollectionRecreate(Object collection, Object key) throws CallbackException {
Interceptor.super.onCollectionRecreate( collection, key );
assertNotNull( ((PersistentCollection<?>) collection).getRole() );
}
@Override
public void onCollectionUpdate(Object collection, Object key) throws CallbackException {
Interceptor.super.onCollectionUpdate( collection, key );
assertNotNull( ((PersistentCollection<?>) collection).getRole() );
}
} )
.openSession(),
session -> {
Employee employee = session.find( Employee.class, 1L );
Project newProject = new Project( 2L );
newProject.employees.add( employee );
employee.projects.add( newProject );
session.persist( newProject );
}
);
}
@Entity(name = "Employee")
public static
|
CollectionRecreateInterceptorTest
|
java
|
apache__camel
|
components/camel-as2/camel-as2-api/src/main/java/org/apache/camel/component/as2/api/entity/ApplicationEDIFACTEntity.java
|
{
"start": 993,
"end": 1472
}
|
class ____ extends ApplicationEntity {
public ApplicationEDIFACTEntity(byte[] content, String charset, String contentTransferEncoding,
boolean isMainBody, String filename) {
super(content, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, charset), contentTransferEncoding, isMainBody,
filename);
}
@Override
public void close() throws IOException {
// do nothing
}
}
|
ApplicationEDIFACTEntity
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java
|
{
"start": 434,
"end": 745
}
|
class ____ extends ActionType<PutRoleMappingResponse> {
public static final PutRoleMappingAction INSTANCE = new PutRoleMappingAction();
public static final String NAME = "cluster:admin/xpack/security/role_mapping/put";
private PutRoleMappingAction() {
super(NAME);
}
}
|
PutRoleMappingAction
|
java
|
google__gson
|
gson/src/test/java/com/google/gson/functional/ExposeFieldsTest.java
|
{
"start": 3809,
"end": 4643
}
|
class ____ {
@Expose private final Integer a;
private final Integer b;
@Expose(serialize = false)
@Keep
final long c;
@Expose(deserialize = false)
final double d;
@Expose(serialize = false, deserialize = false)
@Keep
final char e;
ClassWithExposedFields(Integer a, Integer b) {
this(a, b, 1L, 2.0, 'a');
}
ClassWithExposedFields(Integer a, Integer b, long c, double d, char e) {
this.a = a;
this.b = b;
this.c = c;
this.d = d;
this.e = e;
}
String getExpectedJson() {
StringBuilder sb = new StringBuilder("{");
if (a != null) {
sb.append("\"a\":").append(a).append(",");
}
sb.append("\"d\":").append(d);
sb.append("}");
return sb.toString();
}
}
private static
|
ClassWithExposedFields
|
java
|
apache__camel
|
components/camel-as2/camel-as2-api/src/main/java/org/apache/camel/component/as2/api/AS2SignedDataGenerator.java
|
{
"start": 1322,
"end": 5865
}
|
class ____ extends CMSSignedDataGenerator {
public static final Map<ASN1ObjectIdentifier, String> STANDARD_MICALGS;
static {
Map<ASN1ObjectIdentifier, String> stdMicAlgs = new HashMap<>();
stdMicAlgs.put(CMSAlgorithm.MD5, "md5");
stdMicAlgs.put(CMSAlgorithm.SHA1, "sha-1");
stdMicAlgs.put(CMSAlgorithm.SHA224, "sha-224");
stdMicAlgs.put(CMSAlgorithm.SHA256, "sha-256");
stdMicAlgs.put(CMSAlgorithm.SHA384, "sha-384");
stdMicAlgs.put(CMSAlgorithm.SHA512, "sha-512");
stdMicAlgs.put(CMSAlgorithm.GOST3411, "gostr3411-94");
stdMicAlgs.put(CMSAlgorithm.GOST3411_2012_256, "gostr3411-2012-256");
stdMicAlgs.put(CMSAlgorithm.GOST3411_2012_512, "gostr3411-2012-512");
STANDARD_MICALGS = Collections.unmodifiableMap(stdMicAlgs);
}
/**
* Signing algorithms for DSA keys in order of preference
*/
public static final String[] DSA_SIGNING_ALGORITHMS = {
"SHA512WITHDSA",
"SHA384WITHDSA",
"SHA256WITHDSA",
"SHA224WITHDSA",
"SHA1WITHDSA",
};
/**
* Signing algorithms for RSA keys in order of preference
*/
public static final String[] RSA_SIGNING_ALGORITHMS = {
"SHA512WITHRSA",
"SHA384WITHRSA",
"SHA256WITHRSA",
"SHA224WITHRSA",
"SHA1WITHRSA",
"MD5WITHRSA",
"MD2WITHRSA",
};
/**
* Signing algorithms for EC keys in order of preference
*/
public static final String[] EC_SIGNING_ALGORITHMS = {
"SHA512WITHECDSA",
"SHA384WITHECDSA",
"SHA256WITHECDSA",
"SHA224WITHECDSA",
"SHA1WITHECDSA",
};
public AS2SignedDataGenerator() {
}
/**
* Creates a <code>multipart/signed</code> content type containing the algorithms used by this generator.
*
* @param boundary - boundary to use to demarcate content.
* @return A <code>multipart/signed</code> content type
*/
public ContentType createMultipartSignedContentType(String boundary) {
StringBuilder header = new StringBuilder(AS2MediaType.MULTIPART_SIGNED);
header.append("; boundary=").append(boundary);
Set<String> micAlgSet = new HashSet<>();
// Collect algorithm names used by pre-calculated signers
for (@SuppressWarnings("rawtypes")
Iterator it = _signers.iterator(); it.hasNext();) {
SignerInformation signer = (SignerInformation) it.next();
ASN1ObjectIdentifier digestOID = signer.getDigestAlgorithmID().getAlgorithm();
String micAlg = STANDARD_MICALGS.get(digestOID);
if (micAlg == null) {
micAlgSet.add("unknown");
} else {
micAlgSet.add(micAlg);
}
}
// Collect algorithm names used by signer generators
for (@SuppressWarnings("rawtypes")
Iterator it = signerGens.iterator(); it.hasNext();) {
SignerInfoGenerator signerInfoGen = (SignerInfoGenerator) it.next();
ASN1ObjectIdentifier digestOID = signerInfoGen.getDigestAlgorithm().getAlgorithm();
String micAlg = STANDARD_MICALGS.get(digestOID);
if (micAlg == null) {
micAlgSet.add("unknown");
} else {
micAlgSet.add(micAlg);
}
}
// Add algorithm names to multipart signed header.
int count = 0;
for (String micAlg : micAlgSet) {
if (count == 0) {
if (micAlgSet.size() != 1) {
header.append("; micalg=\"");
} else {
header.append("; micalg=");
}
} else {
header.append(',');
}
header.append(micAlg);
count++;
}
if (count != 0) {
if (micAlgSet.size() != 1) {
header.append('\"');
}
}
return ContentType.parse(header.toString());
}
public static String[] getSupportedSignatureAlgorithmNamesForKey(Key key) {
switch (key.getAlgorithm()) {
case "DSA":
return DSA_SIGNING_ALGORITHMS;
case "RSA":
return RSA_SIGNING_ALGORITHMS;
case "EC":
return EC_SIGNING_ALGORITHMS;
default:
return new String[0];
}
}
}
|
AS2SignedDataGenerator
|
java
|
spring-projects__spring-security
|
messaging/src/main/java/org/springframework/security/messaging/util/matcher/PathPatternMessageMatcher.java
|
{
"start": 4077,
"end": 7053
}
|
class ____ {
private final PathPatternParser parser;
Builder(PathPatternParser parser) {
this.parser = parser;
}
/**
* Match messages having this destination pattern.
*
* <p>
* Path patterns always start with a slash and may contain placeholders. They can
* also be followed by {@code /**} to signify all URIs under a given path.
*
* <p>
* The following are valid patterns and their meaning
* <ul>
* <li>{@code /path} - match exactly and only `/path`</li>
* <li>{@code /path/**} - match `/path` and any of its descendents</li>
* <li>{@code /path/{value}/**} - match `/path/subdirectory` and any of its
* descendents, capturing the value of the subdirectory in
* {@link MessageAuthorizationContext#getVariables()}</li>
* </ul>
*
* <p>
* A more comprehensive list can be found at {@link PathPattern}.
*
* <p>
* A dot-based message pattern is also supported when configuring a
* {@link PathPatternParser} using
* {@link PathPatternMessageMatcher#withPathPatternParser}
* @param pattern the destination pattern to match
* @return the {@link PathPatternMessageMatcher.Builder} for more configuration
*/
public PathPatternMessageMatcher matcher(String pattern) {
return matcher(null, pattern);
}
/**
* Match messages having this type and destination pattern.
*
* <p>
* When the message {@code type} is null, then the matcher does not consider the
* message type
*
* <p>
* Path patterns always start with a slash and may contain placeholders. They can
* also be followed by {@code /**} to signify all URIs under a given path.
*
* <p>
* The following are valid patterns and their meaning
* <ul>
* <li>{@code /path} - match exactly and only `/path`</li>
* <li>{@code /path/**} - match `/path` and any of its descendents</li>
* <li>{@code /path/{value}/**} - match `/path/subdirectory` and any of its
* descendents, capturing the value of the subdirectory in
* {@link MessageAuthorizationContext#getVariables()}</li>
* </ul>
*
* <p>
* A more comprehensive list can be found at {@link PathPattern}.
*
* <p>
* A dot-based message pattern is also supported when configuring a
* {@link PathPatternParser} using
* {@link PathPatternMessageMatcher#withPathPatternParser}
* @param type the message type to match
* @param pattern the destination pattern to match
* @return the {@link PathPatternMessageMatcher.Builder} for more configuration
*/
public PathPatternMessageMatcher matcher(@Nullable SimpMessageType type, String pattern) {
Assert.notNull(pattern, "pattern must not be null");
PathPattern pathPattern = this.parser.parse(pattern);
PathPatternMessageMatcher matcher = new PathPatternMessageMatcher(pathPattern,
this.parser.getPathOptions());
if (type != null) {
matcher.setMessageTypeMatcher(new SimpMessageTypeMatcher(type));
}
return matcher;
}
}
}
|
Builder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/collection/ListAndSetProxyTest.java
|
{
"start": 1317,
"end": 2483
}
|
class ____ {
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
TheOne one = new TheOne( "1" );
session.persist( one );
TheMapKey theMapKey = new TheMapKey( one );
TheMany theMany = new TheMany( theMapKey );
session.persist( theMany );
Map<TheMapKey, TheMany> map = new HashMap<>();
map.put( theMapKey, theMany );
one.setTheManys( map );
one.getTheManyList().add( theMany );
}
);
}
@Test
public void testIt(SessionFactoryScope scope) {
scope.inSession(
session -> {
TheOne one = session.find( TheOne.class, "1" );
Set<TheMapKey> set1 = one.getTheManys().keySet();
Set<TheMapKey> set2 = one.getTheManys().keySet();
assertThat( set1, is( equalTo( set2 ) ) );
assertThat( set1, is( not( sameInstance( set2 ) ) ) );
List<TheMany> list1 = one.getTheManyList().subList( 0, 1 );
List<TheMany> list2 = one.getTheManyList().subList( 0, 1 );
assertThat( list1, is( equalTo( list2 ) ) );
assertThat( list1, is( not( sameInstance( list2 ) ) ) );
}
);
}
@Entity(name = "TheOne")
public static
|
ListAndSetProxyTest
|
java
|
grpc__grpc-java
|
benchmarks/src/generated/main/grpc/io/grpc/benchmarks/proto/ReportQpsScenarioServiceGrpc.java
|
{
"start": 12963,
"end": 14156
}
|
class ____
extends ReportQpsScenarioServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
ReportQpsScenarioServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (ReportQpsScenarioServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new ReportQpsScenarioServiceFileDescriptorSupplier())
.addMethod(getReportScenarioMethod())
.build();
}
}
}
return result;
}
}
|
ReportQpsScenarioServiceMethodDescriptorSupplier
|
java
|
apache__camel
|
components/camel-mongodb-gridfs/src/test/java/org/apache/camel/component/mongodb/gridfs/integration/GridFsProducerOperationsIT.java
|
{
"start": 1536,
"end": 5910
}
|
class ____ extends AbstractMongoDbITSupport {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:create")
.to("mongodb-gridfs:myDb?database={{mongodb.testDb}}&operation=create&bucket=" + getBucket());
from("direct:remove")
.to("mongodb-gridfs:myDb?database={{mongodb.testDb}}&operation=remove&bucket=" + getBucket());
from("direct:findOne")
.to("mongodb-gridfs:myDb?database={{mongodb.testDb}}&operation=findOne&bucket=" + getBucket());
from("direct:listAll")
.to("mongodb-gridfs:myDb?database={{mongodb.testDb}}&operation=listAll&bucket=" + getBucket());
from("direct:count")
.setHeader(GridFsConstants.GRIDFS_OPERATION, constant("count"))
.to("mongodb-gridfs:myDb?database={{mongodb.testDb}}&bucket=" + getBucket());
}
};
}
@Test
public void testOperations() throws Exception {
Map<String, Object> headers = new HashMap<>();
assertFalse(gridFSBucket.find(eq(FILE_NAME)).cursor().hasNext());
headers.put(Exchange.FILE_NAME, FILE_NAME);
headers.put(Exchange.CONTENT_TYPE, "text/plain");
template.requestBodyAndHeaders("direct:create", FILE_DATA, headers);
assertTrue(gridFSBucket.find(eq(GridFsConstants.GRIDFS_FILE_KEY_FILENAME, FILE_NAME)).cursor().hasNext());
assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers, Long.class).longValue());
Exchange result = template.request("direct:findOne", exchange -> exchange.getMessage().setHeaders(headers));
assertTrue(result.getMessage().getHeader(Exchange.FILE_LENGTH, Long.class) > 0);
assertNotNull(result.getMessage().getHeader(Exchange.FILE_LAST_MODIFIED));
InputStream ins = result.getMessage().getBody(InputStream.class);
assertNotNull(ins);
byte b[] = new byte[2048];
int i = ins.read(b);
assertEquals(FILE_DATA, new String(b, 0, i, StandardCharsets.UTF_8));
headers.put(Exchange.FILE_NAME, "2-" + FILE_NAME);
headers.put(GridFsConstants.GRIDFS_CHUNKSIZE, 10);
headers.put(GridFsConstants.GRIDFS_METADATA, "{'foo': 'bar'}");
template.requestBodyAndHeaders("direct:create", FILE_DATA + "data2", headers);
assertEquals(1, template.requestBodyAndHeaders("direct:count", null, headers, Long.class).longValue());
assertEquals(2, template.requestBody("direct:count", null, Long.class).longValue());
String s = template.requestBody("direct:listAll", null, String.class);
assertTrue(s.contains("2-" + FILE_NAME));
template.requestBodyAndHeaders("direct:remove", null, headers);
assertEquals(1, template.requestBody("direct:count", null, Long.class).longValue());
s = template.requestBodyAndHeader("direct:listAll", null, Exchange.FILE_NAME, "2-" + FILE_NAME, String.class);
assertFalse(s.contains("2-" + FILE_NAME));
}
@Test
public void testRemoveByObjectId() {
Map<String, Object> headers = new HashMap<>();
headers.put(Exchange.FILE_NAME, FILE_NAME);
Exchange result = template.request(
"mongodb-gridfs:myDb?database={{mongodb.testDb}}&operation=create&bucket=" + getBucket(), new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getMessage().setBody(FILE_DATA);
exchange.getMessage().setHeaders(headers);
}
});
ObjectId objectId = result.getMessage().getHeader(GridFsConstants.GRIDFS_OBJECT_ID, ObjectId.class);
assertNotNull(objectId);
template.requestBodyAndHeader("mongodb-gridfs:myDb?database={{mongodb.testDb}}&operation=remove&bucket=" + getBucket(),
null, GridFsConstants.GRIDFS_OBJECT_ID, objectId);
Integer count = template.requestBodyAndHeaders(
"mongodb-gridfs:myDb?database={{mongodb.testDb}}&operation=count&bucket=" + getBucket(), null, headers,
Integer.class);
assertEquals(0, count);
}
}
|
GridFsProducerOperationsIT
|
java
|
quarkusio__quarkus
|
extensions/smallrye-reactive-messaging/runtime/src/main/java/io/quarkus/smallrye/reactivemessaging/runtime/ReactiveMessagingRuntimeConfig.java
|
{
"start": 380,
"end": 619
}
|
interface ____ {
/**
* Whether to enable the context propagation for connector channels
*/
@WithName("connector-context-propagation")
Optional<List<String>> connectorContextPropagation();
}
|
ReactiveMessagingRuntimeConfig
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/BasicGenericTypesHandlingTest.java
|
{
"start": 2105,
"end": 2373
}
|
class ____<I, O> {
protected abstract O convert(I i);
@POST
@Produces("text/test")
@Consumes("text/test")
public O handle(I i) {
return convert(i);
}
}
@Path("/test")
public static
|
AbstractResource
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-soap/src/test/java/org/apache/camel/component/cxf/jaxws/CxfProducerContextTest.java
|
{
"start": 1497,
"end": 1569
}
|
class ____ extends CxfProducerTest {
// *** This
|
CxfProducerContextTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/uniqueconstraint/UniqueConstraintDropTest.java
|
{
"start": 2598,
"end": 6099
}
|
class ____ {
@Test
@JiraKey(value = "HHH-11236")
public void testUniqueConstraintIsDropped(
ServiceRegistryScope registryScope,
DomainModelScope modelScope,
@TempDir File tmpDir) throws Exception {
final var scriptFile = new File( tmpDir, "script.sql" );
final var metadata = modelScope.getDomainModel();
metadata.orderColumns( false );
metadata.validate();
final var jdbcServices = registryScope.getRegistry().requireService( JdbcServices.class );
final var dialect = jdbcServices.getDialect();
final var tool = (HibernateSchemaManagementTool) registryScope.getRegistry().requireService( SchemaManagementTool.class );
new IndividuallySchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration(
metadata,
executionOptions( registryScope.getRegistry() ),
ContributableMatcher.ALL,
new TargetDescriptorImpl( scriptFile )
);
if ( !(dialect.getUniqueDelegate() instanceof SkipNullableUniqueDelegate) ) {
if ( dialect.getUniqueDelegate() instanceof AlterTableUniqueIndexDelegate) {
assertTrue( checkDropIndex( scriptFile ) );
}
else if ( dialect.getUniqueDelegate() instanceof AlterTableUniqueDelegate ) {
MatcherAssert.assertThat( "The test_entity_item table unique constraint has not been dropped",
checkDropConstraint( "test_entity_item", dialect, scriptFile ),
is( true )
);
}
}
MatcherAssert.assertThat(
checkDropConstraint( "test_entity_children", dialect, scriptFile ),
is( true )
);
}
private ExecutionOptions executionOptions(StandardServiceRegistry registry) {
final Map<String,Object> configurationValues = registry.requireService( ConfigurationService.class ).getSettings();
return new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return true;
}
@Override
public Map<String,Object> getConfigurationValues() {
return configurationValues;
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
};
}
private boolean checkDropConstraint(
String tableName,
Dialect dialect,
File scriptFile) throws IOException {
String regex = dialect.getAlterTableString( tableName ) + ' ' + dialect.getDropUniqueKeyString();
if ( dialect.supportsIfExistsBeforeConstraintName() ) {
regex += " if exists";
}
regex += " uk.*";
if ( dialect.supportsIfExistsAfterConstraintName() ) {
regex += " if exists";
}
regex += ";";
return isMatching( regex, scriptFile );
}
private boolean checkDropIndex(File scriptFile) throws IOException {
String regex = "drop index test_entity_item.uk.*";
return isMatching( regex, scriptFile );
}
private boolean isMatching(String regex, File scriptFile) throws IOException {
final String fileContent = new String( Files.readAllBytes( scriptFile.toPath() ) ).toLowerCase();
final String[] split = fileContent.split( System.lineSeparator() );
Pattern p = Pattern.compile( regex );
for ( String line : split ) {
final Matcher matcher = p.matcher( line );
if ( matcher.matches() ) {
return true;
}
}
return false;
}
private record TargetDescriptorImpl(File scriptFile) implements TargetDescriptor {
public EnumSet<TargetType> getTargetTypes() {
return EnumSet.of( TargetType.SCRIPT );
}
@Override
public ScriptTargetOutput getScriptTargetOutput() {
return new ScriptTargetOutputToFile( scriptFile, Charset.defaultCharset().name() );
}
}
}
|
UniqueConstraintDropTest
|
java
|
quarkusio__quarkus
|
integration-tests/redis-devservices/src/test/java/io/quarkus/redis/devservices/it/profiles/DevServiceRedis.java
|
{
"start": 161,
"end": 375
}
|
class ____ implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
return Collections.singletonMap("quarkus.redis.devservices.port", "6379");
}
}
|
DevServiceRedis
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/SimplePropertyDescriptorTests.java
|
{
"start": 1329,
"end": 1659
}
|
class ____ {
@SuppressWarnings("unused")
public Object setFoo(String foo) { return null; }
}
Method m = C.class.getMethod("setFoo", String.class);
Object pd = new ExtendedBeanInfo.SimplePropertyDescriptor("foo", null, m);
assertThat(pd.toString()).contains(
"PropertyDescriptor[name=foo",
"propertyType=
|
C
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/test/performance/case1/IntDecoderPerformanceTest.java
|
{
"start": 616,
"end": 2308
}
|
class ____ extends TestCase {
private String text;
private final int COUNT = 1000 * 100;
protected void setUp() throws Exception {
String resource;
resource = "json/int_100.json";
resource = "json/object_f_int_1000.json";
// resource = "json/string_array_10000.json";
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(resource);
text = IOUtils.toString(is);
is.close();
// text =
// "{\"badboy\":true,\"description\":\"神棍敌人姐\",\"name\":\"校长\",\"age\":3,\"birthdate\":1293278091773,\"salary\":123456789.0123}";
}
public void test_performance() throws Exception {
JSON.parse("true");
List<Codec> decoders = new ArrayList<Codec>();
decoders.add(new FastjsonCodec());
decoders.add(new JacksonCodec());
decoders.add(new SimpleJsonCodec());
decoders.add(new JsonLibCodec());
decoders.add(new GsonCodec());
for (int i = 0; i < 4; ++i) {
for (Codec decoder : decoders) {
decode(text, decoder);
// decodeToJavaBean(text, decoder);
}
System.out.println();
}
System.out.println();
System.out.println(text);
}
private void decode(String text, Codec decoder) throws Exception {
long startNano = System.nanoTime();
for (int i = 0; i < COUNT; ++i) {
decoder.decode(text);
}
long nano = System.nanoTime() - startNano;
System.out.println(decoder.getName() + " : \t" + NumberFormat.getInstance().format(nano));
}
public static
|
IntDecoderPerformanceTest
|
java
|
grpc__grpc-java
|
core/src/main/java/io/grpc/internal/AutoConfiguredLoadBalancerFactory.java
|
{
"start": 8293,
"end": 8483
}
|
class ____ extends Exception {
private static final long serialVersionUID = 1L;
private PolicyException(String msg) {
super(msg);
}
}
private static final
|
PolicyException
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/cdi/bcextensions/CustomPseudoScopeTest.java
|
{
"start": 5591,
"end": 6178
}
|
class ____ implements AlterableContext {
public Class<? extends Annotation> getScope() {
return Prototype.class;
}
public <T> T get(Contextual<T> contextual, CreationalContext<T> creationalContext) {
return creationalContext != null ? contextual.create(creationalContext) : null;
}
public <T> T get(Contextual<T> contextual) {
return null;
}
public boolean isActive() {
return true;
}
public void destroy(Contextual<?> contextual) {
}
}
}
|
PrototypeContext
|
java
|
google__guice
|
core/src/com/google/inject/util/Modules.java
|
{
"start": 7399,
"end": 14100
}
|
class ____ extends AbstractModule {
private final ImmutableSet<Module> overrides;
private final ImmutableSet<Module> baseModules;
// TODO(diamondm) checkArgument(!overrides.isEmpty())?
OverrideModule(Iterable<? extends Module> overrides, ImmutableSet<Module> baseModules) {
this.overrides = ImmutableSet.copyOf(overrides);
this.baseModules = baseModules;
}
@Override
public void configure() {
Binder baseBinder = binder();
List<Element> baseElements = Elements.getElements(currentStage(), baseModules);
// If the sole element was a PrivateElements, we want to override
// the private elements within that -- so refocus our elements
// and binder.
if (baseElements.size() == 1) {
Element element = Iterables.getOnlyElement(baseElements);
if (element instanceof PrivateElements) {
PrivateElements privateElements = (PrivateElements) element;
PrivateBinder privateBinder =
baseBinder.newPrivateBinder().withSource(privateElements.getSource());
for (Key<?> exposed : privateElements.getExposedKeys()) {
privateBinder.withSource(privateElements.getExposedSource(exposed)).expose(exposed);
}
baseBinder = privateBinder;
baseElements = privateElements.getElements();
}
}
final Binder binder = baseBinder.skipSources(this.getClass());
final ImmutableSet<Element> elements = ImmutableSet.copyOf(baseElements);
final Module scannersModule = extractScanners(elements);
final List<Element> overrideElements =
Elements.getElements(
currentStage(),
ImmutableList.<Module>builder().addAll(overrides).add(scannersModule).build());
final Set<Key<?>> overriddenKeys = Sets.newHashSet();
final Map<Class<? extends Annotation>, ScopeBinding> overridesScopeAnnotations =
Maps.newHashMap();
// execute the overrides module, keeping track of which keys and scopes are bound
new ModuleWriter(binder) {
@Override
public <T> Void visit(Binding<T> binding) {
overriddenKeys.add(binding.getKey());
return super.visit(binding);
}
@Override
public Void visit(ScopeBinding scopeBinding) {
overridesScopeAnnotations.put(scopeBinding.getAnnotationType(), scopeBinding);
return super.visit(scopeBinding);
}
@Override
public Void visit(PrivateElements privateElements) {
overriddenKeys.addAll(privateElements.getExposedKeys());
return super.visit(privateElements);
}
}.writeAll(overrideElements);
// execute the original module, skipping all scopes and overridden keys. We only skip each
// overridden binding once so things still blow up if the module binds the same thing
// multiple times.
final Map<Scope, List<Object>> scopeInstancesInUse = Maps.newHashMap();
final List<ScopeBinding> scopeBindings = Lists.newArrayList();
new ModuleWriter(binder) {
@Override
public <T> Void visit(Binding<T> binding) {
if (!overriddenKeys.remove(binding.getKey())) {
super.visit(binding);
// Record when a scope instance is used in a binding
Scope scope = getScopeInstanceOrNull(binding);
if (scope != null) {
scopeInstancesInUse
.computeIfAbsent(scope, k -> Lists.newArrayList())
.add(binding.getSource());
}
}
return null;
}
void rewrite(Binder binder, PrivateElements privateElements, Set<Key<?>> keysToSkip) {
PrivateBinder privateBinder =
binder.withSource(privateElements.getSource()).newPrivateBinder();
Set<Key<?>> skippedExposes = Sets.newHashSet();
for (Key<?> key : privateElements.getExposedKeys()) {
if (keysToSkip.remove(key)) {
skippedExposes.add(key);
} else {
privateBinder.withSource(privateElements.getExposedSource(key)).expose(key);
}
}
for (Element element : privateElements.getElements()) {
if (element instanceof Binding && skippedExposes.remove(((Binding) element).getKey())) {
continue;
}
if (element instanceof PrivateElements) {
rewrite(privateBinder, (PrivateElements) element, skippedExposes);
continue;
}
element.applyTo(privateBinder);
}
}
@Override
public Void visit(PrivateElements privateElements) {
rewrite(binder, privateElements, overriddenKeys);
return null;
}
@Override
public Void visit(ScopeBinding scopeBinding) {
scopeBindings.add(scopeBinding);
return null;
}
}.writeAll(elements);
// execute the scope bindings, skipping scopes that have been overridden. Any scope that
// is overridden and in active use will prompt an error
new ModuleWriter(binder) {
@Override
public Void visit(ScopeBinding scopeBinding) {
ScopeBinding overideBinding =
overridesScopeAnnotations.remove(scopeBinding.getAnnotationType());
if (overideBinding == null) {
super.visit(scopeBinding);
} else {
List<Object> usedSources = scopeInstancesInUse.get(scopeBinding.getScope());
if (usedSources != null) {
@SuppressWarnings("OrphanedFormatString") // passed to format method addError below
StringBuilder sb =
new StringBuilder(
"The scope for @%s is bound directly and cannot be overridden.");
sb.append("\n original binding at " + Errors.convert(scopeBinding.getSource()));
for (Object usedSource : usedSources) {
sb.append("\n bound directly at " + Errors.convert(usedSource) + "");
}
binder
.withSource(overideBinding.getSource())
.addError(sb.toString(), scopeBinding.getAnnotationType().getSimpleName());
}
}
return null;
}
}.writeAll(scopeBindings);
}
private Scope getScopeInstanceOrNull(Binding<?> binding) {
return binding.acceptScopingVisitor(
new DefaultBindingScopingVisitor<Scope>() {
@Override
public Scope visitScope(Scope scope) {
return scope;
}
});
}
}
private static
|
OverrideModule
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/manytomany/mapkey/ManyToManyWithMaykeyAndSchemaDefinitionTest.java
|
{
"start": 1607,
"end": 2546
}
|
class ____ {
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
EntityA entityA = new EntityA();
entityA.setId( 1L );
EntityB entityB = new EntityB();
entityB.setId( 1L );
entityA.setEntityBs( "B", entityB );
session.persist( entityB );
session.persist( entityA );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope){
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testRetrievingTheMapGeneratesACorrectlyQuery(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
EntityA entityA = session.get( EntityA.class, 1L );
Collection<EntityB> values = entityA.getEntityBMap().values();
assertThat( values.size(), is( 1 ) );
}
);
}
@Entity(name = "EntityA")
@Table(name = "entitya", schema = "myschema")
public static
|
ManyToManyWithMaykeyAndSchemaDefinitionTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/matchers/MethodHasParametersTest.java
|
{
"start": 3765,
"end": 4193
}
|
class ____ {
public void A(int i, Object obj) {}
}
""");
assertCompiles(
methodMatches(
/* shouldMatch= */ true,
new MethodHasParameters(AT_LEAST_ONE, variableType(isPrimitiveType()))));
assertCompiles(
methodMatches(
/* shouldMatch= */ false,
new MethodHasParameters(ALL, variableType(isPrimitiveType()))));
}
private abstract
|
A
|
java
|
micronaut-projects__micronaut-core
|
http/src/test/java/io/micronaut/http/cookie/CookieUtilsTest.java
|
{
"start": 163,
"end": 2145
}
|
class ____ {
@Test
void verifyCookieSizeWithinLimits() {
final Cookie cookie = Cookie.of("name", "value");
final String cookieEncoded = ServerCookieEncoder.INSTANCE.encode(cookie).get(0);
assertDoesNotThrow(() -> CookieUtils.verifyCookieSize(cookie, cookieEncoded));
}
@Test
void verifyCookieSizeWithinLimitsCustom() {
final Cookie cookie = Cookie.of("name", "value");
final String cookieEncoded = ServerCookieEncoder.INSTANCE.encode(cookie).get(0);
assertDoesNotThrow(() -> CookieUtils.verifyCookieSize(cookie, cookieEncoded, 100));
}
@Test
void verifyCookieSizeOutOfLimits() {
StringBuilder sb = new StringBuilder();
while (sb.toString().getBytes(StandardCharsets.UTF_8).length <= 4096) {
sb.append('a');
}
final Cookie cookie = Cookie.of("name", sb.toString());
final String cookieEncoded = ServerCookieEncoder.INSTANCE.encode(cookie).get(0);
CookieSizeExceededException ex = assertThrows(CookieSizeExceededException.class,
() -> CookieUtils.verifyCookieSize(cookie, cookieEncoded));
assertEquals("name", ex.getCookieName());
assertEquals(4096, ex.getMaxSize());
assertTrue(ex.getSize() > 4096);
}
@Test
void verifyCookieSizeOutOfLimitsCustom() {
StringBuilder sb = new StringBuilder();
while (sb.toString().getBytes(StandardCharsets.UTF_8).length <= 1000) {
sb.append('a');
}
final Cookie cookie = Cookie.of("name", sb.toString());
final String cookieEncoded = ServerCookieEncoder.INSTANCE.encode(cookie).get(0);
CookieSizeExceededException ex = assertThrows(CookieSizeExceededException.class,
() -> CookieUtils.verifyCookieSize(cookie, cookieEncoded, 1000));
assertEquals("name", ex.getCookieName());
assertEquals(1000, ex.getMaxSize());
assertTrue(ex.getSize() > 1000);
}
}
|
CookieUtilsTest
|
java
|
processing__processing4
|
core/src/processing/opengl/PJOGL.java
|
{
"start": 17576,
"end": 18787
}
|
class ____ extends GLUtessellatorCallbackAdapter {
@Override
public void begin(int type) {
callback.begin(type);
}
@Override
public void end() {
callback.end();
}
@Override
public void vertex(Object data) {
callback.vertex(data);
}
@Override
public void combine(double[] coords, Object[] data,
float[] weight, Object[] outData) {
callback.combine(coords, data, weight, outData);
}
@Override
public void error(int errnum) {
callback.error(errnum);
}
}
}
@Override
protected String tessError(int err) {
return glu.gluErrorString(err);
}
///////////////////////////////////////////////////////////
// Font outline
static {
SHAPE_TEXT_SUPPORTED = true;
SEG_MOVETO = PathIterator.SEG_MOVETO;
SEG_LINETO = PathIterator.SEG_LINETO;
SEG_QUADTO = PathIterator.SEG_QUADTO;
SEG_CUBICTO = PathIterator.SEG_CUBICTO;
SEG_CLOSE = PathIterator.SEG_CLOSE;
}
@Override
protected FontOutline createFontOutline(char ch, Object font) {
return new FontOutline(ch, (Font) font);
}
protected
|
GLUCallback
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/sql/Mapper.java
|
{
"start": 924,
"end": 1070
}
|
interface ____ {
@SelectProvider(type = SqlProvider.class)
List<User> findAll(@Param("offset") long offset, @Param("limit") int limit);
|
Mapper
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/util/GenericFieldInfoTest2.java
|
{
"start": 223,
"end": 451
}
|
class ____ extends TestCase {
public void test_generic() throws Exception {
A4 a = JSON.parseObject("{\"data\":[]3}", A4.class);
assertTrue(a.data instanceof List);
}
public static
|
GenericFieldInfoTest2
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRecoveryAction.java
|
{
"start": 1276,
"end": 2416
}
|
class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(new Route(GET, "/_recovery"), new Route(GET, "/{index}/_recovery"));
}
@Override
public String getName() {
return "recovery_action";
}
@Override
public boolean allowSystemIndexAccessByDefault() {
return true;
}
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
final RecoveryRequest recoveryRequest = new RecoveryRequest(Strings.splitStringByCommaToArray(request.param("index")));
recoveryRequest.detailed(request.paramAsBoolean("detailed", false));
recoveryRequest.activeOnly(request.paramAsBoolean("active_only", false));
recoveryRequest.indicesOptions(IndicesOptions.fromRequest(request, recoveryRequest.indicesOptions()));
return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).admin()
.indices()
.recoveries(recoveryRequest, new RestRefCountedChunkedToXContentListener<>(channel));
}
}
|
RestRecoveryAction
|
java
|
apache__kafka
|
raft/src/main/java/org/apache/kafka/raft/errors/BufferAllocationException.java
|
{
"start": 939,
"end": 1125
}
|
class ____ extends RaftException {
private static final long serialVersionUID = 1L;
public BufferAllocationException(String s) {
super(s);
}
}
|
BufferAllocationException
|
java
|
apache__flink
|
flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopMapFunctionITCase.java
|
{
"start": 7145,
"end": 7798
}
|
class ____ implements Mapper<IntWritable, Text, IntWritable, Text> {
@Override
public void map(
final IntWritable k,
final Text v,
final OutputCollector<IntWritable, Text> out,
final Reporter r)
throws IOException {
if (v.toString().contains("bananas")) {
out.collect(k, v);
}
}
@Override
public void configure(final JobConf arg0) {}
@Override
public void close() throws IOException {}
}
/** {@link Mapper} that duplicates records. */
public static
|
NonPassingMapper
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/load/engine/bitmap_recycle/ArrayPool.java
|
{
"start": 1405,
"end": 1804
}
|
class ____ no guarantees about the contents of the returned array.
*
* @see #get(int, Class)
*/
<T> T getExact(int size, Class<T> arrayClass);
/** Clears all arrays from the pool. */
void clearMemory();
/**
* Trims the size to the appropriate level.
*
* @param level A trim specified in {@link android.content.ComponentCallbacks2}.
*/
void trimMemory(int level);
}
|
makes
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/multitenancy/schema/CurrentTenantResolverMultiTenancyTest.java
|
{
"start": 849,
"end": 1541
}
|
class ____ extends SchemaBasedMultiTenancyTest {
private TestCurrentTenantIdentifierResolver currentTenantResolver = new TestCurrentTenantIdentifierResolver();
@Override
protected void configure(SessionFactoryBuilder sfb) {
sfb.applyCurrentTenantIdentifierResolver( currentTenantResolver );
}
@Override
protected SessionBuilder newSession(String tenant) {
currentTenantResolver.currentTenantIdentifier = tenant;
SessionBuilder sessionBuilder = sessionFactory.withOptions();
try(Session session = sessionBuilder.openSession()) {
Assert.assertEquals( tenant, session.getTenantIdentifier() );
}
return sessionBuilder;
}
private static
|
CurrentTenantResolverMultiTenancyTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/PrimitiveAtomicReferenceTest.java
|
{
"start": 1733,
"end": 2186
}
|
class ____ {
private AtomicReference<Integer> ref = new AtomicReference<>();
public boolean cas() {
return ref.compareAndSet(null, 10);
}
}
""")
.doTest();
}
@Test
public void negativeNotBoxedType() {
helper
.addSourceLines(
"Test.java",
"""
import java.util.concurrent.atomic.AtomicReference;
|
Test
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/config/plugins/validation/constraints/Required.java
|
{
"start": 1494,
"end": 1753
}
|
interface ____ {
/**
* The message to be logged if this constraint is violated. This should normally
* be overridden. It may contain a {} placeholder for the field's name.
*/
String message() default "The parameter is null: {}";
}
|
Required
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java
|
{
"start": 2370,
"end": 10195
}
|
class ____ extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(MockTransportService.TestPlugin.class);
}
/**
* This test tries to truncate some of larger files in the index to trigger leftovers on the recovery
* target. This happens during recovery when the last chunk of the file is transferred to the replica
* we just throw an exception to make sure the recovery fails and we leave some half baked files on the target.
* Later we allow full recovery to ensure we can still recover and don't run into corruptions.
*/
public void testCancelRecoveryAndResume() throws Exception {
updateClusterSettings(
Settings.builder()
.put(RecoverySettings.INDICES_RECOVERY_CHUNK_SIZE.getKey(), ByteSizeValue.of(randomIntBetween(50, 300), ByteSizeUnit.BYTES))
);
NodesStatsResponse nodeStats = clusterAdmin().prepareNodesStats().get();
List<NodeStats> dataNodeStats = new ArrayList<>();
for (NodeStats stat : nodeStats.getNodes()) {
if (stat.getNode().canContainData()) {
dataNodeStats.add(stat);
}
}
assertThat(dataNodeStats.size(), greaterThanOrEqualTo(2));
Collections.shuffle(dataNodeStats, random());
// we use 2 nodes a lucky and unlucky one
// the lucky one holds the primary
// the unlucky one gets the replica and the truncated leftovers
String primariesNode = dataNodeStats.get(0).getNode().getName();
String unluckyNode = dataNodeStats.get(1).getNode().getName();
// create the index and prevent allocation on any other nodes than the lucky one
// we have no replicas so far and make sure that we allocate the primary on the lucky node
assertAcked(
prepareCreate("test").setMapping("field1", "type=text", "the_id", "type=text")
.setSettings(indexSettings(numberOfShards(), 0).put("index.routing.allocation.include._name", primariesNode))
); // only allocate on the lucky node
// index some docs and check if they are coming back
int numDocs = randomIntBetween(100, 200);
List<IndexRequestBuilder> builder = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
builder.add(prepareIndex("test").setId(id).setSource("field1", English.intToEnglish(i), "the_id", id));
}
indexRandom(true, builder);
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
assertHitCount(prepareSearch().setQuery(QueryBuilders.termQuery("the_id", id)), 1);
}
ensureGreen();
// ensure we have flushed segments and make them a big one via optimize
indicesAdmin().prepareFlush().setForce(true).get();
indicesAdmin().prepareFlush().setForce(true).get(); // double flush to create safe commit in case of async durability
indicesAdmin().prepareForceMerge().setMaxNumSegments(1).setFlush(true).get();
// We write some garbage into the shard directory so that we can verify that it is cleaned up before we resend.
// Cleanup helps prevent recovery from failing due to lack of space from garbage left over from a previous
// recovery that crashed during file transmission. #104473
// We can't look for the presence of the recovery temp files themselves because they are automatically
// cleaned up on clean shutdown by MultiFileWriter.
final String GARBAGE_PREFIX = "recovery.garbage.";
final CountDownLatch latch = new CountDownLatch(1);
final AtomicBoolean truncate = new AtomicBoolean(true);
IndicesService unluckyIndices = internalCluster().getInstance(IndicesService.class, unluckyNode);
Function<ShardId, Path> getUnluckyIndexPath = (shardId) -> unluckyIndices.indexService(shardId.getIndex())
.getShard(shardId.getId())
.shardPath()
.resolveIndex();
for (NodeStats dataNode : dataNodeStats) {
MockTransportService.getInstance(dataNode.getNode().getName())
.addSendBehavior(
internalCluster().getInstance(TransportService.class, unluckyNode),
(connection, requestId, action, request, options) -> {
if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) {
RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request;
logger.info("file chunk [{}] lastChunk: {}", req, req.lastChunk());
// During the first recovery attempt (when truncate is set), write an extra garbage file once for each
// file transmitted. We get multiple chunks per file but only one is the last.
if (truncate.get() && req.lastChunk()) {
final var shardPath = getUnluckyIndexPath.apply(req.shardId());
final var garbagePath = Files.createTempFile(shardPath, GARBAGE_PREFIX, null);
logger.info("writing garbage at: {}", garbagePath);
}
if ((req.name().endsWith("cfs") || req.name().endsWith("fdt")) && req.lastChunk() && truncate.get()) {
latch.countDown();
throw new RuntimeException("Caused some truncated files for fun and profit");
}
} else if (action.equals(PeerRecoveryTargetService.Actions.FILES_INFO)) {
// verify there are no garbage files present at the FILES_INFO stage of recovery. This precedes FILES_CHUNKS
// and so will run before garbage has been introduced on the first attempt, and before post-transfer cleanup
// has been performed on the second.
final var shardPath = getUnluckyIndexPath.apply(((RecoveryFilesInfoRequest) request).shardId());
try (var list = Files.list(shardPath).filter(path -> path.getFileName().startsWith(GARBAGE_PREFIX))) {
final var garbageFiles = list.toArray();
assertArrayEquals(
"garbage files should have been cleaned before file transmission",
new Path[0],
garbageFiles
);
}
}
connection.sendRequest(requestId, action, request, options);
}
);
}
logger.info("--> bumping replicas to 1"); //
updateIndexSettings(
Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
.put(
"index.routing.allocation.include._name", // now allow allocation on all nodes
primariesNode + "," + unluckyNode
),
"test"
);
latch.await();
// at this point we got some truncated leftovers on the replica on the unlucky node
// now we are allowing the recovery to allocate again and finish to see if we wipe the truncated files
truncate.compareAndSet(true, false);
ensureGreen("test");
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
assertHitCount(prepareSearch().setQuery(QueryBuilders.termQuery("the_id", id)), 1);
}
}
}
|
TruncatedRecoveryIT
|
java
|
google__guice
|
core/test/com/google/inject/BindingAnnotationTest.java
|
{
"start": 4304,
"end": 4360
}
|
class ____ {
@Inject @Red String s;
}
static
|
RedFoo
|
java
|
apache__flink
|
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/PatternStreamBuilder.java
|
{
"start": 2642,
"end": 7276
}
|
enum ____ {
ProcessingTime,
EventTime
}
private PatternStreamBuilder(
final DataStream<IN> inputStream,
final Pattern<IN, ?> pattern,
final TimeBehaviour timeBehaviour,
@Nullable final EventComparator<IN> comparator,
@Nullable final OutputTag<IN> lateDataOutputTag) {
this.inputStream = checkNotNull(inputStream);
this.pattern = checkNotNull(pattern);
this.timeBehaviour = checkNotNull(timeBehaviour);
this.comparator = comparator;
this.lateDataOutputTag = lateDataOutputTag;
}
TypeInformation<IN> getInputType() {
return inputStream.getType();
}
/**
* Invokes the {@link org.apache.flink.api.java.ClosureCleaner} on the given function if closure
* cleaning is enabled in the {@link ExecutionConfig}.
*
* @return The cleaned Function
*/
<F> F clean(F f) {
return inputStream.getExecutionEnvironment().clean(f);
}
PatternStreamBuilder<IN> withComparator(final EventComparator<IN> comparator) {
return new PatternStreamBuilder<>(
inputStream, pattern, timeBehaviour, checkNotNull(comparator), lateDataOutputTag);
}
PatternStreamBuilder<IN> withLateDataOutputTag(final OutputTag<IN> lateDataOutputTag) {
return new PatternStreamBuilder<>(
inputStream, pattern, timeBehaviour, comparator, checkNotNull(lateDataOutputTag));
}
PatternStreamBuilder<IN> inProcessingTime() {
return new PatternStreamBuilder<>(
inputStream, pattern, TimeBehaviour.ProcessingTime, comparator, lateDataOutputTag);
}
PatternStreamBuilder<IN> inEventTime() {
return new PatternStreamBuilder<>(
inputStream, pattern, TimeBehaviour.EventTime, comparator, lateDataOutputTag);
}
/**
* Creates a data stream containing results of {@link PatternProcessFunction} to fully matching
* event patterns.
*
* @param processFunction function to be applied to matching event sequences
* @param outTypeInfo output TypeInformation of {@link PatternProcessFunction#processMatch(Map,
* PatternProcessFunction.Context, Collector)}
* @param <OUT> type of output events
* @return Data stream containing fully matched event sequence with applied {@link
* PatternProcessFunction}
*/
<OUT, K> SingleOutputStreamOperator<OUT> build(
final TypeInformation<OUT> outTypeInfo,
final PatternProcessFunction<IN, OUT> processFunction) {
checkNotNull(outTypeInfo);
checkNotNull(processFunction);
final TypeSerializer<IN> inputSerializer =
inputStream
.getType()
.createSerializer(inputStream.getExecutionConfig().getSerializerConfig());
final boolean isProcessingTime = timeBehaviour == TimeBehaviour.ProcessingTime;
final boolean timeoutHandling = processFunction instanceof TimedOutPartialMatchHandler;
final NFACompiler.NFAFactory<IN> nfaFactory =
NFACompiler.compileFactory(pattern, timeoutHandling);
final CepOperator<IN, K, OUT> operator =
new CepOperator<>(
inputSerializer,
isProcessingTime,
nfaFactory,
comparator,
pattern.getAfterMatchSkipStrategy(),
processFunction,
lateDataOutputTag);
final SingleOutputStreamOperator<OUT> patternStream;
if (inputStream instanceof KeyedStream) {
KeyedStream<IN, K> keyedStream = (KeyedStream<IN, K>) inputStream;
patternStream = keyedStream.transform("CepOperator", outTypeInfo, operator);
} else {
KeySelector<IN, Byte> keySelector = new NullByteKeySelector<>();
patternStream =
inputStream
.keyBy(keySelector)
.transform("GlobalCepOperator", outTypeInfo, operator)
.forceNonParallel();
}
return patternStream;
}
// ---------------------------------------- factory-like methods
// ---------------------------------------- //
static <IN> PatternStreamBuilder<IN> forStreamAndPattern(
final DataStream<IN> inputStream, final Pattern<IN, ?> pattern) {
return new PatternStreamBuilder<>(
inputStream, pattern, TimeBehaviour.EventTime, null, null);
}
}
|
TimeBehaviour
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/concurrent/CircuitBreaker.java
|
{
"start": 1884,
"end": 4011
}
|
interface ____<T> {
/**
* Checks the state of this circuit breaker and changes it if necessary. The return
* value indicates whether the circuit breaker is now in state <em>closed</em>; a value
* of <strong>true</strong> typically means that the current operation can continue.
*
* @return <strong>true</strong> if the circuit breaker is now closed;
* <strong>false</strong> otherwise.
*/
boolean checkState();
/**
* Closes this circuit breaker. Its state is changed to closed. If this circuit
* breaker is already closed, this method has no effect.
*/
void close();
/**
* Increments the monitored value and performs a check of the current state of this
* circuit breaker. This method works like {@link #checkState()}, but the monitored
* value is incremented before the state check is performed.
*
* @param increment value to increment in the monitored value of the circuit breaker
* @return <strong>true</strong> if the circuit breaker is now closed;
* <strong>false</strong> otherwise
*/
boolean incrementAndCheckState(T increment);
/**
* Tests the current closed state of this circuit breaker. A return value of
* <strong>true</strong> means that the circuit breaker is currently closed. This
* means that everything is okay with the monitored subsystem.
*
* @return the current closed state of this circuit breaker.
*/
boolean isClosed();
/**
* Tests the current open state of this circuit breaker. A return value of
* <strong>true</strong> means that the circuit breaker is currently open indicating a
* problem in the monitored subsystem.
*
* @return the current open state of this circuit breaker.
*/
boolean isOpen();
/**
* Opens this circuit breaker. Its state is changed to open. Depending on a concrete
* implementation, it may close itself again if the monitored subsystem becomes
* available. If this circuit breaker is already open, this method has no effect.
*/
void open();
}
|
CircuitBreaker
|
java
|
apache__camel
|
components/camel-telemetry/src/main/java/org/apache/camel/telemetry/decorators/PahoSpanDecorator.java
|
{
"start": 927,
"end": 1618
}
|
class ____ extends AbstractMessagingSpanDecorator {
@Override
public String getComponent() {
return "paho";
}
@Override
protected String getDestination(Exchange exchange, Endpoint endpoint) {
// when using toD for dynamic destination then extract from header
String destination = exchange.getMessage().getHeader("CamelPahoOverrideTopic", String.class);
if (destination == null) {
destination = super.getDestination(exchange, endpoint);
}
return destination;
}
@Override
public String getComponentClassName() {
return "org.apache.camel.component.paho.PahoComponent";
}
}
|
PahoSpanDecorator
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/SimplePropertyDescriptorTests.java
|
{
"start": 2379,
"end": 2870
}
|
class ____.lang.String",
"indexedWriteMethod=public java.lang.Object");
}
}
@Test
void nonIndexedEquality() throws IntrospectionException, SecurityException, NoSuchMethodException {
Object pd1 = new ExtendedBeanInfo.SimplePropertyDescriptor("foo", null, null);
assertThat(pd1).isEqualTo(pd1);
Object pd2 = new ExtendedBeanInfo.SimplePropertyDescriptor("foo", null, null);
assertThat(pd1).isEqualTo(pd2);
assertThat(pd2).isEqualTo(pd1);
@SuppressWarnings("unused")
|
java
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/test/java/org/apache/camel/builder/endpoint/FtpRawParameterTest.java
|
{
"start": 1178,
"end": 2215
}
|
class ____ extends BaseEndpointDslTest {
@Test
public void testRaw() throws Exception {
FtpEndpoint ftp = (FtpEndpoint) context.getEndpoints().stream().filter(e -> e.getEndpointUri().startsWith("ftp"))
.findFirst().get();
assertNotNull(ftp);
assertEquals(5000L, ftp.getDelay());
assertTrue(ftp.getConfiguration().isBinary());
assertEquals("scott", ftp.getConfiguration().getUsername());
assertEquals("sec+%ret", ftp.getConfiguration().getPassword());
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new EndpointRouteBuilder() {
public void configure() throws Exception {
from(ftp("localhost:2121/inbox").username("scott").password("RAW(sec+%ret)").binary(true).delay(5000))
.routeId("myroute").noAutoStartup()
.convertBodyTo(String.class)
.to(mock("result"));
}
};
}
}
|
FtpRawParameterTest
|
java
|
processing__processing4
|
build/macos/appbundler/src/com/oracle/appbundler/Option.java
|
{
"start": 1325,
"end": 2598
}
|
class ____ optionally be named, which allows the bundled Java program
* itself to override the option. Changes will take effect upon restart of the
* application.<p>
* Assuming your {@code CFBundleIdentifier} (settable via {@link AppBundlerTask#setIdentifier(String)})
* is {@code com.oracle.appbundler}. Then you can override a named option by calling
* <pre>
* import java.util.prefs.Preferences;
* [...]
* Preferences jvmOptions = Preferences.userRoot().node("/com/oracle/appbundler/JVMOptions");
* jvmOptions.put("name", "value");
* jvmOptions.flush();
* </pre>
* The corresponding entries will be stored in a file called
* {@code ~/Library/Preferences/com.oracle.appbundler.plist}.
* To manipulate the file without Java's {@link java.util.prefs.Preferences} from the command line,
* you should use the tool
* <a href="https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man1/defaults.1.html">defaults</a>.
* For example, to add an entry via the command line, use:
* <pre>
* defaults write com.oracle.appbundler /com/oracle/appbundler/ -dict-add JVMOptions/ '{"name"="value";}'
* </pre>
*
* @author <a href="mailto:hs@tagtraum.com">Hendrik Schreiber</a> (preference related code only)
*/
public
|
can
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/AnySetterTest.java
|
{
"start": 3427,
"end": 3572
}
|
class ____ extends Base {
public String value;
public Impl() { }
public Impl(String v) { value = v; }
}
static
|
Impl
|
java
|
micronaut-projects__micronaut-core
|
inject/src/main/java/io/micronaut/context/DefaultFieldInjectionPoint.java
|
{
"start": 5440,
"end": 5775
}
|
class ____ extends AbstractEnvironmentAnnotationMetadata {
FieldAnnotationMetadata(DefaultAnnotationMetadata targetMetadata) {
super(targetMetadata);
}
@Nullable
@Override
protected Environment getEnvironment() {
return environment;
}
}
}
|
FieldAnnotationMetadata
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/SourceNAryInputChainingITCase.java
|
{
"start": 17265,
"end": 17593
}
|
class ____<T> extends AbstractInput<T, T> {
PassThoughInput(AbstractStreamOperatorV2<T> owner, int inputId) {
super(owner, inputId);
}
@Override
public void processElement(StreamRecord<T> element) throws Exception {
output.collect(element);
}
}
}
|
PassThoughInput
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/FluxBufferTimeout.java
|
{
"start": 22625,
"end": 30324
}
|
class ____<T, C extends Collection<? super T>>
implements InnerOperator<T, C> {
final CoreSubscriber<? super C> actual;
final static int NOT_TERMINATED = 0;
final static int TERMINATED_WITH_SUCCESS = 1;
final static int TERMINATED_WITH_ERROR = 2;
final static int TERMINATED_WITH_CANCEL = 3;
final int batchSize;
final long timespan;
final TimeUnit unit;
final Scheduler.Worker timer;
final Runnable flushTask;
protected @Nullable Subscription subscription;
volatile int terminated =
NOT_TERMINATED;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<BufferTimeoutSubscriber> TERMINATED =
AtomicIntegerFieldUpdater.newUpdater(BufferTimeoutSubscriber.class, "terminated");
volatile long requested;
@SuppressWarnings("rawtypes")
static final AtomicLongFieldUpdater<BufferTimeoutSubscriber> REQUESTED =
AtomicLongFieldUpdater.newUpdater(BufferTimeoutSubscriber.class, "requested");
volatile long outstanding;
@SuppressWarnings("rawtypes")
static final AtomicLongFieldUpdater<BufferTimeoutSubscriber> OUTSTANDING =
AtomicLongFieldUpdater.newUpdater(BufferTimeoutSubscriber.class, "outstanding");
volatile int index = 0;
static final AtomicIntegerFieldUpdater<BufferTimeoutSubscriber> INDEX =
AtomicIntegerFieldUpdater.newUpdater(BufferTimeoutSubscriber.class, "index");
volatile @Nullable Disposable timespanRegistration;
final Supplier<C> bufferSupplier;
volatile @Nullable C values;
BufferTimeoutSubscriber(CoreSubscriber<? super C> actual,
int maxSize,
long timespan,
TimeUnit unit,
Scheduler.Worker timer,
Supplier<C> bufferSupplier) {
this.actual = actual;
this.timespan = timespan;
this.unit = unit;
this.timer = timer;
this.flushTask = () -> {
if (terminated == NOT_TERMINATED) {
int index;
for(;;){
index = this.index;
if(index == 0){
return;
}
if(INDEX.compareAndSet(this, index, 0)){
break;
}
}
flushCallback(null);
}
};
this.batchSize = maxSize;
this.bufferSupplier = bufferSupplier;
}
protected void doOnSubscribe() {
values = bufferSupplier.get();
}
void nextCallback(T value) {
synchronized (this) {
if (OUTSTANDING.decrementAndGet(this) < 0)
{
actual.onError(Exceptions.failWithOverflow("Unrequested element received"));
Context ctx = actual.currentContext();
Operators.onDiscard(value, ctx);
Operators.onDiscardMultiple(values, ctx);
return;
}
C v = values;
if(v == null) {
v = Objects.requireNonNull(bufferSupplier.get(),
"The bufferSupplier returned a null buffer");
values = v;
}
v.add(value);
}
}
void flushCallback(@Nullable T ev) { //TODO investigate ev not used
final C v;
boolean flush = false;
synchronized (this) {
v = values;
if (v != null && !v.isEmpty()) {
values = bufferSupplier.get();
flush = true;
}
}
if (flush) {
long r = requested;
if (r != 0L) {
if (r != Long.MAX_VALUE) {
long next;
for (;;) {
next = r - 1;
if (REQUESTED.compareAndSet(this, r, next)) {
actual.onNext(v);
return;
}
r = requested;
if (r <= 0L) {
break;
}
}
}
else {
actual.onNext(v);
return;
}
}
cancel();
actual.onError(Exceptions.failWithOverflow(
"Could not emit buffer due to lack of requests"));
Operators.onDiscardMultiple(v, this.actual.currentContext());
}
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return subscription;
if (key == Attr.CANCELLED) return terminated == TERMINATED_WITH_CANCEL;
if (key == Attr.TERMINATED) return terminated == TERMINATED_WITH_ERROR || terminated == TERMINATED_WITH_SUCCESS;
if (key == Attr.REQUESTED_FROM_DOWNSTREAM) return requested;
if (key == Attr.CAPACITY) return batchSize;
if (key == Attr.BUFFERED) return batchSize - index; // TODO: shouldn't this
// be index instead ? as it currently stands, the returned value represents
// anticipated items left to fill buffer if completed before timeout
if (key == Attr.RUN_ON) return timer;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.ASYNC;
return InnerOperator.super.scanUnsafe(key);
}
@Override
public void onNext(final T value) {
int index;
boolean flush;
for(;;){
index = this.index + 1;
flush = index % batchSize == 0;
if(INDEX.compareAndSet(this, index - 1, flush ? 0 : index)){
break;
}
}
if (index == 1) {
try {
timespanRegistration = timer.schedule(flushTask, timespan, unit);
}
catch (RejectedExecutionException ree) {
Context ctx = actual.currentContext();
onError(Operators.onRejectedExecution(ree, subscription, null, value, ctx));
Operators.onDiscard(value, ctx);
return;
}
}
nextCallback(value);
if (flush) {
Disposable tsRegistration = timespanRegistration;
if (tsRegistration != null) {
tsRegistration.dispose();
timespanRegistration = null;
}
flushCallback(value);
}
}
void checkedComplete() {
try {
flushCallback(null);
}
finally {
actual.onComplete();
}
}
/**
* @return has this {@link Subscriber} terminated with success ?
*/
final boolean isCompleted() {
return terminated == TERMINATED_WITH_SUCCESS;
}
/**
* @return has this {@link Subscriber} terminated with an error ?
*/
final boolean isFailed() {
return terminated == TERMINATED_WITH_ERROR;
}
@Override
public void request(long n) {
if (Operators.validate(n)) {
Operators.addCap(REQUESTED, this, n);
if (terminated != NOT_TERMINATED) {
return;
}
if (batchSize == Integer.MAX_VALUE || n == Long.MAX_VALUE) {
requestMore(Long.MAX_VALUE);
}
else {
long requestLimit = Operators.multiplyCap(requested, batchSize);
if (requestLimit > outstanding) {
requestMore(requestLimit - outstanding);
}
}
}
}
final void requestMore(long n) {
Subscription s = this.subscription;
if (s != null) {
Operators.addCap(OUTSTANDING, this, n);
s.request(n);
}
}
@Override
public CoreSubscriber<? super C> actual() {
return actual;
}
@Override
public void onComplete() {
if (TERMINATED.compareAndSet(this, NOT_TERMINATED, TERMINATED_WITH_SUCCESS)) {
timer.dispose();
checkedComplete();
}
}
@Override
public void onError(Throwable throwable) {
if (TERMINATED.compareAndSet(this, NOT_TERMINATED, TERMINATED_WITH_ERROR)) {
timer.dispose();
Context ctx = actual.currentContext();
synchronized (this) {
C v = values;
if(v != null) {
Operators.onDiscardMultiple(v, ctx);
v.clear();
values = null;
}
}
actual.onError(throwable);
}
}
@Override
public void onSubscribe(Subscription s) {
if (Operators.validate(this.subscription, s)) {
this.subscription = s;
doOnSubscribe();
actual.onSubscribe(this);
}
}
@Override
public void cancel() {
if (TERMINATED.compareAndSet(this, NOT_TERMINATED, TERMINATED_WITH_CANCEL)) {
timer.dispose();
Subscription s = this.subscription;
if (s != null) {
this.subscription = null;
s.cancel();
}
C v = values;
if (v != null) {
Operators.onDiscardMultiple(v, actual.currentContext());
v.clear();
}
}
}
}
}
|
BufferTimeoutSubscriber
|
java
|
apache__camel
|
core/camel-main/src/test/java/org/apache/camel/main/PropertyBindingSupportRootArrayReflectionTest.java
|
{
"start": 4775,
"end": 5171
}
|
class ____ {
private String name;
private MyOtherFoo foo;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public MyOtherFoo getFoo() {
return foo;
}
public void setFoo(MyOtherFoo foo) {
this.foo = foo;
}
}
}
|
MyRoot
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/LogicalTypeDataTypeConverter.java
|
{
"start": 3840,
"end": 10692
}
|
class ____ implements LogicalTypeVisitor<DataType> {
@Override
public DataType visit(CharType charType) {
return new AtomicDataType(charType);
}
@Override
public DataType visit(VarCharType varCharType) {
return new AtomicDataType(varCharType);
}
@Override
public DataType visit(BooleanType booleanType) {
return new AtomicDataType(booleanType);
}
@Override
public DataType visit(BinaryType binaryType) {
return new AtomicDataType(binaryType);
}
@Override
public DataType visit(VarBinaryType varBinaryType) {
return new AtomicDataType(varBinaryType);
}
@Override
public DataType visit(DecimalType decimalType) {
return new AtomicDataType(decimalType);
}
@Override
public DataType visit(TinyIntType tinyIntType) {
return new AtomicDataType(tinyIntType);
}
@Override
public DataType visit(SmallIntType smallIntType) {
return new AtomicDataType(smallIntType);
}
@Override
public DataType visit(IntType intType) {
return new AtomicDataType(intType);
}
@Override
public DataType visit(BigIntType bigIntType) {
return new AtomicDataType(bigIntType);
}
@Override
public DataType visit(FloatType floatType) {
return new AtomicDataType(floatType);
}
@Override
public DataType visit(DoubleType doubleType) {
return new AtomicDataType(doubleType);
}
@Override
public DataType visit(DateType dateType) {
return new AtomicDataType(dateType);
}
@Override
public DataType visit(TimeType timeType) {
return new AtomicDataType(timeType);
}
@Override
public DataType visit(TimestampType timestampType) {
return new AtomicDataType(timestampType);
}
@Override
public DataType visit(ZonedTimestampType zonedTimestampType) {
return new AtomicDataType(zonedTimestampType);
}
@Override
public DataType visit(LocalZonedTimestampType localZonedTimestampType) {
return new AtomicDataType(localZonedTimestampType);
}
@Override
public DataType visit(YearMonthIntervalType yearMonthIntervalType) {
return new AtomicDataType(yearMonthIntervalType);
}
@Override
public DataType visit(DayTimeIntervalType dayTimeIntervalType) {
return new AtomicDataType(dayTimeIntervalType);
}
@Override
public DataType visit(ArrayType arrayType) {
return new CollectionDataType(arrayType, arrayType.getElementType().accept(this));
}
@Override
public DataType visit(MultisetType multisetType) {
return new CollectionDataType(multisetType, multisetType.getElementType().accept(this));
}
@Override
public DataType visit(MapType mapType) {
return new KeyValueDataType(
mapType,
mapType.getKeyType().accept(this),
mapType.getValueType().accept(this));
}
@Override
public DataType visit(RowType rowType) {
final List<DataType> fieldDataTypes =
rowType.getFields().stream()
.map(f -> f.getType().accept(this))
.collect(Collectors.toList());
return new FieldsDataType(rowType, fieldDataTypes);
}
@Override
public DataType visit(DistinctType distinctType) {
final DataType sourceDataType = distinctType.getSourceType().accept(this);
if (sourceDataType instanceof AtomicDataType) {
return new AtomicDataType(distinctType, sourceDataType.getConversionClass());
} else if (sourceDataType instanceof CollectionDataType) {
final CollectionDataType collectionDataType = (CollectionDataType) sourceDataType;
return new CollectionDataType(
distinctType,
collectionDataType.getConversionClass(),
collectionDataType.getElementDataType());
} else if (sourceDataType instanceof KeyValueDataType) {
final KeyValueDataType keyValueDataType = (KeyValueDataType) sourceDataType;
return new KeyValueDataType(
distinctType,
keyValueDataType.getConversionClass(),
keyValueDataType.getKeyDataType(),
keyValueDataType.getValueDataType());
} else if (sourceDataType instanceof FieldsDataType) {
return new FieldsDataType(
distinctType,
sourceDataType.getConversionClass(),
sourceDataType.getChildren());
}
throw new IllegalStateException("Unexpected data type instance.");
}
@Override
public DataType visit(StructuredType structuredType) {
final List<DataType> attributeDataTypes =
structuredType.getAttributes().stream()
.map(a -> a.getType().accept(this))
.collect(Collectors.toList());
return new FieldsDataType(structuredType, attributeDataTypes);
}
@Override
public DataType visit(NullType nullType) {
return new AtomicDataType(nullType);
}
@Override
public DataType visit(RawType<?> rawType) {
return new AtomicDataType(rawType);
}
@Override
public DataType visit(SymbolType<?> symbolType) {
return new AtomicDataType(symbolType);
}
@Override
public DataType visit(DescriptorType descriptorType) {
return new AtomicDataType(descriptorType);
}
@Override
public DataType visit(LogicalType other) {
if (other.is(LogicalTypeRoot.UNRESOLVED)) {
throw new ValidationException(
String.format(
"Unresolved logical type '%s' cannot be used to create a data type.",
other));
}
// for legacy types
return new AtomicDataType(other);
}
}
// --------------------------------------------------------------------------------------------
private LogicalTypeDataTypeConverter() {
// do not instantiate
}
}
|
DefaultDataTypeCreator
|
java
|
apache__camel
|
core/camel-management/src/test/java/org/apache/camel/management/ManagedRouteDumpRouteAsXmlTest.java
|
{
"start": 1444,
"end": 4466
}
|
class ____ extends ManagementTestSupport {
@Test
public void testDumpAsXml() throws Exception {
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = getRouteObjectName(mbeanServer);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
// should be started
String routeId = (String) mbeanServer.getAttribute(on, "RouteId");
assertEquals("myRoute", routeId);
String xml = (String) mbeanServer.invoke(on, "dumpRouteAsXml", null, null);
assertNotNull(xml);
log.info(xml);
assertTrue(xml.contains("route"));
assertTrue(xml.contains("myRoute"));
assertTrue(xml.contains("ref:bar"));
assertTrue(xml.contains("{{result}}"));
assertTrue(xml.contains("java.lang.Exception"));
}
@Test
public void testDumpAsXmlResolvePlaceholder() throws Exception {
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = getRouteObjectName(mbeanServer);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
// should be started
String routeId = (String) mbeanServer.getAttribute(on, "RouteId");
assertEquals("myRoute", routeId);
String xml = (String) mbeanServer.invoke(on, "dumpRouteAsXml", new Object[] { true }, new String[] { "boolean" });
assertNotNull(xml);
log.info(xml);
assertTrue(xml.contains("route"));
assertTrue(xml.contains("myRoute"));
assertTrue(xml.contains("ref:bar"));
assertTrue(xml.contains("mock:result"));
assertTrue(xml.contains("java.lang.Exception"));
}
static ObjectName getRouteObjectName(MBeanServer mbeanServer) throws Exception {
Set<ObjectName> set = mbeanServer.queryNames(new ObjectName("*:type=routes,*"), null);
assertEquals(1, set.size());
return set.iterator().next();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
Properties props = new Properties();
props.put("result", "mock:result");
context.getPropertiesComponent().setOverrideProperties(props);
Endpoint bar = context.getEndpoint("mock:bar");
bindToRegistry("bar", bar);
onException(Exception.class)
.log("${exception.stacktrace}").logStackTrace(true).handled(true);
from("direct:start").routeId("myRoute")
.log("Got ${body}")
.to("ref:bar")
.to("{{result}}");
}
};
}
}
|
ManagedRouteDumpRouteAsXmlTest
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/reflect/TypeTokenTest.java
|
{
"start": 59804,
"end": 60391
}
|
class ____<V> extends TwoTypeArgs<String, V> {}
TypeToken<TwoTypeArgs<?, ? extends Number>> supertype =
new TypeToken<TwoTypeArgs<?, ? extends Number>>() {};
TypeToken<StringForFirstTypeArg<Integer>> subtype =
new TypeToken<StringForFirstTypeArg<Integer>>() {};
assertTrue(subtype.isSubtypeOf(supertype));
// TODO(benyu): This should check equality to an expected value, see discussion in cl/98674873
TypeToken<?> unused = supertype.getSubtype(subtype.getRawType());
}
public void testGetSubtype_baseClassWithNoTypeArgs() {
|
StringForFirstTypeArg
|
java
|
spring-projects__spring-boot
|
test-support/spring-boot-test-support/src/main/java/org/springframework/boot/testsupport/process/DisabledIfProcessUnavailables.java
|
{
"start": 1199,
"end": 1286
}
|
interface ____ {
DisabledIfProcessUnavailable[] value();
}
|
DisabledIfProcessUnavailables
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/source/constants/ErroneousMapper1.java
|
{
"start": 405,
"end": 1098
}
|
interface ____ {
ErroneousMapper1 INSTANCE = Mappers.getMapper( ErroneousMapper1.class );
@Mappings({
@Mapping(target = "stringConstant", constant = "stringConstant"),
@Mapping(target = "emptyStringConstant", constant = ""),
@Mapping(target = "integerConstant", source = "test", constant = "14"),
@Mapping(target = "longWrapperConstant", constant = "3001L"),
@Mapping(target = "dateConstant", dateFormat = "dd-MM-yyyy", constant = "09-01-2014"),
@Mapping(target = "nameConstants", constant = "jack-jill-tom"),
@Mapping(target = "country", constant = "THE_NETHERLANDS")
})
Target sourceToTarget(Source s);
}
|
ErroneousMapper1
|
java
|
alibaba__nacos
|
core/src/test/java/com/alibaba/nacos/core/namespace/filter/NamespaceValidationConfigTest.java
|
{
"start": 1066,
"end": 3186
}
|
class ____ {
@Test
void testGetConfigFromEnvWithDefaultValue() throws ReflectiveOperationException {
MockEnvironment environment = new MockEnvironment();
EnvUtil.setEnvironment(environment);
Constructor<NamespaceValidationConfig> declaredConstructor = NamespaceValidationConfig.class.getDeclaredConstructor();
declaredConstructor.setAccessible(true);
NamespaceValidationConfig config = declaredConstructor.newInstance();
assertFalse(config.isNamespaceValidationEnabled());
}
@Test
void testGetConfigFromEnvWithDisabled() throws ReflectiveOperationException {
MockEnvironment environment = new MockEnvironment();
EnvUtil.setEnvironment(environment);
environment.setProperty("nacos.core.namespace.validation.enabled", String.valueOf(false));
Constructor<NamespaceValidationConfig> declaredConstructor = NamespaceValidationConfig.class.getDeclaredConstructor();
declaredConstructor.setAccessible(true);
NamespaceValidationConfig config = declaredConstructor.newInstance();
assertFalse(config.isNamespaceValidationEnabled());
}
@Test
void testGetConfigFromEnvWithEnabled() throws ReflectiveOperationException {
MockEnvironment environment = new MockEnvironment();
EnvUtil.setEnvironment(environment);
environment.setProperty("nacos.core.namespace.validation.enabled", String.valueOf(true));
Constructor<NamespaceValidationConfig> declaredConstructor = NamespaceValidationConfig.class.getDeclaredConstructor();
declaredConstructor.setAccessible(true);
NamespaceValidationConfig config = declaredConstructor.newInstance();
assertTrue(config.isNamespaceValidationEnabled());
}
@Test
void testPrintConfig() {
NamespaceValidationConfig config = NamespaceValidationConfig.getInstance();
String configStr = config.printConfig();
assertTrue(configStr.contains("NamespaceValidationConfig"));
assertTrue(configStr.contains("namespaceValidationEnabled"));
}
}
|
NamespaceValidationConfigTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/DateUtils.java
|
{
"start": 1253,
"end": 5323
}
|
class ____ {
public static final ZoneId UTC = ZoneId.of("Z");
public static final String EMPTY = "";
public static final DateTimeFormatter ISO_DATE_WITH_NANOS = new DateTimeFormatterBuilder().parseCaseInsensitive()
.append(ISO_LOCAL_DATE)
.appendLiteral('T')
.appendValue(HOUR_OF_DAY, 2)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2)
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2)
.appendFraction(NANO_OF_SECOND, 3, 9, true)
.appendOffsetId()
.toFormatter(Locale.ROOT);
public static final DateTimeFormatter ISO_TIME_WITH_NANOS = new DateTimeFormatterBuilder().parseCaseInsensitive()
.appendValue(HOUR_OF_DAY, 2)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2)
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2)
.appendFraction(NANO_OF_SECOND, 3, 9, true)
.appendOffsetId()
.toFormatter(Locale.ROOT);
public static final DateFormatter UTC_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time").withZone(UTC);
public static final int SECONDS_PER_MINUTE = 60;
public static final int SECONDS_PER_HOUR = SECONDS_PER_MINUTE * 60;
public static final int SECONDS_PER_DAY = SECONDS_PER_HOUR * 24;
private DateUtils() {}
/**
* Parses the given string into a ZonedDateTime using the provided timezone.
*/
public static ZonedDateTime asDateTimeWithNanos(String dateFormat, ZoneId zoneId) {
return DateFormatters.from(ISO_DATE_WITH_NANOS.parse(dateFormat)).withZoneSameInstant(zoneId);
}
public static String toString(Object value) {
if (value == null) {
return "null";
}
if (value instanceof ZonedDateTime) {
return ((ZonedDateTime) value).format(ISO_DATE_WITH_NANOS);
}
if (value instanceof OffsetTime) {
return ((OffsetTime) value).format(ISO_TIME_WITH_NANOS);
}
if (value instanceof Timestamp ts) {
return ts.toInstant().toString();
}
// handle intervals
// YEAR/MONTH/YEAR TO MONTH -> YEAR TO MONTH
if (value instanceof Period p) {
// +yyy-mm - 7 chars
StringBuilder sb = new StringBuilder(7);
if (p.isNegative()) {
sb.append("-");
p = p.negated();
} else {
sb.append("+");
}
sb.append(p.getYears());
sb.append("-");
sb.append(p.getMonths());
return sb.toString();
}
// DAY/HOUR/MINUTE/SECOND (and variations) -> DAY_TO_SECOND
if (value instanceof Duration d) {
// +ddd hh:mm:ss.mmmmmmmmm - 23 chars
StringBuilder sb = new StringBuilder(23);
if (d.isNegative()) {
sb.append("-");
d = d.negated();
} else {
sb.append("+");
}
long durationInSec = d.getSeconds();
sb.append(durationInSec / SECONDS_PER_DAY);
sb.append(" ");
durationInSec = durationInSec % SECONDS_PER_DAY;
sb.append(indent(durationInSec / SECONDS_PER_HOUR));
sb.append(":");
durationInSec = durationInSec % SECONDS_PER_HOUR;
sb.append(indent(durationInSec / SECONDS_PER_MINUTE));
sb.append(":");
durationInSec = durationInSec % SECONDS_PER_MINUTE;
sb.append(indent(durationInSec));
long millis = TimeUnit.NANOSECONDS.toMillis(d.getNano());
if (millis > 0) {
sb.append(".");
while (millis % 10 == 0) {
millis /= 10;
}
sb.append(millis);
}
return sb.toString();
}
return Objects.toString(value);
}
private static String indent(long timeUnit) {
return timeUnit < 10 ? "0" + timeUnit : Long.toString(timeUnit);
}
}
|
DateUtils
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/codec/ByteArrayEncoderTests.java
|
{
"start": 1042,
"end": 2120
}
|
class ____ extends AbstractEncoderTests<ByteArrayEncoder> {
private final byte[] fooBytes = "foo".getBytes(StandardCharsets.UTF_8);
private final byte[] barBytes = "bar".getBytes(StandardCharsets.UTF_8);
ByteArrayEncoderTests() {
super(new ByteArrayEncoder());
}
@Override
@Test
protected void canEncode() {
assertThat(this.encoder.canEncode(ResolvableType.forClass(byte[].class),
MimeTypeUtils.TEXT_PLAIN)).isTrue();
assertThat(this.encoder.canEncode(ResolvableType.forClass(Integer.class),
MimeTypeUtils.TEXT_PLAIN)).isFalse();
assertThat(this.encoder.canEncode(ResolvableType.forClass(byte[].class),
MimeTypeUtils.APPLICATION_JSON)).isTrue();
// SPR-15464
assertThat(this.encoder.canEncode(ResolvableType.NONE, null)).isFalse();
}
@Override
@Test
protected void encode() {
Flux<byte[]> input = Flux.just(this.fooBytes, this.barBytes);
testEncodeAll(input, byte[].class, step -> step
.consumeNextWith(expectBytes(this.fooBytes))
.consumeNextWith(expectBytes(this.barBytes))
.verifyComplete());
}
}
|
ByteArrayEncoderTests
|
java
|
google__guice
|
core/src/com/google/inject/internal/aop/ClassBuilding.java
|
{
"start": 10947,
"end": 11783
}
|
class
____ (hostClass == method.getDeclaringClass()) {
visitor.accept(method);
}
}
}
}
/** Visit all subclass accessible members in the given array. */
static <T extends Executable> void visitMembers(
T[] members, boolean samePackage, Consumer<T> visitor) {
for (T member : members) {
if (canAccess(member, samePackage)) {
visitor.accept(member);
}
}
}
/** Can we access this member from a subclass which may be in the same package? */
private static boolean canAccess(Executable member, boolean samePackage) {
int modifiers = member.getModifiers();
// public and protected members are always ok, non-private also ok if in the same package
return (modifiers & (PUBLIC | PROTECTED)) != 0 || (samePackage && (modifiers & PRIVATE) == 0);
}
}
|
if
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/AnnotationConfigApplicationContextTests.java
|
{
"start": 28439,
"end": 28797
}
|
class ____ implements FactoryBean<String> {
NonInstantiatedFactoryBean() {
throw new IllegalStateException();
}
@Override
public String getObject() {
return "";
}
@Override
public Class<?> getObjectType() {
return String.class;
}
@Override
public boolean isSingleton() {
return true;
}
}
static
|
NonInstantiatedFactoryBean
|
java
|
dropwizard__dropwizard
|
dropwizard-logging/src/main/java/io/dropwizard/logging/common/BootstrapLogging.java
|
{
"start": 620,
"end": 804
}
|
class ____ configure logging before the dropwizard yml
* configuration has been read, parsed, and the provided logging
* strategy has been applied.
* <p/>
* N.B. The methods in this
|
to
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/processor/StateRestoreListener.java
|
{
"start": 2077,
"end": 2502
}
|
class ____ state store registration.
*
* <p>
* Also note that the update process of standby tasks is not monitored via this interface, since a standby task does
* note actually <it>restore</it> state, but keeps updating its state from the changelogs written by the active task
* which does not ever finish.
*
* <p>
* Incremental updates are exposed so users can estimate how much progress has been made.
*/
public
|
during
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsMessageCreatedStrategyComponentTest.java
|
{
"start": 3440,
"end": 3826
}
|
class ____ implements MessageCreatedStrategy {
@Override
public void onMessageCreated(Message message, Session session, Exchange exchange, Throwable cause) {
try {
JmsMessageHelper.setProperty(message, "beer", "Carlsberg");
} catch (JMSException e) {
// ignore
}
}
}
}
|
MyMessageCreatedStrategy
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileMonitoringTimerTask.java
|
{
"start": 1580,
"end": 4237
}
|
class ____ extends TimerTask {
static final Logger LOG = LoggerFactory.getLogger(FileMonitoringTimerTask.class);
@VisibleForTesting
static final String PROCESS_ERROR_MESSAGE =
"Could not process file change : ";
final private List<Path> filePaths;
final private Consumer<Path> onFileChange;
final Consumer<Throwable> onChangeFailure;
private List<Long> lastProcessed;
/**
* See {@link #FileMonitoringTimerTask(List, Consumer, Consumer)}.
*
* @param filePath The file to monitor.
* @param onFileChange What to do when the file changes.
* @param onChangeFailure What to do when <code>onFileChange</code>
* throws an exception.
*/
public FileMonitoringTimerTask(Path filePath, Consumer<Path> onFileChange,
Consumer<Throwable> onChangeFailure) {
this(Collections.singletonList(filePath), onFileChange, onChangeFailure);
}
/**
* Create file monitoring task to be scheduled using a standard
* Java {@link java.util.Timer} instance.
*
* @param filePaths The path to the file to monitor.
* @param onFileChange The function to call when the file has changed.
* @param onChangeFailure The function to call when an exception is
* thrown during the file change processing.
*/
public FileMonitoringTimerTask(List<Path> filePaths,
Consumer<Path> onFileChange,
Consumer<Throwable> onChangeFailure) {
Preconditions.checkNotNull(filePaths,
"path to monitor disk file is not set");
Preconditions.checkNotNull(onFileChange,
"action to monitor disk file is not set");
this.filePaths = new ArrayList<Path>(filePaths);
this.lastProcessed = new ArrayList<Long>();
this.filePaths.forEach(path ->
this.lastProcessed.add(path.toFile().lastModified()));
this.onFileChange = onFileChange;
this.onChangeFailure = onChangeFailure;
}
@Override
public void run() {
int modified = -1;
for (int i = 0; i < filePaths.size() && modified < 0; i++) {
if (lastProcessed.get(i) != filePaths.get(i).toFile().lastModified()) {
modified = i;
}
}
if (modified > -1) {
Path filePath = filePaths.get(modified);
try {
onFileChange.accept(filePath);
} catch (Throwable t) {
if (onChangeFailure != null) {
onChangeFailure.accept(t);
} else {
LOG.error(PROCESS_ERROR_MESSAGE + filePath.toString(), t);
}
}
lastProcessed.set(modified, filePath.toFile().lastModified());
}
}
}
|
FileMonitoringTimerTask
|
java
|
quarkusio__quarkus
|
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/StereotypeInfo.java
|
{
"start": 179,
"end": 2772
}
|
class ____ {
private final ScopeInfo defaultScope;
private final List<AnnotationInstance> interceptorBindings;
private final boolean alternative;
private final Integer alternativePriority;
private final boolean isNamed;
private final boolean isInherited;
private final List<AnnotationInstance> parentStereotypes;
private final ClassInfo target;
// used to differentiate between standard stereotype and one that was added through StereotypeRegistrarBuildItem
private final boolean isAdditionalStereotype;
public StereotypeInfo(ScopeInfo defaultScope, List<AnnotationInstance> interceptorBindings, boolean alternative,
Integer alternativePriority, boolean isNamed, boolean isAdditionalStereotype, ClassInfo target, boolean isInherited,
List<AnnotationInstance> parentStereotypes) {
this.defaultScope = defaultScope;
this.interceptorBindings = interceptorBindings;
this.alternative = alternative;
this.alternativePriority = alternativePriority;
this.isNamed = isNamed;
this.isInherited = isInherited;
this.parentStereotypes = parentStereotypes;
this.target = target;
this.isAdditionalStereotype = isAdditionalStereotype;
}
public StereotypeInfo(ScopeInfo defaultScope, List<AnnotationInstance> interceptorBindings, boolean alternative,
Integer alternativePriority, boolean isNamed, ClassInfo target, boolean isInherited,
List<AnnotationInstance> parentStereotype) {
this(defaultScope, interceptorBindings, alternative, alternativePriority, isNamed, false, target, isInherited,
parentStereotype);
}
public ScopeInfo getDefaultScope() {
return defaultScope;
}
public List<AnnotationInstance> getInterceptorBindings() {
return interceptorBindings;
}
public boolean isAlternative() {
return alternative;
}
public boolean isInherited() {
return isInherited;
}
public Integer getAlternativePriority() {
return alternativePriority;
}
public boolean isNamed() {
return isNamed;
}
public ClassInfo getTarget() {
return target;
}
public DotName getName() {
return target.name();
}
public boolean isAdditionalStereotype() {
return isAdditionalStereotype;
}
public boolean isGenuine() {
return !isAdditionalStereotype;
}
public List<AnnotationInstance> getParentStereotypes() {
return parentStereotypes;
}
}
|
StereotypeInfo
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/proxy/fake/FakeDriverTest.java
|
{
"start": 854,
"end": 1360
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
String url = "jdbc:fake:x1";
Properties info = new Properties();
String sql = "SELECT 1";
MockDriver driver = new MockDriver();
Connection conn = driver.connect(url, info);
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery(sql);
assertEquals(true, rs.next());
assertEquals(1, rs.getInt(1));
conn.close();
}
}
|
FakeDriverTest
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraGen.java
|
{
"start": 6293,
"end": 6395
}
|
class ____ given a row number, will generate the appropriate
* output line.
*/
public static
|
that
|
java
|
junit-team__junit5
|
junit-jupiter-params/src/main/java/org/junit/jupiter/params/provider/EnumSource.java
|
{
"start": 4636,
"end": 4666
}
|
enum ____ by name.
*/
|
constants
|
java
|
alibaba__fastjson
|
src/main/java/com/alibaba/fastjson/support/spring/FastJsonHttpMessageConverter4.java
|
{
"start": 418,
"end": 502
}
|
class ____ compatibility
* @see FastJsonHttpMessageConverter
*/
@Deprecated
public
|
for
|
java
|
micronaut-projects__micronaut-core
|
http/src/main/java/io/micronaut/http/annotation/Header.java
|
{
"start": 2123,
"end": 2226
}
|
interface ____ {
/**
* If used as a bound parameter, this is the header name. If used on a
|
Header
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/jdk/UntypedDeserializationTest.java
|
{
"start": 3749,
"end": 4311
}
|
class ____ {
public java.io.Serializable value;
}
/*
/**********************************************************
/* Test methods
/**********************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
@SuppressWarnings("unchecked")
@Test
public void testSampleDoc() throws Exception
{
final String JSON = SAMPLE_DOC_JSON_SPEC;
/* To get "untyped" Mapping (to Maps, Lists, instead of beans etc),
* we'll specify plain old Object.
|
SerContainer
|
java
|
micronaut-projects__micronaut-core
|
http-server-tck/src/main/java/io/micronaut/http/server/tck/tests/FiltersTest.java
|
{
"start": 6099,
"end": 6371
}
|
class ____ implements ExceptionHandler<CustomException, HttpResponse<?>> {
@Override
public HttpResponse handle(HttpRequest request, CustomException exception) {
return HttpResponse.ok("Exception Handled");
}
}
}
|
CustomExceptionHandler
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/scenarios/FluxTests.java
|
{
"start": 17811,
"end": 34420
}
|
class ____ implements Function<String, Integer> {
@Override
public Integer apply(String s) {
return Integer.parseInt(s);
}
}
@Test
public void analyticsTest() throws Exception {
Sinks.Many<Integer> source = Sinks.many().replay().all();
long avgTime = 50l;
Mono<Long> result = source
.asFlux()
.log("delay")
.publishOn(asyncGroup)
.delayElements(Duration.ofMillis(avgTime))
.elapsed()
.skip(1)
.groupBy(w -> w.getT1())
.flatMap(w -> w.count().map(c -> Tuples.of(w.key(), c)))
.log("elapsed")
.collectSortedList(Comparator.comparing(Tuple2::getT1))
.flatMapMany(Flux::fromIterable)
.reduce(-1L, (acc, next) -> acc > 0l ? ((next.getT1() + acc) / 2) : next.getT1())
.log("reduced-elapsed")
.cache();
source.asFlux()
.subscribe();
for (int j = 0; j < 10; j++) {
source.emitNext(1, FAIL_FAST);
}
source.emitComplete(FAIL_FAST);
assertThat(result.block(Duration.ofSeconds(5))).isGreaterThanOrEqualTo((long)(avgTime * 0.6));
}
@Test
@Tag("slow")
public void parallelTests() throws InterruptedException {
parallelMapManyTest("sync", 1_000_000);
parallelMapManyTest("shared", 1_000_000);
parallelTest("sync", 1_000_000);
parallelTest("shared", 1_000_000);
parallelTest("partitioned", 1_000_000);
parallelMapManyTest("partitioned", 1_000_000);
parallelBufferedTimeoutTest(1_000_000);
}
private void parallelBufferedTimeoutTest(int iterations) throws InterruptedException {
System.out.println("Buffered Stream: " + iterations);
final CountDownLatch latch = new CountDownLatch(iterations);
Sinks.Many<String> deferred = Sinks.many().multicast().onBackpressureBuffer();
deferred.asFlux()
.publishOn(asyncGroup)
.parallel(8)
.groups()
.subscribe(stream -> stream.publishOn(asyncGroup)
.bufferTimeout(1000 / 8, Duration.ofSeconds(1))
.subscribe(batch -> {
for (int j = 0; j < batch.size(); j++) {
latch.countDown();
}
}));
String[] data = new String[iterations];
for (int i = 0; i < iterations; i++) {
data[i] = Integer.toString(i);
}
long start = System.currentTimeMillis();
for (String i : data) {
long busyLoops = 0;
while (deferred.tryEmitNext(i).isFailure()) {
busyLoops++;
if (busyLoops % 5000 == 0 && System.currentTimeMillis() - start >= 10_0000) {
throw new RuntimeException("Busy loop timed out");
}
}
}
if (!latch.await(10, TimeUnit.SECONDS)) {
throw new RuntimeException(latch.getCount()+ " ");
}
long stop = System.currentTimeMillis() - start;
stop = stop > 0 ? stop : 1;
System.out.println("Time spent: " + stop + "ms");
System.out.println("ev/ms: " + iterations / stop);
System.out.println("ev/s: " + iterations / stop * 1000);
System.out.println();
assertThat(latch.getCount()).isEqualTo(0);
}
private void parallelTest(String dispatcher, int iterations) throws InterruptedException {
System.out.println("Dispatcher: " + dispatcher);
System.out.println("..........: " + iterations);
int[] data;
CountDownLatch latch = new CountDownLatch(iterations);
Sinks.Many<Integer> deferred;
switch (dispatcher) {
case "partitioned":
deferred = Sinks.many().multicast().onBackpressureBuffer();
deferred.asFlux()
.publishOn(asyncGroup)
.parallel(2)
.groups()
.subscribe(stream -> stream.publishOn(asyncGroup)
.map(i -> i)
.scan(1, (acc, next) -> acc + next)
.subscribe(i -> latch.countDown()));
break;
default:
deferred = Sinks.many().multicast().onBackpressureBuffer();
deferred.asFlux()
.publishOn(asyncGroup)
.map(i -> i)
.scan(1, (acc, next) -> acc + next)
.subscribe(i -> latch.countDown());
}
data = new int[iterations];
for (int i = 0; i < iterations; i++) {
data[i] = i;
}
long start = System.currentTimeMillis();
for (int i : data) {
long busyLoops = 0;
while (deferred.tryEmitNext(i).isFailure()) {
busyLoops++;
if (busyLoops % 5000 == 0 && System.currentTimeMillis() - start >= 10_0000) {
throw new RuntimeException("Busy loop timed out");
}
}
}
if (!latch.await(15, TimeUnit.SECONDS)) {
throw new RuntimeException("Count:" + (iterations - latch.getCount()) + " ");
}
long stop = System.currentTimeMillis() - start;
stop = stop > 0 ? stop : 1;
System.out.println("Time spent: " + stop + "ms");
System.out.println("ev/ms: " + iterations / stop);
System.out.println("ev/s: " + iterations / stop * 1000);
System.out.println();
assertThat(latch.getCount()).isEqualTo(0);
}
private void parallelMapManyTest(String dispatcher, int iterations) throws InterruptedException {
System.out.println("MM Dispatcher: " + dispatcher);
System.out.println("..........: " + iterations);
int[] data;
CountDownLatch latch = new CountDownLatch(iterations);
Sinks.Many<Integer> mapManydeferred;
switch (dispatcher) {
case "partitioned":
mapManydeferred = Sinks.many().multicast().onBackpressureBuffer();
mapManydeferred.asFlux()
.parallel(4)
.groups()
.subscribe(substream -> substream.publishOn(asyncGroup)
.subscribe(i -> latch.countDown()));
break;
default:
mapManydeferred = Sinks.many().multicast().onBackpressureBuffer();
("sync".equals(dispatcher) ? mapManydeferred.asFlux() : mapManydeferred.asFlux().publishOn(asyncGroup))
.flatMap(Flux::just)
.subscribe(i -> latch.countDown());
}
data = new int[iterations];
for (int i = 0; i < iterations; i++) {
data[i] = i;
}
long start = System.currentTimeMillis();
for (int i : data) {
long busyLoops = 0;
while (mapManydeferred.tryEmitNext(i).isFailure()) {
busyLoops++;
if (busyLoops % 5000 == 0 && System.currentTimeMillis() - start >= 10_0000) {
throw new RuntimeException("Busy loop timed out");
}
}
}
if (!latch.await(20, TimeUnit.SECONDS)) {
throw new RuntimeException(latch.getCount()+"");
}
else {
System.out.println(latch.getCount());
}
assertThat(latch.getCount()).isEqualTo(0);
long stop = System.currentTimeMillis() - start;
stop = stop > 0 ? stop : 1;
System.out.println("MM Dispatcher: " + dispatcher);
System.out.println("Time spent: " + stop + "ms");
System.out.println("ev/ms: " + iterations / stop);
System.out.println("ev/s: " + iterations / stop * 1000);
System.out.println();
}
/**
* See https://github.com/reactor/reactor/issues/451
* @throws Exception for convenience
*/
@Test
public void partitionByHashCodeShouldNeverCreateMoreStreamsThanSpecified() throws Exception {
Flux<Integer> stream = Flux.range(-10, 20)
.map(Integer::intValue);
assertThat(stream.parallel(2)
.groups()
.count()
.block()).isEqualTo(2);
}
/**
* original from @oiavorskyl https://github.com/eventBus/eventBus/issues/358
* @throws Exception for convenience
*/
//@Test
public void shouldNotFlushStreamOnTimeoutPrematurelyAndShouldDoItConsistently() throws Exception {
for (int i = 0; i < 100; i++) {
shouldNotFlushStreamOnTimeoutPrematurely();
}
}
/**
* original from @oiavorskyl https://github.com/eventBus/eventBus/issues/358
* @throws Exception for convenience
*/
@Test
public void shouldNotFlushStreamOnTimeoutPrematurely() throws Exception {
final int NUM_MESSAGES = 100000;
final int BATCH_SIZE = 1000;
final int TIMEOUT = 100;
final int PARALLEL_STREAMS = 2;
/**
* Relative tolerance, default to 90% of the batches, in an operative environment, random factors can impact
* the fluxion latency, e.g. GC pause if system is under pressure.
*/
final double TOLERANCE = 0.9;
Sinks.Many<Integer> batchingStreamDef = Sinks.many().multicast().onBackpressureBuffer();
List<Integer> testDataset = createTestDataset(NUM_MESSAGES);
final CountDownLatch latch = new CountDownLatch(NUM_MESSAGES);
Map<Integer, Integer> batchesDistribution = new ConcurrentHashMap<>();
batchingStreamDef.asFlux()
.publishOn(asyncGroup)
.parallel(PARALLEL_STREAMS)
.groups()
.subscribe(substream -> substream.hide().publishOn(asyncGroup)
.bufferTimeout(BATCH_SIZE, Duration.ofMillis(TIMEOUT))
.subscribe(items -> {
batchesDistribution.compute(items.size(),
(key, value) -> value == null ? 1 : value + 1);
items.forEach(item -> latch.countDown());
}));
final long start = System.currentTimeMillis();
testDataset.forEach(data -> {
long busyLoops = 0;
while (batchingStreamDef.tryEmitNext(data).isFailure()) {
busyLoops++;
if (busyLoops % 5000 == 0 && System.currentTimeMillis() - start >= 10_0000) {
throw new RuntimeException("Busy loop timed out");
}
}
});
System.out.println(batchesDistribution);
if (!latch.await(10, TimeUnit.SECONDS)) {
throw new RuntimeException(latch.getCount() + " ");
}
int messagesProcessed = batchesDistribution.entrySet()
.stream()
.mapToInt(entry -> entry.getKey() * entry.getValue())
.reduce(Integer::sum)
.getAsInt();
assertThat(messagesProcessed).isEqualTo(NUM_MESSAGES);
assertThat(NUM_MESSAGES / BATCH_SIZE * TOLERANCE)
.as("Less than 90% (%d) of the batches are matching the buffer size: %d", NUM_MESSAGES / BATCH_SIZE * TOLERANCE, batchesDistribution.get(BATCH_SIZE))
.isGreaterThanOrEqualTo(batchesDistribution.get(BATCH_SIZE) * TOLERANCE);
}
@Test
public void prematureFlatMapCompletion() throws Exception {
long res = Flux.range(0, 1_000_000)
.flatMap(v -> Flux.range(v, 2))
.count()
.block(Duration.ofSeconds(5));
assertThat(res).as("latch value").isEqualTo(2_000_000);
}
@Test
public void cancelOn() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
AtomicReference<Thread> thread = new AtomicReference<>();
Disposable res = Flux.never()
.doOnCancel(() -> {
thread.set(Thread.currentThread());
countDownLatch.countDown();
})
.cancelOn(asyncGroup)
.subscribe();
res.dispose();
assertThat(countDownLatch.await(3, TimeUnit.SECONDS)).isTrue();
assertThat(thread.get()).isNotSameAs(Thread.currentThread());
}
@Test
public void sequenceEqual() throws Exception {
boolean res = Mono.sequenceEqual(Flux.just(1, 2, 3), Flux.just(1, 2, 3))
.block();
assertThat(res).isTrue();
res = Mono.sequenceEqual(Flux.just(1, 3), Flux.just(1, 2, 3))
.block();
assertThat(res).isFalse();
}
@Test
public void zipOfNull() {
try {
Flux<String> as = Flux.just("x");
Flux<String> bs = Flux.just((String)null);
assertThat(Flux.zip(as, bs).next().block()).isNull();
fail("Exception expected");
}
catch (NullPointerException npe) {
return;
}
}
@Test
public void shouldCorrectlyDispatchComplexFlow() throws InterruptedException {
Sinks.Many<Integer> globalFeed = Sinks.many().multicast().onBackpressureBuffer();
CountDownLatch afterSubscribe = new CountDownLatch(1);
CountDownLatch latch = new CountDownLatch(4);
Flux<Integer> s = Flux.just("2222")
.map(Integer::parseInt)
.flatMap(l -> Flux.merge(globalFeed.asFlux().publishOn(asyncGroup),
Flux.just(1111, l, 3333, 4444, 5555, 6666)).log("merged")
.publishOn(asyncGroup)
.log("dispatched")
.doOnSubscribe(x -> afterSubscribe.countDown())
.filter(nearbyLoc -> 3333 >= nearbyLoc)
.filter(nearbyLoc -> 2222 <= nearbyLoc)
);
/*Disposable action = */s
.limitRate(1)
.subscribe(integer -> {
latch.countDown();
System.out.println(integer);
});
afterSubscribe.await(5, TimeUnit.SECONDS);
globalFeed.emitNext(2223, FAIL_FAST);
globalFeed.emitNext(2224, FAIL_FAST);
latch.await(5, TimeUnit.SECONDS);
assertThat(latch.getCount()).as("latch count").isEqualTo(0);
}
@Test
public void testParallelAsyncStream2() throws InterruptedException {
final int numOps = 25;
CountDownLatch latch = new CountDownLatch(numOps);
for (int i = 0; i < numOps; i++) {
final String source = "ASYNC_TEST " + i;
Flux.just(source)
.transform(operationStream -> operationStream.publishOn(asyncGroup)
.delayElements(Duration.ofMillis(100))
.map(s -> s + " MODIFIED")
.map(s -> {
latch.countDown();
return s;
}))
.take(Duration.ofSeconds(2))
.log("parallelStream", Level.FINE)
.subscribe(LOG::debug);
}
latch.await(15, TimeUnit.SECONDS);
assertThat(latch.getCount()).isEqualTo(0);
}
/**
* https://gist.github.com/nithril/444d8373ce67f0a8b853 Contribution by Nicolas Labrot
* @throws InterruptedException on interrupt
*/
@Test
public void testParallelWithJava8StreamsInput() throws InterruptedException {
Scheduler supplier = afterTest.autoDispose(Schedulers.newParallel("test-p", 2));
int max = ThreadLocalRandom.current()
.nextInt(100, 300);
CountDownLatch countDownLatch = new CountDownLatch(max);
Flux<Integer> worker = Flux.range(0, max)
.publishOn(asyncGroup);
worker.parallel(2)
.runOn(supplier)
.map(v -> v)
.subscribe(v -> countDownLatch.countDown());
countDownLatch.await(10, TimeUnit.SECONDS);
assertThat(countDownLatch.getCount()).isEqualTo(0);
}
@Test
public void testBeyondLongMaxMicroBatching() throws InterruptedException {
List<Integer> tasks = IntStream.range(0, 1500)
.boxed()
.collect(Collectors.toList());
CountDownLatch countDownLatch = new CountDownLatch(tasks.size());
Flux<Integer> worker = Flux.fromIterable(tasks)
.log("before", Level.FINE)
.publishOn(asyncGroup);
/*Disposable tail = */worker.log("after", Level.FINE)
.parallel(2)
.groups()
.subscribe(s -> s.log("w"+s.key(), Level.FINE)
.publishOn(asyncGroup)
.map(v -> v)
.subscribe(v -> countDownLatch.countDown(), Throwable::printStackTrace));
countDownLatch.await(5, TimeUnit.SECONDS);
assertThat(countDownLatch.getCount()).as("Count max: %d", tasks.size()).isEqualTo(0);
}
private static final
|
String2Integer
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.