language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
quarkusio__quarkus
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/disabled/DisabledInterceptorInStrictModeTest.java
{ "start": 1140, "end": 1322 }
class ____ { String ping() { return "pong"; } } @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @InterceptorBinding @
MyBean
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/processor/onexception/ErrorHandlerSuppressExceptionTest.java
{ "start": 1124, "end": 3100 }
class ____ extends ContextTestSupport { @Test public void testSuppressException() { Exchange out = template.send("direct:start", new Processor() { @Override public void process(Exchange exchange) { exchange.getIn().setBody("Hello World"); } }); Assertions.assertTrue(out.isFailed()); Exception t = out.getException(); Assertions.assertNotNull(t); Assertions.assertEquals("Forced error during handling", t.getMessage()); // only 1 suppressed to avoid the same exception being added multiple times Assertions.assertEquals(1, t.getSuppressed().length); Assertions.assertEquals("Root exception", t.getSuppressed()[0].getMessage()); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { onException(Exception.class).maximumRedeliveries(3).redeliveryDelay(0) .process(new Processor() { @Override public void process(Exchange exchange) { // throw a new exception while handling an exception // this should not leak with the same exception being nested // as suppressed exception throw new IllegalArgumentException("Forced error during handling"); } }); from("direct:start") .process(new Processor() { @Override public void process(Exchange exchange) throws Exception { throw new IOException("Root exception"); } }); } }; } }
ErrorHandlerSuppressExceptionTest
java
spring-projects__spring-framework
spring-context/src/main/java/org/springframework/jmx/support/MBeanRegistrationSupport.java
{ "start": 2793, "end": 9047 }
class ____ { /** * {@code Log} instance for this class. */ protected final Log logger = LogFactory.getLog(getClass()); /** * The {@code MBeanServer} instance being used to register beans. */ protected @Nullable MBeanServer server; /** * The beans that have been registered by this exporter. */ private final Set<ObjectName> registeredBeans = new LinkedHashSet<>(); /** * The policy used when registering an MBean and finding that it already exists. * By default an exception is raised. */ private RegistrationPolicy registrationPolicy = RegistrationPolicy.FAIL_ON_EXISTING; /** * Specify the {@code MBeanServer} instance with which all beans should * be registered. The {@code MBeanExporter} will attempt to locate an * existing {@code MBeanServer} if none is supplied. */ public void setServer(@Nullable MBeanServer server) { this.server = server; } /** * Return the {@code MBeanServer} that the beans will be registered with. */ public final @Nullable MBeanServer getServer() { return this.server; } /** * The policy to use when attempting to register an MBean * under an {@link javax.management.ObjectName} that already exists. * @param registrationPolicy the policy to use * @since 3.2 */ public void setRegistrationPolicy(RegistrationPolicy registrationPolicy) { Assert.notNull(registrationPolicy, "RegistrationPolicy must not be null"); this.registrationPolicy = registrationPolicy; } /** * Actually register the MBean with the server. The behavior when encountering * an existing MBean can be configured using {@link #setRegistrationPolicy}. * @param mbean the MBean instance * @param objectName the suggested ObjectName for the MBean * @throws JMException if the registration failed */ protected void doRegister(Object mbean, ObjectName objectName) throws JMException { Assert.state(this.server != null, "No MBeanServer set"); ObjectName actualObjectName; synchronized (this.registeredBeans) { ObjectInstance registeredBean = null; try { registeredBean = this.server.registerMBean(mbean, objectName); } catch (InstanceAlreadyExistsException ex) { if (this.registrationPolicy == RegistrationPolicy.IGNORE_EXISTING) { if (logger.isDebugEnabled()) { logger.debug("Ignoring existing MBean at [" + objectName + "]"); } } else if (this.registrationPolicy == RegistrationPolicy.REPLACE_EXISTING) { try { if (logger.isDebugEnabled()) { logger.debug("Replacing existing MBean at [" + objectName + "]"); } this.server.unregisterMBean(objectName); registeredBean = this.server.registerMBean(mbean, objectName); } catch (InstanceNotFoundException ex2) { if (logger.isInfoEnabled()) { logger.info("Unable to replace existing MBean at [" + objectName + "]", ex2); } throw ex; } } else { throw ex; } } // Track registration and notify listeners. actualObjectName = (registeredBean != null ? registeredBean.getObjectName() : null); if (actualObjectName == null) { actualObjectName = objectName; } this.registeredBeans.add(actualObjectName); } onRegister(actualObjectName, mbean); } /** * Unregisters all beans that have been registered by an instance of this class. */ protected void unregisterBeans() { Set<ObjectName> snapshot; synchronized (this.registeredBeans) { snapshot = new LinkedHashSet<>(this.registeredBeans); } if (!snapshot.isEmpty()) { logger.debug("Unregistering JMX-exposed beans"); for (ObjectName objectName : snapshot) { doUnregister(objectName); } } } /** * Actually unregister the specified MBean from the server. * @param objectName the suggested ObjectName for the MBean */ protected void doUnregister(ObjectName objectName) { Assert.state(this.server != null, "No MBeanServer set"); boolean actuallyUnregistered = false; synchronized (this.registeredBeans) { if (this.registeredBeans.remove(objectName)) { try { // MBean might already have been unregistered by an external process if (this.server.isRegistered(objectName)) { this.server.unregisterMBean(objectName); actuallyUnregistered = true; } else { if (logger.isInfoEnabled()) { logger.info("Could not unregister MBean [" + objectName + "] as said MBean " + "is not registered (perhaps already unregistered by an external process)"); } } } catch (JMException ex) { if (logger.isInfoEnabled()) { logger.info("Could not unregister MBean [" + objectName + "]", ex); } } } } if (actuallyUnregistered) { onUnregister(objectName); } } /** * Return the {@link ObjectName ObjectNames} of all registered beans. */ protected final ObjectName[] getRegisteredObjectNames() { synchronized (this.registeredBeans) { return this.registeredBeans.toArray(new ObjectName[0]); } } /** * Called when an MBean is registered under the given {@link ObjectName}. Allows * subclasses to perform additional processing when an MBean is registered. * <p>The default implementation delegates to {@link #onRegister(ObjectName)}. * @param objectName the actual {@link ObjectName} that the MBean was registered with * @param mbean the registered MBean instance */ protected void onRegister(ObjectName objectName, Object mbean) { onRegister(objectName); } /** * Called when an MBean is registered under the given {@link ObjectName}. Allows * subclasses to perform additional processing when an MBean is registered. * <p>The default implementation is empty. Can be overridden in subclasses. * @param objectName the actual {@link ObjectName} that the MBean was registered with */ protected void onRegister(ObjectName objectName) { } /** * Called when an MBean is unregistered under the given {@link ObjectName}. Allows * subclasses to perform additional processing when an MBean is unregistered. * <p>The default implementation is empty. Can be overridden in subclasses. * @param objectName the {@link ObjectName} that the MBean was registered with */ protected void onUnregister(ObjectName objectName) { } }
MBeanRegistrationSupport
java
apache__flink
flink-core/src/main/java/org/apache/flink/configuration/SlowTaskDetectorOptions.java
{ "start": 1122, "end": 4035 }
class ____ { @Documentation.Section(Documentation.Sections.EXPERT_SCHEDULING) public static final ConfigOption<Duration> CHECK_INTERVAL = key("slow-task-detector.check-interval") .durationType() .defaultValue(Duration.ofSeconds(1)) .withDescription("The interval to check slow tasks."); @Documentation.Section(Documentation.Sections.EXPERT_SCHEDULING) public static final ConfigOption<Duration> EXECUTION_TIME_BASELINE_LOWER_BOUND = key("slow-task-detector.execution-time.baseline-lower-bound") .durationType() .defaultValue(Duration.ofMinutes(1)) .withDescription("The lower bound of slow task detection baseline."); @Documentation.Section(Documentation.Sections.EXPERT_SCHEDULING) public static final ConfigOption<Double> EXECUTION_TIME_BASELINE_RATIO = key("slow-task-detector.execution-time.baseline-ratio") .doubleType() .defaultValue(0.75) .withDescription( "The finished execution ratio threshold to calculate the slow tasks " + "detection baseline. Given that the parallelism is N and the " + "ratio is R, define T as the median of the first N*R finished " + "tasks' execution time. The baseline will be T*M, where M is " + "the multiplier of the baseline. Note that the execution time " + "will be weighted with the task's input bytes to ensure the " + "accuracy of the detection if data skew occurs."); @Documentation.Section(Documentation.Sections.EXPERT_SCHEDULING) public static final ConfigOption<Double> EXECUTION_TIME_BASELINE_MULTIPLIER = key("slow-task-detector.execution-time.baseline-multiplier") .doubleType() .defaultValue(1.5) .withDescription( "The multiplier to calculate the slow tasks detection baseline. Given " + "that the parallelism is N and the ratio is R, define T as " + "the median of the first N*R finished tasks' execution time. " + "The baseline will be T*M, where M is the multiplier of the " + "baseline. Note that the execution time will be weighted with " + "the task's input bytes to ensure the accuracy of the " + "detection if data skew occurs."); private SlowTaskDetectorOptions() { throw new IllegalAccessError(); } }
SlowTaskDetectorOptions
java
apache__logging-log4j2
log4j-layout-template-json/src/main/java/org/apache/logging/log4j/layout/template/json/util/TruncatingBufferedPrintWriter.java
{ "start": 906, "end": 2740 }
class ____ extends PrintWriter implements CharSequence { private final TruncatingBufferedWriter writer; private TruncatingBufferedPrintWriter(final TruncatingBufferedWriter writer) { super(writer, false); this.writer = writer; } public static TruncatingBufferedPrintWriter ofCapacity(final int capacity) { if (capacity < 0) { throw new IllegalArgumentException("was expecting a non-negative capacity: " + capacity); } final TruncatingBufferedWriter writer = new TruncatingBufferedWriter(capacity); return new TruncatingBufferedPrintWriter(writer); } public char[] buffer() { return writer.buffer(); } public int position() { return writer.position(); } public void position(final int index) { writer.position(index); } public int capacity() { return writer.capacity(); } public boolean truncated() { return writer.truncated(); } @Override public int length() { return writer.length(); } @Override public char charAt(final int index) { return writer.charAt(index); } @Override public PrintWriter append(final CharSequence seq) { writer.append(seq); return this; } @Override public PrintWriter append(final CharSequence seq, final int startIndex, final int endIndex) { writer.append(seq, startIndex, endIndex); return this; } @Override public CharSequence subSequence(final int startIndex, final int endIndex) { return writer.subSequence(startIndex, endIndex); } @Override public void close() { writer.close(); } @Override public String toString() { return writer.toString(); } }
TruncatingBufferedPrintWriter
java
alibaba__nacos
api/src/main/java/com/alibaba/nacos/api/config/listener/AbstractSharedListener.java
{ "start": 840, "end": 1553 }
class ____ implements Listener { private volatile String dataId; private volatile String group; public final void fillContext(String dataId, String group) { this.dataId = dataId; this.group = group; } @Override public final void receiveConfigInfo(String configInfo) { innerReceive(dataId, group, configInfo); } @Override public Executor getExecutor() { return null; } /** * receive. * * @param dataId data ID * @param group group * @param configInfo content */ public abstract void innerReceive(String dataId, String group, String configInfo); }
AbstractSharedListener
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/processor/enricher/PollEnrichFileDefaultAggregationStrategyTest.java
{ "start": 1069, "end": 2665 }
class ____ extends ContextTestSupport { @Test public void testPollEnrichDefaultAggregationStrategyBody() throws Exception { getMockEndpoint("mock:start").expectedBodiesReceived("Start"); MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Big file"); mock.expectedFileExists(testFile("enrich/.done/AAA.fin")); mock.expectedFileExists(testFile("enrichdata/.done/AAA.dat")); template.sendBodyAndHeader(fileUri("enrich"), "Start", Exchange.FILE_NAME, "AAA.fin"); context.getRouteController().startAllRoutes(); log.info("Sleeping for 0.25 sec before writing enrichdata file"); Thread.sleep(250); template.sendBodyAndHeader(fileUri("enrichdata"), "Big file", Exchange.FILE_NAME, "AAA.dat"); log.info("... write done"); assertMockEndpointsSatisfied(); assertFileNotExists(testFile("enrichdata/AAA.dat.camelLock")); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { from(fileUri("enrich?initialDelay=0&delay=10&move=.done")).autoStartup(false) .to("mock:start") .pollEnrich( fileUri("enrichdata?initialDelay=0&delay=10&readLock=markerFile&move=.done"), 10000) .to("mock:result"); } }; } }
PollEnrichFileDefaultAggregationStrategyTest
java
spring-projects__spring-security
oauth2/oauth2-authorization-server/src/test/java/org/springframework/security/oauth2/server/authorization/authentication/OAuth2TokenIntrospectionAuthenticationTokenTests.java
{ "start": 1491, "end": 5273 }
class ____ { private String token = "token"; private RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build(); private OAuth2ClientAuthenticationToken clientPrincipal = new OAuth2ClientAuthenticationToken(this.registeredClient, ClientAuthenticationMethod.CLIENT_SECRET_BASIC, this.registeredClient.getClientSecret()); private OAuth2TokenIntrospection tokenClaims = OAuth2TokenIntrospection.builder(true).build(); @Test public void constructorWhenTokenNullThenThrowIllegalArgumentException() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> new OAuth2TokenIntrospectionAuthenticationToken(null, this.clientPrincipal, null, null)) .withMessage("token cannot be empty"); } @Test public void constructorWhenClientPrincipalNullThenThrowIllegalArgumentException() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> new OAuth2TokenIntrospectionAuthenticationToken(this.token, null, null, null)) .withMessage("clientPrincipal cannot be null"); } @Test public void constructorWhenAuthenticatedAndTokenNullThenThrowIllegalArgumentException() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy( () -> new OAuth2TokenIntrospectionAuthenticationToken(null, this.clientPrincipal, this.tokenClaims)) .withMessage("token cannot be empty"); } @Test public void constructorWhenAuthenticatedAndClientPrincipalNullThenThrowIllegalArgumentException() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> new OAuth2TokenIntrospectionAuthenticationToken(this.token, null, this.tokenClaims)) .withMessage("clientPrincipal cannot be null"); } @Test public void constructorWhenAuthenticatedAndTokenClaimsNullThenThrowIllegalArgumentException() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> new OAuth2TokenIntrospectionAuthenticationToken(this.token, this.clientPrincipal, null)) .withMessage("tokenClaims cannot be null"); } @Test public void constructorWhenTokenProvidedThenCreated() { Map<String, Object> additionalParameters = Collections.singletonMap("custom-param", "custom-value"); OAuth2TokenIntrospectionAuthenticationToken authentication = new OAuth2TokenIntrospectionAuthenticationToken( this.token, this.clientPrincipal, OAuth2TokenType.ACCESS_TOKEN.getValue(), additionalParameters); assertThat(authentication.getToken()).isEqualTo(this.token); assertThat(authentication.getPrincipal()).isEqualTo(this.clientPrincipal); assertThat(authentication.getCredentials().toString()).isEmpty(); assertThat(authentication.getTokenTypeHint()).isEqualTo(OAuth2TokenType.ACCESS_TOKEN.getValue()); assertThat(authentication.getAdditionalParameters()).containsExactlyInAnyOrderEntriesOf(additionalParameters); assertThat(authentication.getTokenClaims()).isNotNull(); assertThat(authentication.getTokenClaims().isActive()).isFalse(); assertThat(authentication.isAuthenticated()).isFalse(); } @Test public void constructorWhenTokenClaimsProvidedThenCreated() { OAuth2TokenIntrospectionAuthenticationToken authentication = new OAuth2TokenIntrospectionAuthenticationToken( this.token, this.clientPrincipal, this.tokenClaims); assertThat(authentication.getToken()).isEqualTo(this.token); assertThat(authentication.getPrincipal()).isEqualTo(this.clientPrincipal); assertThat(authentication.getCredentials().toString()).isEmpty(); assertThat(authentication.getTokenTypeHint()).isNull(); assertThat(authentication.getAdditionalParameters()).isEmpty(); assertThat(authentication.getTokenClaims()).isEqualTo(this.tokenClaims); assertThat(authentication.isAuthenticated()).isTrue(); } }
OAuth2TokenIntrospectionAuthenticationTokenTests
java
spring-projects__spring-security
access/src/main/java/org/springframework/security/access/prepost/PrePostAnnotationSecurityMetadataSource.java
{ "start": 1378, "end": 2164 }
class ____ merely responsible for * locating the relevant annotations (if any). It delegates the actual * <tt>ConfigAttribute</tt> creation to its {@link PrePostInvocationAttributeFactory}, * thus decoupling itself from the mechanism which will enforce the annotations' * behaviour. * <p> * Annotations may be specified on classes or methods, and method-specific annotations * will take precedence. If you use any annotation and do not specify a pre-authorization * condition, then the method will be allowed as if a @PreAuthorize("permitAll") were * present. * <p> * Since we are handling multiple annotations here, it's possible that we may have to * combine annotations defined in multiple locations for a single method - they may be * defined on the method itself, or at
is
java
quarkusio__quarkus
extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/mapping/timezone/TimezoneDefaultStorageAutoTest.java
{ "start": 397, "end": 1970 }
class ____ extends AbstractTimezoneDefaultStorageTest { @RegisterExtension static QuarkusUnitTest TEST = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(EntityWithTimezones.class) .addClasses(SchemaUtil.class)) .withConfigurationResource("application.properties") .overrideConfigKey("quarkus.hibernate-orm.mapping.timezone.default-storage", "auto"); @Test public void schema() { assertThat(SchemaUtil.getColumnNames(EntityWithTimezones.class, mappingMetamodel())) .contains("zonedDateTime_tz", "offsetDateTime_tz", "offsetTime_tz"); assertThat(SchemaUtil.getColumnTypeName(EntityWithTimezones.class, "zonedDateTime", mappingMetamodel())) .isEqualTo("TIMESTAMP_UTC"); assertThat(SchemaUtil.getColumnTypeName(EntityWithTimezones.class, "offsetDateTime", mappingMetamodel())) .isEqualTo("TIMESTAMP_UTC"); } @Test @RunOnVertxContext public void persistAndLoad(UniAsserter asserter) { // Native storage is not supported with PostgreSQL, so we'll effectively use COLUMN. assertPersistedThenLoadedValues(asserter, // Column storage preserves the offset, but not the zone ID: https://hibernate.atlassian.net/browse/HHH-16289 PERSISTED_ZONED_DATE_TIME.withZoneSameInstant(PERSISTED_ZONED_DATE_TIME.getOffset()), PERSISTED_OFFSET_DATE_TIME, PERSISTED_OFFSET_TIME); } }
TimezoneDefaultStorageAutoTest
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java
{ "start": 1253, "end": 2197 }
class ____ extends BaseRestHandler { @Override public List<Route> routes() { return List.of(new Route(POST, "/_dangling/{index_uuid}")); } @Override public String getName() { return "import_dangling_index"; } @Override public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient client) throws IOException { final ImportDanglingIndexRequest importRequest = new ImportDanglingIndexRequest( request.param("index_uuid"), request.paramAsBoolean("accept_data_loss", false) ); importRequest.ackTimeout(getAckTimeout(request)); importRequest.masterNodeTimeout(getMasterNodeTimeout(request)); return channel -> client.execute( TransportImportDanglingIndexAction.TYPE, importRequest, new RestToXContentListener<>(channel, r -> ACCEPTED) ); } }
RestImportDanglingIndexAction
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java
{ "start": 1093, "end": 3599 }
class ____ implements IndexedSorter { private static final IndexedSorter alt = new HeapSort(); public QuickSort() { } private static void fix(IndexedSortable s, int p, int r) { if (s.compare(p, r) > 0) { s.swap(p, r); } } /** * Deepest recursion before giving up and doing a heapsort. * Returns 2 * ceil(log(n)). * * @param x x. * @return MaxDepth. */ protected static int getMaxDepth(int x) { if (x <= 0) throw new IllegalArgumentException("Undefined for " + x); return (32 - Integer.numberOfLeadingZeros(x - 1)) << 2; } /** * Sort the given range of items using quick sort. * {@inheritDoc} If the recursion depth falls below {@link #getMaxDepth}, * then switch to {@link HeapSort}. */ @Override public void sort(IndexedSortable s, int p, int r) { sort(s, p, r, null); } @Override public void sort(final IndexedSortable s, int p, int r, final Progressable rep) { sortInternal(s, p, r, rep, getMaxDepth(r - p)); } private static void sortInternal(final IndexedSortable s, int p, int r, final Progressable rep, int depth) { if (null != rep) { rep.progress(); } while (true) { if (r-p < 13) { for (int i = p; i < r; ++i) { for (int j = i; j > p && s.compare(j-1, j) > 0; --j) { s.swap(j, j-1); } } return; } if (--depth < 0) { // give up alt.sort(s, p, r, rep); return; } // select, move pivot into first position fix(s, (p+r) >>> 1, p); fix(s, (p+r) >>> 1, r - 1); fix(s, p, r-1); // Divide int i = p; int j = r; int ll = p; int rr = r; int cr; while(true) { while (++i < j) { if ((cr = s.compare(i, p)) > 0) break; if (0 == cr && ++ll != i) { s.swap(ll, i); } } while (--j > i) { if ((cr = s.compare(p, j)) > 0) break; if (0 == cr && --rr != j) { s.swap(rr, j); } } if (i < j) s.swap(i, j); else break; } j = i; // swap pivot- and all eq values- into position while (ll >= p) { s.swap(ll--, --i); } while (rr < r) { s.swap(rr++, j++); } // Conquer // Recurse on smaller interval first to keep stack shallow assert i != j; if (i - p < r - j) { sortInternal(s, p, i, rep, depth); p = j; } else { sortInternal(s, j, r, rep, depth); r = i; } } } }
QuickSort
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java
{ "start": 703, "end": 23485 }
class ____ extends ESTestCase { public void testWindowMax() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); double expected = -Double.MAX_VALUE; if (i == 0) { window.offer(randValue); continue; } for (double value : window) { expected = Math.max(expected, value); } double actual = MovingFunctions.max(window.stream().mapToDouble(Double::doubleValue).toArray()); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullWindowMax() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.max(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyWindowMax() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.max(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); } public void testWindowMin() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); double expected = Double.MAX_VALUE; if (i == 0) { window.offer(randValue); continue; } for (double value : window) { expected = Math.min(expected, value); } double actual = MovingFunctions.min(window.stream().mapToDouble(Double::doubleValue).toArray()); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullWindowMin() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.min(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyWindowMin() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.min(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); } public void testWindowSum() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); double expected = 0; if (i == 0) { window.offer(randValue); continue; } for (double value : window) { expected += value; } double actual = MovingFunctions.sum(window.stream().mapToDouble(Double::doubleValue).toArray()); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullWindowSum() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.sum(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(0.0)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyWindowSum() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.sum(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(0.0)); } public void testSimpleMovAvg() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); double expected = 0; if (i == 0) { window.offer(randValue); continue; } for (double value : window) { expected += value; } expected /= window.size(); double actual = MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullSimpleMovAvg() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptySimpleMovAvg() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); } public void testSimpleMovStdDev() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); double mean = 0; if (i == 0) { window.offer(randValue); continue; } for (double value : window) { mean += value; } mean /= window.size(); double expected = 0.0; for (double value : window) { expected += Math.pow(value - mean, 2); } expected = Math.sqrt(expected / window.size()); double actual = MovingFunctions.stdDev(window.stream().mapToDouble(Double::doubleValue).toArray(), mean); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullSimpleStdDev() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.stdDev( window.stream().mapToDouble(Double::doubleValue).toArray(), MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()) ); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptySimpleStdDev() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.stdDev( window.stream().mapToDouble(Double::doubleValue).toArray(), MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()) ); assertThat(actual, equalTo(Double.NaN)); } public void testStdDevNaNAvg() { assertThat(MovingFunctions.stdDev(new double[] { 1.0, 2.0, 3.0 }, Double.NaN), equalTo(Double.NaN)); } public void testLinearMovAvg() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); if (i == 0) { window.offer(randValue); continue; } double avg = 0; long totalWeight = 0; long current = 1; for (double value : window) { avg += value * current; totalWeight += current; current += 1; } double expected = avg / totalWeight; double actual = MovingFunctions.linearWeightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullLinearMovAvg() { int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.linearWeightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyLinearMovAvg() { EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.linearWeightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); assertThat(actual, equalTo(Double.NaN)); } public void testEWMAMovAvg() { double alpha = randomDouble(); int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); if (i == 0) { window.offer(randValue); continue; } double avg = 0; boolean first = true; for (double value : window) { if (first) { avg = value; first = false; } else { avg = (value * alpha) + (avg * (1 - alpha)); } } double expected = avg; double actual = MovingFunctions.ewma(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullEwmaMovAvg() { double alpha = randomDouble(); int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.ewma(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyEwmaMovAvg() { double alpha = randomDouble(); EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.ewma(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha); assertThat(actual, equalTo(Double.NaN)); } public void testHoltLinearMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { double randValue = randomDouble(); if (i == 0) { window.offer(randValue); continue; } double s = 0; double last_s = 0; // Trend value double b = 0; double last_b = 0; int counter = 0; double last; for (double value : window) { last = value; if (counter == 0) { s = value; b = value - last; } else { s = alpha * value + (1.0d - alpha) * (last_s + last_b); b = beta * (s - last_s) + (1 - beta) * last_b; } counter += 1; last_s = s; last_b = b; } double expected = s + (0 * b); double actual = MovingFunctions.holt(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta); assertEquals(expected, actual, 0.01 * Math.abs(expected)); window.offer(randValue); } } public void testNullHoltMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(1, 50); EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < numValues; i++) { Double randValue = randomBoolean() ? Double.NaN : null; if (i == 0) { if (randValue != null) { window.offer(randValue); } continue; } double actual = MovingFunctions.holt(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta); assertThat(actual, equalTo(Double.NaN)); if (randValue != null) { window.offer(randValue); } } } public void testEmptyHoltMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.holt(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta); assertThat(actual, equalTo(Double.NaN)); } public void testHoltWintersMultiplicative() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); int period = randomIntBetween(1, 10); int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < windowSize; i++) { window.offer(randomDouble()); } // Smoothed value double s = 0; double last_s = 0; // Trend value double b = 0; double last_b = 0; // Seasonal value double[] seasonal = new double[windowSize]; int counter = 0; double[] vs = new double[windowSize]; for (double v : window) { vs[counter] = v + 0.0000000001; counter += 1; } // Initial level value is average of first season // Calculate the slopes between first and second season for each period for (int i = 0; i < period; i++) { s += vs[i]; b += (vs[i + period] - vs[i]) / period; } s /= period; b /= period; last_s = s; // Calculate first seasonal if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { Arrays.fill(seasonal, 0.0); } else { for (int i = 0; i < period; i++) { seasonal[i] = vs[i] / s; } } for (int i = period; i < vs.length; i++) { s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); b = beta * (s - last_s) + (1 - beta) * last_b; seasonal[i] = gamma * (vs[i] / (last_s + last_b)) + (1 - gamma) * seasonal[i - period]; last_s = s; last_b = b; } int idx = window.size() - period + (0 % period); double expected = (s + (1 * b)) * seasonal[idx]; double actual = MovingFunctions.holtWinters( window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta, gamma, period, true ); assertEquals(expected, actual, 0.01 * Math.abs(expected)); } public void testNullHoltWintersMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); int period = randomIntBetween(1, 10); int numValues = randomIntBetween(1, 100); int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < windowSize; i++) { window.offer(Double.NaN); } for (int i = 0; i < numValues; i++) { double actual = MovingFunctions.holtWinters( window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta, gamma, period, false ); assertThat(actual, equalTo(Double.NaN)); } } public void testEmptyHoltWintersMovAvg() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); int period = randomIntBetween(1, 10); EvictingQueue<Double> window = new EvictingQueue<>(0); double actual = MovingFunctions.holtWinters( window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta, gamma, period, false ); assertThat(actual, equalTo(Double.NaN)); } public void testHoltWintersAdditive() { double alpha = randomDouble(); double beta = randomDouble(); double gamma = randomDouble(); int period = randomIntBetween(1, 10); int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data EvictingQueue<Double> window = new EvictingQueue<>(windowSize); for (int i = 0; i < windowSize; i++) { window.offer(randomDouble()); } // Smoothed value double s = 0; double last_s = 0; // Trend value double b = 0; double last_b = 0; // Seasonal value double[] seasonal = new double[windowSize]; int counter = 0; double[] vs = new double[windowSize]; for (double v : window) { vs[counter] = v; counter += 1; } // Initial level value is average of first season // Calculate the slopes between first and second season for each period for (int i = 0; i < period; i++) { s += vs[i]; b += (vs[i + period] - vs[i]) / period; } s /= period; b /= period; last_s = s; // Calculate first seasonal if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { Arrays.fill(seasonal, 0.0); } else { for (int i = 0; i < period; i++) { seasonal[i] = vs[i] / s; } } for (int i = period; i < vs.length; i++) { s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); b = beta * (s - last_s) + (1 - beta) * last_b; seasonal[i] = gamma * (vs[i] - (last_s - last_b)) + (1 - gamma) * seasonal[i - period]; last_s = s; last_b = b; } int idx = window.size() - period + (0 % period); double expected = s + (1 * b) + seasonal[idx]; double actual = MovingFunctions.holtWinters( window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta, gamma, period, false ); assertEquals(expected, actual, 0.01 * Math.abs(expected)); } }
MovFnWhitelistedFunctionTests
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/associations/ManyToManyBidirectionalWithLinkEntityTest.java
{ "start": 1974, "end": 3607 }
class ____ implements Serializable { @Id @GeneratedValue private Long id; @NaturalId private String registrationNumber; @OneToMany( mappedBy = "person", cascade = CascadeType.ALL, orphanRemoval = true ) private List<PersonAddress> addresses = new ArrayList<>(); //Getters and setters are omitted for brevity //end::associations-many-to-many-bidirectional-with-link-entity-example[] public Person() { } public Person(String registrationNumber) { this.registrationNumber = registrationNumber; } public Long getId() { return id; } public List<PersonAddress> getAddresses() { return addresses; } //tag::associations-many-to-many-bidirectional-with-link-entity-example[] public void addAddress(Address address) { PersonAddress personAddress = new PersonAddress( this, address ); addresses.add( personAddress ); address.getOwners().add( personAddress ); } public void removeAddress(Address address) { PersonAddress personAddress = new PersonAddress( this, address ); address.getOwners().remove( personAddress ); addresses.remove( personAddress ); personAddress.setPerson( null ); personAddress.setAddress( null ); } @Override public boolean equals(Object o) { if ( this == o ) { return true; } if ( o == null || getClass() != o.getClass() ) { return false; } Person person = (Person) o; return Objects.equals( registrationNumber, person.registrationNumber ); } @Override public int hashCode() { return Objects.hash( registrationNumber ); } } @Entity(name = "PersonAddress") public static
Person
java
spring-projects__spring-security
config/src/main/java/org/springframework/security/config/web/server/ServerHttpSecurity.java
{ "start": 187765, "end": 201997 }
class ____ { private ReactiveAuthenticationManager authenticationManager; private ReactiveOneTimeTokenService tokenService; private ServerAuthenticationConverter authenticationConverter = new ServerOneTimeTokenAuthenticationConverter(); private ServerAuthenticationFailureHandler authenticationFailureHandler; private final RedirectServerAuthenticationSuccessHandler defaultSuccessHandler = new RedirectServerAuthenticationSuccessHandler( "/"); private final List<ServerAuthenticationSuccessHandler> defaultSuccessHandlers = new ArrayList<>( List.of(this.defaultSuccessHandler)); private final List<ServerAuthenticationSuccessHandler> authenticationSuccessHandlers = new ArrayList<>(); private ServerOneTimeTokenGenerationSuccessHandler tokenGenerationSuccessHandler; private ServerSecurityContextRepository securityContextRepository; private ServerGenerateOneTimeTokenRequestResolver requestResolver; private String loginProcessingUrl = "/login/ott"; private String defaultSubmitPageUrl = "/login/ott"; private String tokenGeneratingUrl = "/ott/generate"; private boolean submitPageEnabled = true; private String loginPage; protected void configure(ServerHttpSecurity http) { configureSubmitPage(http); configureOttGenerateFilter(http); configureOttAuthenticationFilter(http); configureDefaultEntryPoint(http); } private void configureOttAuthenticationFilter(ServerHttpSecurity http) { AuthenticationWebFilter ottWebFilter = new AuthenticationWebFilter(getAuthenticationManager()); ottWebFilter.setServerAuthenticationConverter(this.authenticationConverter); ottWebFilter.setAuthenticationFailureHandler(getAuthenticationFailureHandler()); ottWebFilter.setAuthenticationSuccessHandler(getAuthenticationSuccessHandler()); ottWebFilter.setRequiresAuthenticationMatcher( ServerWebExchangeMatchers.pathMatchers(HttpMethod.POST, this.loginProcessingUrl)); ottWebFilter.setSecurityContextRepository(this.securityContextRepository); http.addFilterAt(ottWebFilter, SecurityWebFiltersOrder.AUTHENTICATION); } private void configureSubmitPage(ServerHttpSecurity http) { if (!this.submitPageEnabled) { return; } OneTimeTokenSubmitPageGeneratingWebFilter submitPage = new OneTimeTokenSubmitPageGeneratingWebFilter(); submitPage.setLoginProcessingUrl(this.loginProcessingUrl); if (StringUtils.hasText(this.defaultSubmitPageUrl)) { submitPage.setRequestMatcher( ServerWebExchangeMatchers.pathMatchers(HttpMethod.GET, this.defaultSubmitPageUrl)); } http.addFilterAt(submitPage, SecurityWebFiltersOrder.ONE_TIME_TOKEN_SUBMIT_PAGE_GENERATING); } private void configureOttGenerateFilter(ServerHttpSecurity http) { GenerateOneTimeTokenWebFilter generateFilter = new GenerateOneTimeTokenWebFilter(getTokenService(), getTokenGenerationSuccessHandler()); generateFilter .setRequestMatcher(ServerWebExchangeMatchers.pathMatchers(HttpMethod.POST, this.tokenGeneratingUrl)); generateFilter.setGenerateRequestResolver(getRequestResolver()); http.addFilterAt(generateFilter, SecurityWebFiltersOrder.ONE_TIME_TOKEN); } private void configureDefaultEntryPoint(ServerHttpSecurity http) { MediaTypeServerWebExchangeMatcher htmlMatcher = new MediaTypeServerWebExchangeMatcher( MediaType.APPLICATION_XHTML_XML, new MediaType("image", "*"), MediaType.TEXT_HTML, MediaType.TEXT_PLAIN); htmlMatcher.setIgnoredMediaTypes(Collections.singleton(MediaType.ALL)); ServerWebExchangeMatcher xhrMatcher = (exchange) -> { if (exchange.getRequest().getHeaders().getOrEmpty("X-Requested-With").contains("XMLHttpRequest")) { return ServerWebExchangeMatcher.MatchResult.match(); } return ServerWebExchangeMatcher.MatchResult.notMatch(); }; ServerWebExchangeMatcher notXhrMatcher = new NegatedServerWebExchangeMatcher(xhrMatcher); ServerWebExchangeMatcher defaultEntryPointMatcher = new AndServerWebExchangeMatcher(notXhrMatcher, htmlMatcher); String loginPage = "/login"; if (this.loginPage != null) { loginPage = this.loginPage; } RedirectServerAuthenticationEntryPoint defaultEntryPoint = new RedirectServerAuthenticationEntryPoint( loginPage); defaultEntryPoint.setRequestCache(http.requestCache.requestCache); http.defaultEntryPoints.add(new DelegateEntry(defaultEntryPointMatcher, defaultEntryPoint)); } /** * Allows customizing the list of {@link ServerAuthenticationSuccessHandler}. The * default list contains a {@link RedirectServerAuthenticationSuccessHandler} that * redirects to "/". * @param handlersConsumer the handlers consumer * @return the {@link OneTimeTokenLoginSpec} to continue configuring */ public OneTimeTokenLoginSpec authenticationSuccessHandler( Consumer<List<ServerAuthenticationSuccessHandler>> handlersConsumer) { Assert.notNull(handlersConsumer, "handlersConsumer cannot be null"); handlersConsumer.accept(this.authenticationSuccessHandlers); return this; } /** * Specifies the {@link ServerAuthenticationSuccessHandler} * @param authenticationSuccessHandler the * {@link ServerAuthenticationSuccessHandler}. */ public OneTimeTokenLoginSpec authenticationSuccessHandler( ServerAuthenticationSuccessHandler authenticationSuccessHandler) { Assert.notNull(authenticationSuccessHandler, "authenticationSuccessHandler cannot be null"); authenticationSuccessHandler((handlers) -> { handlers.clear(); handlers.add(authenticationSuccessHandler); }); return this; } private ServerAuthenticationSuccessHandler getAuthenticationSuccessHandler() { if (this.authenticationSuccessHandlers.isEmpty()) { return new DelegatingServerAuthenticationSuccessHandler(this.defaultSuccessHandlers); } return new DelegatingServerAuthenticationSuccessHandler(this.authenticationSuccessHandlers); } /** * Specifies the {@link ServerAuthenticationFailureHandler} to use when * authentication fails. The default is redirecting to "/login?error" using * {@link RedirectServerAuthenticationFailureHandler} * @param authenticationFailureHandler the * {@link ServerAuthenticationFailureHandler} to use when authentication fails. */ public OneTimeTokenLoginSpec authenticationFailureHandler( ServerAuthenticationFailureHandler authenticationFailureHandler) { Assert.notNull(authenticationFailureHandler, "authenticationFailureHandler cannot be null"); this.authenticationFailureHandler = authenticationFailureHandler; return this; } ServerAuthenticationFailureHandler getAuthenticationFailureHandler() { if (this.authenticationFailureHandler == null) { this.authenticationFailureHandler = new RedirectServerAuthenticationFailureHandler("/login?error"); } return this.authenticationFailureHandler; } /** * Specifies {@link ReactiveAuthenticationManager} for one time tokens. Default * implementation is {@link OneTimeTokenReactiveAuthenticationManager} * @param authenticationManager */ public OneTimeTokenLoginSpec authenticationManager(ReactiveAuthenticationManager authenticationManager) { Assert.notNull(authenticationManager, "authenticationManager cannot be null"); this.authenticationManager = authenticationManager; return this; } ReactiveAuthenticationManager getAuthenticationManager() { if (this.authenticationManager == null) { ReactiveUserDetailsService userDetailsService = getBean(ReactiveUserDetailsService.class); return new OneTimeTokenReactiveAuthenticationManager(getTokenService(), userDetailsService); } return this.authenticationManager; } /** * Configures the {@link ReactiveOneTimeTokenService} used to generate and consume * {@link OneTimeToken} * @param oneTimeTokenService */ public OneTimeTokenLoginSpec tokenService(ReactiveOneTimeTokenService oneTimeTokenService) { Assert.notNull(oneTimeTokenService, "oneTimeTokenService cannot be null"); this.tokenService = oneTimeTokenService; return this; } ReactiveOneTimeTokenService getTokenService() { if (this.tokenService != null) { return this.tokenService; } ReactiveOneTimeTokenService oneTimeTokenService = getBeanOrNull(ReactiveOneTimeTokenService.class); if (oneTimeTokenService != null) { return oneTimeTokenService; } this.tokenService = new InMemoryReactiveOneTimeTokenService(); return this.tokenService; } /** * Use this {@link ServerAuthenticationConverter} when converting incoming * requests to an {@link Authentication}. By default, the * {@link ServerOneTimeTokenAuthenticationConverter} is used. * @param authenticationConverter the {@link ServerAuthenticationConverter} to use */ public OneTimeTokenLoginSpec authenticationConverter(ServerAuthenticationConverter authenticationConverter) { Assert.notNull(authenticationConverter, "authenticationConverter cannot be null"); this.authenticationConverter = authenticationConverter; return this; } /** * Use this {@link ServerGenerateOneTimeTokenRequestResolver} when resolving * {@link GenerateOneTimeTokenRequest} from {@link ServerWebExchange}. By default, * the {@link DefaultServerGenerateOneTimeTokenRequestResolver} is used. * @param requestResolver the * {@link DefaultServerGenerateOneTimeTokenRequestResolver} to use * @since 6.5 */ public OneTimeTokenLoginSpec generateRequestResolver( ServerGenerateOneTimeTokenRequestResolver requestResolver) { Assert.notNull(requestResolver, "generateRequestResolver cannot be null"); this.requestResolver = requestResolver; return this; } private ServerGenerateOneTimeTokenRequestResolver getRequestResolver() { if (this.requestResolver != null) { return this.requestResolver; } ServerGenerateOneTimeTokenRequestResolver bean = getBeanOrNull( ServerGenerateOneTimeTokenRequestResolver.class); this.requestResolver = Objects.requireNonNullElseGet(bean, DefaultServerGenerateOneTimeTokenRequestResolver::new); return this.requestResolver; } /** * Specifies the URL to process the login request, defaults to {@code /login/ott}. * Only POST requests are processed, for that reason make sure that you pass a * valid CSRF token if CSRF protection is enabled. * @param loginProcessingUrl */ public OneTimeTokenLoginSpec loginProcessingUrl(String loginProcessingUrl) { Assert.hasText(loginProcessingUrl, "loginProcessingUrl cannot be null or empty"); this.loginProcessingUrl = loginProcessingUrl; return this; } /** * Configures whether the default one-time token submit page should be shown. This * will prevent the {@link OneTimeTokenSubmitPageGeneratingWebFilter} to be * configured. * @param show */ public OneTimeTokenLoginSpec showDefaultSubmitPage(boolean show) { this.submitPageEnabled = show; return this; } /** * Sets the URL that the default submit page will be generated. Defaults to * {@code /login/ott}. If you don't want to generate the default submit page you * should use {@link #showDefaultSubmitPage(boolean)}. Note that this method * always invoke {@link #showDefaultSubmitPage(boolean)} passing {@code true}. * @param submitPageUrl */ public OneTimeTokenLoginSpec defaultSubmitPageUrl(String submitPageUrl) { Assert.hasText(submitPageUrl, "submitPageUrl cannot be null or empty"); this.defaultSubmitPageUrl = submitPageUrl; showDefaultSubmitPage(true); return this; } /** * Specifies strategy to be used to handle generated one-time tokens. * @param oneTimeTokenGenerationSuccessHandler */ public OneTimeTokenLoginSpec tokenGenerationSuccessHandler( ServerOneTimeTokenGenerationSuccessHandler oneTimeTokenGenerationSuccessHandler) { Assert.notNull(oneTimeTokenGenerationSuccessHandler, "oneTimeTokenGenerationSuccessHandler cannot be null"); this.tokenGenerationSuccessHandler = oneTimeTokenGenerationSuccessHandler; return this; } /** * Specifies the URL that a One-Time Token generate request will be processed. * Defaults to {@code /ott/generate}. * @param tokenGeneratingUrl */ public OneTimeTokenLoginSpec tokenGeneratingUrl(String tokenGeneratingUrl) { Assert.hasText(tokenGeneratingUrl, "tokenGeneratingUrl cannot be null or empty"); this.tokenGeneratingUrl = tokenGeneratingUrl; return this; } /** * The {@link ServerSecurityContextRepository} used to save the * {@code Authentication}. Defaults to * {@link WebSessionServerSecurityContextRepository}. For the * {@code SecurityContext} to be loaded on subsequent requests the * {@link ReactorContextWebFilter} must be configured to be able to load the value * (they are not implicitly linked). * @param securityContextRepository the repository to use * @return the {@link OneTimeTokenLoginSpec} to continue configuring */ public OneTimeTokenLoginSpec securityContextRepository( ServerSecurityContextRepository securityContextRepository) { this.securityContextRepository = securityContextRepository; return this; } private ServerOneTimeTokenGenerationSuccessHandler getTokenGenerationSuccessHandler() { if (this.tokenGenerationSuccessHandler == null) { this.tokenGenerationSuccessHandler = getBeanOrNull(ServerOneTimeTokenGenerationSuccessHandler.class); } if (this.tokenGenerationSuccessHandler == null) { throw new IllegalStateException(""" A ServerOneTimeTokenGenerationSuccessHandler is required to enable oneTimeTokenLogin(). Please provide it as a bean or pass it to the oneTimeTokenLogin() DSL. """); } return this.tokenGenerationSuccessHandler; } /** * Specifies the URL to send users to if login is required. A default login page * will be generated when this attribute is not specified. * @param loginPage the URL to send users to if login is required * @return the {@link OAuth2LoginSpec} for further configuration * @since 6.5 */ public OneTimeTokenLoginSpec loginPage(String loginPage) { Assert.hasText(loginPage, "loginPage cannot be empty"); this.loginPage = loginPage; return this; } } }
OneTimeTokenLoginSpec
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/source/presencecheck/spi/SoccerTeamMapperNestedObjects.java
{ "start": 379, "end": 815 }
interface ____ { SoccerTeamMapperNestedObjects INSTANCE = Mappers.getMapper( SoccerTeamMapperNestedObjects.class ); @Mappings({ @Mapping(target = "players", ignore = true), @Mapping(target = "goalKeeperName", source = "goalKeeper.name"), @Mapping(target = "referee.name", source = "refereeName") }) SoccerTeamTargetWithPresenceCheck mapNested( SoccerTeamSource in ); }
SoccerTeamMapperNestedObjects
java
elastic__elasticsearch
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/llama/embeddings/LlamaEmbeddingsModel.java
{ "start": 1137, "end": 4799 }
class ____ extends LlamaModel { /** * Constructor for creating a LlamaEmbeddingsModel with specified parameters. * * @param inferenceEntityId the unique identifier for the inference entity * @param taskType the type of task this model is designed for * @param service the name of the inference service * @param serviceSettings the settings for the inference service, specific to embeddings * @param secrets the secret settings for the model, such as API keys or tokens * @param context the context for parsing configuration settings */ public LlamaEmbeddingsModel( String inferenceEntityId, TaskType taskType, String service, Map<String, Object> serviceSettings, ChunkingSettings chunkingSettings, Map<String, Object> secrets, ConfigurationParseContext context ) { this( inferenceEntityId, taskType, service, LlamaEmbeddingsServiceSettings.fromMap(serviceSettings, context), chunkingSettings, retrieveSecretSettings(secrets) ); } /** * Constructor for creating a LlamaEmbeddingsModel with specified parameters. * * @param model the base LlamaEmbeddingsModel to copy properties from * @param serviceSettings the settings for the inference service, specific to embeddings */ public LlamaEmbeddingsModel(LlamaEmbeddingsModel model, LlamaEmbeddingsServiceSettings serviceSettings) { super(model, serviceSettings); setPropertiesFromServiceSettings(serviceSettings); } /** * Sets properties from the provided LlamaEmbeddingsServiceSettings. * * @param serviceSettings the service settings to extract properties from */ private void setPropertiesFromServiceSettings(LlamaEmbeddingsServiceSettings serviceSettings) { this.uri = serviceSettings.uri(); this.rateLimitSettings = serviceSettings.rateLimitSettings(); } /** * Constructor for creating a LlamaEmbeddingsModel with specified parameters. * * @param inferenceEntityId the unique identifier for the inference entity * @param taskType the type of task this model is designed for * @param service the name of the inference service * @param serviceSettings the settings for the inference service, specific to embeddings * @param chunkingSettings the chunking settings for processing input data * @param secrets the secret settings for the model, such as API keys or tokens */ public LlamaEmbeddingsModel( String inferenceEntityId, TaskType taskType, String service, LlamaEmbeddingsServiceSettings serviceSettings, ChunkingSettings chunkingSettings, SecretSettings secrets ) { super( new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, EmptyTaskSettings.INSTANCE, chunkingSettings), new ModelSecrets(secrets) ); setPropertiesFromServiceSettings(serviceSettings); } @Override public LlamaEmbeddingsServiceSettings getServiceSettings() { return (LlamaEmbeddingsServiceSettings) super.getServiceSettings(); } /** * Accepts a visitor to create an executable action for this Llama embeddings model. * * @param creator the visitor that creates the executable action * @return an ExecutableAction representing the Llama embeddings model */ @Override public ExecutableAction accept(LlamaActionVisitor creator) { return creator.create(this); } }
LlamaEmbeddingsModel
java
quarkusio__quarkus
extensions/websockets-next/runtime/src/main/java/io/quarkus/websockets/next/runtime/Endpoints.java
{ "start": 1263, "end": 22011 }
class ____ { private static final Logger LOG = Logger.getLogger(Endpoints.class); static void initialize(Vertx vertx, ArcContainer container, Codecs codecs, WebSocketConnectionBase connection, WebSocketBase ws, String generatedEndpointClass, Optional<Duration> autoPingInterval, SecuritySupport securitySupport, UnhandledFailureStrategy unhandledFailureStrategy, TrafficLogger trafficLogger, Runnable onClose, boolean activateRequestContext, boolean activateSessionContext, TelemetrySupport telemetrySupport) { Context context = vertx.getOrCreateContext(); // Initialize and capture the session context state that will be activated // during message processing ManagedContext sessionContext = null; InjectableContext.ContextState sessionContextState = null; if (activateSessionContext) { sessionContext = container.sessionContext(); sessionContextState = sessionContext.initializeState(); } ContextSupport contextSupport = new ContextSupport(connection, sessionContextState, sessionContext, activateRequestContext ? container.requestContext() : null); // Create an endpoint that delegates callbacks to the endpoint bean WebSocketEndpoint endpoint = createEndpoint(generatedEndpointClass, context, connection, codecs, contextSupport, securitySupport, telemetrySupport); // A broadcast processor is only needed if Multi is consumed by the callback BroadcastProcessor<Object> textBroadcastProcessor = endpoint.consumedTextMultiType() != null ? BroadcastProcessor.create() : null; BroadcastProcessor<Object> binaryBroadcastProcessor = endpoint.consumedBinaryMultiType() != null ? BroadcastProcessor.create() : null; // NOTE: We always invoke callbacks on a new duplicated context // and the endpoint is responsible to make the switch if blocking/virtualThread Context onOpenContext = ContextSupport.createNewDuplicatedContext(context, connection); onOpenContext.runOnContext(new Handler<Void>() { @Override public void handle(Void event) { endpoint.onOpen().onComplete(r -> { if (r.succeeded()) { LOG.debugf("@OnOpen callback completed: %s", connection); // If Multi is consumed we need to invoke the callback eagerly // but after @OnOpen completes if (textBroadcastProcessor != null) { Multi<Object> multi = textBroadcastProcessor.onCancellation().call(connection::close); onOpenContext.runOnContext(new Handler<Void>() { @Override public void handle(Void event) { endpoint.onTextMessage(multi).onComplete(r -> { if (r.succeeded()) { LOG.debugf("@OnTextMessage callback consuming Multi completed: %s", connection); } else { handleFailure(unhandledFailureStrategy, r.cause(), "Unable to complete @OnTextMessage callback consuming Multi", connection); } }); } }); } if (binaryBroadcastProcessor != null) { Multi<Object> multi = binaryBroadcastProcessor.onCancellation().call(connection::close); onOpenContext.runOnContext(new Handler<Void>() { @Override public void handle(Void event) { endpoint.onBinaryMessage(multi).onComplete(r -> { if (r.succeeded()) { LOG.debugf("@OnBinaryMessage callback consuming Multi completed: %s", connection); } else { handleFailure(unhandledFailureStrategy, r.cause(), "Unable to complete @OnBinaryMessage callback consuming Multi", connection); } }); } }); } } else { if (telemetrySupport != null) { telemetrySupport.connectionOpeningFailed(r.cause()); } handleFailure(unhandledFailureStrategy, r.cause(), "Unable to complete @OnOpen callback", connection); } }); } }); if (textBroadcastProcessor == null) { // Multi not consumed - invoke @OnTextMessage callback for each message received textMessageHandler(connection, endpoint, ws, onOpenContext, m -> { if (trafficLogger != null) { trafficLogger.textMessageReceived(connection, m); } endpoint.onTextMessage(m).onComplete(r -> { if (r.succeeded()) { LOG.debugf("@OnTextMessage callback consumed text message: %s", connection); } else { handleFailure(unhandledFailureStrategy, r.cause(), "Unable to consume text message in @OnTextMessage callback", connection); } }); }, true); } else { textMessageHandler(connection, endpoint, ws, onOpenContext, m -> { contextSupport.start(); try { if (trafficLogger != null) { trafficLogger.textMessageReceived(connection, m); } textBroadcastProcessor.onNext(endpoint.decodeTextMultiItem(m)); LOG.debugf("Text message >> Multi: %s", connection); } catch (Throwable throwable) { endpoint.doOnError(throwable).subscribe().with( v -> LOG.debugf("Text message >> Multi: %s", connection), t -> handleFailure(unhandledFailureStrategy, t, "Unable to send text message to Multi", connection)); } finally { contextSupport.end(false); } }, false); } if (binaryBroadcastProcessor == null) { // Multi not consumed - invoke @OnBinaryMessage callback for each message received binaryMessageHandler(connection, endpoint, ws, onOpenContext, m -> { if (trafficLogger != null) { trafficLogger.binaryMessageReceived(connection, m); } endpoint.onBinaryMessage(m).onComplete(r -> { if (r.succeeded()) { LOG.debugf("@OnBinaryMessage callback consumed binary message: %s", connection); } else { handleFailure(unhandledFailureStrategy, r.cause(), "Unable to consume binary message in @OnBinaryMessage callback", connection); } }); }, true); } else { binaryMessageHandler(connection, endpoint, ws, onOpenContext, m -> { contextSupport.start(); try { if (trafficLogger != null) { trafficLogger.binaryMessageReceived(connection, m); } binaryBroadcastProcessor.onNext(endpoint.decodeBinaryMultiItem(m)); LOG.debugf("Binary message >> Multi: %s", connection); } catch (Throwable throwable) { endpoint.doOnError(throwable).subscribe().with( v -> LOG.debugf("Binary message >> Multi: %s", connection), t -> handleFailure(unhandledFailureStrategy, t, "Unable to send binary message to Multi", connection)); } finally { contextSupport.end(false); } }, false); } pingMessageHandler(connection, endpoint, ws, onOpenContext, m -> { endpoint.onPingMessage(m).onComplete(r -> { if (r.succeeded()) { LOG.debugf("@OnPingMessage callback consumed application message: %s", connection); } else { handleFailure(unhandledFailureStrategy, r.cause(), "Unable to consume application message in @OnPingMessage callback", connection); } }); }); pongMessageHandler(connection, endpoint, ws, onOpenContext, m -> { endpoint.onPongMessage(m).onComplete(r -> { if (r.succeeded()) { LOG.debugf("@OnPongMessage callback consumed application message: %s", connection); } else { handleFailure(unhandledFailureStrategy, r.cause(), "Unable to consume application message in @OnPongMessage callback", connection); } }); }); Long timerId; if (autoPingInterval.isPresent()) { timerId = vertx.setPeriodic(autoPingInterval.get().toMillis(), new Handler<Long>() { @Override public void handle(Long timerId) { if (connection.isOpen()) { connection.sendAutoPing(); } else { LOG.debugf("Try to cancel the autoPing timer for a closed connection: %s", connection.id()); vertx.cancelTimer(timerId); } } }); } else { timerId = null; } ws.closeHandler(new Handler<Void>() { @Override public void handle(Void event) { if (trafficLogger != null) { trafficLogger.connectionClosed(connection); } ContextSupport.createNewDuplicatedContext(context, connection).runOnContext(new Handler<Void>() { @Override public void handle(Void event) { endpoint.onClose().onComplete(r -> { try { if (r.succeeded()) { LOG.debugf("@OnClose callback completed: %s", connection); } else { handleFailure(unhandledFailureStrategy, r.cause(), "Unable to complete @OnClose callback", connection); } securitySupport.onClose(); onClose.run(); } finally { // Make sure we always try to cancel the timer if (timerId != null) { vertx.cancelTimer(timerId); } } }); } }); } }); ws.exceptionHandler(new Handler<Throwable>() { @Override public void handle(Throwable t) { ContextSupport.createNewDuplicatedContext(context, connection).runOnContext(new Handler<Void>() { @Override public void handle(Void event) { endpoint.doOnError(t).subscribe().with( v -> LOG.debugf("Error [%s] processed: %s", t.getClass(), connection), t -> handleFailure(unhandledFailureStrategy, t, "Unhandled error occurred", connection)); } }); } }); } private static void handleFailure(UnhandledFailureStrategy strategy, Throwable cause, String message, WebSocketConnectionBase connection) { switch (strategy) { case LOG_AND_CLOSE -> logAndClose(cause, message, connection); case CLOSE -> closeConnection(cause, message, connection); case LOG -> logFailure(cause, message, connection); case NOOP -> LOG.tracef("Unhandled failure ignored: %s", connection); default -> throw new IllegalArgumentException("Unexpected strategy: " + strategy); } } private static void logAndClose(Throwable cause, String message, WebSocketConnectionBase connection) { logFailure(cause, message, connection); closeConnection(cause, message, connection); } private static void closeConnection(Throwable cause, String message, WebSocketConnectionBase connection) { if (connection.isClosed()) { return; } CloseReason closeReason; final int statusCode; if (isSecurityFailure(cause)) { statusCode = WebSocketCloseStatus.POLICY_VIOLATION.code(); } else { statusCode = connection instanceof WebSocketClientConnectionImpl ? WebSocketCloseStatus.INVALID_MESSAGE_TYPE.code() : WebSocketCloseStatus.INTERNAL_SERVER_ERROR.code(); } if (LaunchMode.current().isDevOrTest()) { closeReason = new CloseReason(statusCode, cause.getMessage()); } else { closeReason = new CloseReason(statusCode); } connection.close(closeReason).subscribe().with( v -> LOG.debugf("Connection closed due to unhandled failure %s: %s", cause, connection), t -> LOG.errorf("Unable to close connection [%s] due to unhandled failure [%s]: %s", connection.id(), cause, t)); } private static void logFailure(Throwable throwable, String message, WebSocketConnectionBase connection) { if (isWebSocketIsClosedFailure(throwable, connection)) { LOG.debugf(throwable, message + ": %s", connection); } else if (isSecurityFailure(throwable)) { // Avoid excessive logging for security failures LOG.errorf("Security failure: %s", throwable.toString()); } else { LOG.errorf(throwable, message + ": %s", connection); } } private static boolean isSecurityFailure(Throwable throwable) { return throwable instanceof UnauthorizedException || throwable instanceof AuthenticationException || throwable instanceof ForbiddenException; } static boolean isWebSocketIsClosedFailure(Throwable throwable, WebSocketConnectionBase connection) { if (throwable instanceof HttpClosedException) { return true; } if (!connection.isClosed()) { return false; } if (throwable == null) { return false; } String message = throwable.getMessage(); if (message == null) { return false; } return message.contains("WebSocket is closed"); } private static void textMessageHandler(WebSocketConnectionBase connection, WebSocketEndpoint endpoint, WebSocketBase ws, Context context, Consumer<String> textAction, boolean newDuplicatedContext) { ws.textMessageHandler(new Handler<String>() { @Override public void handle(String message) { Context duplicatedContext = newDuplicatedContext ? ContextSupport.createNewDuplicatedContext(context, connection) : context; duplicatedContext.runOnContext(new Handler<Void>() { @Override public void handle(Void event) { textAction.accept(message); } }); } }); } private static void binaryMessageHandler(WebSocketConnectionBase connection, WebSocketEndpoint endpoint, WebSocketBase ws, Context context, Consumer<Buffer> binaryAction, boolean newDuplicatedContext) { ws.binaryMessageHandler(new Handler<Buffer>() { @Override public void handle(Buffer message) { Context duplicatedContext = newDuplicatedContext ? ContextSupport.createNewDuplicatedContext(context, connection) : context; duplicatedContext.runOnContext(new Handler<Void>() { @Override public void handle(Void event) { binaryAction.accept(message); } }); } }); } private static void pingMessageHandler(WebSocketConnectionBase connection, WebSocketEndpoint endpoint, WebSocketBase ws, Context context, Consumer<Buffer> pingAction) { ws.frameHandler(new Handler<WebSocketFrame>() { @Override public void handle(WebSocketFrame frame) { if (frame.type() == WebSocketFrameType.PING) { Context duplicatedContext = ContextSupport.createNewDuplicatedContext(context, connection); duplicatedContext.runOnContext(new Handler<Void>() { @Override public void handle(Void event) { pingAction.accept(frame.binaryData()); } }); } } }); } private static void pongMessageHandler(WebSocketConnectionBase connection, WebSocketEndpoint endpoint, WebSocketBase ws, Context context, Consumer<Buffer> pongAction) { ws.pongHandler(new Handler<Buffer>() { @Override public void handle(Buffer message) { Context duplicatedContext = ContextSupport.createNewDuplicatedContext(context, connection); duplicatedContext.runOnContext(new Handler<Void>() { @Override public void handle(Void event) { pongAction.accept(message); } }); } }); } private static WebSocketEndpoint createEndpoint(String endpointClassName, Context context, WebSocketConnectionBase connection, Codecs codecs, ContextSupport contextSupport, SecuritySupport securitySupport, TelemetrySupport telemetrySupport) { try { ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl == null) { cl = WebSocketServerRecorder.class.getClassLoader(); } @SuppressWarnings("unchecked") Class<? extends WebSocketEndpoint> endpointClazz = (Class<? extends WebSocketEndpoint>) cl .loadClass(endpointClassName); ErrorInterceptor errorInterceptor = telemetrySupport == null ? null : telemetrySupport.getErrorInterceptor(); WebSocketEndpoint endpoint = (WebSocketEndpoint) endpointClazz .getDeclaredConstructor(WebSocketConnectionBase.class, Codecs.class, ContextSupport.class, SecuritySupport.class, ErrorInterceptor.class) .newInstance(connection, codecs, contextSupport, securitySupport, errorInterceptor); if (telemetrySupport != null) { return telemetrySupport.decorate(endpoint, connection); } return endpoint; } catch (Exception e) { throw new WebSocketException("Unable to create endpoint instance: " + endpointClassName, e); } } }
Endpoints
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/bugs/_1005/Issue1005ErroneousInterfaceResultTypeMapper.java
{ "start": 304, "end": 449 }
interface ____ { @BeanMapping(resultType = HasPrimaryKey.class) HasKey map(OrderDto orderDto); }
Issue1005ErroneousInterfaceResultTypeMapper
java
apache__flink
flink-core/src/main/java/org/apache/flink/configuration/description/LinkElement.java
{ "start": 1001, "end": 2109 }
class ____ implements InlineElement { private final String link; private final String text; /** * Creates a link with a given url and description. * * @param link address that this link should point to * @param text a description for that link, that should be used in text * @return link representation */ public static LinkElement link(String link, String text) { return new LinkElement(link, text); } /** * Creates a link with a given url. This url will be used as a description for that link. * * @param link address that this link should point to * @return link representation */ public static LinkElement link(String link) { return new LinkElement(link, link); } public String getLink() { return link; } public String getText() { return text; } private LinkElement(String link, String text) { this.link = link; this.text = text; } @Override public void format(Formatter formatter) { formatter.format(this); } }
LinkElement
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/component/log/DefaultExchangeFormatterTest.java
{ "start": 1462, "end": 8433 }
class ____ extends ContextTestSupport { @Test public void testSendMessageToLogDefault() { assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST", "Hello World")); } @Test public void testSendMessageToLogAllOff() { assertDoesNotThrow( () -> template.sendBody("log:org.apache.camel.TEST?showBody=false&showBodyType=false&showExchangePattern=false", "Hello World")); } @Test public void testSendMessageToLogSingleOptions() { assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showExchangeId=true", "Hello World")); assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showExchangePattern=true", "Hello World")); assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showExchangePattern=false", "Hello World")); assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showProperties=true", "Hello World")); assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showHeaders=true", "Hello World")); assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showBodyType=true", "Hello World")); assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showBody=true", "Hello World")); assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showAll=true", "Hello World")); assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showFuture=true", new MyFuture(() -> "foo"))); assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showFuture=false", new MyFuture(() -> "bar"))); } @Test public void testSendMessageToLogMultiOptions() { assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showHeaders=true", "Hello World")); assertDoesNotThrow( () -> template.sendBody("log:org.apache.camel.TEST?showAllProperties=true&showHeaders=true", "Hello World")); } @Test public void testSendMessageToLogShowFalse() { assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?showBodyType=false", "Hello World")); } @Test public void testSendMessageToLogMultiLine() { assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?multiline=true", "Hello World")); } @Test public void testSendByteArrayMessageToLogDefault() { assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST", "Hello World".getBytes())); } @Test public void testSendMessageToLogMaxChars() { assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST", "Hello World this is a very long string that is NOT going to be chopped by maxchars")); assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?maxChars=50", "Hello World this is a very long string that is going to be chopped by maxchars")); assertDoesNotThrow(() -> template.sendBody("log:org.apache.camel.TEST?maxChars=50&showAll=true&multiline=true", "Hello World this is a very long string that is going to be chopped by maxchars")); } @Test public void testSendExchangeWithException() throws Exception { Endpoint endpoint = resolveMandatoryEndpoint("log:org.apache.camel.TEST?showException=true"); Exchange exchange = endpoint.createExchange(); exchange.getIn().setBody("Hello World"); exchange.setException(new IllegalArgumentException("Damn")); Producer producer = endpoint.createProducer(); producer.start(); producer.process(exchange); assertMockEndpointsSatisfied(); producer.stop(); } @Test public void testSendCaughtExchangeWithException() throws Exception { Endpoint endpoint = resolveMandatoryEndpoint("log:org.apache.camel.TEST?showCaughtException=true"); Exchange exchange = endpoint.createExchange(); exchange.getIn().setBody("Hello World"); exchange.setProperty(Exchange.EXCEPTION_CAUGHT, new IllegalArgumentException("I am caught")); Producer producer = endpoint.createProducer(); producer.start(); producer.process(exchange); assertMockEndpointsSatisfied(); producer.stop(); } @Test public void testSendCaughtExchangeWithExceptionAndMultiline() throws Exception { Endpoint endpoint = resolveMandatoryEndpoint("log:org.apache.camel.TEST?showCaughtException=true&multiline=true"); Exchange exchange = endpoint.createExchange(); exchange.getIn().setBody("Hello World"); exchange.setProperty(Exchange.EXCEPTION_CAUGHT, new IllegalArgumentException("I am caught")); Producer producer = endpoint.createProducer(); producer.start(); producer.process(exchange); assertMockEndpointsSatisfied(); producer.stop(); } @Test public void testSendExchangeWithExceptionAndStackTrace() throws Exception { Endpoint endpoint = resolveMandatoryEndpoint("log:org.apache.camel.TEST?showException=true&showStackTrace=true"); Exchange exchange = endpoint.createExchange(); exchange.getIn().setBody("Hello World"); exchange.setException(new IllegalArgumentException("Damn")); Producer producer = endpoint.createProducer(); producer.start(); producer.process(exchange); assertMockEndpointsSatisfied(); producer.stop(); } @Test public void testSendCaughtExchangeWithExceptionAndStackTrace() throws Exception { Endpoint endpoint = resolveMandatoryEndpoint("log:org.apache.camel.TEST?showCaughtException=true&showStackTrace=true"); Exchange exchange = endpoint.createExchange(); exchange.getIn().setBody("Hello World"); exchange.setProperty(Exchange.EXCEPTION_CAUGHT, new IllegalArgumentException("I am caught")); Producer producer = endpoint.createProducer(); producer.start(); producer.process(exchange); assertMockEndpointsSatisfied(); producer.stop(); } @Test public void testConfiguration() { DefaultExchangeFormatter formatter = new DefaultExchangeFormatter(); assertFalse(formatter.isShowExchangeId()); assertFalse(formatter.isShowProperties()); assertFalse(formatter.isShowHeaders()); assertFalse(formatter.isShowVariables()); assertTrue(formatter.isShowBodyType()); assertTrue(formatter.isShowBody()); assertFalse(formatter.isShowException()); assertFalse(formatter.isShowCaughtException()); assertFalse(formatter.isShowStackTrace()); assertFalse(formatter.isShowAll()); assertFalse(formatter.isMultiline()); assertEquals(10000, formatter.getMaxChars()); } private static
DefaultExchangeFormatterTest
java
apache__camel
components/camel-ignite/src/main/java/org/apache/camel/component/ignite/cache/IgniteCacheComponent.java
{ "start": 1286, "end": 2809 }
class ____ extends AbstractIgniteComponent { public static IgniteCacheComponent fromIgnite(Ignite ignite) { IgniteCacheComponent answer = new IgniteCacheComponent(); answer.setIgnite(ignite); return answer; } public static IgniteCacheComponent fromConfiguration(IgniteConfiguration configuration) { IgniteCacheComponent answer = new IgniteCacheComponent(); answer.setIgniteConfiguration(configuration); return answer; } public static IgniteCacheComponent fromInputStream(InputStream inputStream) { IgniteCacheComponent answer = new IgniteCacheComponent(); answer.setConfigurationResource(inputStream); return answer; } public static IgniteCacheComponent fromUrl(URL url) { IgniteCacheComponent answer = new IgniteCacheComponent(); answer.setConfigurationResource(url); return answer; } public static IgniteCacheComponent fromLocation(String location) { IgniteCacheComponent answer = new IgniteCacheComponent(); answer.setConfigurationResource(location); return answer; } @Override protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { ObjectHelper.notNull(getCamelContext(), "Camel Context"); IgniteCacheEndpoint answer = new IgniteCacheEndpoint(uri, remaining, parameters, this); setProperties(answer, parameters); return answer; } }
IgniteCacheComponent
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/MapKeyTest.java
{ "start": 1113, "end": 2898 }
class ____ { @Test public void testMapKeyTemporal(EntityManagerFactoryScope scope) throws Exception { SimpleDateFormat formatter = new SimpleDateFormat( "yyyy-MM-dd" ); final Date date1 = formatter.parse( "2022-02-02" ); final Date date2 = java.sql.Date.valueOf( formatter.format( Calendar.getInstance().getTime() ) ); Set<Date> expectedDates = new HashSet<>(); expectedDates.add( date1 ); expectedDates.add( date2 ); School school1 = new School( 1, "High School" ); School school2 = new School( 2, "Primary School" ); Person person1 = new Person( 1, "Andrea", school2 ); Person person2 = new Person( 2, "Luigi", school2 ); Set<Person> expectedPeople = new HashSet<>(); expectedPeople.add( person1 ); expectedPeople.add( person2 ); scope.inTransaction( entityManager -> { Map<Date, Person> lastNames = new HashMap<>(); lastNames.put( date1, person1 ); lastNames.put( date2, person2 ); school2.setStudentsByDate( lastNames ); entityManager.persist( school1 ); entityManager.persist( school2 ); entityManager.persist( person1 ); entityManager.persist( person2 ); } ); scope.inTransaction( entityManager -> { Person person = entityManager.find( Person.class, 2 ); School school = person.getSchool(); Map<Date, Person> studentsByDate = school.getStudentsByDate(); Set<Date> dates = studentsByDate.keySet(); assertEquals( expectedDates.size(), dates.size() ); assertTrue( expectedDates.containsAll( dates ) ); Collection<Person> people = studentsByDate.values(); assertEquals( expectedPeople.size(), people.size() ); assertTrue( expectedPeople.containsAll( people ) ); } ); } @Entity @Table(name = "PERSON_TABLE") public static
MapKeyTest
java
lettuce-io__lettuce-core
src/main/java/io/lettuce/core/json/JsonType.java
{ "start": 313, "end": 886 }
enum ____ { OBJECT, ARRAY, STRING, INTEGER, NUMBER, BOOLEAN, UNKNOWN; public static JsonType fromString(String s) { switch (s) { case "object": return OBJECT; case "array": return ARRAY; case "string": return STRING; case "integer": return INTEGER; case "number": return NUMBER; case "boolean": return BOOLEAN; default: return UNKNOWN; } } }
JsonType
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/context/annotation/configuration/ConfigurationClassProcessingTests.java
{ "start": 17619, "end": 17764 }
class ____ { @Bean public final TestBean testBean() { return new TestBean(); } } @Configuration static
ConfigWithFinalBeanWithoutProxy
java
spring-projects__spring-framework
spring-jdbc/src/test/java/org/springframework/jdbc/support/JdbcUtilsTests.java
{ "start": 909, "end": 2245 }
class ____ { @Test void commonDatabaseName() { assertThat(JdbcUtils.commonDatabaseName("Oracle")).isEqualTo("Oracle"); assertThat(JdbcUtils.commonDatabaseName("DB2-for-Spring")).isEqualTo("DB2"); assertThat(JdbcUtils.commonDatabaseName("Sybase SQL Server")).isEqualTo("Sybase"); assertThat(JdbcUtils.commonDatabaseName("Adaptive Server Enterprise")).isEqualTo("Sybase"); assertThat(JdbcUtils.commonDatabaseName("MySQL")).isEqualTo("MySQL"); assertThat(JdbcUtils.commonDatabaseName("MariaDB")).isEqualTo("MariaDB"); } @Test void resolveTypeName() { assertThat(JdbcUtils.resolveTypeName(Types.VARCHAR)).isEqualTo("VARCHAR"); assertThat(JdbcUtils.resolveTypeName(Types.NUMERIC)).isEqualTo("NUMERIC"); assertThat(JdbcUtils.resolveTypeName(Types.INTEGER)).isEqualTo("INTEGER"); assertThat(JdbcUtils.resolveTypeName(JdbcUtils.TYPE_UNKNOWN)).isNull(); } @Test void convertUnderscoreNameToPropertyName() { assertThat(JdbcUtils.convertUnderscoreNameToPropertyName("MY_NAME")).isEqualTo("myName"); assertThat(JdbcUtils.convertUnderscoreNameToPropertyName("yOUR_nAME")).isEqualTo("yourName"); assertThat(JdbcUtils.convertUnderscoreNameToPropertyName("a_name")).isEqualTo("AName"); assertThat(JdbcUtils.convertUnderscoreNameToPropertyName("someone_elses_name")).isEqualTo("someoneElsesName"); } }
JdbcUtilsTests
java
elastic__elasticsearch
modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java
{ "start": 34037, "end": 35647 }
class ____ { private final ScrollableHitSource.AsyncResponse asyncResponse; private final List<? extends ScrollableHitSource.Hit> hits; private int consumedOffset = 0; ScrollConsumableHitsResponse(ScrollableHitSource.AsyncResponse asyncResponse) { this.asyncResponse = asyncResponse; this.hits = asyncResponse.response().getHits(); } ScrollableHitSource.Response response() { return asyncResponse.response(); } List<? extends ScrollableHitSource.Hit> consumeRemainingHits() { return consumeHits(remainingHits()); } List<? extends ScrollableHitSource.Hit> consumeHits(int numberOfHits) { if (numberOfHits < 0) { throw new IllegalArgumentException("Invalid number of hits to consume [" + numberOfHits + "]"); } if (numberOfHits > remainingHits()) { throw new IllegalArgumentException( "Unable to provide [" + numberOfHits + "] hits as there are only [" + remainingHits() + "] hits available" ); } int start = consumedOffset; consumedOffset += numberOfHits; return hits.subList(start, consumedOffset); } boolean hasRemainingHits() { return remainingHits() > 0; } int remainingHits() { return hits.size() - consumedOffset; } void done(TimeValue extraKeepAlive) { asyncResponse.done(extraKeepAlive); } } }
ScrollConsumableHitsResponse
java
elastic__elasticsearch
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java
{ "start": 157667, "end": 166744 }
class ____ extends BooleanExpressionContext { public BooleanExpressionContext left; public Token operator; public BooleanExpressionContext right; public List<BooleanExpressionContext> booleanExpression() { return getRuleContexts(BooleanExpressionContext.class); } public BooleanExpressionContext booleanExpression(int i) { return getRuleContext(BooleanExpressionContext.class, i); } public TerminalNode AND() { return getToken(SqlBaseParser.AND, 0); } public TerminalNode OR() { return getToken(SqlBaseParser.OR, 0); } public LogicalBinaryContext(BooleanExpressionContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).enterLogicalBinary(this); } @Override public void exitRule(ParseTreeListener listener) { if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).exitLogicalBinary(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if (visitor instanceof SqlBaseVisitor) return ((SqlBaseVisitor<? extends T>) visitor).visitLogicalBinary(this); else return visitor.visitChildren(this); } } public final BooleanExpressionContext booleanExpression() throws RecognitionException { return booleanExpression(0); } private BooleanExpressionContext booleanExpression(int _p) throws RecognitionException { ParserRuleContext _parentctx = _ctx; int _parentState = getState(); BooleanExpressionContext _localctx = new BooleanExpressionContext(_ctx, _parentState); BooleanExpressionContext _prevctx = _localctx; int _startState = 54; enterRecursionRule(_localctx, 54, RULE_booleanExpression, _p); try { int _alt; enterOuterAlt(_localctx, 1); { setState(541); _errHandler.sync(this); switch (getInterpreter().adaptivePredict(_input, 73, _ctx)) { case 1: { _localctx = new LogicalNotContext(_localctx); _ctx = _localctx; _prevctx = _localctx; setState(511); match(NOT); setState(512); booleanExpression(8); } break; case 2: { _localctx = new ExistsContext(_localctx); _ctx = _localctx; _prevctx = _localctx; setState(513); match(EXISTS); setState(514); match(T__0); setState(515); query(); setState(516); match(T__1); } break; case 3: { _localctx = new StringQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; setState(518); match(QUERY); setState(519); match(T__0); setState(520); ((StringQueryContext) _localctx).queryString = string(); setState(521); matchQueryOptions(); setState(522); match(T__1); } break; case 4: { _localctx = new MatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; setState(524); match(MATCH); setState(525); match(T__0); setState(526); ((MatchQueryContext) _localctx).singleField = qualifiedName(); setState(527); match(T__2); setState(528); ((MatchQueryContext) _localctx).queryString = string(); setState(529); matchQueryOptions(); setState(530); match(T__1); } break; case 5: { _localctx = new MultiMatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; setState(532); match(MATCH); setState(533); match(T__0); setState(534); ((MultiMatchQueryContext) _localctx).multiFields = string(); setState(535); match(T__2); setState(536); ((MultiMatchQueryContext) _localctx).queryString = string(); setState(537); matchQueryOptions(); setState(538); match(T__1); } break; case 6: { _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; setState(540); predicated(); } break; } _ctx.stop = _input.LT(-1); setState(551); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input, 75, _ctx); while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER) { if (_alt == 1) { if (_parseListeners != null) triggerExitRuleEvent(); _prevctx = _localctx; { setState(549); _errHandler.sync(this); switch (getInterpreter().adaptivePredict(_input, 74, _ctx)) { case 1: { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext) _localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); setState(543); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); setState(544); ((LogicalBinaryContext) _localctx).operator = match(AND); setState(545); ((LogicalBinaryContext) _localctx).right = booleanExpression(3); } break; case 2: { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext) _localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); setState(546); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); setState(547); ((LogicalBinaryContext) _localctx).operator = match(OR); setState(548); ((LogicalBinaryContext) _localctx).right = booleanExpression(2); } break; } } } setState(553); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input, 75, _ctx); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { unrollRecursionContexts(_parentctx); } return _localctx; } @SuppressWarnings("CheckReturnValue") public static
LogicalBinaryContext
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceManager.java
{ "start": 6898, "end": 8612 }
class ____ implements MultipleArcTransition<ServiceManager, ServiceEvent, State> { @Override public State transition(ServiceManager serviceManager, ServiceEvent event) { //trigger check of service state ServiceState currState = serviceManager.serviceSpec.getState(); if (currState.equals(ServiceState.STABLE)) { return State.STABLE; } if (currState.equals(ServiceState.EXPRESS_UPGRADING) || currState.equals(ServiceState.CANCEL_UPGRADING)) { if (!serviceManager.componentsToUpgrade.isEmpty()) { org.apache.hadoop.yarn.service.api.records.Component compSpec = serviceManager.componentsToUpgrade.get(0); Component component = serviceManager.scheduler.getAllComponents() .get(compSpec.getName()); if (!component.isUpgrading()) { serviceManager.componentsToUpgrade.remove(0); serviceManager.upgradeNextCompIfAny( currState.equals(ServiceState.CANCEL_UPGRADING)); } } } if (currState.equals(ServiceState.UPGRADING_AUTO_FINALIZE) || ((currState.equals(ServiceState.EXPRESS_UPGRADING) || currState.equals(ServiceState.CANCEL_UPGRADING)) && serviceManager.componentsToUpgrade.isEmpty())) { ServiceState targetState = checkIfStable(serviceManager.serviceSpec); if (targetState.equals(ServiceState.STABLE)) { if (serviceManager.finalizeUpgrade( currState.equals(ServiceState.CANCEL_UPGRADING))) { return State.STABLE; } } } return State.UPGRADING; } } private static
CheckStableTransition
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/PrivateConstructorForUtilityClassTest.java
{ "start": 6824, "end": 7115 }
class ____ { { } } """) .expectUnchanged() .doTest(); } @Test public void otherClassesGetLeftAlone_constructor() { testHelper .addInputLines( "in/Test.java", """ final
Test
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/show/MySqlShowTest_29_plancache.java
{ "start": 929, "end": 2022 }
class ____ extends MysqlTest { public void test_0() throws Exception { String sql = "show plancache plan select * from table1;"; MySqlStatementParser parser = new MySqlStatementParser(sql); List<SQLStatement> statementList = parser.parseStatementList(); SQLStatement stmt = statementList.get(0); assertEquals(1, statementList.size()); MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor(); stmt.accept(visitor); assertEquals(0, visitor.getTables().size()); assertEquals(0, visitor.getColumns().size()); assertEquals(0, visitor.getConditions().size()); assertEquals(0, visitor.getOrderByColumns().size()); // assertTrue(visitor.getTables().containsKey(new TableStat.Name("mytable"))); assertEquals("SHOW PLANCACHE PLAN\n" + "SELECT *\n" + "FROM table1;", stmt.toString()); assertEquals("show plancache plan\n" + "select *\n" + "from table1;", stmt.toLowerCaseString()); } }
MySqlShowTest_29_plancache
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java
{ "start": 939, "end": 11845 }
class ____ implements GroupingAggregatorFunction { private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of( new IntermediateStateDesc("count", ElementType.LONG), new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); private final LongArrayState state; private final List<Integer> channels; private final DriverContext driverContext; private final boolean countAll; public static CountGroupingAggregatorFunction create(DriverContext driverContext, List<Integer> inputChannels) { return new CountGroupingAggregatorFunction(inputChannels, new LongArrayState(driverContext.bigArrays(), 0), driverContext); } public static List<IntermediateStateDesc> intermediateStateDesc() { return INTERMEDIATE_STATE_DESC; } private CountGroupingAggregatorFunction(List<Integer> channels, LongArrayState state, DriverContext driverContext) { this.channels = channels; this.state = state; this.driverContext = driverContext; this.countAll = channels.isEmpty(); } private int blockIndex() { return countAll ? 0 : channels.get(0); } @Override public int intermediateBlockCount() { return intermediateStateDesc().size(); } @Override public AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds, Page page) { Block valuesBlock = page.getBlock(blockIndex()); if (countAll == false) { Vector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { state.enableGroupIdTracking(seenGroupIds); } return new AddInput() { @Override public void add(int positionOffset, IntArrayBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override public void add(int positionOffset, IntBigArrayBlock groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override public void add(int positionOffset, IntVector groupIds) { addRawInput(positionOffset, groupIds, valuesBlock); } @Override public void close() {} }; } } return new AddInput() { @Override public void add(int positionOffset, IntArrayBlock groupIds) { addRawInput(groupIds); } @Override public void add(int positionOffset, IntBigArrayBlock groupIds) { addRawInput(groupIds); } @Override public void add(int positionOffset, IntVector groupIds) { addRawInput(groupIds); } @Override public void close() {} }; } private void addRawInput(int positionOffset, IntVector groups, Block values) { int position = positionOffset; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++, position++) { if (values.isNull(position)) { continue; } int groupId = groups.getInt(groupPosition); state.increment(groupId, values.getValueCount(position)); } } private void addRawInput(int positionOffset, IntArrayBlock groups, Block values) { int position = positionOffset; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++, position++) { if (groups.isNull(groupPosition) || values.isNull(position)) { continue; } int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = groups.getInt(g); state.increment(groupId, values.getValueCount(position)); } } } private void addRawInput(int positionOffset, IntBigArrayBlock groups, Block values) { int position = positionOffset; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++, position++) { if (groups.isNull(groupPosition) || values.isNull(position)) { continue; } int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = groups.getInt(g); state.increment(groupId, values.getValueCount(position)); } } } /** * This method is called for count all. */ private void addRawInput(IntVector groups) { if (groups.isConstant()) { state.increment(groups.getInt(0), groups.getPositionCount()); } else { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = groups.getInt(groupPosition); state.increment(groupId, 1); } } } /** * This method is called for count all. */ private void addRawInput(IntArrayBlock groups) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; } int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = groups.getInt(g); state.increment(groupId, 1); } } } /** * This method is called for count all. */ private void addRawInput(IntBigArrayBlock groups) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; } int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = groups.getInt(g); state.increment(groupId, 1); } } } @Override public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { state.enableGroupIdTracking(seenGroupIds); } @Override public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= blockIndex() + intermediateStateDesc().size(); state.enableGroupIdTracking(new SeenGroupIds.Empty()); LongVector count = page.<LongBlock>getBlock(channels.get(0)).asVector(); BooleanVector seen = page.<BooleanBlock>getBlock(channels.get(1)).asVector(); assert count.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; } int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = groups.getInt(g); state.increment(groupId, count.getLong(groupPosition + positionOffset)); } } } @Override public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= blockIndex() + intermediateStateDesc().size(); state.enableGroupIdTracking(new SeenGroupIds.Empty()); LongVector count = page.<LongBlock>getBlock(channels.get(0)).asVector(); BooleanVector seen = page.<BooleanBlock>getBlock(channels.get(1)).asVector(); assert count.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { if (groups.isNull(groupPosition)) { continue; } int groupStart = groups.getFirstValueIndex(groupPosition); int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = groups.getInt(g); state.increment(groupId, count.getLong(groupPosition + positionOffset)); } } } @Override public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= blockIndex() + intermediateStateDesc().size(); state.enableGroupIdTracking(new SeenGroupIds.Empty()); LongVector count = page.<LongBlock>getBlock(channels.get(0)).asVector(); BooleanVector seen = page.<BooleanBlock>getBlock(channels.get(1)).asVector(); assert count.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { state.increment(groups.getInt(groupPosition), count.getLong(groupPosition + positionOffset)); } } @Override public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { state.toIntermediate(blocks, offset, selected, driverContext); } @Override public void evaluateFinal(Block[] blocks, int offset, IntVector selected, GroupingAggregatorEvaluationContext evaluationContext) { try (LongVector.Builder builder = evaluationContext.blockFactory().newLongVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int si = selected.getInt(i); builder.appendLong(state.hasValue(si) ? state.get(si) : 0); } blocks[offset] = builder.build().asBlock(); } } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(this.getClass().getSimpleName()).append("["); sb.append("channels=").append(channels); sb.append("]"); return sb.toString(); } @Override public void close() { state.close(); } }
CountGroupingAggregatorFunction
java
lettuce-io__lettuce-core
src/test/java/io/lettuce/core/cluster/ClusterReadOnlyCommandsUnitTests.java
{ "start": 761, "end": 1062 }
enum ____ format (e.g., "JSON.GET" -> "JSON_GET") String enumName = readOnlyCommand.toString().replace('.', '_'); enumName = enumName.replace("__", "_"); assertThat(readOnlyCommand.toString()).isEqualTo(CommandType.valueOf(enumName).toString()); } } }
name
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/SuperclassCollectionTest.java
{ "start": 1224, "end": 3830 }
class ____ { @Test public void testPerson(EntityManagerFactoryScope scope) { String address = "super-address"; String localAddress = "local-address"; PersonBaseBase person = createPerson( scope, new Person(), address, localAddress ); assertAddress( scope, person, address, localAddress ); } @Test public void testOtherSubclass(EntityManagerFactoryScope scope) { String address = "other-super-address"; String localAddress = "other-local-address"; PersonBaseBase person = createPerson( scope, new OtherSubclass(), address, localAddress ); assertAddress( scope, person, address, localAddress ); } @Test @JiraKey( value = "HHH-10556") public void testOtherPerson(EntityManagerFactoryScope scope) { String address = "other-person-super-address"; String localAddress = "other-person-local-address"; PersonBaseBase person = createPerson( scope, new OtherPerson(), address, localAddress ); assertAddress( scope, person, address, localAddress ); } private void assertAddress(EntityManagerFactoryScope scope, PersonBaseBase person, String address, String localAddress) { List<Object> results = find( scope, person.getClass(), person.id, "addresses" ); assertEquals( 1, results.size() ); assertEquals( person.addresses.get( 0 ).id, ( (Address) results.get( 0 ) ).id ); assertEquals( address, ( (Address) results.get( 0 ) ).name ); results = find( scope, person.getClass(), person.id, "localAddresses" ); assertEquals( 1, results.size() ); assertEquals( person.getLocalAddresses().get( 0 ).id, ( (Address) results.get( 0 ) ).id ); assertEquals( localAddress, ( (Address) results.get( 0 ) ).name ); } private PersonBaseBase createPerson(EntityManagerFactoryScope scope, PersonBaseBase person, String address, String localAddress) { PersonBaseBase personBaseBase; person.addresses.add( new Address( address ) ); person.getLocalAddresses().add( new Address( localAddress ) ); personBaseBase = scope.fromTransaction( entityManager -> entityManager.merge( person ) ); return personBaseBase; } private List<Object> find(EntityManagerFactoryScope scope, Class<?> clazz, int id, String path) { return scope.fromEntityManager( entityManager -> { CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery<Object> cq = cb.createQuery(); Root<?> root = cq.from( clazz ); cq.select( root.get( path ) ) .where( cb.equal( root.get( "id" ), id ) ); TypedQuery<Object> query = entityManager.createQuery( cq ); return query.getResultList(); } ); } @Entity(name="Address") public static
SuperclassCollectionTest
java
apache__commons-lang
src/test/java/org/apache/commons/lang3/function/FailableTest.java
{ "start": 75214, "end": 75719 }
interface ____ properly defined to throw any exception using String and IOExceptions as * generic test types. */ @Test void testThrows_FailableByteConsumer_IOException() { assertThrows(IOException.class, () -> new FailableByteConsumer<IOException>() { @Override public void accept(final byte value) throws IOException { throw new IOException("test"); } }.accept((byte) 0)); } /** * Tests that our failable
is
java
junit-team__junit5
junit-platform-engine/src/main/java/org/junit/platform/engine/support/hierarchical/SingleLock.java
{ "start": 1597, "end": 2032 }
class ____ implements ForkJoinPool.ManagedBlocker { private volatile boolean acquired; @Override public boolean block() throws InterruptedException { if (!this.acquired) { SingleLock.this.lock.lockInterruptibly(); this.acquired = true; } return true; } @Override public boolean isReleasable() { return this.acquired || (this.acquired = SingleLock.this.lock.tryLock()); } } }
SingleLockManagedBlocker
java
apache__camel
components/camel-whatsapp/src/main/java/org/apache/camel/component/whatsapp/model/Section.java
{ "start": 886, "end": 1270 }
class ____ { private String title; private List<Object> rows; public Section() { } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public List<Object> getRows() { return rows; } public void setRows(List<Object> rows) { this.rows = rows; } }
Section
java
processing__processing4
java/src/processing/mode/java/tweak/ColorSelector.java
{ "start": 1285, "end": 3139 }
class ____ { int hue, saturation, brightness; public JFrame frame; public ColorControlBox colorBox; ColorSelectorBox selectorBox; ColorSelectorSlider selectorSlider; SelectorTopBar topBar; public ColorSelector(ColorControlBox colorBox) { this.colorBox = colorBox; createFrame(); } public void createFrame() { frame = new JFrame(); frame.setBackground(Color.BLACK); Box box = Box.createHorizontalBox(); box.setBackground(Color.BLACK); selectorSlider = new ColorSelectorSlider(); if (!colorBox.isBW) { selectorBox = new ColorSelectorBox(); box.add(selectorBox); } box.add(Box.createHorizontalGlue()); box.add(selectorSlider, BorderLayout.CENTER); box.add(Box.createHorizontalGlue()); frame.getContentPane().add(box, BorderLayout.CENTER); frame.pack(); frame.setResizable(false); frame.setCursor(Cursor.getPredefinedCursor(Cursor.CROSSHAIR_CURSOR)); } public void show(int x, int y) { frame.setLocation(x, y); frame.setVisible(true); frame.repaint(); } public void hide() { this.colorBox = null; frame.setVisible(false); } public void refreshColor() { if (!colorBox.ilegalColor) { setColor(colorBox.color); } } public void setColor(Color c) { if (selectorBox != null) { selectorBox.setToColor(c); } selectorSlider.setToColor(c); repaintSelector(); } public void satBrightChanged() { repaintSelector(); } public void hueChanged() { if (selectorBox != null) { selectorBox.renderBack(); } repaintSelector(); } public void repaintSelector() { if (selectorBox != null) { selectorBox.repaint(); } selectorSlider.repaint(); } // . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
ColorSelector
java
lettuce-io__lettuce-core
src/test/java/io/lettuce/core/ByteBufferCodec.java
{ "start": 135, "end": 878 }
class ____ implements RedisCodec<ByteBuffer, ByteBuffer> { @Override public ByteBuffer decodeKey(ByteBuffer bytes) { ByteBuffer decoupled = ByteBuffer.allocate(bytes.remaining()); decoupled.put(bytes); return (ByteBuffer) decoupled.flip(); } @Override public ByteBuffer decodeValue(ByteBuffer bytes) { ByteBuffer decoupled = ByteBuffer.allocate(bytes.remaining()); decoupled.put(bytes); return (ByteBuffer) decoupled.flip(); } @Override public ByteBuffer encodeKey(ByteBuffer key) { return key.asReadOnlyBuffer(); } @Override public ByteBuffer encodeValue(ByteBuffer value) { return value.asReadOnlyBuffer(); } }
ByteBufferCodec
java
quarkusio__quarkus
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/ContextParamFromCdiTest.java
{ "start": 510, "end": 980 }
class ____ { @RegisterExtension static QuarkusUnitTest test = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(ContextFromCdi.class, ContextFromCdiResource.class)); @Test public void testParam() { RestAssured.get("/context-from-cdi") .then().statusCode(200).body(Matchers.equalTo("context")); } @ApplicationScoped @Unremovable public static
ContextParamFromCdiTest
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/InsertOrderingRCATest.java
{ "start": 31445, "end": 32213 }
class ____ extends Condition { private String ruleName; public AlertCondition() { } public String getRuleName() { return ruleName; } public void setRuleName(String ruleName) { this.ruleName = ruleName; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } AlertCondition that = (AlertCondition) o; return Objects.equals( ruleName, that.ruleName ); } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (ruleName != null ? ruleName.hashCode() : 0); return result; } } @MappedSuperclass public static abstract
AlertCondition
java
junit-team__junit5
jupiter-tests/src/test/java/org/junit/jupiter/engine/discovery/DiscoverySelectorResolverTests.java
{ "start": 38277, "end": 38349 }
class ____ { @Test void testC() { } } } }
DoubleNestedTestCase
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/objectid/TestObjectIdDeserialization.java
{ "start": 4010, "end": 4243 }
class ____ { public int id; public int data; public WithCustomResolution(int id, int data) { this.id = id; this.data = data; } } public static
WithCustomResolution
java
apache__logging-log4j2
log4j-core/src/main/java/org/apache/logging/log4j/core/appender/db/jdbc/AbstractDriverManagerConnectionSource.java
{ "start": 1996, "end": 6527 }
class ____<B extends Builder<B>> { @PluginBuilderAttribute @Required protected String connectionString; @PluginBuilderAttribute protected String driverClassName; @PluginBuilderAttribute protected char[] password; @PluginElement("Properties") protected Property[] properties; @PluginBuilderAttribute protected char[] userName; @SuppressWarnings("unchecked") protected B asBuilder() { return (B) this; } public String getConnectionString() { return connectionString; } public String getDriverClassName() { return driverClassName; } public char[] getPassword() { return password; } public Property[] getProperties() { return properties; } public char[] getUserName() { return userName; } public B setConnectionString(final String connectionString) { this.connectionString = connectionString; return asBuilder(); } public B setDriverClassName(final String driverClassName) { this.driverClassName = driverClassName; return asBuilder(); } public B setPassword(final char[] password) { this.password = password; return asBuilder(); } public B setProperties(final Property[] properties) { this.properties = properties; return asBuilder(); } public B setUserName(final char[] userName) { this.userName = userName; return asBuilder(); } } private static final Logger LOGGER = StatusLogger.getLogger(); public static Logger getLogger() { return LOGGER; } private final String actualConnectionString; private final String connectionString; private final String driverClassName; private final char[] password; private final Property[] properties; private final char[] userName; public AbstractDriverManagerConnectionSource( final String driverClassName, final String connectionString, final String actualConnectionString, final char[] userName, final char[] password, final Property[] properties) { this.driverClassName = driverClassName; this.connectionString = connectionString; this.actualConnectionString = actualConnectionString; this.userName = userName; this.password = password; this.properties = properties; } public String getActualConnectionString() { return actualConnectionString; } @SuppressWarnings("resource") // The JDBC Connection is freed when the connection source is stopped. @Override public Connection getConnection() throws SQLException { loadDriver(); final String actualConnectionString = getActualConnectionString(); LOGGER.debug("{} getting connection for '{}'", getClass().getSimpleName(), actualConnectionString); Connection connection; if (properties != null && properties.length > 0) { if (userName != null || password != null) { throw new SQLException("Either set the userName and password, or set the Properties, but not both."); } connection = DriverManager.getConnection(actualConnectionString, toProperties(properties)); } else { connection = DriverManager.getConnection(actualConnectionString, toString(userName), toString(password)); } LOGGER.debug( "{} acquired connection for '{}': {} ({}@{})", getClass().getSimpleName(), actualConnectionString, connection, connection.getClass().getName(), Integer.toHexString(connection.hashCode())); return connection; } public String getConnectionString() { return connectionString; } public String getDriverClassName() { return driverClassName; } public char[] getPassword() { return password; } public Property[] getProperties() { return properties; } public char[] getUserName() { return userName; } protected void loadDriver() throws SQLException { loadDriver(driverClassName); } /** * Loads a JDBC driver for the given
Builder
java
quarkusio__quarkus
independent-projects/resteasy-reactive/server/vertx/src/main/java/org/jboss/resteasy/reactive/server/vertx/serializers/ServerVertxAsyncFileMessageBodyWriter.java
{ "start": 838, "end": 3183 }
class ____ implements ServerMessageBodyWriter<AsyncFile> { @Override public boolean isWriteable(Class<?> type, Type genericType, ResteasyReactiveResourceInfo target, MediaType mediaType) { // allow for subtypes, such as AsyncFileImpl return AsyncFile.class.isAssignableFrom(type); } @Override public void writeResponse(AsyncFile file, Type genericType, ServerRequestContext context) throws WebApplicationException { ResteasyReactiveRequestContext ctx = ((ResteasyReactiveRequestContext) context); ctx.suspend(); ServerHttpResponse response = context.serverResponse(); // this is only set by nice people, unfortunately if (file.getReadLength() != Long.MAX_VALUE) { response.setResponseHeader(HttpHeaders.CONTENT_LENGTH, String.valueOf(file.getReadLength())); } else { response.setChunked(true); } file.handler(buffer -> { try { response.write(buffer.getBytes()); } catch (Exception x) { // believe it or not, this throws ctx.resume(x); return; } if (response.isWriteQueueFull()) { file.pause(); response.addDrainHandler(new Runnable() { @Override public void run() { file.resume(); } }); } }); file.endHandler(new Handler<Void>() { @Override public void handle(Void event) { file.close(); response.end(); // Not sure if I need to resume, actually ctx.resume(); } }); } @Override public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return AsyncFile.class.isAssignableFrom(type); } @Override public void writeTo(AsyncFile asyncFile, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException { throw new UnsupportedOperationException("not supported"); } }
ServerVertxAsyncFileMessageBodyWriter
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
{ "start": 2037, "end": 4549 }
class ____ { private static final int START_STOP_TIMEOUT_SEC = 30; private ServerRunnable serverRunnable; private Thread serverThread; private int port; private void startTestServer() throws Exception { // start simple tcp server. serverRunnable = new ServerRunnable(); serverThread = new SubjectInheritingThread(serverRunnable); serverThread.start(); final long timeout = System.currentTimeMillis() + START_STOP_TIMEOUT_SEC * 1000; while (!serverRunnable.isReady()) { assertNull(serverRunnable.getThrowable()); Thread.sleep(10); if (System.currentTimeMillis() > timeout) { fail("Server thread did not start properly in allowed time of " + START_STOP_TIMEOUT_SEC + " sec."); } } port = serverRunnable.getPort(); } @AfterEach public void stopTestServer() throws InterruptedException { final Thread t = serverThread; if (t != null) { serverThread = null; port = -1; // stop server serverRunnable.stop(); t.join(START_STOP_TIMEOUT_SEC * 1000); assertFalse(t.isAlive()); assertNull(serverRunnable.getThrowable()); } } @Test public void testSocketFactoryAsKeyInMap() { Map<SocketFactory, Integer> dummyCache = new HashMap<SocketFactory, Integer>(); int toBeCached1 = 1; int toBeCached2 = 2; Configuration conf = new Configuration(); conf.set(CommonConfigurationKeys.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY, "org.apache.hadoop.ipc.TestSocketFactory$DummySocketFactory"); final SocketFactory dummySocketFactory = NetUtils .getDefaultSocketFactory(conf); dummyCache.put(dummySocketFactory, toBeCached1); conf.set(CommonConfigurationKeys.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY, "org.apache.hadoop.net.StandardSocketFactory"); final SocketFactory defaultSocketFactory = NetUtils .getDefaultSocketFactory(conf); dummyCache.put(defaultSocketFactory, toBeCached2); assertThat(dummyCache.size()) .withFailMessage("The cache contains two elements") .isEqualTo(2); assertThat(defaultSocketFactory) .withFailMessage("Equals of both socket factory shouldn't be same") .isNotEqualTo(dummySocketFactory); assertSame(toBeCached2, dummyCache.remove(defaultSocketFactory)); dummyCache.put(defaultSocketFactory, toBeCached2); assertSame(toBeCached1, dummyCache.remove(dummySocketFactory)); } /** * A dummy socket factory
TestSocketFactory
java
spring-projects__spring-boot
core/spring-boot-test/src/main/java/org/springframework/boot/test/mock/web/SpringBootMockServletContext.java
{ "start": 1313, "end": 3422 }
class ____ extends MockServletContext { private static final String[] SPRING_BOOT_RESOURCE_LOCATIONS = new String[] { "classpath:META-INF/resources", "classpath:resources", "classpath:static", "classpath:public" }; private final ResourceLoader resourceLoader; private @Nullable File emptyRootDirectory; public SpringBootMockServletContext(String resourceBasePath) { this(resourceBasePath, new FileSystemResourceLoader()); } public SpringBootMockServletContext(String resourceBasePath, ResourceLoader resourceLoader) { super(resourceBasePath, resourceLoader); this.resourceLoader = resourceLoader; } @Override protected String getResourceLocation(String path) { if (!path.startsWith("/")) { path = "/" + path; } String resourceLocation = getResourceBasePathLocation(path); if (exists(resourceLocation)) { return resourceLocation; } for (String prefix : SPRING_BOOT_RESOURCE_LOCATIONS) { resourceLocation = prefix + path; if (exists(resourceLocation)) { return resourceLocation; } } return super.getResourceLocation(path); } protected final String getResourceBasePathLocation(String path) { return super.getResourceLocation(path); } private boolean exists(String resourceLocation) { try { Resource resource = this.resourceLoader.getResource(resourceLocation); return resource.exists(); } catch (Exception ex) { return false; } } @Override public @Nullable URL getResource(String path) throws MalformedURLException { URL resource = super.getResource(path); if (resource == null && "/".equals(path)) { // Liquibase assumes that "/" always exists, if we don't have a directory // use a temporary location. try { if (this.emptyRootDirectory == null) { synchronized (this) { File tempDirectory = Files.createTempDirectory("spr-servlet").toFile(); tempDirectory.deleteOnExit(); this.emptyRootDirectory = tempDirectory; } } return this.emptyRootDirectory.toURI().toURL(); } catch (IOException ex) { // Ignore } } return resource; } }
SpringBootMockServletContext
java
apache__logging-log4j2
log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/jmh/instant/InstantPatternFormatterBenchmark.java
{ "start": 4821, "end": 9050 }
class ____ { private final String pattern; final FastDatePrinter fastFormatter; final FixedDateFormat fixedFormatter; final InstantPatternFormatter instantFormatter; final DateTimeFormatter javaFormatter; Formatters(final String pattern) { this.pattern = pattern; this.fastFormatter = new FastDatePrinter(pattern, TIME_ZONE, LOCALE) {}; this.fixedFormatter = FixedDateFormat.createIfSupported(pattern, TIME_ZONE.getID()); if (fixedFormatter == null) { final String message = String.format( "couldn't create `%s` for pattern `%s` and time zone `%s`", FixedDateFormat.class.getSimpleName(), pattern, TIME_ZONE.getID()); throw new IllegalStateException(message); } this.instantFormatter = InstantPatternFormatter.newBuilder() .setPattern(pattern) .setLocale(LOCALE) .setTimeZone(TIME_ZONE) .setCachingEnabled(false) .build(); this.javaFormatter = DateTimeFormatter.ofPattern(pattern) .withZone(TIME_ZONE.toZoneId()) .withLocale(LOCALE); } } private final StringBuilder stringBuilder = new StringBuilder(Math.max(DATE_TIME_FORMATTERS.pattern.length(), TIME_FORMATTERS.pattern.length()) * 2); private final char[] charBuffer = new char[stringBuilder.capacity()]; private final Calendar calendar = Calendar.getInstance(TIME_ZONE, LOCALE); @Benchmark public void instantFormatter_dateTime(final Blackhole blackhole) { instantFormatter(blackhole, DATE_TIME_FORMATTERS.instantFormatter); } @Benchmark public void instantFormatter_time(final Blackhole blackhole) { instantFormatter(blackhole, TIME_FORMATTERS.instantFormatter); } private void instantFormatter(final Blackhole blackhole, final InstantPatternFormatter formatter) { for (final MutableInstant instant : INSTANTS) { stringBuilder.setLength(0); formatter.formatTo(stringBuilder, instant); blackhole.consume(stringBuilder.length()); } } @Benchmark public void fastFormatter_dateTime(final Blackhole blackhole) { fastFormatter(blackhole, DATE_TIME_FORMATTERS.fastFormatter); } @Benchmark public void fastFormatter_time(final Blackhole blackhole) { fastFormatter(blackhole, TIME_FORMATTERS.fastFormatter); } private void fastFormatter(final Blackhole blackhole, final FastDatePrinter formatter) { for (final MutableInstant instant : INSTANTS) { stringBuilder.setLength(0); calendar.setTimeInMillis(instant.getEpochMillisecond()); formatter.format(calendar, stringBuilder); blackhole.consume(stringBuilder.length()); } } @Benchmark public void fixedFormatter_dateTime(final Blackhole blackhole) { fixedFormatter(blackhole, DATE_TIME_FORMATTERS.fixedFormatter); } @Benchmark public void fixedFormatter_time(final Blackhole blackhole) { fixedFormatter(blackhole, DATE_TIME_FORMATTERS.fixedFormatter); } private void fixedFormatter(final Blackhole blackhole, final FixedDateFormat formatter) { for (final MutableInstant instant : INSTANTS) { final int length = formatter.formatInstant(instant, charBuffer, 0); blackhole.consume(length); } } @Benchmark public void javaFormatter_dateTime(final Blackhole blackhole) { javaFormatter(blackhole, DATE_TIME_FORMATTERS.javaFormatter); } @Benchmark public void javaFormatter_time(final Blackhole blackhole) { javaFormatter(blackhole, TIME_FORMATTERS.javaFormatter); } private void javaFormatter(final Blackhole blackhole, final DateTimeFormatter formatter) { for (final MutableInstant instant : INSTANTS) { stringBuilder.setLength(0); formatter.formatTo(instant, stringBuilder); blackhole.consume(stringBuilder.length()); } } }
Formatters
java
eclipse-vertx__vert.x
vertx-core/src/main/java/io/vertx/core/dns/SrvRecord.java
{ "start": 704, "end": 1390 }
interface ____ { /** * Returns the priority for this service record. */ int priority(); /** * Returns the record time to live */ long ttl(); /** * Returns the weight of this service record. */ int weight(); /** * Returns the port the service is running on. */ int port(); /** * Returns the name for the server being queried. */ String name(); /** * Returns the protocol for the service being queried (i.e. "_tcp"). */ String protocol(); /** * Returns the service's name (i.e. "_http"). */ String service(); /** * Returns the name of the host for the service. */ @Nullable String target(); }
SrvRecord
java
apache__camel
components/camel-kubernetes/src/test/java/org/apache/camel/component/kubernetes/producer/KubernetesPodsProducerTest.java
{ "start": 1905, "end": 9440 }
class ____ extends KubernetesTestSupport { KubernetesMockServer server; NamespacedKubernetesClient client; @BindToRegistry("kubernetesClient") public KubernetesClient getClient() { return client; } @Test void listTest() { server.expect().withPath("/api/v1/pods") .andReturn(200, new PodListBuilder().addNewItem().and().addNewItem().and().addNewItem().and().build()).once(); server.expect().withPath("/api/v1/namespaces/test/pods") .andReturn(200, new PodListBuilder().addNewItem().and().addNewItem().and().build()).once(); List<?> result = template.requestBody("direct:list", "", List.class); assertEquals(3, result.size()); Exchange ex = template.request("direct:list", exchange -> exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test")); List<?> resultNamespace = ex.getMessage().getBody(List.class); assertEquals(2, resultNamespace.size()); } @Test void listByLabelsTest() throws Exception { Map<String, String> labels = Map.of( "key1", "value1", "key2", "value2"); String urlEncodedLabels = toUrlEncoded(labels.entrySet().stream().map(e -> e.getKey() + "=" + e.getValue()) .collect(Collectors.joining(","))); server.expect().withPath("/api/v1/pods?labelSelector=" + urlEncodedLabels) .andReturn(200, new PodListBuilder().addNewItem().and().addNewItem().and().addNewItem().and().build()) .once(); server.expect().withPath("/api/v1/namespaces/test/pods?labelSelector=" + urlEncodedLabels) .andReturn(200, new PodListBuilder().addNewItem().and().addNewItem().and().build()) .once(); Exchange ex = template.request("direct:listByLabels", exchange -> exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_PODS_LABELS, labels)); assertEquals(3, ex.getMessage().getBody(List.class).size()); ex = template.request("direct:listByLabels", exchange -> { exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_PODS_LABELS, labels); exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test"); }); assertEquals(2, ex.getMessage().getBody(List.class).size()); } @Test void getPodTest() { Pod pod1 = new PodBuilder().withNewMetadata().withName("pod1").withNamespace("test").and().build(); Pod pod2 = new PodBuilder().withNewMetadata().withName("pod2").withNamespace("ns1").and().build(); server.expect().withPath("/api/v1/namespaces/test/pods/pod1").andReturn(200, pod1).once(); server.expect().withPath("/api/v1/namespaces/ns1/pods/pod2").andReturn(200, pod2).once(); Exchange ex = template.request("direct:getPod", exchange -> { exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test"); exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_POD_NAME, "pod1"); }); Pod result = ex.getMessage().getBody(Pod.class); assertEquals("pod1", result.getMetadata().getName()); } @Test void createPod() { Map<String, String> labels = Map.of("my.label.key", "my.label.value"); PodSpec spec = new PodSpecBuilder().withHostname("SomeHostname").build(); Pod pod1 = new PodBuilder().withNewMetadata().withName("pod1").withNamespace("test").withLabels(labels).and() .withSpec(spec).build(); server.expect().post().withPath("/api/v1/namespaces/test/pods").andReturn(200, pod1).once(); Exchange ex = template.request("direct:createPod", exchange -> { exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test"); exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_PODS_LABELS, labels); exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_POD_NAME, "pod1"); exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_POD_SPEC, spec); }); Pod result = ex.getMessage().getBody(Pod.class); assertEquals("test", result.getMetadata().getNamespace()); assertEquals("pod1", result.getMetadata().getName()); assertEquals(labels, result.getMetadata().getLabels()); assertEquals("SomeHostname", result.getSpec().getHostname()); } @Test void updatePod() { Map<String, String> labels = Map.of("my.label.key", "my.label.value"); PodSpec spec = new PodSpecBuilder().withHostname("SomeHostname").build(); Pod pod1 = new PodBuilder().withNewMetadata().withName("pod1").withNamespace("test").withLabels(labels).and() .withSpec(spec).build(); server.expect().get().withPath("/api/v1/namespaces/test/pods/pod1") .andReturn(200, new PodBuilder().withNewMetadata().withName("pod1").withNamespace("test").endMetadata().build()) .once(); server.expect().put().withPath("/api/v1/namespaces/test/pods/pod1").andReturn(200, pod1).once(); Exchange ex = template.request("direct:updatePod", exchange -> { exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test"); exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_PODS_LABELS, labels); exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_POD_NAME, "pod1"); exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_POD_SPEC, spec); }); Pod result = ex.getMessage().getBody(Pod.class); assertEquals("test", result.getMetadata().getNamespace()); assertEquals("pod1", result.getMetadata().getName()); assertEquals(labels, result.getMetadata().getLabels()); assertEquals("SomeHostname", result.getSpec().getHostname()); } @Test void deletePod() { Pod pod1 = new PodBuilder().withNewMetadata().withName("pod1").withNamespace("test").and().build(); server.expect().withPath("/api/v1/namespaces/test/pods/pod1").andReturn(200, pod1).once(); Exchange ex = template.request("direct:deletePod", exchange -> { exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, "test"); exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_POD_NAME, "pod1"); }); boolean podDeleted = ex.getMessage().getBody(Boolean.class); assertTrue(podDeleted); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { from("direct:list").to("kubernetes-pods:///?kubernetesClient=#kubernetesClient&operation=listPods"); from("direct:listByLabels") .to("kubernetes-pods:///?kubernetesClient=#kubernetesClient&operation=listPodsByLabels"); from("direct:getPod").to("kubernetes-pods:///?kubernetesClient=#kubernetesClient&operation=getPod"); from("direct:createPod").to("kubernetes-pods:///?kubernetesClient=#kubernetesClient&operation=createPod"); from("direct:updatePod").to("kubernetes-pods:///?kubernetesClient=#kubernetesClient&operation=updatePod"); from("direct:deletePod").to("kubernetes-pods:///?kubernetesClient=#kubernetesClient&operation=deletePod"); } }; } }
KubernetesPodsProducerTest
java
quarkusio__quarkus
extensions/load-shedding/deployment/src/test/java/io/quarkus/load/shedding/TimeBasedRequestClassifierTest.java
{ "start": 204, "end": 1096 }
class ____ { @Test public void fixedTime() { fixedTime(0); fixedTime(1_000); fixedTime(500_000); } private void fixedTime(long now) { int hour = (int) (now >> 22); int nextHour = (int) ((now + 1_000) >> 22); assertEquals(hour, nextHour); nextHour = (int) ((now + 1_000_000) >> 22); assertEquals(hour, nextHour); nextHour = (int) ((now + 3_600_000) >> 22); assertEquals(hour, nextHour); // 4_200_000 because 2^22 = 4_194_304 nextHour = (int) ((now + 4_200_000) >> 22); assertNotEquals(hour, nextHour); } @Test public void currentTime() { long now = System.currentTimeMillis(); int hour = (int) (now >> 22); int nextHour = (int) ((now + 4_200_000) >> 22); assertNotEquals(hour, nextHour); } }
TimeBasedRequestClassifierTest
java
spring-projects__spring-framework
spring-context/src/main/java/org/springframework/context/ApplicationStartupAware.java
{ "start": 989, "end": 1482 }
interface ____ extends Aware { /** * Set the ApplicationStartup that this object runs with. * <p>Invoked after population of normal bean properties but before an init * callback like InitializingBean's afterPropertiesSet or a custom init-method. * Invoked before ApplicationContextAware's setApplicationContext. * @param applicationStartup application startup to be used by this object */ void setApplicationStartup(ApplicationStartup applicationStartup); }
ApplicationStartupAware
java
apache__camel
components/camel-zeebe/src/generated/java/org/apache/camel/component/zeebe/ZeebeEndpointUriFactory.java
{ "start": 515, "end": 2342 }
class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory { private static final String BASE = ":operationName"; private static final Set<String> PROPERTY_NAMES; private static final Set<String> SECRET_PROPERTY_NAMES; private static final Map<String, String> MULTI_VALUE_PREFIXES; static { Set<String> props = new HashSet<>(8); props.add("bridgeErrorHandler"); props.add("exceptionHandler"); props.add("exchangePattern"); props.add("formatJSON"); props.add("jobKey"); props.add("lazyStartProducer"); props.add("operationName"); props.add("timeout"); PROPERTY_NAMES = Collections.unmodifiableSet(props); SECRET_PROPERTY_NAMES = Collections.emptySet(); MULTI_VALUE_PREFIXES = Collections.emptyMap(); } @Override public boolean isEnabled(String scheme) { return "zeebe".equals(scheme); } @Override public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException { String syntax = scheme + BASE; String uri = syntax; Map<String, Object> copy = new HashMap<>(properties); uri = buildPathParameter(syntax, uri, "operationName", null, true, copy); uri = buildQueryParameters(uri, copy, encode); return uri; } @Override public Set<String> propertyNames() { return PROPERTY_NAMES; } @Override public Set<String> secretPropertyNames() { return SECRET_PROPERTY_NAMES; } @Override public Map<String, String> multiValuePrefixes() { return MULTI_VALUE_PREFIXES; } @Override public boolean isLenientProperties() { return false; } }
ZeebeEndpointUriFactory
java
google__dagger
hilt-compiler/main/java/dagger/hilt/processor/internal/generatesrootinput/GeneratesRootInputPropagatedDataGenerator.java
{ "start": 1185, "end": 1919 }
class ____ { private final XProcessingEnv processingEnv; private final XTypeElement element; GeneratesRootInputPropagatedDataGenerator(XProcessingEnv processingEnv, XTypeElement element) { this.processingEnv = processingEnv; this.element = element; } void generate() { TypeSpec.Builder generator = TypeSpec.classBuilder(Processors.getFullEnclosedName(element)); JavaPoetExtKt.addOriginatingElement(generator, element) .addAnnotation( AnnotationSpec.builder(ClassNames.GENERATES_ROOT_INPUT_PROPAGATED_DATA) .addMember("value", "$T.class", element.getClassName()) .build()) .addJavadoc( "Generated
GeneratesRootInputPropagatedDataGenerator
java
spring-projects__spring-framework
spring-test/src/main/java/org/springframework/test/context/testng/AbstractTransactionalTestNGSpringContextTests.java
{ "start": 3042, "end": 7529 }
class ____, available to subclasses. * @since 3.2 */ protected final JdbcTemplate jdbcTemplate = new JdbcTemplate(); private @Nullable String sqlScriptEncoding; /** * Set the {@code DataSource}, typically provided via Dependency Injection. * <p>This method also instantiates the {@link #jdbcTemplate} instance variable. */ @Autowired public void setDataSource(DataSource dataSource) { this.jdbcTemplate.setDataSource(dataSource); } /** * Specify the encoding for SQL scripts, if different from the platform encoding. * @see #executeSqlScript */ public void setSqlScriptEncoding(String sqlScriptEncoding) { this.sqlScriptEncoding = sqlScriptEncoding; } /** * Convenience method for counting the rows in the given table. * @param tableName table name to count rows in * @return the number of rows in the table * @see JdbcTestUtils#countRowsInTable */ protected int countRowsInTable(String tableName) { return JdbcTestUtils.countRowsInTable(this.jdbcTemplate, tableName); } /** * Convenience method for counting the rows in the given table, using the * provided {@code WHERE} clause. * <p>See the Javadoc for {@link JdbcTestUtils#countRowsInTableWhere} for details. * @param tableName the name of the table to count rows in * @param whereClause the {@code WHERE} clause to append to the query * @return the number of rows in the table that match the provided * {@code WHERE} clause * @since 3.2 * @see JdbcTestUtils#countRowsInTableWhere */ protected int countRowsInTableWhere(String tableName, String whereClause) { return JdbcTestUtils.countRowsInTableWhere(this.jdbcTemplate, tableName, whereClause); } /** * Convenience method for deleting all rows from the specified tables. * <p>Use with caution outside of a transaction! * @param names the names of the tables from which to delete * @return the total number of rows deleted from all specified tables * @see JdbcTestUtils#deleteFromTables */ protected int deleteFromTables(String... names) { return JdbcTestUtils.deleteFromTables(this.jdbcTemplate, names); } /** * Convenience method for deleting all rows from the given table, using the * provided {@code WHERE} clause. * <p>Use with caution outside of a transaction! * <p>See the Javadoc for {@link JdbcTestUtils#deleteFromTableWhere} for details. * @param tableName the name of the table to delete rows from * @param whereClause the {@code WHERE} clause to append to the query * @param args arguments to bind to the query (leaving it to the {@code * PreparedStatement} to guess the corresponding SQL type); may also contain * {@link org.springframework.jdbc.core.SqlParameterValue SqlParameterValue} * objects which indicate not only the argument value but also the SQL type * and optionally the scale. * @return the number of rows deleted from the table * @since 4.0 * @see JdbcTestUtils#deleteFromTableWhere */ protected int deleteFromTableWhere(String tableName, String whereClause, Object... args) { return JdbcTestUtils.deleteFromTableWhere(this.jdbcTemplate, tableName, whereClause, args); } /** * Convenience method for dropping all the specified tables. * <p>Use with caution outside of a transaction! * @param names the names of the tables to drop * @since 3.2 * @see JdbcTestUtils#dropTables */ protected void dropTables(String... names) { JdbcTestUtils.dropTables(this.jdbcTemplate, names); } /** * Execute the given SQL script. * <p>Use with caution outside of a transaction! * <p>The script will normally be loaded by classpath. * <p><b>Do not use this method to execute DDL if you expect rollback.</b> * @param sqlResourcePath the Spring resource path for the SQL script * @param continueOnError whether to continue without throwing an * exception in the event of an error * @throws DataAccessException if there is an error executing a statement * @see ResourceDatabasePopulator * @see #setSqlScriptEncoding */ protected void executeSqlScript(String sqlResourcePath, boolean continueOnError) throws DataAccessException { DataSource ds = this.jdbcTemplate.getDataSource(); Assert.state(ds != null, "No DataSource set"); Assert.state(this.applicationContext != null, "No ApplicationContext available"); Resource resource = this.applicationContext.getResource(sqlResourcePath); new ResourceDatabasePopulator(continueOnError, false, this.sqlScriptEncoding, resource).execute(ds); } }
manages
java
apache__avro
lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectFloatArrayTest.java
{ "start": 2407, "end": 3354 }
class ____ extends BasicArrayState { private final Schema schema; private float[][] testData; private Encoder encoder; private ReflectDatumWriter<float[]> datumWriter; public TestStateEncode() { super(ARRAY_SIZE); final String jsonText = ReflectData.get().getSchema(float[].class).toString(); this.schema = new Schema.Parser().parse(jsonText); } /** * Setup the trial data. * * @throws IOException Could not setup test data */ @Setup(Level.Trial) public void doSetupTrial() throws Exception { this.encoder = super.newEncoder(false, getNullOutputStream()); this.datumWriter = new ReflectDatumWriter<>(schema); this.testData = new float[getBatchSize()][]; for (int i = 0; i < testData.length; i++) { this.testData[i] = populateFloatArray(getRandom(), getArraySize()); } } } @State(Scope.Thread) public static
TestStateEncode
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/erroneous/ambiguousfactorymethod/SourceTargetMapperAndBarFactory.java
{ "start": 439, "end": 788 }
class ____ { public static final SourceTargetMapperAndBarFactory INSTANCE = Mappers.getMapper( SourceTargetMapperAndBarFactory.class ); public abstract Target sourceToTarget(Source source); public abstract Bar fooToBar(Foo foo); public Bar createBar() { return new Bar( "BAR" ); } }
SourceTargetMapperAndBarFactory
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/boot/registry/classloading/spi/package-info.java
{ "start": 106, "end": 191 }
class ____ service SPI. */ package org.hibernate.boot.registry.classloading.spi;
loading
java
alibaba__nacos
console/src/main/java/com/alibaba/nacos/console/handler/impl/remote/NacosMaintainerClientHolder.java
{ "start": 1812, "end": 3809 }
class ____ extends MemberChangeListener { private static final Logger LOGGER = LoggerFactory.getLogger(NacosMaintainerClientHolder.class); private final RemoteServerMemberManager memberManager; private volatile NamingMaintainerService namingMaintainerService; private volatile ConfigMaintainerService configMaintainerService; private volatile AiMaintainerService aiMaintainerService; public NacosMaintainerClientHolder(RemoteServerMemberManager memberManager) throws NacosException { this.memberManager = memberManager; buildMaintainerService(); NotifyCenter.registerSubscriber(this); } private void buildMaintainerService() throws NacosException { List<String> memberAddress = memberManager.allMembers().stream().map(Member::getAddress).toList(); String memberAddressString = StringUtils.join(memberAddress, ","); Properties properties = new Properties(); properties.setProperty(PropertyKeyConst.SERVER_ADDR, memberAddressString); namingMaintainerService = NamingMaintainerFactory.createNamingMaintainerService(properties); configMaintainerService = ConfigMaintainerFactory.createConfigMaintainerService(properties); aiMaintainerService = AiMaintainerFactory.createAiMaintainerService(properties); } public NamingMaintainerService getNamingMaintainerService() { return namingMaintainerService; } public ConfigMaintainerService getConfigMaintainerService() { return configMaintainerService; } public AiMaintainerService getAiMaintainerService() { return aiMaintainerService; } @Override public void onEvent(MembersChangeEvent event) { try { buildMaintainerService(); } catch (NacosException e) { LOGGER.warn("Nacos Server members changed, but build new maintain client failed with: ", e); } } }
NacosMaintainerClientHolder
java
quarkusio__quarkus
integration-tests/qute/src/test/java/io/quarkus/it/qute/QuteITCase.java
{ "start": 114, "end": 157 }
class ____ extends QuteTestCase { }
QuteITCase
java
netty__netty
handler/src/main/java/io/netty/handler/ssl/ApplicationProtocolConfig.java
{ "start": 4748, "end": 6316 }
enum ____ { /** * If the peer who selects the application protocol doesn't find a match this will result in the failing the * handshake with a fatal alert. * <p> * For example in the case of ALPN this will result in a * <a herf="https://tools.ietf.org/html/rfc7301#section-3.2">no_application_protocol(120)</a> alert. */ FATAL_ALERT, /** * If the peer who selects the application protocol doesn't find a match it will pretend no to support * the TLS extension by not advertising support for the TLS extension in the handshake. This is used in cases * where a "best effort" is desired to talk even if there is no matching protocol. */ NO_ADVERTISE, /** * If the peer who selects the application protocol doesn't find a match it will just select the last protocol * it advertised support for. This is used in cases where a "best effort" is desired to talk even if there * is no matching protocol, and the assumption is the "most general" fallback protocol is typically listed last. * <p> * This may be <a href="https://tools.ietf.org/html/rfc7301#section-3.2">illegal for some RFCs</a> but was * observed behavior by some SSL implementations, and is supported for flexibility/compatibility. */ CHOOSE_MY_LAST_PROTOCOL } /** * Defines the most common behaviors for the peer which is notified of the selected protocol. */ public
SelectorFailureBehavior
java
spring-projects__spring-boot
module/spring-boot-health/src/test/java/org/springframework/boot/health/actuate/endpoint/SimpleHttpCodeStatusMapperTests.java
{ "start": 1053, "end": 2388 }
class ____ { @Test void createWhenMappingsAreNullUsesDefaultMappings() { SimpleHttpCodeStatusMapper mapper = new SimpleHttpCodeStatusMapper(null); assertThat(mapper.getStatusCode(Status.UNKNOWN)).isEqualTo(WebEndpointResponse.STATUS_OK); assertThat(mapper.getStatusCode(Status.UP)).isEqualTo(WebEndpointResponse.STATUS_OK); assertThat(mapper.getStatusCode(Status.DOWN)).isEqualTo(WebEndpointResponse.STATUS_SERVICE_UNAVAILABLE); assertThat(mapper.getStatusCode(Status.OUT_OF_SERVICE)) .isEqualTo(WebEndpointResponse.STATUS_SERVICE_UNAVAILABLE); } @Test void getStatusCodeReturnsMappedStatus() { Map<String, Integer> map = new LinkedHashMap<>(); map.put("up", 123); map.put("down", 456); SimpleHttpCodeStatusMapper mapper = new SimpleHttpCodeStatusMapper(map); assertThat(mapper.getStatusCode(Status.UP)).isEqualTo(123); assertThat(mapper.getStatusCode(Status.DOWN)).isEqualTo(456); assertThat(mapper.getStatusCode(Status.OUT_OF_SERVICE)).isEqualTo(200); } @Test void getStatusCodeWhenMappingsAreNotUniformReturnsMappedStatus() { Map<String, Integer> map = new LinkedHashMap<>(); map.put("out-of-service", 123); SimpleHttpCodeStatusMapper mapper = new SimpleHttpCodeStatusMapper(map); assertThat(mapper.getStatusCode(Status.OUT_OF_SERVICE)).isEqualTo(123); } }
SimpleHttpCodeStatusMapperTests
java
mockito__mockito
mockito-core/src/main/java/org/mockito/internal/configuration/plugins/PluginLoader.java
{ "start": 1953, "end": 5297 }
class ____ {@code preferredPluginType}. * * @return An object of either {@code preferredPluginType} or {@code alternatePluginType}, * cast to the lowest common denominator in the chain of inheritance */ @SuppressWarnings("unchecked") <ReturnT, PreferredT extends ReturnT, AlternateType extends ReturnT> ReturnT loadPlugin( final Class<PreferredT> preferredPluginType, final Class<AlternateType> alternatePluginType) { try { PreferredT preferredPlugin = initializer.loadImpl(preferredPluginType); if (preferredPlugin != null) { return preferredPlugin; } else if (alternatePluginType != null) { AlternateType alternatePlugin = initializer.loadImpl(alternatePluginType); if (alternatePlugin != null) { return alternatePlugin; } } return plugins.getDefaultPlugin(preferredPluginType); } catch (final Throwable t) { return (ReturnT) Proxy.newProxyInstance( preferredPluginType.getClassLoader(), new Class<?>[] {preferredPluginType}, new InvocationHandler() { @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { throw new IllegalStateException( "Could not initialize plugin: " + preferredPluginType + " (alternate: " + alternatePluginType + ")", t); } }); } } /** * Scans the classpath for given {@code pluginType} and returns a list of its instances. * * @return An list of {@code pluginType} or an empty list if none was found. */ @SuppressWarnings("unchecked") <T> List<T> loadPlugins(final Class<T> pluginType) { try { return initializer.loadImpls(pluginType); } catch (final Throwable t) { return Collections.singletonList( (T) Proxy.newProxyInstance( pluginType.getClassLoader(), new Class<?>[] {pluginType}, new InvocationHandler() { @Override public Object invoke( Object proxy, Method method, Object[] args) throws Throwable { throw new IllegalStateException( "Could not initialize plugin: " + pluginType, t); } })); } } }
of
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorHSQLDBDatabaseImpl.java
{ "start": 187, "end": 541 }
class ____ extends SequenceInformationExtractorLegacyImpl { /** * Singleton access */ public static final SequenceInformationExtractorHSQLDBDatabaseImpl INSTANCE = new SequenceInformationExtractorHSQLDBDatabaseImpl(); @Override protected String sequenceStartValueColumn() { return "start_with"; } }
SequenceInformationExtractorHSQLDBDatabaseImpl
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cid/keymanytoone/NestedKeyManyToOneTest.java
{ "start": 770, "end": 1133 }
class ____ { @Test public void testNestedIdClassAssociations(SessionFactoryScope scope) { scope.inTransaction( session -> { session.createQuery( "SELECT idClassEntity_1.basicEntity.key1 FROM NestedIdClassEntity a JOIN a.idClassEntity idClassEntity_1" ).getResultList(); } ); } @Entity(name = "BasicEntity") public static
NestedKeyManyToOneTest
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java
{ "start": 60583, "end": 86601 }
class ____ { private String clusterUUID; private boolean clusterUUIDCommitted; private long version; private CoordinationMetadata coordinationMetadata = CoordinationMetadata.EMPTY_METADATA; private Settings transientSettings = Settings.EMPTY; private Settings persistentSettings = Settings.EMPTY; private DiffableStringMap hashesOfConsistentSettings = DiffableStringMap.EMPTY; private final ImmutableOpenMap.Builder<String, ClusterCustom> customs; /** * TODO: This should map to {@link ProjectMetadata} (not Builder), but that's tricky to do due to * legacy delegation methods such as {@link #indices(Map)} which expect to have a mutable project */ private final Map<ProjectId, ProjectMetadata.Builder> projectMetadata; private final ImmutableOpenMap.Builder<String, ReservedStateMetadata> reservedStateMetadata; @SuppressWarnings("this-escape") public Builder() { clusterUUID = UNKNOWN_CLUSTER_UUID; customs = ImmutableOpenMap.builder(); projectMetadata = new HashMap<>(); reservedStateMetadata = ImmutableOpenMap.builder(); } Builder(Metadata metadata) { this.clusterUUID = metadata.clusterUUID; this.clusterUUIDCommitted = metadata.clusterUUIDCommitted; this.coordinationMetadata = metadata.coordinationMetadata; this.transientSettings = metadata.transientSettings; this.persistentSettings = metadata.persistentSettings; this.hashesOfConsistentSettings = metadata.hashesOfConsistentSettings; this.version = metadata.version; this.customs = ImmutableOpenMap.builder(metadata.customs); this.projectMetadata = Maps.transformValues(metadata.projectMetadata, ProjectMetadata::builder); this.reservedStateMetadata = ImmutableOpenMap.builder(metadata.reservedStateMetadata); } private ProjectMetadata.Builder getSingleProject() { if (projectMetadata.isEmpty()) { createDefaultProject(); } else if (projectMetadata.size() != 1) { throw new MultiProjectPendingException("There are multiple projects " + projectMetadata.keySet()); } return projectMetadata.values().iterator().next(); } public Builder projectMetadata(Map<ProjectId, ProjectMetadata> projectMetadata) { assert projectMetadata.entrySet().stream().allMatch(e -> e.getValue().id().equals(e.getKey())) : "Project metadata map is inconsistent"; this.projectMetadata.clear(); projectMetadata.forEach((k, v) -> this.projectMetadata.put(k, ProjectMetadata.builder(v))); return this; } public Builder put(ProjectMetadata projectMetadata) { return put(ProjectMetadata.builder(projectMetadata)); } public Builder put(ProjectMetadata.Builder projectMetadata) { this.projectMetadata.put(projectMetadata.getId(), projectMetadata); return this; } public Builder removeProject(ProjectId projectId) { this.projectMetadata.remove(projectId); return this; } public ProjectMetadata.Builder getProject(ProjectId projectId) { return projectMetadata.get(projectId); } public Builder forEachProject(UnaryOperator<ProjectMetadata.Builder> modifier) { projectMetadata.replaceAll((p, b) -> modifier.apply(b)); return this; } @Deprecated(forRemoval = true) public Builder put(IndexMetadata.Builder indexMetadataBuilder) { getSingleProject().put(indexMetadataBuilder); return this; } @Deprecated(forRemoval = true) public Builder put(IndexMetadata indexMetadata, boolean incrementVersion) { getSingleProject().put(indexMetadata, incrementVersion); return this; } @Deprecated(forRemoval = true) public IndexMetadata get(String index) { return getSingleProject().get(index); } @Deprecated(forRemoval = true) public IndexMetadata getSafe(Index index) { return getSingleProject().getSafe(index); } @Deprecated(forRemoval = true) public Builder remove(String index) { getSingleProject().remove(index); return this; } @Deprecated(forRemoval = true) public Builder removeAllIndices() { getSingleProject().removeAllIndices(); return this; } @Deprecated(forRemoval = true) public Builder indices(Map<String, IndexMetadata> indices) { getSingleProject().indices(indices); return this; } @Deprecated(forRemoval = true) public Builder put(IndexTemplateMetadata.Builder template) { getSingleProject().put(template); return this; } @Deprecated(forRemoval = true) public Builder put(IndexTemplateMetadata template) { getSingleProject().put(template); return this; } @Deprecated(forRemoval = true) public Builder removeTemplate(String templateName) { getSingleProject().removeTemplate(templateName); return this; } @Deprecated(forRemoval = true) public Builder templates(Map<String, IndexTemplateMetadata> templates) { getSingleProject().templates(templates); return this; } @Deprecated(forRemoval = true) public Builder componentTemplates(Map<String, ComponentTemplate> componentTemplates) { getSingleProject().componentTemplates(componentTemplates); return this; } @Deprecated(forRemoval = true) public Builder indexTemplates(Map<String, ComposableIndexTemplate> indexTemplates) { getSingleProject().indexTemplates(indexTemplates); return this; } @Deprecated(forRemoval = true) public Builder put(String name, ComposableIndexTemplate indexTemplate) { getSingleProject().put(name, indexTemplate); return this; } @Deprecated(forRemoval = true) public Builder dataStreams(Map<String, DataStream> dataStreams, Map<String, DataStreamAlias> dataStreamAliases) { getSingleProject().dataStreams(dataStreams, dataStreamAliases); return this; } @Deprecated(forRemoval = true) public Builder put(DataStream dataStream) { getSingleProject().put(dataStream); return this; } @Deprecated(forRemoval = true) public DataStreamMetadata dataStreamMetadata() { return getSingleProject().dataStreamMetadata(); } @Deprecated(forRemoval = true) public boolean put(String aliasName, String dataStream, Boolean isWriteDataStream, String filter) { return getSingleProject().put(aliasName, dataStream, isWriteDataStream, filter); } public Builder putCustom(String type, ClusterCustom custom) { customs.put(type, Objects.requireNonNull(custom, type)); return this; } @Deprecated(forRemoval = true) public Builder putCustom(String type, ProjectCustom custom) { getSingleProject().putCustom(type, Objects.requireNonNull(custom, type)); return this; } public ClusterCustom getCustom(String type) { return customs.get(type); } public Builder removeCustom(String type) { customs.remove(type); return this; } public Builder removeCustomIf(BiPredicate<String, ? super ClusterCustom> p) { customs.removeAll(p); return this; } public Builder customs(Map<String, ClusterCustom> clusterCustoms) { clusterCustoms.forEach((key, value) -> Objects.requireNonNull(value, key)); customs.putAllFromMap(clusterCustoms); return this; } @Deprecated(forRemoval = true) public Builder projectCustoms(Map<String, ProjectCustom> projectCustoms) { projectCustoms.forEach((key, value) -> Objects.requireNonNull(value, key)); getSingleProject().customs(projectCustoms); return this; } /** * Adds a map of namespace to {@link ReservedStateMetadata} into the metadata builder * @param reservedStateMetadata a map of namespace to {@link ReservedStateMetadata} * @return {@link Builder} */ public Builder put(Map<String, ReservedStateMetadata> reservedStateMetadata) { this.reservedStateMetadata.putAllFromMap(reservedStateMetadata); return this; } /** * Adds a {@link ReservedStateMetadata} for a given namespace to the metadata builder * @param metadata a {@link ReservedStateMetadata} * @return {@link Builder} */ public Builder put(ReservedStateMetadata metadata) { reservedStateMetadata.put(metadata.namespace(), metadata); return this; } /** * Removes a {@link ReservedStateMetadata} for a given namespace * @param metadata a {@link ReservedStateMetadata} * @return {@link Builder} */ public Builder removeReservedState(ReservedStateMetadata metadata) { reservedStateMetadata.remove(metadata.namespace()); return this; } @Deprecated(forRemoval = true) public Builder updateSettings(Settings settings, String... indices) { getSingleProject().updateSettings(settings, indices); return this; } /** * Update the number of replicas for the specified indices. * * @param numberOfReplicas the number of replicas * @param indices the indices to update the number of replicas for * @return the builder */ @Deprecated(forRemoval = true) public Builder updateNumberOfReplicas(final int numberOfReplicas, final String[] indices) { getSingleProject().updateNumberOfReplicas(numberOfReplicas, indices); return this; } public Builder coordinationMetadata(CoordinationMetadata coordinationMetadata) { this.coordinationMetadata = coordinationMetadata; return this; } public Settings transientSettings() { return this.transientSettings; } public Builder transientSettings(Settings settings) { this.transientSettings = settings; return this; } public Settings persistentSettings() { return this.persistentSettings; } public Builder persistentSettings(Settings settings) { this.persistentSettings = settings; return this; } public Builder hashesOfConsistentSettings(DiffableStringMap hashesOfConsistentSettings) { this.hashesOfConsistentSettings = hashesOfConsistentSettings; return this; } public Builder hashesOfConsistentSettings(Map<String, String> hashesOfConsistentSettings) { this.hashesOfConsistentSettings = new DiffableStringMap(hashesOfConsistentSettings); return this; } public Builder version(long version) { this.version = version; return this; } public Builder clusterUUID(String clusterUUID) { this.clusterUUID = clusterUUID; return this; } public Builder clusterUUIDCommitted(boolean clusterUUIDCommitted) { this.clusterUUIDCommitted = clusterUUIDCommitted; return this; } public Builder generateClusterUuidIfNeeded() { if (clusterUUID.equals(UNKNOWN_CLUSTER_UUID)) { clusterUUID(UUIDs.randomBase64UUID()); } return this; } /** * @return a new <code>Metadata</code> instance */ public Metadata build() { return build(false); } public Metadata build(boolean skipNameCollisionChecks) { return new Metadata( clusterUUID, clusterUUIDCommitted, version, coordinationMetadata, buildProjectMetadata(skipNameCollisionChecks), transientSettings, persistentSettings, Settings.builder().put(persistentSettings).put(transientSettings).build(), hashesOfConsistentSettings, customs.build(), reservedStateMetadata.build() ); } private Map<ProjectId, ProjectMetadata> buildProjectMetadata(boolean skipNameCollisionChecks) { if (projectMetadata.isEmpty()) { createDefaultProject(); } assert assertProjectIdAndProjectMetadataConsistency(); if (projectMetadata.size() == 1) { final var entry = projectMetadata.entrySet().iterator().next(); // Map.of() with a single entry is highly optimized // so we want take advantage of that performance boost for this common case of a single project return Map.of(entry.getKey(), entry.getValue().build(skipNameCollisionChecks)); } else { return Collections.unmodifiableMap(Maps.transformValues(projectMetadata, m -> m.build(skipNameCollisionChecks))); } } private ProjectMetadata.Builder createDefaultProject() { return projectMetadata.put(DEFAULT_PROJECT_ID, new ProjectMetadata.Builder(Map.of(), 0).id(DEFAULT_PROJECT_ID)); } private boolean assertProjectIdAndProjectMetadataConsistency() { projectMetadata.forEach((id, project) -> { assert project.getId().equals(id) : "project id mismatch key=[" + id + "] builder=[" + project.getId() + "]"; }); return true; } /** * There are a set of specific custom sections that have moved from top-level sections to project-level sections * as part of the multi-project refactor. Enumerate them here so we can move them to the right place * if they are read as a top-level section from a previous metadata version. */ private static final Set<String> MOVED_PROJECT_CUSTOMS = Set.of( IndexGraveyard.TYPE, DataStreamMetadata.TYPE, ComposableIndexTemplateMetadata.TYPE, ComponentTemplateMetadata.TYPE ); public static Metadata fromXContent(XContentParser parser) throws IOException { Builder builder = new Builder(); // we might get here after the meta-data element, or on a fresh parser XContentParser.Token token = parser.currentToken(); String currentFieldName = parser.currentName(); if ("meta-data".equals(currentFieldName) == false) { token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { // move to the field name (meta-data) XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); // move to the next object token = parser.nextToken(); } currentFieldName = parser.currentName(); } if ("meta-data".equals(currentFieldName) == false) { throw new IllegalArgumentException("Expected [meta-data] as a field name but got " + currentFieldName); } XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); /** * Used when reading BWC fields from when indices etc used to be directly on metadata */ final Supplier<ProjectMetadata.Builder> projectBuilderForBwc = () -> { // Due to the way we handle repository metadata (we changed it from cluster scoped to project scoped) // we may have cases where we have both project scoped XContent (with its own indices, customs etc) // and also cluster scoped XContent that needs to be applied to the default project // And, in this case there may be multiple projects even while we're applying BWC logic to the default project ProjectMetadata.Builder pmb = builder.getProject(ProjectId.DEFAULT); if (pmb == null) { pmb = ProjectMetadata.builder(ProjectId.DEFAULT); builder.put(pmb); } return pmb; }; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { switch (currentFieldName) { case "projects" -> { assert builder.projectMetadata.isEmpty() : "expect empty projectMetadata, but got " + builder.projectMetadata; readProjects(parser, builder); } default -> throw new IllegalArgumentException("Unexpected field [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.START_OBJECT) { switch (currentFieldName) { case "cluster_coordination" -> builder.coordinationMetadata(CoordinationMetadata.fromXContent(parser)); case "settings" -> builder.persistentSettings(Settings.fromXContent(parser)); case "hashes_of_consistent_settings" -> builder.hashesOfConsistentSettings(parser.mapStrings()); /* Cluster reserved state */ case "reserved_state" -> { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { builder.put(ReservedStateMetadata.fromXContent(parser)); } } /* BwC Top-level project things */ case "indices" -> { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { projectBuilderForBwc.get().put(IndexMetadata.Builder.fromXContent(parser), false); } } case "templates" -> { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { projectBuilderForBwc.get().put(IndexTemplateMetadata.Builder.fromXContent(parser, parser.currentName())); } } /* Cluster customs (and project customs in older formats) */ default -> { // Older clusters didn't separate cluster-scoped and project-scope customs so a top-level custom object might // actually be a project-scoped custom final NamedXContentRegistry registry = parser.getXContentRegistry(); if (registry.hasParser(ClusterCustom.class, currentFieldName, parser.getRestApiVersion()) && MOVED_PROJECT_CUSTOMS.contains(currentFieldName) == false) { parseCustomObject(parser, currentFieldName, ClusterCustom.class, builder::putCustom); } else if (registry.hasParser(ProjectCustom.class, currentFieldName, parser.getRestApiVersion())) { parseCustomObject(parser, currentFieldName, ProjectCustom.class, (name, projectCustom) -> { if (projectCustom instanceof PersistentTasksCustomMetadata persistentTasksCustomMetadata) { assert PersistentTasksCustomMetadata.TYPE.equals(name) : name + " != " + PersistentTasksCustomMetadata.TYPE; final var tuple = persistentTasksCustomMetadata.split(); projectBuilderForBwc.get().putCustom(PersistentTasksCustomMetadata.TYPE, tuple.v2()); builder.putCustom(ClusterPersistentTasksCustomMetadata.TYPE, tuple.v1()); } else { projectBuilderForBwc.get().putCustom(name, projectCustom); } }); } else { logger.warn("Skipping unknown custom object with type {}", currentFieldName); parser.skipChildren(); } } } } else if (token.isValue()) { switch (currentFieldName) { case "version" -> builder.version(parser.longValue()); case "cluster_uuid", "uuid" -> builder.clusterUUID(parser.text()); case "cluster_uuid_committed" -> builder.clusterUUIDCommitted(parser.booleanValue()); default -> throw new IllegalArgumentException("Unexpected field [" + currentFieldName + "]"); } } else { throw new IllegalArgumentException("Unexpected token " + token); } } XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); return builder.build(); } private static void readProjects(XContentParser parser, Builder builder) throws IOException { XContentParser.Token token; XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { builder.put(ProjectMetadata.Builder.fromXContent(parser)); } } static <C extends MetadataCustom<C>> void parseCustomObject( XContentParser parser, String name, Class<C> categoryClass, BiConsumer<String, C> consumer ) throws IOException { try { C custom = parser.namedObject(categoryClass, name, null); consumer.accept(custom.getWriteableName(), custom); } catch (NamedObjectNotFoundException _ex) { logger.warn("Skipping unknown custom [{}] object with type {}", categoryClass.getSimpleName(), name); parser.skipChildren(); } } } private volatile Metadata.ProjectLookup projectLookup = null; /** * Attempt to find a project for the supplied {@link Index}. */ public Optional<ProjectMetadata> lookupProject(Index index) { return getProjectLookup().project(index); } /** * Attempt to find a project for the supplied {@link Index}. * @throws org.elasticsearch.index.IndexNotFoundException if the index does not exist in any project */ public ProjectMetadata projectFor(Index index) { return lookupProject(index).orElseThrow( () -> new IndexNotFoundException("index [" + index + "] does not exist in any project", index) ); } /** * Attempt to find the IndexMetadata for the supplied {@link Index}. * @throws org.elasticsearch.index.IndexNotFoundException if the index does not exist in any project */ public IndexMetadata indexMetadata(Index index) { return projectFor(index).getIndexSafe(index); } /** * This method is similar to {@link #indexMetadata}. But it returns an {@link Optional} instead of * throwing when either the project or the index is not found. */ public Optional<IndexMetadata> findIndex(Index index) { return lookupProject(index).map(projectMetadata -> projectMetadata.index(index)); } ProjectLookup getProjectLookup() { /* * projectLookup is volatile, but this assignment is not synchronized * That means it is possible that we will generate multiple lookup objects if there are multiple concurrent callers * Those lookup objects will be identical, and the double assignment will be safe, but there is the cost of building the lookup * more than once. * In the single project case building the lookup is cheap, and synchronization would be costly. * In the multiple project case, it might be cheaper to synchronize, but the long term solution is to maintain the lookup table * as projects/indices are added/removed rather than rebuild it each time the cluster-state/metadata object changes. */ if (this.projectLookup == null) { if (this.isSingleProject()) { projectLookup = new SingleProjectLookup(getSingleProject()); } else { projectLookup = new MultiProjectLookup(); } } return projectLookup; } /** * A lookup table from {@link Index} to {@link ProjectId} */
Builder
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/compatible/jsonlib/CompatibleTest0.java
{ "start": 5598, "end": 6684 }
class ____ { private Boolean f1; private Character f2; private String f3; private Date date; private boolean f4; private char f5; public Boolean getF1() { return f1; } public void setF1(Boolean f1) { this.f1 = f1; } public Character getF2() { return f2; } public void setF2(Character f2) { this.f2 = f2; } public String getF3() { return f3; } public void setF3(String f3) { this.f3 = f3; } public Date getDate() { return date; } public void setDate(Date date) { this.date = date; } public boolean isF4() { return f4; } public void setF4(boolean f4) { this.f4 = f4; } public char getF5() { return f5; } public void setF5(char f5) { this.f5 = f5; } } public static
V1
java
ReactiveX__RxJava
src/main/java/io/reactivex/rxjava3/internal/operators/completable/CompletableFromPublisher.java
{ "start": 871, "end": 1248 }
class ____<T> extends Completable { final Publisher<T> flowable; public CompletableFromPublisher(Publisher<T> flowable) { this.flowable = flowable; } @Override protected void subscribeActual(final CompletableObserver downstream) { flowable.subscribe(new FromPublisherSubscriber<>(downstream)); } static final
CompletableFromPublisher
java
spring-cloud__spring-cloud-gateway
spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/filter/BodyFilterFunctions.java
{ "start": 8561, "end": 8689 }
interface ____<T, R> { R apply(ServerRequest request, ServerResponse response, T t); } private static
RewriteResponseFunction
java
elastic__elasticsearch
x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ForkIT.java
{ "start": 1262, "end": 46076 }
class ____ extends AbstractEsqlIntegTestCase { @Before public void setupIndex() { createAndPopulateIndices(); } public void testSimple() { var query = """ FROM test | WHERE id > 2 | FORK ( WHERE content:"fox" ) // match operator ( WHERE content:"dog" ) | KEEP id, _fork, content | SORT id, _fork """; testSimpleImpl(query); } public void testSimpleMatchFunction() { var query = """ FROM test | WHERE id > 2 | FORK ( WHERE match(content, "fox") ) // match function ( WHERE match(content, "dog") ) | KEEP id, _fork, content | SORT id, _fork """; testSimpleImpl(query); } private void testSimpleImpl(String query) { try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("id", "_fork", "content")); assertColumnTypes(resp.columns(), List.of("integer", "keyword", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of(3, "fork2", "This dog is really brown"), List.of(4, "fork2", "The dog is brown but this document is very very long"), List.of(6, "fork1", "The quick brown fox jumps over the lazy dog"), List.of(6, "fork2", "The quick brown fox jumps over the lazy dog") ); assertValues(resp.values(), expectedValues); } } public void testRow() { var query = """ ROW a = [1, 2, 3, 4], b = 100 | MV_EXPAND a | FORK (WHERE a % 2 == 1) (WHERE a % 2 == 0) | SORT _fork, a """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("a", "b", "_fork")); assertColumnTypes(resp.columns(), List.of("integer", "integer", "keyword")); Iterable<Iterable<Object>> expectedValues = List.of( List.of(1, 100, "fork1"), List.of(3, 100, "fork1"), List.of(2, 100, "fork2"), List.of(4, 100, "fork2") ); assertValues(resp.values(), expectedValues); } } public void testSortAndLimitInFirstSubQuery() { var query = """ FROM test | WHERE id > 0 | FORK ( WHERE content:"fox" | SORT id DESC | LIMIT 1 ) ( WHERE content:"dog" ) | KEEP id, _fork, content | SORT id, _fork """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("id", "_fork", "content")); assertColumnTypes(resp.columns(), List.of("integer", "keyword", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of(2, "fork2", "This is a brown dog"), List.of(3, "fork2", "This dog is really brown"), List.of(4, "fork2", "The dog is brown but this document is very very long"), List.of(6, "fork1", "The quick brown fox jumps over the lazy dog"), List.of(6, "fork2", "The quick brown fox jumps over the lazy dog") ); assertValues(resp.values(), expectedValues); } } public void testSortAndLimitInFirstSubQueryASC() { var query = """ FROM test | WHERE id > 0 | FORK ( WHERE content:"fox" | SORT id ASC | LIMIT 1 ) ( WHERE content:"dog" ) | KEEP id, _fork, content | SORT id, _fork """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("id", "_fork", "content")); assertColumnTypes(resp.columns(), List.of("integer", "keyword", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of(1, "fork1", "This is a brown fox"), List.of(2, "fork2", "This is a brown dog"), List.of(3, "fork2", "This dog is really brown"), List.of(4, "fork2", "The dog is brown but this document is very very long"), List.of(6, "fork2", "The quick brown fox jumps over the lazy dog") ); assertValues(resp.values(), expectedValues); } } public void testSortAndLimitInSecondSubQuery() { var query = """ FROM test | WHERE id > 2 | FORK ( WHERE content:"fox" ) ( WHERE content:"dog" | SORT id DESC | LIMIT 2 ) | KEEP _fork, id, content | SORT _fork, id """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("fork1", 6, "The quick brown fox jumps over the lazy dog"), List.of("fork2", 4, "The dog is brown but this document is very very long"), List.of("fork2", 6, "The quick brown fox jumps over the lazy dog") ); assertValues(resp.values(), expectedValues); } } public void testSortAndLimitInBothSubQueries() { var query = """ FROM test | WHERE id > 0 | FORK ( WHERE content:"fox" | SORT id | LIMIT 1 ) ( WHERE content:"dog" | SORT id | LIMIT 1 ) | KEEP id, _fork, content | SORT id, _fork """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("id", "_fork", "content")); assertColumnTypes(resp.columns(), List.of("integer", "keyword", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of(1, "fork1", "This is a brown fox"), List.of(2, "fork2", "This is a brown dog") ); assertValues(resp.values(), expectedValues); } } public void testWhereWhere() { var query = """ FROM test | FORK ( WHERE id < 2 | WHERE content:"fox" ) ( WHERE id > 2 | WHERE content:"dog" ) | SORT _fork, id | KEEP _fork, id, content """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("fork1", 1, "This is a brown fox"), List.of("fork2", 3, "This dog is really brown"), List.of("fork2", 4, "The dog is brown but this document is very very long"), List.of("fork2", 6, "The quick brown fox jumps over the lazy dog") ); assertValues(resp.values(), expectedValues); } } public void testWhereSort() { var query = """ FROM test | FORK ( WHERE content:"fox" | SORT id ) ( WHERE content:"dog" | SORT id ) | SORT _fork, id | KEEP _fork, id, content """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("fork1", 1, "This is a brown fox"), List.of("fork1", 6, "The quick brown fox jumps over the lazy dog"), List.of("fork2", 2, "This is a brown dog"), List.of("fork2", 3, "This dog is really brown"), List.of("fork2", 4, "The dog is brown but this document is very very long"), List.of("fork2", 6, "The quick brown fox jumps over the lazy dog") ); assertValues(resp.values(), expectedValues); } } public void testWhereSortOnlyInFork() { var queryWithMatchOperator = """ FROM test | FORK ( WHERE content:"fox" | SORT id ) ( WHERE content:"dog" | SORT id ) | KEEP _fork, id, content | SORT _fork, id """; var queryWithMatchFunction = """ FROM test | FORK ( WHERE match(content, "fox") | SORT id ) ( WHERE match(content, "dog") | SORT id ) | KEEP _fork, id, content | SORT _fork, id """; for (var query : List.of(queryWithMatchOperator, queryWithMatchFunction)) { try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("fork1", 1, "This is a brown fox"), List.of("fork1", 6, "The quick brown fox jumps over the lazy dog"), List.of("fork2", 2, "This is a brown dog"), List.of("fork2", 3, "This dog is really brown"), List.of("fork2", 4, "The dog is brown but this document is very very long"), List.of("fork2", 6, "The quick brown fox jumps over the lazy dog") ); assertValues(resp.values(), expectedValues); } } } public void testSortAndLimitOnlyInSecondSubQuery() { var query = """ FROM test | FORK ( WHERE content:"fox" ) ( SORT id | LIMIT 3 ) | SORT _fork, id | KEEP _fork, id, content """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("fork1", 1, "This is a brown fox"), List.of("fork1", 6, "The quick brown fox jumps over the lazy dog"), List.of("fork2", 1, "This is a brown fox"), List.of("fork2", 2, "This is a brown dog"), List.of("fork2", 3, "This dog is really brown") ); assertValues(resp.values(), expectedValues); } } public void testLimitOnlyInFirstSubQuery() { var query = """ FROM test | FORK ( LIMIT 100 ) ( WHERE content:"fox" ) | SORT _fork, id | KEEP _fork, id, content """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("fork1", 1, "This is a brown fox"), List.of("fork1", 2, "This is a brown dog"), List.of("fork1", 3, "This dog is really brown"), List.of("fork1", 4, "The dog is brown but this document is very very long"), List.of("fork1", 5, "There is also a white cat"), List.of("fork1", 6, "The quick brown fox jumps over the lazy dog"), List.of("fork2", 1, "This is a brown fox"), List.of("fork2", 6, "The quick brown fox jumps over the lazy dog") ); assertValues(resp.values(), expectedValues); } } public void testLimitOnlyInSecondSubQuery() { var query = """ FROM test | FORK ( WHERE content:"fox" ) ( LIMIT 100 ) | SORT _fork, id | KEEP _fork, id, content """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("fork1", 1, "This is a brown fox"), List.of("fork1", 6, "The quick brown fox jumps over the lazy dog"), List.of("fork2", 1, "This is a brown fox"), List.of("fork2", 2, "This is a brown dog"), List.of("fork2", 3, "This dog is really brown"), List.of("fork2", 4, "The dog is brown but this document is very very long"), List.of("fork2", 5, "There is also a white cat"), List.of("fork2", 6, "The quick brown fox jumps over the lazy dog") ); assertValues(resp.values(), expectedValues); } } public void testKeepOnlyId() { var query = """ FROM test METADATA _score | WHERE id > 2 | FORK ( WHERE content:"fox" ) ( WHERE content:"dog" ) | KEEP id | SORT id """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("id")); assertColumnTypes(resp.columns(), List.of("integer")); Iterable<Iterable<Object>> expectedValues = List.of(List.of(3), List.of(4), List.of(6), List.of(6)); assertValues(resp.values(), expectedValues); } } public void testScoringKeepAndSort() { var query = """ FROM test METADATA _score | WHERE id > 2 | FORK ( WHERE content:"fox" ) ( WHERE content:"dog" ) | KEEP id, content, _fork, _score | SORT id """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("id", "content", "_fork", "_score")); assertColumnTypes(resp.columns(), List.of("integer", "text", "keyword", "double")); assertThat(getValuesList(resp.values()).size(), equalTo(4)); // just assert that the expected number of results } } public void testThreeSubQueries() { var query = """ FROM test | WHERE id > 2 | FORK ( WHERE content:"fox" ) ( WHERE content:"dog" ) ( WHERE content:"cat" ) | KEEP _fork, id, content | SORT _fork, id """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("fork1", 6, "The quick brown fox jumps over the lazy dog"), List.of("fork2", 3, "This dog is really brown"), List.of("fork2", 4, "The dog is brown but this document is very very long"), List.of("fork2", 6, "The quick brown fox jumps over the lazy dog"), List.of("fork3", 5, "There is also a white cat") ); assertValues(resp.values(), expectedValues); } } public void testFiveSubQueries() { var query = """ FROM test | FORK ( WHERE id == 6 ) ( WHERE id == 2 ) ( WHERE id == 5 ) ( WHERE id == 1 ) ( WHERE id == 3 ) | SORT _fork, id | KEEP _fork, id, content """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("fork1", 6, "The quick brown fox jumps over the lazy dog"), List.of("fork2", 2, "This is a brown dog"), List.of("fork3", 5, "There is also a white cat"), List.of("fork4", 1, "This is a brown fox"), List.of("fork5", 3, "This dog is really brown") ); assertValues(resp.values(), expectedValues); } } // Tests that sort order is preserved within each fork // subquery, without any subsequent overall stream sort public void testFourSubQueriesWithSortAndLimit() { var query = """ FROM test | FORK ( WHERE id > 0 | SORT id DESC | LIMIT 2 ) ( WHERE id > 1 | SORT id ASC | LIMIT 3 ) ( WHERE id < 3 | SORT id DESC | LIMIT 2 ) ( WHERE id > 2 | SORT id ASC | LIMIT 3 ) | KEEP _fork, id, content """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> fork0 = List.of( List.of("fork1", 6, "The quick brown fox jumps over the lazy dog"), List.of("fork1", 5, "There is also a white cat") ); Iterable<Iterable<Object>> fork1 = List.of( List.of("fork2", 2, "This is a brown dog"), List.of("fork2", 3, "This dog is really brown"), List.of("fork2", 4, "The dog is brown but this document is very very long") ); Iterable<Iterable<Object>> fork2 = List.of( List.of("fork3", 2, "This is a brown dog"), List.of("fork3", 1, "This is a brown fox") ); Iterable<Iterable<Object>> fork3 = List.of( List.of("fork4", 3, "This dog is really brown"), List.of("fork4", 4, "The dog is brown but this document is very very long"), List.of("fork4", 5, "There is also a white cat") ); assertValues(valuesFilter(resp.values(), row -> row.next().equals("fork1")), fork0); assertValues(valuesFilter(resp.values(), row -> row.next().equals("fork2")), fork1); assertValues(valuesFilter(resp.values(), row -> row.next().equals("fork3")), fork2); assertValues(valuesFilter(resp.values(), row -> row.next().equals("fork4")), fork3); assertThat(getValuesList(resp.values()).size(), equalTo(10)); } } public void testSubqueryWithoutResults() { var query = """ FROM test | WHERE id > 2 | FORK ( WHERE content:"rabbit" ) ( WHERE content:"dog" ) ( WHERE content:"cat" ) | KEEP _fork, id, content | SORT _fork, id """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("fork2", 3, "This dog is really brown"), List.of("fork2", 4, "The dog is brown but this document is very very long"), List.of("fork2", 6, "The quick brown fox jumps over the lazy dog"), List.of("fork3", 5, "There is also a white cat") ); assertValues(resp.values(), expectedValues); } } public void testAllSubQueriesWithoutResults() { var query = """ FROM test | FORK ( WHERE content:"rabbit" ) ( WHERE content:"lion" ) ( WHERE content:"tiger" ) | KEEP _fork, id, content | SORT _fork, id """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> empty = List.of(); assertValues(resp.values(), empty); } } public void testSubqueryWithoutLimitOnly() { // this should var query = """ FROM test | FORK ( LIMIT 0 ) // verify optimizes away ( WHERE content:"cat" ) | KEEP _fork, id, content | SORT _fork, id """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content")); assertColumnTypes(resp.columns(), List.of("keyword", "integer", "text")); Iterable<Iterable<Object>> expectedValues = List.of(List.of("fork2", 5, "There is also a white cat")); assertValues(resp.values(), expectedValues); } } public void testWithEvalSimple() { var query = """ FROM test | WHERE content:"cat" | FORK ( EVAL a = 1 ) ( EVAL a = 2 ) | KEEP a, _fork, id, content | SORT _fork """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("a", "_fork", "id", "content")); Iterable<Iterable<Object>> expectedValues = List.of( List.of(1, "fork1", 5, "There is also a white cat"), List.of(2, "fork2", 5, "There is also a white cat") ); assertValues(resp.values(), expectedValues); } } public void testWithEvalDifferentOutputs() { var query = """ FROM test | WHERE id == 2 | FORK ( EVAL a = 1 ) ( EVAL b = 2 ) | KEEP a, b, _fork | SORT _fork, a """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("a", "b", "_fork")); Iterable<Iterable<Object>> expectedValues = List.of( Arrays.stream(new Object[] { 1, null, "fork1" }).toList(), Arrays.stream(new Object[] { null, 2, "fork2" }).toList() ); assertValues(resp.values(), expectedValues); } } public void testWithStatsSimple() { var query = """ FROM test | FORK (STATS x=COUNT(*), y=MV_SORT(VALUES(id))) (WHERE id == 2) | KEEP _fork, x, y, id | SORT _fork, id """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "x", "y", "id")); Iterable<Iterable<Object>> expectedValues = List.of( Arrays.stream(new Object[] { "fork1", 6L, List.of(1, 2, 3, 4, 5, 6), null }).toList(), Arrays.stream(new Object[] { "fork2", null, null, 2 }).toList() ); assertValues(resp.values(), expectedValues); } } public void testWithStatsAfterFork() { var query = """ FROM test | FORK ( WHERE content:"fox" | EVAL a = 1) ( WHERE content:"cat" | EVAL b = 2 ) ( WHERE content:"dog" | EVAL c = 3 ) | STATS c = count(*) """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("c")); assertColumnTypes(resp.columns(), List.of("long")); Iterable<Iterable<Object>> expectedValues = List.of(List.of(7L)); assertValues(resp.values(), expectedValues); } } public void testWithStatsWithWhereAfterFork() { var query = """ FROM test | FORK ( WHERE content:"fox" | EVAL a = 1) ( WHERE content:"cat" | EVAL b = 2 ) ( WHERE content:"dog" | EVAL c = 3 ) | STATS c = count(*) WHERE _fork == "fork1" """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("c")); assertColumnTypes(resp.columns(), List.of("long")); Iterable<Iterable<Object>> expectedValues = List.of(List.of(2L)); assertValues(resp.values(), expectedValues); } } public void testWithConditionOnForkField() { var query = """ FROM test | FORK ( WHERE content:"fox" | EVAL a = 1) ( WHERE content:"cat" | EVAL b = 2 ) ( WHERE content:"dog" | EVAL c = 3 ) | WHERE _fork == "fork2" | KEEP _fork, id, content, a, b, c | SORT _fork """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content", "a", "b", "c")); Iterable<Iterable<Object>> expectedValues = List.of( Arrays.stream(new Object[] { "fork2", 5, "There is also a white cat", null, 2, null }).toList() ); assertValues(resp.values(), expectedValues); } } public void testWithFilteringOnConstantColumn() { var query = """ FROM test | FORK ( WHERE content:"fox" | EVAL a = 1) ( WHERE content:"cat" | EVAL a = 2 ) ( WHERE content:"dog" | EVAL a = 3 ) | WHERE a == 3 | KEEP _fork, id, content, a | SORT id | LIMIT 3 """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("_fork", "id", "content", "a")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("fork3", 2, "This is a brown dog", 3), List.of("fork3", 3, "This dog is really brown", 3), List.of("fork3", 4, "The dog is brown but this document is very very long", 3) ); assertValues(resp.values(), expectedValues); } } public void testWithLookUpJoinBeforeFork() { var query = """ FROM test | LOOKUP JOIN test-lookup ON id | FORK (WHERE id == 2 OR id == 3) (WHERE id == 1 OR id == 2) | SORT _fork, id """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("content", "id", "animal", "_fork")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("This is a brown dog", 2, "dog", "fork1"), List.of("This dog is really brown", 3, "dog", "fork1"), List.of("This is a brown fox", 1, "fox", "fork2"), List.of("This is a brown dog", 2, "dog", "fork2") ); assertValues(resp.values(), expectedValues); } } public void testWithLookUpAfterFork() { var query = """ FROM test | FORK (WHERE id == 2 OR id == 3) (WHERE id == 1 OR id == 2) | LOOKUP JOIN test-lookup ON id | SORT _fork, id """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("content", "id", "_fork", "animal")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("This is a brown dog", 2, "fork1", "dog"), List.of("This dog is really brown", 3, "fork1", "dog"), List.of("This is a brown fox", 1, "fork2", "fox"), List.of("This is a brown dog", 2, "fork2", "dog") ); assertValues(resp.values(), expectedValues); } } public void testWithUnionTypesBeforeFork() { var query = """ FROM test,test-other | EVAL x = id::keyword | EVAL id = id::keyword | EVAL content = content::keyword | FORK (WHERE x == "2") (WHERE x == "1") | SORT _fork, x, content | KEEP content, id, x, _fork """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("content", "id", "x", "_fork")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("This is a brown dog", "2", "2", "fork1"), List.of("This is a brown dog", "2", "2", "fork1"), List.of("This is a brown fox", "1", "1", "fork2"), List.of("This is a brown fox", "1", "1", "fork2") ); assertValues(resp.values(), expectedValues); } } public void testWithUnionTypesInBranches() { var query = """ FROM test,test-other | EVAL content = content::keyword | FORK (EVAL x = id::keyword | WHERE x == "2" | EVAL id = x::integer) (EVAL x = "a" | WHERE id::keyword == "1" | EVAL id = id::integer) | SORT _fork, x | KEEP content, id, x, _fork """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("content", "id", "x", "_fork")); Iterable<Iterable<Object>> expectedValues = List.of( List.of("This is a brown dog", 2, "2", "fork1"), List.of("This is a brown dog", 2, "2", "fork1"), List.of("This is a brown fox", 1, "a", "fork2"), List.of("This is a brown fox", 1, "a", "fork2") ); assertValues(resp.values(), expectedValues); } } public void testWithDrop() { var query = """ FROM test | WHERE id > 2 | FORK ( WHERE content:"fox" | DROP content) ( WHERE content:"dog" | DROP content) | KEEP id, _fork | SORT id, _fork """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("id", "_fork")); assertColumnTypes(resp.columns(), List.of("integer", "keyword")); Iterable<Iterable<Object>> expectedValues = List.of( List.of(3, "fork2"), List.of(4, "fork2"), List.of(6, "fork1"), List.of(6, "fork2") ); assertValues(resp.values(), expectedValues); } } public void testWithKeep() { var query = """ FROM test | WHERE id > 2 | FORK ( WHERE content:"fox" | KEEP id) ( WHERE content:"dog" | KEEP id) | SORT id, _fork """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("id", "_fork")); assertColumnTypes(resp.columns(), List.of("integer", "keyword")); Iterable<Iterable<Object>> expectedValues = List.of( List.of(3, "fork2"), List.of(4, "fork2"), List.of(6, "fork1"), List.of(6, "fork2") ); assertValues(resp.values(), expectedValues); } } public void testWithUnsupportedFieldsWithSameBranches() { var query = """ FROM test-other | FORK ( WHERE id == "3") ( WHERE id == "2" ) | SORT _fork """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("content", "embedding", "id", "_fork")); assertColumnTypes(resp.columns(), List.of("keyword", "unsupported", "keyword", "keyword")); Iterable<Iterable<Object>> expectedValues = List.of( Arrays.stream(new Object[] { "This dog is really brown", null, "3", "fork1" }).toList(), Arrays.stream(new Object[] { "This is a brown dog", null, "2", "fork2" }).toList() ); assertValues(resp.values(), expectedValues); } } public void testWithUnsupportedFieldsWithDifferentBranches() { var query = """ FROM test-other | FORK ( STATS x = count(*)) ( WHERE id == "2" ) | SORT _fork """; try (var resp = run(query)) { assertColumnNames(resp.columns(), List.of("x", "_fork", "content", "embedding", "id")); assertColumnTypes(resp.columns(), List.of("long", "keyword", "keyword", "unsupported", "keyword")); Iterable<Iterable<Object>> expectedValues = List.of( Arrays.stream(new Object[] { 3L, "fork1", null, null, null }).toList(), Arrays.stream(new Object[] { null, "fork2", "This is a brown dog", null, "2" }).toList() ); assertValues(resp.values(), expectedValues); } } public void testWithUnsupportedFieldsAndConflicts() { var firstQuery = """ FROM test-other | FORK ( STATS embedding = count(*)) ( WHERE id == "2" ) | SORT _fork """; var e = expectThrows(VerificationException.class, () -> run(firstQuery)); assertTrue(e.getMessage().contains("Column [embedding] has conflicting data types")); var secondQuery = """ FROM test-other | FORK ( WHERE id == "2" ) ( STATS embedding = count(*)) | SORT _fork """; e = expectThrows(VerificationException.class, () -> run(secondQuery)); assertTrue(e.getMessage().contains("Column [embedding] has conflicting data types")); var thirdQuery = """ FROM test-other | FORK ( WHERE id == "2" ) ( WHERE id == "3" ) ( STATS embedding = count(*)) | SORT _fork """; e = expectThrows(VerificationException.class, () -> run(thirdQuery)); assertTrue(e.getMessage().contains("Column [embedding] has conflicting data types")); } public void testValidationsAfterFork() { var firstQuery = """ FROM test* | FORK ( WHERE true ) ( WHERE true ) | DROP _fork | STATS a = count_distinct(embedding) """; var e = expectThrows(VerificationException.class, () -> run(firstQuery)); assertTrue( e.getMessage().contains("[count_distinct(embedding)] must be [any exact type except unsigned_long, _source, or counter types]") ); var secondQuery = """ FROM test* | FORK ( WHERE true ) ( WHERE true ) | DROP _fork | EVAL a = substring(1, 2, 3) """; e = expectThrows(VerificationException.class, () -> run(secondQuery)); assertTrue(e.getMessage().contains("first argument of [substring(1, 2, 3)] must be [string], found value [1] type [integer]")); var thirdQuery = """ FROM test* | FORK ( WHERE true ) ( WHERE true ) | DROP _fork | EVAL a = b + 2 """; e = expectThrows(VerificationException.class, () -> run(thirdQuery)); assertTrue(e.getMessage().contains("Unknown column [b]")); } public void testWithEvalWithConflictingTypes() { var query = """ FROM test | FORK ( EVAL a = 1 ) ( EVAL a = "aaaa" ) | KEEP a, _fork """; var e = expectThrows(VerificationException.class, () -> run(query)); assertTrue(e.getMessage().contains("Column [a] has conflicting data types")); } public void testSubqueryWithUnknownField() { var query = """ FROM test | FORK ( WHERE foo:"dog" ) // unknown field foo ( WHERE content:"cat" ) | KEEP _fork, id, content | SORT _fork, id """; var e = expectThrows(VerificationException.class, () -> run(query)); assertTrue(e.getMessage().contains("Unknown column [foo]")); } public void testSubqueryWithUnknownFieldMatchFunction() { var query = """ FROM test | FORK ( WHERE match(bar, "dog") ) // unknown field bar ( WHERE content:"cat" ) | KEEP _fork, id, content | SORT _fork, id """; var e = expectThrows(VerificationException.class, () -> run(query)); assertTrue(e.getMessage().contains("Unknown column [bar]")); } public void testSubqueryWithUnknownFieldInThirdBranch() { var query = """ FROM test | FORK ( WHERE content:"cat" ) ( WHERE content:"dog" ) ( WHERE fubar:"fox" ) // unknown fubar ( WHERE content:"rabbit" ) | KEEP _fork, id, content """; var e = expectThrows(VerificationException.class, () -> run(query)); assertTrue(e.getMessage().contains("Unknown column [fubar]")); } public void testSubqueryWithUnknownFieldInSort() { var query = """ FROM test | FORK ( WHERE content:"dog" | sort baz) // unknown field baz ( WHERE content:"cat" ) | KEEP _fork, id, content | SORT _fork, id """; var e = expectThrows(VerificationException.class, () -> run(query)); assertTrue(e.getMessage().contains("Unknown column [baz]")); var queryTwo = """ FROM test | FORK ( WHERE content:"dog" ) ( WHERE content:"cat" | sort bar) // unknown field bar | KEEP _fork, id, content | SORT _fork, id """; e = expectThrows(VerificationException.class, () -> run(queryTwo)); assertTrue(e.getMessage().contains("Unknown column [bar]")); } public void testSubqueryWithUnknownFieldInEval() { var query = """ FROM test | FORK ( EVAL x = baz + 1) ( WHERE content:"cat" ) | KEEP _fork, id, content | SORT _fork, id """; var e = expectThrows(VerificationException.class, () -> run(query)); assertTrue(e.getMessage().contains("Unknown column [baz]")); } public void testOneSubQuery() { var query = """ FROM test | WHERE id > 2 | FORK ( WHERE content:"fox" ) """; try (var resp = run(query)) { assertColumnTypes(resp.columns(), List.of("text", "integer", "keyword")); assertColumnNames(resp.columns(), List.of("content", "id", "_fork")); Iterable<Iterable<Object>> expectedValues = List.of( Arrays.stream(new Object[] { "The quick brown fox jumps over the lazy dog", 6, "fork1" }).toList() ); assertValues(resp.values(), expectedValues); } } public void testForkWithinFork() { var query = """ FROM test | FORK ( FORK (WHERE true) (WHERE true) ) ( FORK (WHERE true) (WHERE true) ) """; var e = expectThrows(VerificationException.class, () -> run(query)); assertTrue(e.getMessage().contains("Only a single FORK command is supported, but found multiple")); } public void testProfile() { var query = """ FROM test | FORK ( WHERE content:"fox" | SORT id ) ( WHERE content:"dog" | SORT id ) | SORT _fork, id | KEEP _fork, id, content """; try (var resp = run(syncEsqlQueryRequest(query).pragmas(randomPragmas()).profile(true))) { EsqlQueryResponse.Profile profile = resp.profile(); assertNotNull(profile); assertEquals( Set.of("data", "main.final", "node_reduce", "subplan-0.final", "subplan-1.final"), profile.drivers().stream().map(DriverProfile::description).collect(Collectors.toSet()) ); } } public void testWithTooManySubqueries() { var query = """ FROM test | FORK (WHERE true) (WHERE true) (WHERE true) (WHERE true) (WHERE true) (WHERE true) (WHERE true) (WHERE true) (WHERE true) """; var e = expectThrows(ParsingException.class, () -> run(query)); assertTrue(e.getMessage().contains("Fork supports up to 8 branches")); } private void createAndPopulateIndices() { var indexName = "test"; var client = client().admin().indices(); var createRequest = client.prepareCreate(indexName) .setSettings(Settings.builder().put("index.number_of_shards", 1)) .setMapping("id", "type=integer", "content", "type=text"); assertAcked(createRequest); client().prepareBulk() .add(new IndexRequest(indexName).id("1").source("id", 1, "content", "This is a brown fox")) .add(new IndexRequest(indexName).id("2").source("id", 2, "content", "This is a brown dog")) .add(new IndexRequest(indexName).id("3").source("id", 3, "content", "This dog is really brown")) .add(new IndexRequest(indexName).id("4").source("id", 4, "content", "The dog is brown but this document is very very long")) .add(new IndexRequest(indexName).id("5").source("id", 5, "content", "There is also a white cat")) .add(new IndexRequest(indexName).id("6").source("id", 6, "content", "The quick brown fox jumps over the lazy dog")) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); ensureYellow(indexName); var lookupIndex = "test-lookup"; createRequest = client.prepareCreate(lookupIndex) .setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.mode", "lookup")) .setMapping("id", "type=integer", "animal", "type=keyword"); assertAcked(createRequest); client().prepareBulk() .add(new IndexRequest(lookupIndex).id("1").source("id", 1, "animal", "fox")) .add(new IndexRequest(lookupIndex).id("2").source("id", 2, "animal", "dog")) .add(new IndexRequest(lookupIndex).id("3").source("id", 3, "animal", "dog")) .add(new IndexRequest(lookupIndex).id("4").source("id", 4, "animal", "dog")) .add(new IndexRequest(lookupIndex).id("5").source("id", 5, "animal", "cat")) .add(new IndexRequest(lookupIndex).id("6").source("id", 6, "animal", List.of("fox", "dog"))) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); ensureYellow(lookupIndex); var otherTestIndex = "test-other"; createRequest = client.prepareCreate(otherTestIndex) .setSettings(Settings.builder().put("index.number_of_shards", 1)) .setMapping("id", "type=keyword", "content", "type=keyword", "embedding", "type=sparse_vector"); assertAcked(createRequest); client().prepareBulk() .add( new IndexRequest(otherTestIndex).id("1") .source("id", "1", "content", "This is a brown fox", "embedding", Map.of("abc", 1.0)) ) .add( new IndexRequest(otherTestIndex).id("2") .source("id", "2", "content", "This is a brown dog", "embedding", Map.of("def", 2.0)) ) .add( new IndexRequest(otherTestIndex).id("3") .source("id", "3", "content", "This dog is really brown", "embedding", Map.of("ghi", 1.0)) ) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); ensureYellow(indexName); } static Iterator<Iterator<Object>> valuesFilter(Iterator<Iterator<Object>> values, Predicate<Iterator<Object>> filter) { return getValuesList(values).stream().filter(row -> filter.test(row.iterator())).map(List::iterator).toList().iterator(); } }
ForkIT
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java
{ "start": 147123, "end": 147708 }
class ____ { public int foo(Suit suit) { switch (suit) { case HEART: case DIAMOND: return 1; case SPADE: System.out.println("hello"); throw new RuntimeException(); case null: default: System.out.println("fall out"); } return 2; } } """) .addOutputLines( "Test.java", """
Test
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/state/heap/StateTableByKeyGroupReaders.java
{ "start": 1292, "end": 1555 }
class ____ a static factory method to create different implementations of {@link * StateSnapshotKeyGroupReader} depending on the provided serialization format version. * * <p>The implementations are also located here as inner classes. */ @Internal public
provides
java
apache__hadoop
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/statistics/S3AStatisticsContext.java
{ "start": 933, "end": 1949 }
interface ____ extends CountersAndGauges { /** * Create a stream input statistics instance. * @return the new instance */ S3AInputStreamStatistics newInputStreamStatistics(); /** * Create a new instance of the committer statistics. * @return a new committer statistics instance */ CommitterStatistics newCommitterStatistics(); /** * Create a stream output statistics instance. * @return the new instance */ BlockOutputStreamStatistics newOutputStreamStatistics(); /** * Create a delegation token statistics instance. * @return an instance of delegation token statistics */ DelegationTokenStatistics newDelegationTokenStatistics(); /** * Create a StatisticsFromAwsSdk instance. * @return an instance of StatisticsFromAwsSdk */ StatisticsFromAwsSdk newStatisticsFromAwsSdk(); /** * Creaet a multipart statistics collector. * @return an instance */ S3AMultipartUploaderStatistics createMultipartUploaderStatistics(); }
S3AStatisticsContext
java
apache__flink
flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/TupleSerializerTest.java
{ "start": 2045, "end": 13881 }
class ____ { @Test void testTuple0() { Tuple0[] testTuples = new Tuple0[] {Tuple0.INSTANCE, Tuple0.INSTANCE, Tuple0.INSTANCE}; runTests(1, testTuples); } @Test void testTuple1Int() { @SuppressWarnings({"unchecked", "rawtypes"}) Tuple1<Integer>[] testTuples = new Tuple1[] { new Tuple1<Integer>(42), new Tuple1<Integer>(1), new Tuple1<Integer>(0), new Tuple1<Integer>(-1), new Tuple1<Integer>(Integer.MAX_VALUE), new Tuple1<Integer>(Integer.MIN_VALUE) }; runTests(4, testTuples); } @Test void testTuple1String() { Random rnd = new Random(68761564135413L); @SuppressWarnings({"unchecked", "rawtypes"}) Tuple1<String>[] testTuples = new Tuple1[] { new Tuple1<String>(StringUtils.getRandomString(rnd, 10, 100)), new Tuple1<String>("abc"), new Tuple1<String>(""), new Tuple1<String>(StringUtils.getRandomString(rnd, 30, 170)), new Tuple1<String>(StringUtils.getRandomString(rnd, 15, 50)), new Tuple1<String>("") }; runTests(-1, testTuples); } @Test void testTuple1StringArray() { Random rnd = new Random(289347567856686223L); String[] arr1 = new String[] { "abc", "", StringUtils.getRandomString(rnd, 10, 100), StringUtils.getRandomString(rnd, 15, 50), StringUtils.getRandomString(rnd, 30, 170), StringUtils.getRandomString(rnd, 14, 15), "" }; String[] arr2 = new String[] { "foo", "", StringUtils.getRandomString(rnd, 10, 100), StringUtils.getRandomString(rnd, 1000, 5000), StringUtils.getRandomString(rnd, 30000, 35000), StringUtils.getRandomString(rnd, 100 * 1024, 105 * 1024), "bar" }; @SuppressWarnings("unchecked") Tuple1<String[]>[] testTuples = new Tuple1[] {new Tuple1<String[]>(arr1), new Tuple1<String[]>(arr2)}; runTests(-1, testTuples); } @Test void testTuple2StringDouble() { Random rnd = new Random(807346528946L); @SuppressWarnings("unchecked") Tuple2<String, Double>[] testTuples = new Tuple2[] { new Tuple2<String, Double>( StringUtils.getRandomString(rnd, 10, 100), rnd.nextDouble()), new Tuple2<String, Double>( StringUtils.getRandomString(rnd, 10, 100), rnd.nextDouble()), new Tuple2<String, Double>( StringUtils.getRandomString(rnd, 10, 100), rnd.nextDouble()), new Tuple2<String, Double>("", rnd.nextDouble()), new Tuple2<String, Double>( StringUtils.getRandomString(rnd, 10, 100), rnd.nextDouble()), new Tuple2<String, Double>( StringUtils.getRandomString(rnd, 10, 100), rnd.nextDouble()) }; runTests(-1, testTuples); } @Test void testTuple2StringStringArray() { Random rnd = new Random(289347567856686223L); String[] arr1 = new String[] { "abc", "", StringUtils.getRandomString(rnd, 10, 100), StringUtils.getRandomString(rnd, 15, 50), StringUtils.getRandomString(rnd, 30, 170), StringUtils.getRandomString(rnd, 14, 15), "" }; String[] arr2 = new String[] { "foo", "", StringUtils.getRandomString(rnd, 10, 100), StringUtils.getRandomString(rnd, 1000, 5000), StringUtils.getRandomString(rnd, 30000, 35000), StringUtils.getRandomString(rnd, 100 * 1024, 105 * 1024), "bar" }; @SuppressWarnings("unchecked") Tuple2<String, String[]>[] testTuples = new Tuple2[] { new Tuple2<String, String[]>(StringUtils.getRandomString(rnd, 30, 170), arr1), new Tuple2<String, String[]>(StringUtils.getRandomString(rnd, 30, 170), arr2), new Tuple2<String, String[]>(StringUtils.getRandomString(rnd, 30, 170), arr1), new Tuple2<String, String[]>(StringUtils.getRandomString(rnd, 30, 170), arr2), new Tuple2<String, String[]>(StringUtils.getRandomString(rnd, 30, 170), arr2) }; runTests(-1, testTuples); } @Test void testTuple5CustomObjects() { Random rnd = new Random(807346528946L); SimpleTypes a = new SimpleTypes(); SimpleTypes b = new SimpleTypes( rnd.nextInt(), rnd.nextLong(), (byte) rnd.nextInt(), StringUtils.getRandomString(rnd, 10, 100), (short) rnd.nextInt(), rnd.nextDouble()); SimpleTypes c = new SimpleTypes( rnd.nextInt(), rnd.nextLong(), (byte) rnd.nextInt(), StringUtils.getRandomString(rnd, 10, 100), (short) rnd.nextInt(), rnd.nextDouble()); SimpleTypes d = new SimpleTypes( rnd.nextInt(), rnd.nextLong(), (byte) rnd.nextInt(), StringUtils.getRandomString(rnd, 10, 100), (short) rnd.nextInt(), rnd.nextDouble()); SimpleTypes e = new SimpleTypes( rnd.nextInt(), rnd.nextLong(), (byte) rnd.nextInt(), StringUtils.getRandomString(rnd, 10, 100), (short) rnd.nextInt(), rnd.nextDouble()); SimpleTypes f = new SimpleTypes( rnd.nextInt(), rnd.nextLong(), (byte) rnd.nextInt(), StringUtils.getRandomString(rnd, 10, 100), (short) rnd.nextInt(), rnd.nextDouble()); SimpleTypes g = new SimpleTypes( rnd.nextInt(), rnd.nextLong(), (byte) rnd.nextInt(), StringUtils.getRandomString(rnd, 10, 100), (short) rnd.nextInt(), rnd.nextDouble()); ComplexNestedObject1 o1 = new ComplexNestedObject1(5626435); ComplexNestedObject1 o2 = new ComplexNestedObject1(76923); ComplexNestedObject1 o3 = new ComplexNestedObject1(-1100); ComplexNestedObject1 o4 = new ComplexNestedObject1(0); ComplexNestedObject1 o5 = new ComplexNestedObject1(44); ComplexNestedObject2 co1 = new ComplexNestedObject2(rnd); ComplexNestedObject2 co2 = new ComplexNestedObject2(); ComplexNestedObject2 co3 = new ComplexNestedObject2(rnd); ComplexNestedObject2 co4 = new ComplexNestedObject2(rnd); Book b1 = new Book(976243875L, "The Serialization Odysse", 42); Book b2 = new Book(0L, "Debugging byte streams", 1337); Book b3 = new Book(-1L, "Low level interfaces", 0xC0FFEE); Book b4 = new Book(Long.MAX_VALUE, "The joy of bits and bytes", 0xDEADBEEF); Book b5 = new Book(Long.MIN_VALUE, "Winnign a prize for creative test strings", 0xBADF00); Book b6 = new Book(-2L, "Distributed Systems", 0xABCDEF0123456789L); ArrayList<String> list = new ArrayList<String>(); list.add("A"); list.add("B"); list.add("C"); list.add("D"); list.add("E"); BookAuthor ba1 = new BookAuthor(976243875L, list, "Arno Nym"); ArrayList<String> list2 = new ArrayList<String>(); BookAuthor ba2 = new BookAuthor(987654321L, list2, "The Saurus"); @SuppressWarnings("unchecked") Tuple5<SimpleTypes, Book, ComplexNestedObject1, BookAuthor, ComplexNestedObject2>[] testTuples = new Tuple5[] { new Tuple5< SimpleTypes, Book, ComplexNestedObject1, BookAuthor, ComplexNestedObject2>(a, b1, o1, ba1, co1), new Tuple5< SimpleTypes, Book, ComplexNestedObject1, BookAuthor, ComplexNestedObject2>(b, b2, o2, ba2, co2), new Tuple5< SimpleTypes, Book, ComplexNestedObject1, BookAuthor, ComplexNestedObject2>(c, b3, o3, ba1, co3), new Tuple5< SimpleTypes, Book, ComplexNestedObject1, BookAuthor, ComplexNestedObject2>(d, b2, o4, ba1, co4), new Tuple5< SimpleTypes, Book, ComplexNestedObject1, BookAuthor, ComplexNestedObject2>(e, b4, o5, ba2, co4), new Tuple5< SimpleTypes, Book, ComplexNestedObject1, BookAuthor, ComplexNestedObject2>(f, b5, o1, ba2, co4), new Tuple5< SimpleTypes, Book, ComplexNestedObject1, BookAuthor, ComplexNestedObject2>(g, b6, o4, ba1, co2) }; runTests(-1, testTuples); } private <T extends Tuple> void runTests(int length, T... instances) { TupleTypeInfo<T> tupleTypeInfo = (TupleTypeInfo<T>) TypeExtractor.getForObject(instances[0]); TypeSerializer<T> serializer = tupleTypeInfo.createSerializer(new SerializerConfigImpl()); Class<T> tupleClass = tupleTypeInfo.getTypeClass(); if (tupleClass == Tuple0.class) { length = 1; } SerializerTestInstance<T> test = new SerializerTestInstance<T>(serializer, tupleClass, length, instances) {}; test.testAll(); } }
TupleSerializerTest
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/issue_1400/Issue1449.java
{ "start": 529, "end": 1264 }
class ____ extends TestCase { public void test_for_issue() throws Exception { Student student = new Student(); student.name = "name"; student.id = 1L; student.sex = Sex.MAN; System.out.println(JSON.toJSON(student).toString()); System.out.println(JSON.toJSONString(student)); String str1 = "{\"id\":1,\"name\":\"name\",\"sex\":\"MAN\"}"; Student stu1 = JSON.parseObject(str1, Student.class); System.out.println(JSON.toJSONString(stu1)); String str2 = "{\"id\":1,\"name\":\"name\",\"sex\":{\"code\":\"1\",\"des\":\"男\"}}"; JSON.parseObject(str2, Student.class); } @JSONType(deserializer = SexDeserializer.class) public static
Issue1449
java
quarkusio__quarkus
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/context/ContextDataTest.java
{ "start": 1125, "end": 1435 }
class ____ { @AroundInvoke Object around(InvocationContext ctx) throws Exception { Object ret = "alpha:" + ctx.proceed(); return ret + ":" + ctx.getContextData().get("bravo"); } } @Simple @Priority(2) @Interceptor public static
AlphaInterceptor
java
quarkusio__quarkus
extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/CustomSerializerTest.java
{ "start": 3709, "end": 4256 }
class ____ implements ContextResolver<ObjectMapper> { @Override public ObjectMapper getContext(final Class<?> type) { final ObjectMapper objectMapper = new ObjectMapper(); final SimpleModule simpleModule = new SimpleModule("custom-data"); simpleModule.addSerializer(new CustomDataSerializer()); objectMapper.registerModule(simpleModule); objectMapper.registerModule(new JavaTimeModule()); return objectMapper; } } }
CustomObjectMapperContextResolver
java
elastic__elasticsearch
test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ClusterHandle.java
{ "start": 600, "end": 2273 }
interface ____ extends Closeable { /** * Starts the cluster. This method will block until all nodes are started and cluster is ready to serve requests. */ void start(); /** * Stops the cluster. This method will block until all cluster node processes have exited. This method is thread-safe and subsequent * calls will wait for the exiting termination to complete. * * @param forcibly whether to forcibly terminate the cluster */ void stop(boolean forcibly); /** * Whether the cluster is started or not. This method makes no guarantees on cluster availability, only that the node processes have * been started. * * @return whether the cluster has been started */ boolean isStarted(); /** * Returns a comma-separated list of HTTP transport endpoints for cluster. If this method is called on an unstarted cluster, the cluster * will be started. This method is thread-safe and subsequent calls will wait for cluster start and availability. * * @return cluster node HTTP transport addresses */ String getHttpAddresses(); /** * Returns the HTTP transport endpoint for the node at the given index. If this method is called on an unstarted cluster, the cluster * will be started. This method is thread-safe and subsequent calls will wait for cluster start and availability. * * @return cluster node HTTP transport addresses */ String getHttpAddress(int index); /** * Cleans up any resources created by this cluster. Calling this method will forcibly terminate any running nodes. */ void close(); }
ClusterHandle
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryFinalTest.java
{ "start": 2082, "end": 2215 }
class ____ { final Object o = null; } """) .expectUnchanged() .doTest(); } }
Test
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/issue_1600/Issue1649_private.java
{ "start": 224, "end": 621 }
class ____ extends TestCase { public void test_for_issue() throws Exception { Apple apple = new Apple(); String json = JSON.toJSONString(apple); assertEquals("{\"color\":\"\",\"productCity\":\"\",\"size\":0}", json); } @JSONType(serialzeFeatures = {SerializerFeature.WriteNullStringAsEmpty,SerializerFeature.WriteMapNullValue}) private static
Issue1649_private
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/blob/BlobServerProtocol.java
{ "start": 988, "end": 2614 }
class ____ { // -------------------------------------------------------------------------------------------- // Constants used in the protocol of the BLOB store // -------------------------------------------------------------------------------------------- /** The buffer size in bytes for network transfers. */ static final int BUFFER_SIZE = 65536; // 64 K /** * Internal code to identify a PUT operation. * * <p>Note: previously, there was also <tt>DELETE_OPERATION</tt> (code <tt>2</tt>). */ static final byte PUT_OPERATION = 0; /** * Internal code to identify a GET operation. * * <p>Note: previously, there was also <tt>DELETE_OPERATION</tt> (code <tt>2</tt>). */ static final byte GET_OPERATION = 1; /** Internal code to identify a successful operation. */ static final byte RETURN_OKAY = 0; /** Internal code to identify an erroneous operation. */ static final byte RETURN_ERROR = 1; /** * Internal code to identify a job-unrelated BLOBs (only for transient BLOBs!). * * <p>Note: previously, there was also <tt>NAME_ADDRESSABLE</tt> (code <tt>1</tt>). */ static final byte JOB_UNRELATED_CONTENT = 0; /** * Internal code to identify a job-related (permanent or transient) BLOBs. * * <p>Note: This is equal to the previous <tt>JOB_ID_SCOPE</tt> (code <tt>2</tt>). */ static final byte JOB_RELATED_CONTENT = 2; // -------------------------------------------------------------------------------------------- private BlobServerProtocol() {} }
BlobServerProtocol
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/pool/TestActiveTrace.java
{ "start": 828, "end": 2170 }
class ____ extends TestCase { private DruidDataSource dataSource; protected void setUp() throws Exception { DruidDataSourceStatManager.clear(); dataSource = new DruidDataSource(); dataSource.setRemoveAbandoned(true); dataSource.setRemoveAbandonedTimeoutMillis(100); dataSource.setLogAbandoned(true); dataSource.setTimeBetweenEvictionRunsMillis(10); dataSource.setMinEvictableIdleTimeMillis(300 * 1000); dataSource.setUrl("jdbc:mock:xxx"); } protected void tearDown() throws Exception { dataSource.close(); assertEquals(0, DruidDataSourceStatManager.getInstance().getDataSourceList().size()); } public void test_activeTrace() throws Exception { for (int i = 0; i < 1000; ++i) { dataSource.shrink(); Connection conn = dataSource.getConnection(); conn.close(); // sleep 10ms for checking stability, see https://github.com/alibaba/druid/issues/5620 Thread.sleep(10); // assertEquals(1, dataSource.getPoolingCount()); dataSource.shrink(); assertEquals("createCount : " + dataSource.getCreateCount(), 0, dataSource.getPoolingCount()); assertEquals(0, dataSource.getActiveConnections().size()); } } }
TestActiveTrace
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/IncorrectMainMethodTest.java
{ "start": 1984, "end": 2266 }
interface ____ { static void main(String[] args) {} } """) .doTest(); } // clever but not wrong @Test public void negativeVarargs() { testHelper .addSourceLines( "Test.java", """
Test
java
elastic__elasticsearch
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIf.java
{ "start": 1012, "end": 2694 }
class ____ extends ConditionalFunction { private final Expression left, right; public NullIf(Source source, Expression left, Expression right) { super(source, Arrays.asList(left, right)); this.left = left; this.right = right; } @Override protected NodeInfo<? extends NullIf> info() { return NodeInfo.create(this, NullIf::new, children().get(0), children().get(1)); } @Override public Expression replaceChildren(List<Expression> newChildren) { return new NullIf(source(), newChildren.get(0), newChildren.get(1)); } public Expression left() { return left; } public Expression right() { return right; } @Override public boolean foldable() { return left.semanticEquals(right) || super.foldable(); } @Override public Object fold() { if (left.semanticEquals(right)) { return null; } return NullIfProcessor.apply(left.fold(), right.fold()); } @Override public ScriptTemplate asScript() { ScriptTemplate left = asScript(children().get(0)); ScriptTemplate right = asScript(children().get(1)); String template = "{sql}.nullif(" + left.template() + "," + right.template() + ")"; ParamsBuilder params = paramsBuilder(); params.script(left.params()); params.script(right.params()); return new ScriptTemplate(formatTemplate(template), params.build(), dataType); } @Override protected Pipe makePipe() { return new NullIfPipe(source(), this, Expressions.pipe(children().get(0)), Expressions.pipe(children().get(1))); } }
NullIf
java
bumptech__glide
library/src/main/java/com/bumptech/glide/GeneratedAppGlideModule.java
{ "start": 612, "end": 950 }
class ____ extends AppGlideModule { /** This method can be removed when manifest parsing is no longer supported. */ @NonNull Set<Class<?>> getExcludedModuleClasses() { return new HashSet<>(); } @Nullable RequestManagerRetriever.RequestManagerFactory getRequestManagerFactory() { return null; } }
GeneratedAppGlideModule
java
google__error-prone
core/src/main/java/com/google/errorprone/bugpatterns/IterablePathParameter.java
{ "start": 1929, "end": 3362 }
class ____ extends BugChecker implements VariableTreeMatcher { @Override public Description matchVariable(VariableTree tree, VisitorState state) { Type type = ASTHelpers.getType(tree); VarSymbol symbol = ASTHelpers.getSymbol(tree); if (type == null) { return NO_MATCH; } if (symbol.getKind() != ElementKind.PARAMETER) { return NO_MATCH; } if (!isSameType(type, state.getSymtab().iterableType, state)) { return NO_MATCH; } if (type.getTypeArguments().isEmpty()) { return NO_MATCH; } if (!isSameType( wildBound(getOnlyElement(type.getTypeArguments())), state.getTypeFromString(Path.class.getName()), state)) { return NO_MATCH; } Description.Builder description = buildDescription(tree); Tree parent = state.getPath().getParentPath().getLeaf(); if (tree.getType() instanceof ParameterizedTypeTree parameterizedTypeTree && (!(parent instanceof JCLambda jCLambda) || jCLambda.paramKind == ParameterKind.EXPLICIT)) { description.addFix( SuggestedFix.builder() .addImport("java.util.Collection") .replace(parameterizedTypeTree.getType(), "Collection") .build()); } return description.build(); } static Type wildBound(Type type) { return type.hasTag(TypeTag.WILDCARD) ? ((WildcardType) type).type : type; } }
IterablePathParameter
java
junit-team__junit5
jupiter-tests/src/test/java/org/junit/jupiter/api/AssertNotEqualsAssertionsTests.java
{ "start": 11243, "end": 12887 }
class ____ { @Test void assertNotEqualsDouble() { double unexpected = 1.0d; double actual = 2.0d; assertNotEquals(unexpected, actual); assertNotEquals(unexpected, actual, "message"); assertNotEquals(unexpected, actual, () -> "message"); } @Test void assertNotEqualsForTwoNaNDouble() { try { assertNotEquals(Double.NaN, Double.NaN); expectAssertionFailedError(); } catch (AssertionFailedError ex) { assertMessageEquals(ex, "expected: not equal but was: <NaN>"); } } @Test void withEqualValues() { double unexpected = 1.0d; double actual = 1.0d; try { assertNotEquals(unexpected, actual); expectAssertionFailedError(); } catch (AssertionFailedError ex) { assertMessageEquals(ex, "expected: not equal but was: <1.0>"); } } @Test void withEqualValuesWithMessage() { double unexpected = 1.0d; double actual = 1.0d; try { assertNotEquals(unexpected, actual, "custom message"); expectAssertionFailedError(); } catch (AssertionFailedError ex) { assertMessageStartsWith(ex, "custom message"); assertMessageEndsWith(ex, "expected: not equal but was: <1.0>"); } } @Test void withEqualValuesWithMessageSupplier() { double unexpected = 1.0d; double actual = 1.0d; try { assertNotEquals(unexpected, actual, () -> "custom message from supplier"); expectAssertionFailedError(); } catch (AssertionFailedError ex) { assertMessageStartsWith(ex, "custom message from supplier"); assertMessageEndsWith(ex, "expected: not equal but was: <1.0>"); } } } @Nested
AssertNotEqualsDoubleWithoutDelta
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/bug/Issue1296.java
{ "start": 198, "end": 473 }
class ____ extends TestCase { public void test_for_issue() throws Exception { Exception error = null; try { JSON.parseObject("1"); } catch (JSONException e) { error = e; } assertNotNull(error); } }
Issue1296
java
spring-projects__spring-boot
module/spring-boot-r2dbc/src/testFixtures/java/org/springframework/boot/r2dbc/SimpleConnectionFactoryProvider.java
{ "start": 1546, "end": 1872 }
class ____ implements ConnectionFactory { @Override public Publisher<? extends Connection> create() { return Mono.error(new UnsupportedOperationException()); } @Override public ConnectionFactoryMetadata getMetadata() { return SimpleConnectionFactoryProvider.class::getName; } } }
SimpleTestConnectionFactory
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/InfluxDbEndpointBuilderFactory.java
{ "start": 6964, "end": 9460 }
interface ____ extends EndpointProducerBuilder { default InfluxDbEndpointBuilder basic() { return (InfluxDbEndpointBuilder) this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer (advanced) * * @param lazyStartProducer the value to set * @return the dsl builder */ default AdvancedInfluxDbEndpointBuilder lazyStartProducer(boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: producer (advanced) * * @param lazyStartProducer the value to set * @return the dsl builder */ default AdvancedInfluxDbEndpointBuilder lazyStartProducer(String lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } } public
AdvancedInfluxDbEndpointBuilder
java
quarkusio__quarkus
core/devmode-spi/src/main/java/io/quarkus/dev/appstate/ApplicationStateNotification.java
{ "start": 2594, "end": 2665 }
enum ____ { INITIAL, STARTED, STOPPED } }
State
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/apidiff/Java8ApiCheckerTest.java
{ "start": 2479, "end": 2904 }
class ____ { void f(CRC32 c, byte[] b) { // BUG: Diagnostic contains: Checksum#update(byte[]) is not available c.update(b); } } """) .doTest(); } @Test public void checksumNegative() { compilationHelper .addSourceLines( "Test.java", """ import java.util.zip.CRC32;
Test
java
elastic__elasticsearch
server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java
{ "start": 2679, "end": 2895 }
class ____ extends ESIntegTestCase { @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomScriptPlugin.class); } public static
RandomScoreFunctionIT
java
spring-projects__spring-framework
spring-web/src/test/java/org/springframework/web/server/adapter/WebHttpHandlerBuilderTests.java
{ "start": 10341, "end": 10814 }
class ____ { @Bean @Order(2) public WebExceptionHandler exceptionHandlerA() { return (exchange, ex) -> writeToResponse(exchange, "ExceptionHandlerA"); } @Bean @Order(1) public WebExceptionHandler exceptionHandlerB() { return (exchange, ex) -> writeToResponse(exchange, "ExceptionHandlerB"); } @Bean public WebHandler webHandler() { return exchange -> Mono.error(new Exception()); } } @Configuration static
OrderedExceptionHandlerBeanConfig
java
junit-team__junit5
junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/extension/TestInfoParameterResolver.java
{ "start": 879, "end": 1484 }
class ____ implements ParameterResolver { @Override public ExtensionContextScope getTestInstantiationExtensionContextScope(ExtensionContext rootContext) { return ExtensionContextScope.TEST_METHOD; } @Override public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) { return (parameterContext.getParameter().getType() == TestInfo.class); } @Override public TestInfo resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) { return new DefaultTestInfo(extensionContext); } private static
TestInfoParameterResolver
java
micronaut-projects__micronaut-core
http/src/test/groovy/io/micronaut/http/filter/LambdaExecutable.java
{ "start": 671, "end": 1735 }
class ____ implements ExecutableMethod<Object, Object> { private final Closure<?> closure; private final Argument<?>[] arguments; private final ReturnType<Object> returnType; public LambdaExecutable(Closure<?> closure, Argument<?>[] arguments, ReturnType<Object> returnType) { this.closure = closure; this.arguments = arguments; this.returnType = returnType; } @Override public Class<Object> getDeclaringType() { return Object.class; } @Override public String getMethodName() { throw new UnsupportedOperationException(); } @Override public Argument<?>[] getArguments() { return arguments; } @Override public Method getTargetMethod() { throw new UnsupportedOperationException(); } @Override public ReturnType<Object> getReturnType() { return returnType; } @Override public Object invoke(@Nullable Object instance, Object... arguments) { return closure.curry(arguments).call(); } }
LambdaExecutable
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/collection/spi/PersistentMap.java
{ "start": 12936, "end": 13352 }
class ____ extends AbstractValueDelayedOperation { private final K index; protected AbstractMapValueDelayedOperation(K index, E addedValue, E orphan) { super( addedValue, orphan ); this.index = index; } protected final K getIndex() { return index; } @Override public Object getAddedEntry() { return Map.entry( getIndex(), getAddedInstance() ); } } final
AbstractMapValueDelayedOperation
java
spring-projects__spring-framework
spring-webflux/src/main/java/org/springframework/web/reactive/result/view/DefaultRenderingBuilder.java
{ "start": 1109, "end": 3316 }
class ____ implements Rendering.RedirectBuilder { private final Object view; private @Nullable Model model; private @Nullable HttpStatusCode status; private @Nullable HttpHeaders headers; DefaultRenderingBuilder(Object view) { this.view = view; } @Override public DefaultRenderingBuilder modelAttribute(String name, Object value) { initModel().addAttribute(name, value); return this; } @Override public DefaultRenderingBuilder modelAttribute(Object value) { initModel().addAttribute(value); return this; } @Override public DefaultRenderingBuilder modelAttributes(Object... values) { initModel().addAllAttributes(Arrays.asList(values)); return this; } @Override public DefaultRenderingBuilder model(Map<String, ?> map) { initModel().addAllAttributes(map); return this; } private Model initModel() { if (this.model == null) { this.model = new ExtendedModelMap(); } return this.model; } @Override public DefaultRenderingBuilder status(HttpStatusCode status) { if (this.view instanceof RedirectView redirectView) { redirectView.setStatusCode(status); } else { this.status = status; } return this; } @Override public DefaultRenderingBuilder header(String headerName, String... headerValues) { initHeaders().put(headerName, Arrays.asList(headerValues)); return this; } @Override public DefaultRenderingBuilder headers(HttpHeaders headers) { initHeaders().putAll(headers); return this; } private HttpHeaders initHeaders() { if (this.headers == null) { this.headers = new HttpHeaders(); } return this.headers; } @Override public Rendering.RedirectBuilder contextRelative(boolean contextRelative) { getRedirectView().setContextRelative(contextRelative); return this; } @Override public Rendering.RedirectBuilder propagateQuery(boolean propagate) { getRedirectView().setPropagateQuery(propagate); return this; } private RedirectView getRedirectView() { Assert.isInstanceOf(RedirectView.class, this.view); return (RedirectView) this.view; } @Override public Rendering build() { return new DefaultRendering(this.view, this.model, this.status, this.headers); } }
DefaultRenderingBuilder
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/util/StreamUtils.java
{ "start": 1464, "end": 8207 }
class ____ { /** * The default buffer size used when copying bytes. */ public static final int BUFFER_SIZE = 8192; private static final byte[] EMPTY_CONTENT = new byte[0]; /** * Copy the contents of the given InputStream into a new byte array. * <p>Leaves the stream open when done. * @param in the stream to copy from (may be {@code null} or empty) * @return the new byte array that has been copied to (possibly empty) * @throws IOException in case of I/O errors */ public static byte[] copyToByteArray(@Nullable InputStream in) throws IOException { if (in == null) { return EMPTY_CONTENT; } return in.readAllBytes(); } /** * Copy the contents of the given InputStream into a String. * <p>Leaves the stream open when done. * @param in the InputStream to copy from (may be {@code null} or empty) * @param charset the {@link Charset} to use to decode the bytes * @return the String that has been copied to (possibly empty) * @throws IOException in case of I/O errors */ public static String copyToString(@Nullable InputStream in, Charset charset) throws IOException { if (in == null) { return ""; } StringBuilder out = new StringBuilder(); InputStreamReader reader = new InputStreamReader(in, charset); char[] buffer = new char[BUFFER_SIZE]; int charsRead; while ((charsRead = reader.read(buffer)) != -1) { out.append(buffer, 0, charsRead); } return out.toString(); } /** * Copy the contents of the given {@link ByteArrayOutputStream} into a {@link String}. * <p>This is a more effective equivalent of {@code new String(baos.toByteArray(), charset)}. * @param baos the {@code ByteArrayOutputStream} to be copied into a String * @param charset the {@link Charset} to use to decode the bytes * @return the String that has been copied to (possibly empty) * @since 5.2.6 */ public static String copyToString(ByteArrayOutputStream baos, Charset charset) { Assert.notNull(baos, "No ByteArrayOutputStream specified"); Assert.notNull(charset, "No Charset specified"); return baos.toString(charset); } /** * Copy the contents of the given byte array to the given OutputStream. * <p>Leaves the stream open when done. * @param in the byte array to copy from * @param out the OutputStream to copy to * @throws IOException in case of I/O errors */ public static void copy(byte[] in, OutputStream out) throws IOException { Assert.notNull(in, "No input byte array specified"); Assert.notNull(out, "No OutputStream specified"); out.write(in); out.flush(); } /** * Copy the contents of the given String to the given OutputStream. * <p>Leaves the stream open when done. * @param in the String to copy from * @param charset the Charset * @param out the OutputStream to copy to * @throws IOException in case of I/O errors */ public static void copy(String in, Charset charset, OutputStream out) throws IOException { Assert.notNull(in, "No input String specified"); Assert.notNull(charset, "No Charset specified"); Assert.notNull(out, "No OutputStream specified"); out.write(in.getBytes(charset)); out.flush(); } /** * Copy the contents of the given InputStream to the given OutputStream. * <p>Leaves both streams open when done. * @param in the InputStream to copy from * @param out the OutputStream to copy to * @return the number of bytes copied * @throws IOException in case of I/O errors */ public static int copy(InputStream in, OutputStream out) throws IOException { Assert.notNull(in, "No InputStream specified"); Assert.notNull(out, "No OutputStream specified"); int count = (int) in.transferTo(out); out.flush(); return count; } /** * Copy a range of content of the given InputStream to the given OutputStream. * <p>If the specified range exceeds the length of the InputStream, this copies * up to the end of the stream and returns the actual number of copied bytes. * <p>Leaves both streams open when done. * @param in the InputStream to copy from * @param out the OutputStream to copy to * @param start the position to start copying from * @param end the position to end copying * @return the number of bytes copied * @throws IOException in case of I/O errors * @since 4.3 */ public static long copyRange(InputStream in, OutputStream out, long start, long end) throws IOException { Assert.notNull(in, "No InputStream specified"); Assert.notNull(out, "No OutputStream specified"); long skipped = in.skip(start); if (skipped < start) { throw new IOException("Skipped only " + skipped + " bytes out of " + start + " required"); } long bytesToCopy = end - start + 1; byte[] buffer = new byte[(int) Math.min(StreamUtils.BUFFER_SIZE, bytesToCopy)]; while (bytesToCopy > 0) { int bytesRead = (bytesToCopy < buffer.length ? in.read(buffer, 0, (int) bytesToCopy) : in.read(buffer)); if (bytesRead == -1) { break; } out.write(buffer, 0, bytesRead); bytesToCopy -= bytesRead; } return (end - start + 1 - bytesToCopy); } /** * Drain the remaining content of the given {@link InputStream}. * <p>Leaves the {@code InputStream} open when done. * @param in the {@code InputStream} to drain * @return the number of bytes read, or {@code 0} if the supplied * {@code InputStream} is {@code null} or empty * @throws IOException in case of I/O errors * @since 4.3 */ public static int drain(@Nullable InputStream in) throws IOException { if (in == null) { return 0; } return (int) in.transferTo(OutputStream.nullOutputStream()); } /** * Return an efficient empty {@link InputStream}. * @return an InputStream which contains no bytes * @since 4.2.2 * @deprecated as of 6.0 in favor of {@link InputStream#nullInputStream()} */ @Deprecated(since = "6.0") public static InputStream emptyInput() { return InputStream.nullInputStream(); } /** * Return a variant of the given {@link InputStream} where calling * {@link InputStream#close() close()} has no effect. * @param in the InputStream to decorate * @return a version of the InputStream that ignores calls to close */ public static InputStream nonClosing(InputStream in) { Assert.notNull(in, "No InputStream specified"); return new NonClosingInputStream(in); } /** * Return a variant of the given {@link OutputStream} where calling * {@link OutputStream#close() close()} has no effect. * @param out the OutputStream to decorate * @return a version of the OutputStream that ignores calls to close */ public static OutputStream nonClosing(OutputStream out) { Assert.notNull(out, "No OutputStream specified"); return new NonClosingOutputStream(out); } private static final
StreamUtils
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/bugs/_1828/Person.java
{ "start": 198, "end": 622 }
class ____ { String name; private CompleteAddress completeAddress; public String getName() { return name; } public void setName(String name) { this.name = name; } public CompleteAddress getCompleteAddress() { return completeAddress; } public void setCompleteAddress(CompleteAddress completeAddress) { this.completeAddress = completeAddress; } }
Person