language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
netty__netty
codec-http2/src/main/java/io/netty/handler/codec/http2/DelegatingDecompressorFrameListener.java
{ "start": 17115, "end": 23488 }
class ____ { private final EmbeddedChannel decompressor; private int compressed; private int decompressed; private Http2Stream stream; private int padding; private boolean dataDecompressed; private ChannelHandlerContext targetCtx; Http2Decompressor(EmbeddedChannel decompressor, Http2Connection connection, Http2FrameListener listener) { this.decompressor = decompressor; this.decompressor.pipeline().addLast(new ChannelInboundHandlerAdapter() { @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { ByteBuf buf = (ByteBuf) msg; if (!buf.isReadable()) { buf.release(); return; } incrementDecompressedBytes(buf.readableBytes()); // Immediately return the bytes back to the flow controller. ConsumedBytesConverter will convert // from the decompressed amount which the user knows about to the compressed amount which flow // control knows about. connection.local().flowController().consumeBytes(stream, listener.onDataRead(targetCtx, stream.id(), buf, padding, false)); padding = 0; // Padding is only communicated once on the first iteration. buf.release(); dataDecompressed = true; } @Override public void channelInactive(ChannelHandlerContext ctx) throws Exception { listener.onDataRead(targetCtx, stream.id(), Unpooled.EMPTY_BUFFER, padding, true); } }); } /** * Release remaining content from the {@link EmbeddedChannel}. */ void cleanup() { decompressor.finishAndReleaseAll(); } int decompress(ChannelHandlerContext ctx, Http2Stream stream, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception { final int compressedBytes = data.readableBytes() + padding; incrementCompressedBytes(compressedBytes); try { this.stream = stream; this.padding = padding; this.dataDecompressed = false; this.targetCtx = ctx; // call retain here as it will call release after its written to the channel decompressor.writeInbound(data.retain()); if (endOfStream) { decompressor.finish(); if (!dataDecompressed) { // No new decompressed data was extracted from the compressed data. This means the application // could not be provided with data and thus could not return how many bytes were processed. // We will assume there is more data coming which will complete the decompression block. // To allow for more data we return all bytes to the flow control window (so the peer can // send more data). incrementDecompressedBytes(compressedBytes); return compressedBytes; } } // We consume bytes each time we call the listener to ensure if multiple frames are decompressed // that the bytes are accounted for immediately. Otherwise the user may see an inconsistent state of // flow control. return 0; } catch (Throwable t) { // Http2Exception might be thrown by writeInbound(...) or finish(). if (t instanceof Http2Exception) { throw (Http2Exception) t; } throw streamError(stream.id(), INTERNAL_ERROR, t, "Decompressor error detected while delegating data read on streamId %d", stream.id()); } } /** * Increment the number of bytes received prior to doing any decompression. */ private void incrementCompressedBytes(int delta) { assert delta >= 0; compressed += delta; } /** * Increment the number of bytes after the decompression process. */ private void incrementDecompressedBytes(int delta) { assert delta >= 0; decompressed += delta; } /** * Determines the ratio between {@code numBytes} and {@link Http2Decompressor#decompressed}. * This ratio is used to decrement {@link Http2Decompressor#decompressed} and * {@link Http2Decompressor#compressed}. * @param streamId the stream ID * @param decompressedBytes The number of post-decompressed bytes to return to flow control * @return The number of pre-decompressed bytes that have been consumed. */ int consumeBytes(int streamId, int decompressedBytes) throws Http2Exception { checkPositiveOrZero(decompressedBytes, "decompressedBytes"); if (decompressed - decompressedBytes < 0) { throw streamError(streamId, INTERNAL_ERROR, "Attempting to return too many bytes for stream %d. decompressed: %d " + "decompressedBytes: %d", streamId, decompressed, decompressedBytes); } double consumedRatio = decompressedBytes / (double) decompressed; int consumedCompressed = Math.min(compressed, (int) Math.ceil(compressed * consumedRatio)); if (compressed - consumedCompressed < 0) { throw streamError(streamId, INTERNAL_ERROR, "overflow when converting decompressed bytes to compressed bytes for stream %d." + "decompressedBytes: %d decompressed: %d compressed: %d consumedCompressed: %d", streamId, decompressedBytes, decompressed, compressed, consumedCompressed); } decompressed -= decompressedBytes; compressed -= consumedCompressed; return consumedCompressed; } } }
Http2Decompressor
java
netty__netty
handler/src/main/java/io/netty/handler/ssl/JdkAlpnSslEngine.java
{ "start": 1226, "end": 1391 }
class ____ extends JdkSslEngine { private final ProtocolSelectionListener selectionListener; private final AlpnSelector alpnSelector; final
JdkAlpnSslEngine
java
hibernate__hibernate-orm
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/interfaces/hbm/propertiesAudited/AbstractPropertiesAuditedTest.java
{ "start": 745, "end": 3335 }
class ____ { private long ai_id; private long nai_id; private static int NUMERITO = 555; @BeforeClassTemplate public void initData(EntityManagerFactoryScope scope) { scope.inTransaction( em -> { AuditedImplementor ai = new AuditedImplementor(); ai.setData( "La data" ); ai.setAuditedImplementorData( "audited implementor data" ); ai.setNumerito( NUMERITO ); NonAuditedImplementor nai = new NonAuditedImplementor(); nai.setData( "info" ); nai.setNonAuditedImplementorData( "sttring" ); nai.setNumerito( NUMERITO ); em.persist( ai ); em.persist( nai ); ai_id = ai.getId(); nai_id = nai.getId(); } ); } @Test public void testRetrieveAudited(EntityManagerFactoryScope scope) { scope.inTransaction( em -> { // levanto las versiones actuales AuditedImplementor ai = em.find( AuditedImplementor.class, ai_id ); assertNotNull( ai ); SimpleInterface si = em.find( SimpleInterface.class, ai_id ); assertNotNull( si ); final var auditReader = AuditReaderFactory.get( em ); // levanto las de la revisión 1, ninguna debe ser null AuditedImplementor ai_rev1 = auditReader.find( AuditedImplementor.class, ai_id, 1 ); assertNotNull( ai_rev1 ); SimpleInterface si_rev1 = auditReader.find( SimpleInterface.class, ai_id, 1 ); assertNotNull( si_rev1 ); // data de las actuales no debe ser null assertNotNull( ai.getData() ); assertNotNull( si.getData() ); // data de las revisiones No está auditada assertNull( ai_rev1.getData() ); assertNull( si_rev1.getData() ); // numerito de las revisiones está auditada, debe ser igual a NUMERITO assertEquals( NUMERITO, ai_rev1.getNumerito() ); assertEquals( NUMERITO, si_rev1.getNumerito() ); } ); } @Test public void testRetrieveNonAudited(EntityManagerFactoryScope scope) { scope.inTransaction( em -> { // levanto las versiones actuales NonAuditedImplementor nai = em.find( NonAuditedImplementor.class, nai_id ); assertNotNull( nai ); SimpleInterface si = em.find( SimpleInterface.class, nai_id ); assertNotNull( si ); assertEquals( nai.getData(), si.getData() ); final var auditReader = AuditReaderFactory.get( em ); // levanto la revision assertThrows( NotAuditedException.class, () -> { auditReader.find( NonAuditedImplementor.class, nai_id, 1 ); } ); // levanto la revision que no es auditable pero con la interfaz, el // resultado debe ser null SimpleInterface si_rev1 = auditReader.find( SimpleInterface.class, nai_id, 1 ); assertNull( si_rev1 ); } ); } }
AbstractPropertiesAuditedTest
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/dialect/function/json/MariaDBJsonQueryFunction.java
{ "start": 2554, "end": 3588 }
enum ____ { NONE, WRAP, TRIM } private static DecorationMode determineDecorationMode( JsonQueryArguments arguments, SqlAstTranslator<?> walker, JsonQueryWrapMode wrapMode) { if ( wrapMode == JsonQueryWrapMode.WITH_WRAPPER ) { final String jsonPath = walker.getLiteralValue( arguments.jsonPath() ); if ( jsonPath.indexOf( '*' ) != -1 ) { // If the JSON path contains a star, MySQL will always wrap the result return DecorationMode.NONE; } else { // Otherwise we have to wrap the result manually return DecorationMode.WRAP; } } else if ( wrapMode == JsonQueryWrapMode.WITHOUT_WRAPPER ) { final String jsonPath = walker.getLiteralValue( arguments.jsonPath() ); if ( jsonPath.indexOf( '*' ) != -1 ) { // If the JSON path contains a star, MySQL will always wrap the result, // so we have to trim the brackets return DecorationMode.TRIM; } else { // Nothing to do return DecorationMode.NONE; } } else { return DecorationMode.NONE; } } }
DecorationMode
java
apache__flink
flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/table/module/StateModule.java
{ "start": 1668, "end": 4743 }
class ____ implements Module { private static final Logger LOG = LoggerFactory.getLogger(StateModule.class); public static final String IDENTIFIER = "state"; public static final StateModule INSTANCE = new StateModule(); private final Map<String, BuiltInFunctionDefinition> normalizedFunctions; private final Set<String> functionNamesWithInternal; private final Set<String> functionNamesWithoutInternal; private StateModule() { final List<BuiltInFunctionDefinition> definitions = new ArrayList<>(); definitions.add(SavepointMetadataTableFunction.SAVEPOINT_METADATA); ServiceLoader.load(DynamicBuiltInFunctionDefinitionFactory.class) .iterator() .forEachRemaining( f -> { if (f.factoryIdentifier().startsWith(IDENTIFIER + ".")) { definitions.addAll(f.getBuiltInFunctionDefinitions()); } }); checkDuplicatedFunctions(definitions); this.normalizedFunctions = definitions.stream() .collect( Collectors.toMap( f -> f.getName().toUpperCase(Locale.ROOT), Function.identity())); this.functionNamesWithInternal = definitions.stream() .map(BuiltInFunctionDefinition::getName) .collect(Collectors.toSet()); this.functionNamesWithoutInternal = definitions.stream() .filter(f -> !f.isInternal()) .map(BuiltInFunctionDefinition::getName) .collect(Collectors.toSet()); } @VisibleForTesting static void checkDuplicatedFunctions(List<BuiltInFunctionDefinition> definitions) { Set<String> seen = new HashSet<>(); Set<String> duplicates = new HashSet<>(); for (BuiltInFunctionDefinition definition : definitions) { String name = definition.getName(); if (!seen.add(name)) { duplicates.add(name); } } if (!duplicates.isEmpty()) { String error = "Duplicate function names found: " + String.join(",", duplicates); LOG.error(error); throw new IllegalStateException(error); } } @Override public Set<String> listFunctions() { return listFunctions(false); } @Override public Set<String> listFunctions(boolean includeHiddenFunctions) { if (includeHiddenFunctions) { return functionNamesWithInternal; } else { return functionNamesWithoutInternal; } } @Override public Optional<FunctionDefinition> getFunctionDefinition(String name) { final String normalizedName = name.toUpperCase(Locale.ROOT); return Optional.ofNullable(normalizedFunctions.get(normalizedName)); } }
StateModule
java
dropwizard__dropwizard
dropwizard-testing/src/main/java/io/dropwizard/testing/junit5/DropwizardExtensionsSupport.java
{ "start": 656, "end": 722 }
class ____ and executes before and after actions. */ public
instances
java
spring-projects__spring-framework
spring-test/src/test/java/org/springframework/test/context/event/EventPublishingTestExecutionListenerIntegrationTests.java
{ "start": 11009, "end": 11646 }
class ____ { final TestExecutionListener listener; AsyncTestEventComponent(TestExecutionListener listener) { this.listener = listener; } @BeforeTestMethod("event.testContext.testMethod.name == 'testWithFailingAsyncEventListener'") @Async public void beforeTestMethodWithAsyncFailure(BeforeTestMethodEvent event) throws Exception { this.listener.beforeTestMethod(event.getSource()); throw new RuntimeException(String.format("Asynchronous exception for test method [%s] in thread [%s]", event.getTestContext().getTestMethod().getName(), Thread.currentThread().getName())); } } static
AsyncTestEventComponent
java
playframework__playframework
documentation/manual/working/commonGuide/pekko/code/javaguide/pekko/components/ComponentsWithClusterSharding.java
{ "start": 495, "end": 861 }
class ____ extends BuiltInComponentsFromContext implements ClusterShardingComponents, AssetsComponents, HttpFiltersComponents { public ComponentsWithClusterSharding(ApplicationLoader.Context context) { super(context); } @Override public Router router() { return Router.empty(); } } // #cluster-compile-time-injection
ComponentsWithClusterSharding
java
apache__camel
components/camel-aws/camel-aws2-ddb/src/main/java/org/apache/camel/component/aws2/ddb/UpdateItemCommand.java
{ "start": 1201, "end": 2021 }
class ____ extends AbstractDdbCommand { public UpdateItemCommand(DynamoDbClient ddbClient, Ddb2Configuration configuration, Exchange exchange) { super(ddbClient, configuration, exchange); } @Override public void execute() { UpdateItemResponse result = ddbClient.updateItem(UpdateItemRequest.builder().tableName(determineTableName()) .key(determineKey()).attributeUpdates(determineUpdateValues()) .expected(determineUpdateCondition()).returnValues(determineReturnValues()).build()); addAttributesToResult(result.attributes()); } @SuppressWarnings("unchecked") private Map<String, AttributeValueUpdate> determineUpdateValues() { return exchange.getIn().getHeader(Ddb2Constants.UPDATE_VALUES, Map.class); } }
UpdateItemCommand
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/rpc/RpcEndpointTest.java
{ "start": 8216, "end": 18394 }
class ____ extends RpcEndpoint implements RunningStateTestingEndpointGateway { private final CountDownLatch onStopCalled; private final CompletableFuture<Void> stopFuture; RunningStateTestingEndpoint(RpcService rpcService, CompletableFuture<Void> stopFuture) { super(rpcService); this.stopFuture = stopFuture; this.onStopCalled = new CountDownLatch(1); } @Override public CompletableFuture<Void> onStop() { onStopCalled.countDown(); return stopFuture; } CompletableFuture<Void> closeAndWaitUntilOnStopCalled() throws InterruptedException { CompletableFuture<Void> terminationFuture = closeAsync(); onStopCalled.await(); return terminationFuture; } public CompletableFuture<Boolean> queryIsRunningFlag() { return CompletableFuture.completedFuture(isRunning()); } } /** Tests executing the runnable in the main thread of the underlying RPC endpoint. */ @Test void testExecute() throws InterruptedException, ExecutionException, TimeoutException { final RpcEndpoint endpoint = new BaseEndpoint(rpcService); final CompletableFuture<Void> asyncExecutionFuture = new CompletableFuture<>(); try { endpoint.start(); endpoint.getMainThreadExecutor() .execute( () -> { endpoint.validateRunsInMainThread(); asyncExecutionFuture.complete(null); }); asyncExecutionFuture.get(); } finally { RpcUtils.terminateRpcEndpoint(endpoint); endpoint.validateResourceClosed(); } } @Test void testScheduleRunnableWithDelayInMilliseconds() throws Exception { testScheduleWithDelay( (mainThreadExecutor, expectedDelay) -> mainThreadExecutor.schedule( () -> {}, expectedDelay.toMillis(), TimeUnit.MILLISECONDS)); } @Test void testScheduleRunnableWithDelayInSeconds() throws Exception { testScheduleWithDelay( (mainThreadExecutor, expectedDelay) -> mainThreadExecutor.schedule( () -> {}, expectedDelay.toMillis() / 1000, TimeUnit.SECONDS)); } @Test void testScheduleRunnableAfterClose() throws Exception { testScheduleAfterClose( (mainThreadExecutor, expectedDelay) -> mainThreadExecutor.schedule( () -> {}, expectedDelay.toMillis() / 1000, TimeUnit.SECONDS)); } @Test void testCancelScheduledRunnable() throws Exception { testCancelScheduledTask( (mainThreadExecutor, future) -> { final Duration delayDuration = Duration.ofMillis(2); return mainThreadExecutor.schedule( () -> { future.complete(null); }, delayDuration.toMillis(), TimeUnit.MILLISECONDS); }); } @Test void testScheduleCallableWithDelayInMilliseconds() throws Exception { testScheduleWithDelay( (mainThreadExecutor, expectedDelay) -> mainThreadExecutor.schedule( () -> 1, expectedDelay.toMillis(), TimeUnit.MILLISECONDS)); } @Test void testScheduleCallableWithDelayInSeconds() throws Exception { testScheduleWithDelay( (mainThreadExecutor, expectedDelay) -> mainThreadExecutor.schedule( () -> 1, expectedDelay.toMillis() / 1000, TimeUnit.SECONDS)); } @Test void testScheduleCallableAfterClose() throws Exception { testScheduleAfterClose( (mainThreadExecutor, expectedDelay) -> mainThreadExecutor.schedule( () -> 1, expectedDelay.toMillis() / 1000, TimeUnit.SECONDS)); } @Test void testCancelScheduledCallable() { testCancelScheduledTask( (mainThreadExecutor, future) -> { final Duration delayDuration = Duration.ofMillis(2); return mainThreadExecutor.schedule( () -> { future.complete(null); return null; }, delayDuration.toMillis(), TimeUnit.MILLISECONDS); }); } private static void testScheduleWithDelay( BiConsumer<RpcEndpoint.MainThreadExecutor, Duration> scheduler) throws Exception { final CompletableFuture<Void> taskCompletedFuture = new CompletableFuture<>(); final String endpointId = "foobar"; final MainThreadExecutable mainThreadExecutable = new TestMainThreadExecutable((runnable) -> taskCompletedFuture.complete(null)); final RpcEndpoint.MainThreadExecutor mainThreadExecutor = new RpcEndpoint.MainThreadExecutor(mainThreadExecutable, () -> {}, endpointId); final Duration expectedDelay = Duration.ofSeconds(1); scheduler.accept(mainThreadExecutor, expectedDelay); taskCompletedFuture.get(); mainThreadExecutor.close(); } private static void testScheduleAfterClose( BiFunction<RpcEndpoint.MainThreadExecutor, Duration, ScheduledFuture<?>> scheduler) { final CompletableFuture<Void> taskCompletedFuture = new CompletableFuture<>(); final String endpointId = "foobar"; final MainThreadExecutable mainThreadExecutable = new TestMainThreadExecutable((runnable) -> taskCompletedFuture.complete(null)); final RpcEndpoint.MainThreadExecutor mainThreadExecutor = new RpcEndpoint.MainThreadExecutor(mainThreadExecutable, () -> {}, endpointId); mainThreadExecutor.close(); final Duration expectedDelay = Duration.ofSeconds(0); ScheduledFuture<?> future = scheduler.apply(mainThreadExecutor, expectedDelay); assertThat(taskCompletedFuture).isNotDone(); assertThat((Future<?>) future).isNotDone(); } private static void testCancelScheduledTask( BiFunction<RpcEndpoint.MainThreadExecutor, CompletableFuture<Void>, ScheduledFuture<?>> scheduler) { final MainThreadExecutable mainThreadExecutable = new TestMainThreadExecutable(Runnable::run); final ManuallyTriggeredScheduledExecutorService manuallyTriggeredScheduledExecutorService = new ManuallyTriggeredScheduledExecutorService(); final RpcEndpoint.MainThreadExecutor mainThreadExecutor = new RpcEndpoint.MainThreadExecutor( mainThreadExecutable, () -> {}, manuallyTriggeredScheduledExecutorService); final CompletableFuture<Void> actionFuture = new CompletableFuture<>(); ScheduledFuture<?> scheduledFuture = scheduler.apply(mainThreadExecutor, actionFuture); scheduledFuture.cancel(true); manuallyTriggeredScheduledExecutorService.triggerAllNonPeriodicTasks(); assertThat((Future<?>) scheduledFuture).isCancelled(); assertThat(actionFuture).isNotDone(); mainThreadExecutor.close(); } /** * Tests executing the callable in the main thread of the underlying RPC service, returning a * future for the result of the callable. If the callable is not completed within the given * timeout, then the future will be failed with a TimeoutException. This schedule method is * called directly from RpcEndpoint, MainThreadExecutor do not support this method. */ @Test void testCallAsync() throws InterruptedException, ExecutionException, TimeoutException { final RpcEndpoint endpoint = new BaseEndpoint(rpcService); final Integer expectedInteger = 12345; try { endpoint.start(); final CompletableFuture<Integer> integerFuture = endpoint.callAsync( () -> { endpoint.validateRunsInMainThread(); return expectedInteger; }, Duration.ofSeconds(10L)); assertThat(integerFuture.get()).isEqualTo(expectedInteger); } finally { RpcUtils.terminateRpcEndpoint(endpoint); endpoint.validateResourceClosed(); } } /** * Make the callable sleep some time more than specified timeout, so TimeoutException is * expected. */ @Test void testCallAsyncTimeout() throws InterruptedException, ExecutionException, TimeoutException { final RpcEndpoint endpoint = new BaseEndpoint(rpcService); final Duration timeout = Duration.ofMillis(100); CountDownLatch latch = new CountDownLatch(1); try { endpoint.start(); final CompletableFuture<Throwable> throwableFuture = endpoint.callAsync( () -> { endpoint.validateRunsInMainThread(); latch.await(); return 12345; }, timeout) .handle((ignore, throwable) -> throwable); final Throwable throwable = throwableFuture.get(); assertThat(throwable).isNotNull().isInstanceOf(TimeoutException.class); } finally { latch.countDown(); RpcUtils.terminateRpcEndpoint(endpoint); endpoint.validateResourceClosed(); } } private static
RunningStateTestingEndpoint
java
apache__dubbo
dubbo-common/src/main/java/org/apache/dubbo/common/convert/StringToDoubleConverter.java
{ "start": 1044, "end": 1325 }
class ____ implements StringConverter<Double> { @Override public Double convert(String source) { return isNotEmpty(source) ? valueOf(source) : null; } @Override public int getPriority() { return NORMAL_PRIORITY + 3; } }
StringToDoubleConverter
java
apache__dubbo
dubbo-common/src/test/java/org/apache/dubbo/common/extension/ExtensionLoader_Adaptive_Test.java
{ "start": 6278, "end": 7038 }
interface ____.apache.dubbo.common.extension.ext5.NoAdaptiveMethodExt"), containsString( "No adaptive method exist on extension org.apache.dubbo.common.extension.ext5.NoAdaptiveMethodExt, refuse to create the adaptive class"))); } // report same error when get is invoked for multiple times try { ExtensionLoader.getExtensionLoader(NoAdaptiveMethodExt.class).getAdaptiveExtension(); fail(); } catch (IllegalStateException expected) { assertThat( expected.getMessage(), allOf( containsString( "Can't create adaptive extension
org
java
spring-projects__spring-framework
spring-aop/src/main/java/org/springframework/aop/framework/adapter/AdvisorAdapterRegistry.java
{ "start": 968, "end": 2749 }
interface ____ { /** * Return an {@link Advisor} wrapping the given advice. * <p>Should by default at least support * {@link org.aopalliance.intercept.MethodInterceptor}, * {@link org.springframework.aop.MethodBeforeAdvice}, * {@link org.springframework.aop.AfterReturningAdvice}, * {@link org.springframework.aop.ThrowsAdvice}. * @param advice an object that should be an advice * @return an Advisor wrapping the given advice (never {@code null}; * if the advice parameter is an Advisor, it is to be returned as-is) * @throws UnknownAdviceTypeException if no registered advisor adapter * can wrap the supposed advice */ Advisor wrap(Object advice) throws UnknownAdviceTypeException; /** * Return an array of AOP Alliance MethodInterceptors to allow use of the * given Advisor in an interception-based framework. * <p>Don't worry about the pointcut associated with the {@link Advisor}, if it is * a {@link org.springframework.aop.PointcutAdvisor}: just return an interceptor. * @param advisor the Advisor to find an interceptor for * @return an array of MethodInterceptors to expose this Advisor's behavior * @throws UnknownAdviceTypeException if the Advisor type is * not understood by any registered AdvisorAdapter */ MethodInterceptor[] getInterceptors(Advisor advisor) throws UnknownAdviceTypeException; /** * Register the given {@link AdvisorAdapter}. Note that it is not necessary to register * adapters for an AOP Alliance Interceptors or Spring Advices: these must be * automatically recognized by an {@code AdvisorAdapterRegistry} implementation. * @param adapter an AdvisorAdapter that understands particular Advisor or Advice types */ void registerAdvisorAdapter(AdvisorAdapter adapter); }
AdvisorAdapterRegistry
java
elastic__elasticsearch
modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientsManagerTests.java
{ "start": 2826, "end": 23667 }
class ____ extends ESTestCase { private Map<ProjectId, AtomicInteger> s3SecretsIdGenerators; private List<String> clientNames; private Map<String, S3ClientSettings> clusterClientsSettings; private TestThreadPool threadPool; private ClusterService clusterService; private S3Service s3Service; private S3ClientsManager s3ClientsManager; @Override public void setUp() throws Exception { super.setUp(); s3SecretsIdGenerators = ConcurrentCollections.newConcurrentMap(); clientNames = IntStream.range(0, between(2, 5)).mapToObj(i -> randomIdentifier() + "_" + i).toList(); final Settings.Builder builder = Settings.builder(); final var mockSecureSettings = new MockSecureSettings(); clientNames.forEach(clientName -> { mockSecureSettings.setString("s3.client." + clientName + ".access_key", clientName + "_cluster_access_key"); mockSecureSettings.setString("s3.client." + clientName + ".secret_key", clientName + "_cluster_secret_key"); if (randomBoolean()) { builder.put("s3.client." + clientName + ".max_retries", between(1, 10)); } if (randomBoolean()) { builder.put("s3.client." + clientName + ".read_timeout", between(1, 99) + "s"); } if (randomBoolean()) { builder.put("s3.client." + clientName + ".max_connections", between(1, 100)); } }); final Settings settings = builder.setSecureSettings(mockSecureSettings).build(); clusterClientsSettings = S3ClientSettings.load(settings); threadPool = new TestThreadPool(getTestName()); clusterService = ClusterServiceUtils.createClusterService(threadPool, settings); s3Service = new S3Service( mock(Environment.class), clusterService, TestProjectResolvers.allProjects(), // with multiple projects support mock(ResourceWatcherService.class), () -> Region.of("es-test-region") ); s3Service.refreshAndClearCache(S3ClientSettings.load(settings)); s3ClientsManager = s3Service.getS3ClientsManager(); assertThat(s3ClientsManager.getClusterClientsHolder().allClientSettings(), equalTo(clusterClientsSettings)); assertNotNull(s3ClientsManager.getPerProjectClientsHolders()); s3Service.start(); } @Override public void tearDown() throws Exception { super.tearDown(); s3Service.close(); clusterService.close(); threadPool.close(); assertTrue(s3ClientsManager.isManagerClosed()); s3ClientsManager.getPerProjectClientsHolders().forEach((projectId, clientsHolder) -> assertTrue(clientsHolder.isClosed())); assertTrue(s3ClientsManager.getClusterClientsHolder().isClosed()); } public void testDoesNotCreateClientWhenSecretsAreNotConfigured() { assertThat(s3ClientsManager.getPerProjectClientsHolders(), anEmptyMap()); final ProjectId projectId = randomUniqueProjectId(); // No project secrets at all ClusterServiceUtils.setState( clusterService, ClusterState.builder(clusterService.state()).putProjectMetadata(ProjectMetadata.builder(projectId)).build() ); assertThat(s3ClientsManager.getPerProjectClientsHolders(), anEmptyMap()); // Project secrets but no s3 credentials final var mockSecureSettings = new MockSecureSettings(); mockSecureSettings.setFile( Strings.join(randomList(1, 5, ESTestCase::randomIdentifier), "."), randomByteArrayOfLength(between(8, 20)) ); ClusterServiceUtils.setState( clusterService, ClusterState.builder(clusterService.state()) .putProjectMetadata( ProjectMetadata.builder(projectId) .putCustom(ProjectSecrets.TYPE, new ProjectSecrets(new SecureClusterStateSettings(mockSecureSettings))) ) .build() ); assertThat(s3ClientsManager.getPerProjectClientsHolders(), anEmptyMap()); } public void testClientsLifeCycleForSingleProject() throws Exception { final ProjectId projectId = randomUniqueProjectId(); final String clientName = randomFrom(clientNames); final String anotherClientName = randomValueOtherThan(clientName, () -> randomFrom(clientNames)); // Configure project secrets for one client assertClientNotFound(projectId, clientName); updateProjectInClusterState(projectId, newProjectClientsSecrets(projectId, clientName)); { assertProjectClientSettings(projectId, clientName); // Retrieve client for the 1st time final AmazonS3Reference initialClient = s3ClientsManager.client(projectId, createRepositoryMetadata(clientName)); assertClientCredentials(projectId, clientName, initialClient); // Client is cached when retrieved again assertThat(initialClient, sameInstance(s3ClientsManager.client(projectId, createRepositoryMetadata(clientName)))); // Client not configured cannot be accessed assertClientNotFound(projectId, anotherClientName); // Client should be released and recreated again on access s3ClientsManager.releaseCachedClients(projectId); final AmazonS3Reference clientUpdated = s3ClientsManager.client(projectId, createRepositoryMetadata(clientName)); assertThat(clientUpdated, not(sameInstance(initialClient))); clientUpdated.decRef(); // Release the initial client and all references should be cleared initialClient.decRef(); initialClient.decRef(); assertFalse(initialClient.hasReferences()); // Update client secrets should release and recreate the client updateProjectInClusterState(projectId, newProjectClientsSecrets(projectId, clientName, anotherClientName)); assertProjectClientSettings(projectId, clientName, anotherClientName); final AmazonS3Reference clientUpdateAgain = s3ClientsManager.client(projectId, createRepositoryMetadata(clientName)); assertThat(clientUpdateAgain, not(sameInstance(clientUpdated))); clientUpdateAgain.decRef(); // A different client for a different client name final AmazonS3Reference antherClient = s3ClientsManager.client(projectId, createRepositoryMetadata(anotherClientName)); assertClientCredentials(projectId, anotherClientName, antherClient); assertThat(antherClient, not(sameInstance(clientUpdateAgain))); antherClient.decRef(); } final var clientsHolder = s3ClientsManager.getPerProjectClientsHolders().get(projectId); // Remove project secrets or the entire project if (randomBoolean()) { updateProjectInClusterState(projectId, Map.of()); } else { removeProjectFromClusterState(projectId); } assertClientNotFound(projectId, clientName); final AtomicReference<Exception> exceptionRef = new AtomicReference<>(); assertBusy(() -> { assertTrue(clientsHolder.isClosed()); try (var client = clientsHolder.client(createRepositoryMetadata(randomFrom(clientName, anotherClientName)))) { fail("client should be closed"); // the cache is still being cleared out } catch (Exception e) { exceptionRef.compareAndSet(null, e); // the first exception must be expected and is checked below } }); final var e = exceptionRef.get(); assertThat(e, instanceOf(AlreadyClosedException.class)); assertThat(e.getMessage(), containsString("Project [" + projectId + "] clients holder is closed")); } public void testClientsForMultipleProjects() throws InterruptedException { final List<ProjectId> projectIds = randomList(2, 8, ESTestCase::randomUniqueProjectId); final List<Thread> threads = projectIds.stream().map(projectId -> new Thread(() -> { final int iterations = between(1, 3); for (var i = 0; i < iterations; i++) { final List<String> clientNames = randomNonEmptySubsetOf(this.clientNames); updateProjectInClusterState(projectId, newProjectClientsSecrets(projectId, clientNames.toArray(String[]::new))); assertProjectClientSettings(projectId, clientNames.toArray(String[]::new)); for (var clientName : shuffledList(clientNames)) { try (var clientRef = s3ClientsManager.client(projectId, createRepositoryMetadata(clientName))) { assertClientCredentials(projectId, clientName, clientRef); } } if (randomBoolean()) { final Map<String, AmazonS3Reference> previousClientRefs = clientNames.stream() .map(clientName -> Map.entry(clientName, s3ClientsManager.client(projectId, createRepositoryMetadata(clientName)))) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); s3ClientsManager.releaseCachedClients(projectId); previousClientRefs.forEach((clientName, previousClientRef) -> { final AmazonS3Reference currentClientRef = s3ClientsManager.client(projectId, createRepositoryMetadata(clientName)); assertThat(currentClientRef, not(sameInstance(previousClientRef))); assertClientCredentials(projectId, clientName, currentClientRef); currentClientRef.decRef(); previousClientRef.decRef(); }); } else if (randomBoolean()) { if (randomBoolean()) { updateProjectInClusterState(projectId, Map.of()); } else { removeProjectFromClusterState(projectId); } assertThat(s3ClientsManager.getPerProjectClientsHolders(), not(hasKey(projectId))); clientNames.forEach(clientName -> assertClientNotFound(projectId, clientName)); } } })).toList(); threads.forEach(Thread::start); for (var thread : threads) { assertTrue(thread.join(Duration.ofSeconds(10))); } } public void testClusterAndProjectClients() { final ProjectId projectId = randomUniqueProjectId(); final String clientName = randomFrom(clientNames); final boolean configureProjectClientsFirst = randomBoolean(); if (configureProjectClientsFirst) { updateProjectInClusterState(projectId, newProjectClientsSecrets(projectId, clientName)); } final var repositoryMetadata = new RepositoryMetadata( randomIdentifier(), "s3", Settings.builder().put("client", clientName).build() ); final AmazonS3Reference clusterClient = s3Service.client(projectIdForClusterClient(), repositoryMetadata); if (configureProjectClientsFirst == false) { assertThat(s3ClientsManager.getPerProjectClientsHolders(), anEmptyMap()); } clusterClient.decRef(); if (configureProjectClientsFirst == false) { updateProjectInClusterState(projectId, newProjectClientsSecrets(projectId, clientName)); } final AmazonS3Reference projectClient = s3Service.client(projectId, repositoryMetadata); assertThat(projectClient, not(sameInstance(clusterClient))); projectClient.decRef(); // Release the cluster client s3Service.onBlobStoreClose(projectIdForClusterClient()); assertFalse(clusterClient.hasReferences()); assertTrue(projectClient.hasReferences()); // Release the project client s3Service.onBlobStoreClose(projectId); assertFalse(projectClient.hasReferences()); } public void testClientsHolderAfterManagerClosed() { final ProjectId projectId = randomUniqueProjectId(); final String clientName = randomFrom(clientNames); s3Service.close(); assertTrue(s3ClientsManager.isManagerClosed()); // New holder can be added after the manager is closed, but no actual client can be created updateProjectInClusterState(projectId, newProjectClientsSecrets(projectId, clientName)); try (var clientsHolder = s3ClientsManager.getPerProjectClientsHolders().get(projectId)) { assertNotNull(clientsHolder); assertFalse(clientsHolder.isClosed()); final var e = expectThrows( AlreadyClosedException.class, () -> s3ClientsManager.client(projectId, createRepositoryMetadata(clientName)) ); assertThat(e.getMessage(), containsString("s3 clients manager is closed")); } } public void testProjectClientsDisabled() { final var clusterService = spy(this.clusterService); final S3Service s3ServiceWithNoProjectSupport = new S3Service( mock(Environment.class), clusterService, TestProjectResolvers.DEFAULT_PROJECT_ONLY, mock(ResourceWatcherService.class), () -> Region.of("es-test-region") ); s3ServiceWithNoProjectSupport.refreshAndClearCache(S3ClientSettings.load(clusterService.getSettings())); s3ServiceWithNoProjectSupport.start(); verify(clusterService, never()).addHighPriorityApplier(any()); assertNull(s3ServiceWithNoProjectSupport.getS3ClientsManager().getPerProjectClientsHolders()); // Cluster client still works final String clientName = randomFrom(clientNames); final var repositoryMetadata = new RepositoryMetadata( randomIdentifier(), "s3", Settings.builder().put("client", clientName).build() ); final AmazonS3Reference clientRef = s3ServiceWithNoProjectSupport.client(projectIdForClusterClient(), repositoryMetadata); clientRef.decRef(); s3ServiceWithNoProjectSupport.close(); assertFalse(clientRef.hasReferences()); } private ProjectId projectIdForClusterClient() { return randomBoolean() ? ProjectId.DEFAULT : null; } private void assertProjectClientSettings(ProjectId projectId, String... clientNames) { final var clientsHolder = s3ClientsManager.getPerProjectClientsHolders().get(projectId); assertNotNull(clientsHolder); final Map<String, S3ClientSettings> s3ClientSettingsMap = clientsHolder.allClientSettings(); assertThat(s3ClientSettingsMap.keySet(), containsInAnyOrder(clientNames)); for (var clientName : clientNames) { final S3ClientSettings projectClientSettings = s3ClientSettingsMap.get(clientName); final S3ClientSettings clusterClientSettings = clusterClientsSettings.get(clientName); assertNotNull(clusterClientSettings); // Picks up the correct project scoped credentials assertThat( projectClientSettings.credentials, equalTo( AwsBasicCredentials.create(projectClientAccessKey(projectId, clientName), projectClientSecretKey(projectId, clientName)) ) ); assertThat(projectClientSettings.credentials, not(equalTo(clusterClientSettings.credentials))); // Inherit setting override from the cluster client of the same name assertThat(projectClientSettings.maxRetries, equalTo(clusterClientSettings.maxRetries)); assertThat(projectClientSettings.maxConnections, equalTo(clusterClientSettings.maxConnections)); assertThat(projectClientSettings.readTimeoutMillis, equalTo(clusterClientSettings.readTimeoutMillis)); } } private void assertClientCredentials(ProjectId projectId, String clientName, AmazonS3Reference clientRef) { try { final AwsCredentialsIdentity awsCredentialsIdentity = clientRef.client() .serviceClientConfiguration() .credentialsProvider() .resolveIdentity() .get(); assertThat(awsCredentialsIdentity.accessKeyId(), equalTo(projectClientAccessKey(projectId, clientName))); assertThat(awsCredentialsIdentity.secretAccessKey(), equalTo(projectClientSecretKey(projectId, clientName))); } catch (InterruptedException | ExecutionException e) { fail(e, "unexpected exception"); } } private void assertClientNotFound(ProjectId projectId, String clientName) { final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> s3ClientsManager.client(projectId, createRepositoryMetadata(clientName)) ); assertThat( e.getMessage(), anyOf( containsString("no s3 client is configured for project [" + projectId + "]"), containsString("s3 client [" + clientName + "] does not exist for project [" + projectId + "]") ) ); } private void updateProjectInClusterState(ProjectId projectId, Map<String, String> projectClientSecrets) { final var mockSecureSettings = new MockSecureSettings(); projectClientSecrets.forEach((k, v) -> mockSecureSettings.setFile(k, v.getBytes(StandardCharsets.UTF_8))); // Sometimes add an unrelated project secret if (randomBoolean() && randomBoolean()) { mockSecureSettings.setFile( Strings.join(randomList(1, 5, ESTestCase::randomIdentifier), "."), randomByteArrayOfLength(between(8, 20)) ); } final var secureClusterStateSettings = new SecureClusterStateSettings(mockSecureSettings); synchronized (this) { final ClusterState initialState = clusterService.state(); final ProjectMetadata.Builder projectBuilder = initialState.metadata().hasProject(projectId) ? ProjectMetadata.builder(initialState.metadata().getProject(projectId)) : ProjectMetadata.builder(projectId); if (secureClusterStateSettings.getSettingNames().isEmpty() == false || projectBuilder.getCustom(ProjectSecrets.TYPE) != null || randomBoolean()) { projectBuilder.putCustom(ProjectSecrets.TYPE, new ProjectSecrets(secureClusterStateSettings)); } final ClusterState stateWithProject = ClusterState.builder(initialState).putProjectMetadata(projectBuilder).build(); ClusterServiceUtils.setState(clusterService, stateWithProject); } } private void removeProjectFromClusterState(ProjectId projectId) { synchronized (this) { final ClusterState initialState = clusterService.state(); final ClusterState stateWithoutProject = ClusterState.builder(initialState) .metadata(Metadata.builder(initialState.metadata()).removeProject(projectId)) .routingTable(GlobalRoutingTable.builder(initialState.globalRoutingTable()).removeProject(projectId).build()) .build(); ClusterServiceUtils.setState(clusterService, stateWithoutProject); } } private RepositoryMetadata createRepositoryMetadata(String clientName) { return new RepositoryMetadata("repo", S3Repository.TYPE, Settings.builder().put("client", clientName).build()); } private Map<String, String> newProjectClientsSecrets(ProjectId projectId, String... clientNames) { s3SecretsIdGenerators.computeIfAbsent(projectId, ignored -> new AtomicInteger(0)).incrementAndGet(); final Map<String, String> m = new HashMap<>(); Arrays.stream(clientNames).forEach(clientName -> { m.put("s3.client." + clientName + ".access_key", projectClientAccessKey(projectId, clientName)); m.put("s3.client." + clientName + ".secret_key", projectClientSecretKey(projectId, clientName)); }); return Map.copyOf(m); } private String projectClientAccessKey(ProjectId projectId, String clientName) { return projectId + "_" + clientName + "_access_key_" + s3SecretsIdGenerators.get(projectId).get(); } private String projectClientSecretKey(ProjectId projectId, String clientName) { return projectId + "_" + clientName + "_secret_key_" + s3SecretsIdGenerators.get(projectId).get(); } }
S3ClientsManagerTests
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/graph/EntityGraphs.java
{ "start": 1125, "end": 14428 }
class ____ { /** * Create a new entity graph rooted at the given entity, without * needing a reference to the session or session factory. * * @param rootType The {@link EntityType} representing the root * entity of the graph * @return a new mutable {@link EntityGraph} * * @since 7.0 */ public static <T> EntityGraph<T> createGraph(EntityType<T> rootType) { return new RootGraphImpl<>( null, (EntityDomainType<T>) rootType ); } /** * Create a new entity graph rooted at the given * {@linkplain RepresentationMode#MAP dynamic entity}, without * needing a reference to the session or session factory. * * @param rootType The {@link EntityType} representing the root * entity of the graph, which must be a dynamic * entity * @return a new mutable {@link EntityGraph} * * @since 7.0 */ public static EntityGraph<Map<String,?>> createGraphForDynamicEntity(EntityType<?> rootType) { final EntityDomainType<?> domainType = (EntityDomainType<?>) rootType; if ( domainType.getRepresentationMode() != RepresentationMode.MAP ) { throw new IllegalArgumentException( "Entity '" + domainType.getHibernateEntityName() + "' is not a dynamic entity" ); } @SuppressWarnings("unchecked") //Safe, because we just checked final EntityDomainType<Map<String, ?>> dynamicEntity = (EntityDomainType<Map<String, ?>>) domainType; return new RootGraphImpl<>( null, dynamicEntity ); } /** * Merges multiple entity graphs into a single graph that specifies the * fetching/loading of all attributes the input graphs specify. * * @param <T> Root entity type of the query and graph. * * @param entityManager {@code EntityManager} to use to create the new merged graph. * @param root Root type of the entity for which the graph is being merged. * @param graphs Graphs to merge. * * @return The merged graph. */ @SafeVarargs public static <T> EntityGraph<T> merge(EntityManager entityManager, Class<T> root, Graph<T>... graphs) { return merge( entityManager, root, Arrays.stream(graphs) ); } /** * Merges multiple entity graphs into a single graph that specifies the * fetching/loading of all attributes the input graphs specify. * * @param <T> Root entity type of the query and graph. * * @param entityManager {@code EntityManager} to use to create the new merged graph. * @param root Root type of the entity for which the graph is being merged. * @param graphs Graphs to merge. * * @return The merged graph. * * @since 7.0 */ public static <T> EntityGraph<T> merge(EntityManager entityManager, Class<T> root, List<? extends Graph<T>> graphs) { return merge( entityManager, root, graphs.stream() ); } /** * Merges multiple entity graphs into a single graph that specifies the * fetching/loading of all attributes the input graphs specify. * * @param <T> Root entity type of the query and graph. * * @param entityManager {@code EntityManager} to use to create the new merged graph. * @param root Root type of the entity for which the graph is being merged. * @param graphs Graphs to merge. * * @return The merged graph. * * @since 7.0 */ public static <T> EntityGraph<T> merge(EntityManager entityManager, Class<T> root, Stream<? extends Graph<T>> graphs) { final RootGraphImplementor<T> merged = ((SessionImplementor) entityManager).createEntityGraph( root ); graphs.forEach( graph -> merged.merge( (GraphImplementor<T>) graph ) ); return merged; } /** * Convenience method to apply the given graph to the given query * without the need for a cast when working with JPA API. * * @param query The JPA {@link TypedQuery} * @param graph The JPA {@link EntityGraph} to apply * @param semantic The semantic to use when applying the graph * * @see SelectionQuery#setEntityGraph(EntityGraph, GraphSemantic) * * @since 7.0 */ public static <R> void setGraph(TypedQuery<R> query, EntityGraph<R> graph, GraphSemantic semantic) { ((org.hibernate.query.Query<R>) query).setEntityGraph( graph, semantic ); } /** * Convenience method to apply the given load graph to the given * query without the need for a cast when working with JPA API. * * @param query The JPA {@link TypedQuery} * @param graph The JPA {@link EntityGraph} to apply * * @since 7.0 */ public static <R> void setLoadGraph(TypedQuery<R> query, EntityGraph<R> graph) { setGraph( query, graph, GraphSemantic.LOAD ); } /** * Convenience method to apply the given fetch graph to the given * query without the need for a cast when working with JPA API. * * @param query The JPA {@link TypedQuery} * @param graph The JPA {@link EntityGraph} to apply * * @since 7.0 */ public static <R> void setFetchGraph(TypedQuery<R> query, EntityGraph<R> graph) { setGraph( query, graph, GraphSemantic.FETCH ); } /** * Allows a treated subgraph to ve created for a {@link Subgraph}, since the * JPA-standard operation {@link EntityGraph#addTreatedSubgraph(Class)} is * declared by {@link EntityGraph}. * * @param graph any {@linkplain Graph root graph or subgraph} * @param subtype the treated (narrowed) type * * @since 7.0 */ public <S> Subgraph<S> addTreatedSubgraph(Graph<? super S> graph, Class<S> subtype) { return ((org.hibernate.graph.Graph<? super S>) graph).addTreatedSubgraph( subtype ); } /** * Convenience method for {@linkplain Query#getResultList() executing the query}, * applying the given {@link EntityGraph} using the specified semantic * * @param query The JPA Query * @param graph The graph to apply * @param semantic The semantic to use when applying the graph * * @deprecated Since it is not type safe and returns a raw type */ @Deprecated(since = "7.0") public static @SuppressWarnings("rawtypes") List executeList(Query query, EntityGraph<?> graph, GraphSemantic semantic) { return query.unwrap( org.hibernate.query.Query.class ) .applyGraph( (RootGraph<?>) graph, semantic ) .getResultList(); } /** * Form of {@link #executeList(Query, EntityGraph, GraphSemantic)} accepting a * {@link TypedQuery}. * * @param query The JPA Query * @param graph The graph to apply * @param semantic The semantic to use when applying the graph * * @apiNote This signature assumes that the Query's return is an entity and that * the graph applies to that entity's type. JPA does not necessarily * require that, but it is by far the most common usage. * * @deprecated Use {@link #setGraph(TypedQuery, EntityGraph, GraphSemantic)} instead */ @Deprecated(since = "7.0") public static <R> List<R> executeList(TypedQuery<R> query, EntityGraph<R> graph, GraphSemantic semantic) { @SuppressWarnings("unchecked") org.hibernate.query.Query<R> unwrapped = query.unwrap( org.hibernate.query.Query.class ); return unwrapped.setEntityGraph( graph, semantic ).getResultList(); } /** * Convenience method for {@linkplain Query#getResultList() executing the query}, * applying the given {@link EntityGraph} using the named semantic using JPA's * "hint name". See {@link GraphSemantic#fromHintName}. * * @param query The JPA Query * @param graph The graph to apply * @param semanticJpaHintName See {@link GraphSemantic#fromHintName} * * @return The result list * * @deprecated Since it is not type safe, returns a raw type, and accepts a string */ @Deprecated(since = "7.0") public static @SuppressWarnings("rawtypes") List executeList(Query query, EntityGraph<?> graph, String semanticJpaHintName) { return executeList( query, graph, GraphSemantic.fromHintName( semanticJpaHintName ) ); } /** * Form of {@link #executeList(Query, EntityGraph, String)} accepting a * {@link TypedQuery}. * * @param query The JPA Query * @param graph The graph to apply * @param semanticJpaHintName See {@link GraphSemantic#fromHintName} * * @apiNote This signature assumes that the Query's return is an entity and that * the graph applies to that entity's type. JPA does not necessarily * require that, but it is by far the most common usage. * * @deprecated Since it accepts a string instead of {@link GraphSemantic} */ @Deprecated(since = "7.0") public static <R> List<R> executeList(TypedQuery<R> query, EntityGraph<R> graph, String semanticJpaHintName) { return executeList( query, graph, GraphSemantic.fromHintName( semanticJpaHintName ) ); } /** * Convenience method for {@linkplain Query#getResultList() executing the query} * using the given {@link EntityGraph}. * * @param query The JPA Query * @param graph The graph to apply * * @apiNote Operates on the assumption that the "default" semantic for an * entity graph applied to a query is {@link GraphSemantic#FETCH}. * This is simply knowledge from JPA EG discussions, nothing that * is specifically mentioned or discussed in the spec. * * @deprecated Since it is not type safe and returns a raw type */ @Deprecated(since = "7.0") public static @SuppressWarnings("rawtypes") List executeList(Query query, EntityGraph<?> graph) { return executeList( query, graph, GraphSemantic.FETCH ); } /** * Form of {@link #executeList(Query, EntityGraph, String)} accepting a * {@link TypedQuery}. * * @param query The JPA Query * @param graph The graph to apply * * @apiNote This signature assumes that the Query's return is an entity and that * the graph applies to that entity's type. JPA does not necessarily * require that, but it is by far the most common usage. * * @deprecated Use {@link #setFetchGraph(TypedQuery, EntityGraph)} instead */ @Deprecated(since = "7.0") public static <R> List<R> executeList(TypedQuery<R> query, EntityGraph<R> graph) { return executeList( query, graph, GraphSemantic.FETCH ); } /** * Compares two entity graphs and returns {@code true} if they are equal, * ignoring attribute order. * * @param <T> Root entity type of BOTH graphs. * @param a Graph to compare. * @param b Graph to compare. * */ public static <T> boolean areEqual(EntityGraph<T> a, EntityGraph<T> b) { if ( a == b ) { return true; } if ( ( a == null ) || ( b == null ) ) { return false; } final List<AttributeNode<?>> aNodes = a.getAttributeNodes(); final List<AttributeNode<?>> bNodes = b.getAttributeNodes(); if ( aNodes.size() != bNodes.size() ) { return false; } for ( AttributeNode<?> aNode : aNodes ) { final String attributeName = aNode.getAttributeName(); AttributeNode<?> bNode = null; for ( AttributeNode<?> bCandidate : bNodes ) { if ( attributeName.equals( bCandidate.getAttributeName() ) ) { bNode = bCandidate; break; } } if ( !areEqual( aNode, bNode ) ) { return false; } } return true; } /** * Compares two entity graph attribute node and returns {@code true} if they are equal, * ignoring subgraph attribute order. */ public static boolean areEqual(AttributeNode<?> a, AttributeNode<?> b) { if ( a == b ) { return true; } if ( ( a == null ) || ( b == null ) ) { return false; } if ( a.getAttributeName().equals( b.getAttributeName() ) ) { return areEqual( a.getSubgraphs(), b.getSubgraphs() ) && areEqual( a.getKeySubgraphs(), b.getKeySubgraphs() ); } else { return false; } } /** * Compares two entity subgraph maps and returns {@code true} if they are equal, * ignoring order. */ public static boolean areEqual( @SuppressWarnings("rawtypes") Map<Class, Subgraph> a, @SuppressWarnings("rawtypes") Map<Class, Subgraph> b) { if ( a == b ) { return true; } if ( ( a == null ) || ( b == null ) ) { return false; } @SuppressWarnings("rawtypes") final Set<Class> aKeys = a.keySet(); @SuppressWarnings("rawtypes") final Set<Class> bKeys = b.keySet(); if ( aKeys.equals( bKeys ) ) { for ( Class<?> clazz : aKeys ) { if ( !bKeys.contains( clazz ) ) { return false; } if ( !areEqual( a.get( clazz ), b.get( clazz ) ) ) { return false; } } return true; } else { return false; } } /** * Compares two entity subgraphs and returns {@code true} if they are equal, * ignoring attribute order. */ public static boolean areEqual( @SuppressWarnings("rawtypes") Subgraph a, @SuppressWarnings("rawtypes") Subgraph b) { if ( a == b ) { return true; } if ( ( a == null ) || ( b == null ) ) { return false; } if ( a.getClassType() != b.getClassType() ) { return false; } @SuppressWarnings("unchecked") final List<AttributeNode<?>> aNodes = a.getAttributeNodes(); @SuppressWarnings("unchecked") final List<AttributeNode<?>> bNodes = b.getAttributeNodes(); if ( aNodes.size() != bNodes.size() ) { return false; } for ( AttributeNode<?> aNode : aNodes ) { final String attributeName = aNode.getAttributeName(); AttributeNode<?> bNode = null; for ( AttributeNode<?> bCandidate : bNodes ) { if ( attributeName.equals( bCandidate.getAttributeName() ) ) { bNode = bCandidate; break; } } if ( !areEqual( aNode, bNode ) ) { return false; } } return true; } }
EntityGraphs
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanDoublesEvaluator.java
{ "start": 5045, "end": 5811 }
class ____ implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanDoublesEvaluator get(DriverContext context) { return new LessThanDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override public String toString() { return "LessThanDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; } } }
Factory
java
spring-projects__spring-framework
spring-beans/src/main/java/org/springframework/beans/factory/xml/XmlBeanDefinitionReader.java
{ "start": 10883, "end": 19656 }
interface ____ custom handling of XML parsing errors and warnings. * <p>If not set, a default SimpleSaxErrorHandler is used that simply * logs warnings using the logger instance of the view class, * and rethrows errors to discontinue the XML transformation. * @see SimpleSaxErrorHandler */ public void setErrorHandler(ErrorHandler errorHandler) { this.errorHandler = errorHandler; } /** * Specify the {@link BeanDefinitionDocumentReader} implementation to use, * responsible for the actual reading of the XML bean definition document. * <p>The default is {@link DefaultBeanDefinitionDocumentReader}. * @param documentReaderClass the desired BeanDefinitionDocumentReader implementation class */ public void setDocumentReaderClass(Class<? extends BeanDefinitionDocumentReader> documentReaderClass) { this.documentReaderClass = documentReaderClass; } /** * Load bean definitions from the specified XML file. * @param resource the resource descriptor for the XML file * @return the number of bean definitions found * @throws BeanDefinitionStoreException in case of loading or parsing errors */ @Override public int loadBeanDefinitions(Resource resource) throws BeanDefinitionStoreException { return loadBeanDefinitions(new EncodedResource(resource)); } /** * Load bean definitions from the specified XML file. * @param encodedResource the resource descriptor for the XML file, * allowing to specify an encoding to use for parsing the file * @return the number of bean definitions found * @throws BeanDefinitionStoreException in case of loading or parsing errors */ public int loadBeanDefinitions(EncodedResource encodedResource) throws BeanDefinitionStoreException { Assert.notNull(encodedResource, "EncodedResource must not be null"); if (logger.isTraceEnabled()) { logger.trace("Loading XML bean definitions from " + encodedResource); } Set<EncodedResource> currentResources = this.resourcesCurrentlyBeingLoaded.get(); if (!currentResources.add(encodedResource)) { throw new BeanDefinitionStoreException( "Detected cyclic loading of " + encodedResource + " - check your import definitions!"); } try (InputStream inputStream = encodedResource.getResource().getInputStream()) { InputSource inputSource = new InputSource(inputStream); if (encodedResource.getEncoding() != null) { inputSource.setEncoding(encodedResource.getEncoding()); } return doLoadBeanDefinitions(inputSource, encodedResource.getResource()); } catch (IOException ex) { throw new BeanDefinitionStoreException( "IOException parsing XML document from " + encodedResource.getResource(), ex); } finally { currentResources.remove(encodedResource); if (currentResources.isEmpty()) { this.resourcesCurrentlyBeingLoaded.remove(); } } } /** * Load bean definitions from the specified XML file. * @param inputSource the SAX InputSource to read from * @return the number of bean definitions found * @throws BeanDefinitionStoreException in case of loading or parsing errors */ public int loadBeanDefinitions(InputSource inputSource) throws BeanDefinitionStoreException { return loadBeanDefinitions(inputSource, "resource loaded through SAX InputSource"); } /** * Load bean definitions from the specified XML file. * @param inputSource the SAX InputSource to read from * @param resourceDescription a description of the resource * (can be {@code null} or empty) * @return the number of bean definitions found * @throws BeanDefinitionStoreException in case of loading or parsing errors */ public int loadBeanDefinitions(InputSource inputSource, @Nullable String resourceDescription) throws BeanDefinitionStoreException { return doLoadBeanDefinitions(inputSource, new DescriptiveResource(resourceDescription)); } /** * Actually load bean definitions from the specified XML file. * @param inputSource the SAX InputSource to read from * @param resource the resource descriptor for the XML file * @return the number of bean definitions found * @throws BeanDefinitionStoreException in case of loading or parsing errors * @see #doLoadDocument * @see #registerBeanDefinitions */ protected int doLoadBeanDefinitions(InputSource inputSource, Resource resource) throws BeanDefinitionStoreException { try { Document doc = doLoadDocument(inputSource, resource); int count = registerBeanDefinitions(doc, resource); if (logger.isDebugEnabled()) { logger.debug("Loaded " + count + " bean definitions from " + resource); } return count; } catch (BeanDefinitionStoreException ex) { throw ex; } catch (SAXParseException ex) { throw new XmlBeanDefinitionStoreException(resource.getDescription(), "Line " + ex.getLineNumber() + " in XML document from " + resource + " is invalid", ex); } catch (SAXException ex) { throw new XmlBeanDefinitionStoreException(resource.getDescription(), "XML document from " + resource + " is invalid", ex); } catch (ParserConfigurationException ex) { throw new BeanDefinitionStoreException(resource.getDescription(), "Parser configuration exception parsing XML from " + resource, ex); } catch (IOException ex) { throw new BeanDefinitionStoreException(resource.getDescription(), "IOException parsing XML document from " + resource, ex); } catch (Throwable ex) { throw new BeanDefinitionStoreException(resource.getDescription(), "Unexpected exception parsing XML document from " + resource, ex); } } /** * Actually load the specified document using the configured DocumentLoader. * @param inputSource the SAX InputSource to read from * @param resource the resource descriptor for the XML file * @return the DOM Document * @throws Exception when thrown from the DocumentLoader * @see #setDocumentLoader * @see DocumentLoader#loadDocument */ protected Document doLoadDocument(InputSource inputSource, Resource resource) throws Exception { return this.documentLoader.loadDocument(inputSource, getEntityResolver(), this.errorHandler, getValidationModeForResource(resource), isNamespaceAware()); } /** * Determine the validation mode for the specified {@link Resource}. * If no explicit validation mode has been configured, then the validation * mode gets {@link #detectValidationMode detected} from the given resource. * <p>Override this method if you would like full control over the validation * mode, even when something other than {@link #VALIDATION_AUTO} was set. * @see #detectValidationMode */ protected int getValidationModeForResource(Resource resource) { int validationModeToUse = getValidationMode(); if (validationModeToUse != VALIDATION_AUTO) { return validationModeToUse; } int detectedMode = detectValidationMode(resource); if (detectedMode != VALIDATION_AUTO) { return detectedMode; } // Hmm, we didn't get a clear indication... Let's assume XSD, // since apparently no DTD declaration has been found up until // detection stopped (before finding the document's root tag). return VALIDATION_XSD; } /** * Detect which kind of validation to perform on the XML file identified * by the supplied {@link Resource}. If the file has a {@code DOCTYPE} * definition then DTD validation is used otherwise XSD validation is assumed. * <p>Override this method if you would like to customize resolution * of the {@link #VALIDATION_AUTO} mode. */ protected int detectValidationMode(Resource resource) { if (resource.isOpen()) { throw new BeanDefinitionStoreException( "Passed-in Resource [" + resource + "] contains an open stream: " + "cannot determine validation mode automatically. Either pass in a Resource " + "that is able to create fresh streams, or explicitly specify the validationMode " + "on your XmlBeanDefinitionReader instance."); } InputStream inputStream; try { inputStream = resource.getInputStream(); } catch (IOException ex) { throw new BeanDefinitionStoreException( "Unable to determine validation mode for [" + resource + "]: cannot open InputStream. " + "Did you attempt to load directly from a SAX InputSource without specifying the " + "validationMode on your XmlBeanDefinitionReader instance?", ex); } try { return this.validationModeDetector.detectValidationMode(inputStream); } catch (IOException ex) { throw new BeanDefinitionStoreException("Unable to determine validation mode for [" + resource + "]: an error occurred whilst reading from the InputStream.", ex); } } /** * Register the bean definitions contained in the given DOM document. * Called by {@code loadBeanDefinitions}. * <p>Creates a new instance of the parser
for
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Sqs2EndpointBuilderFactory.java
{ "start": 99053, "end": 120311 }
interface ____ extends Sqs2EndpointConsumerBuilder, Sqs2EndpointProducerBuilder { default AdvancedSqs2EndpointBuilder advanced() { return (AdvancedSqs2EndpointBuilder) this; } /** * The hostname of the Amazon AWS cloud. * * The option is a: <code>java.lang.String</code> type. * * Default: amazonaws.com * Group: common * * @param amazonAWSHost the value to set * @return the dsl builder */ default Sqs2EndpointBuilder amazonAWSHost(String amazonAWSHost) { doSetProperty("amazonAWSHost", amazonAWSHost); return this; } /** * Setting the auto-creation of the queue. * * The option is a: <code>boolean</code> type. * * Default: false * Group: common * * @param autoCreateQueue the value to set * @return the dsl builder */ default Sqs2EndpointBuilder autoCreateQueue(boolean autoCreateQueue) { doSetProperty("autoCreateQueue", autoCreateQueue); return this; } /** * Setting the auto-creation of the queue. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: common * * @param autoCreateQueue the value to set * @return the dsl builder */ default Sqs2EndpointBuilder autoCreateQueue(String autoCreateQueue) { doSetProperty("autoCreateQueue", autoCreateQueue); return this; } /** * To use a custom HeaderFilterStrategy to map headers to/from Camel. * * The option is a: * <code>org.apache.camel.spi.HeaderFilterStrategy</code> type. * * Group: common * * @param headerFilterStrategy the value to set * @return the dsl builder */ default Sqs2EndpointBuilder headerFilterStrategy(org.apache.camel.spi.HeaderFilterStrategy headerFilterStrategy) { doSetProperty("headerFilterStrategy", headerFilterStrategy); return this; } /** * To use a custom HeaderFilterStrategy to map headers to/from Camel. * * The option will be converted to a * <code>org.apache.camel.spi.HeaderFilterStrategy</code> type. * * Group: common * * @param headerFilterStrategy the value to set * @return the dsl builder */ default Sqs2EndpointBuilder headerFilterStrategy(String headerFilterStrategy) { doSetProperty("headerFilterStrategy", headerFilterStrategy); return this; } /** * Set the need for overriding the endpoint. This option needs to be * used in combination with the uriEndpointOverride option. * * The option is a: <code>boolean</code> type. * * Default: false * Group: common * * @param overrideEndpoint the value to set * @return the dsl builder */ default Sqs2EndpointBuilder overrideEndpoint(boolean overrideEndpoint) { doSetProperty("overrideEndpoint", overrideEndpoint); return this; } /** * Set the need for overriding the endpoint. This option needs to be * used in combination with the uriEndpointOverride option. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: common * * @param overrideEndpoint the value to set * @return the dsl builder */ default Sqs2EndpointBuilder overrideEndpoint(String overrideEndpoint) { doSetProperty("overrideEndpoint", overrideEndpoint); return this; } /** * The underlying protocol used to communicate with SQS. * * The option is a: <code>java.lang.String</code> type. * * Default: https * Group: common * * @param protocol the value to set * @return the dsl builder */ default Sqs2EndpointBuilder protocol(String protocol) { doSetProperty("protocol", protocol); return this; } /** * Specify the queue owner aws account id when you need to connect the * queue with a different account owner. * * The option is a: <code>java.lang.String</code> type. * * Group: common * * @param queueOwnerAWSAccountId the value to set * @return the dsl builder */ default Sqs2EndpointBuilder queueOwnerAWSAccountId(String queueOwnerAWSAccountId) { doSetProperty("queueOwnerAWSAccountId", queueOwnerAWSAccountId); return this; } /** * The region in which SQS client needs to work. When using this * parameter, the configuration will expect the lowercase name of the * region (for example, ap-east-1) You'll need to use the name * Region.EU_WEST_1.id(). * * The option is a: <code>java.lang.String</code> type. * * Group: common * * @param region the value to set * @return the dsl builder */ default Sqs2EndpointBuilder region(String region) { doSetProperty("region", region); return this; } /** * Set the overriding uri endpoint. This option needs to be used in * combination with overrideEndpoint option. * * The option is a: <code>java.lang.String</code> type. * * Group: common * * @param uriEndpointOverride the value to set * @return the dsl builder */ default Sqs2EndpointBuilder uriEndpointOverride(String uriEndpointOverride) { doSetProperty("uriEndpointOverride", uriEndpointOverride); return this; } /** * To define a proxy host when instantiating the SQS client. * * The option is a: <code>java.lang.String</code> type. * * Group: proxy * * @param proxyHost the value to set * @return the dsl builder */ default Sqs2EndpointBuilder proxyHost(String proxyHost) { doSetProperty("proxyHost", proxyHost); return this; } /** * To define a proxy port when instantiating the SQS client. * * The option is a: <code>java.lang.Integer</code> type. * * Group: proxy * * @param proxyPort the value to set * @return the dsl builder */ default Sqs2EndpointBuilder proxyPort(Integer proxyPort) { doSetProperty("proxyPort", proxyPort); return this; } /** * To define a proxy port when instantiating the SQS client. * * The option will be converted to a <code>java.lang.Integer</code> * type. * * Group: proxy * * @param proxyPort the value to set * @return the dsl builder */ default Sqs2EndpointBuilder proxyPort(String proxyPort) { doSetProperty("proxyPort", proxyPort); return this; } /** * To define a proxy protocol when instantiating the SQS client. * * The option is a: <code>software.amazon.awssdk.core.Protocol</code> * type. * * Default: HTTPS * Group: proxy * * @param proxyProtocol the value to set * @return the dsl builder */ default Sqs2EndpointBuilder proxyProtocol(software.amazon.awssdk.core.Protocol proxyProtocol) { doSetProperty("proxyProtocol", proxyProtocol); return this; } /** * To define a proxy protocol when instantiating the SQS client. * * The option will be converted to a * <code>software.amazon.awssdk.core.Protocol</code> type. * * Default: HTTPS * Group: proxy * * @param proxyProtocol the value to set * @return the dsl builder */ default Sqs2EndpointBuilder proxyProtocol(String proxyProtocol) { doSetProperty("proxyProtocol", proxyProtocol); return this; } /** * The maximumMessageSize (in bytes) an SQS message can contain for this * queue. * * The option is a: <code>java.lang.Integer</code> type. * * Group: queue * * @param maximumMessageSize the value to set * @return the dsl builder */ default Sqs2EndpointBuilder maximumMessageSize(Integer maximumMessageSize) { doSetProperty("maximumMessageSize", maximumMessageSize); return this; } /** * The maximumMessageSize (in bytes) an SQS message can contain for this * queue. * * The option will be converted to a <code>java.lang.Integer</code> * type. * * Group: queue * * @param maximumMessageSize the value to set * @return the dsl builder */ default Sqs2EndpointBuilder maximumMessageSize(String maximumMessageSize) { doSetProperty("maximumMessageSize", maximumMessageSize); return this; } /** * The messageRetentionPeriod (in seconds) a message will be retained by * SQS for this queue. * * The option is a: <code>java.lang.Integer</code> type. * * Group: queue * * @param messageRetentionPeriod the value to set * @return the dsl builder */ default Sqs2EndpointBuilder messageRetentionPeriod(Integer messageRetentionPeriod) { doSetProperty("messageRetentionPeriod", messageRetentionPeriod); return this; } /** * The messageRetentionPeriod (in seconds) a message will be retained by * SQS for this queue. * * The option will be converted to a <code>java.lang.Integer</code> * type. * * Group: queue * * @param messageRetentionPeriod the value to set * @return the dsl builder */ default Sqs2EndpointBuilder messageRetentionPeriod(String messageRetentionPeriod) { doSetProperty("messageRetentionPeriod", messageRetentionPeriod); return this; } /** * The policy for this queue. It can be loaded by default from * classpath, but you can prefix with classpath:, file:, or http: to * load the resource from different systems. * * This option can also be loaded from an existing file, by prefixing * with file: or classpath: followed by the location of the file. * * The option is a: <code>java.lang.String</code> type. * * Group: queue * * @param policy the value to set * @return the dsl builder */ default Sqs2EndpointBuilder policy(String policy) { doSetProperty("policy", policy); return this; } /** * To define the queueUrl explicitly. All other parameters, which would * influence the queueUrl, are ignored. This parameter is intended to be * used to connect to a mock implementation of SQS, for testing * purposes. * * The option is a: <code>java.lang.String</code> type. * * Group: queue * * @param queueUrl the value to set * @return the dsl builder */ default Sqs2EndpointBuilder queueUrl(String queueUrl) { doSetProperty("queueUrl", queueUrl); return this; } /** * If you do not specify WaitTimeSeconds in the request, the queue * attribute ReceiveMessageWaitTimeSeconds is used to determine how long * to wait. * * The option is a: <code>java.lang.Integer</code> type. * * Group: queue * * @param receiveMessageWaitTimeSeconds the value to set * @return the dsl builder */ default Sqs2EndpointBuilder receiveMessageWaitTimeSeconds(Integer receiveMessageWaitTimeSeconds) { doSetProperty("receiveMessageWaitTimeSeconds", receiveMessageWaitTimeSeconds); return this; } /** * If you do not specify WaitTimeSeconds in the request, the queue * attribute ReceiveMessageWaitTimeSeconds is used to determine how long * to wait. * * The option will be converted to a <code>java.lang.Integer</code> * type. * * Group: queue * * @param receiveMessageWaitTimeSeconds the value to set * @return the dsl builder */ default Sqs2EndpointBuilder receiveMessageWaitTimeSeconds(String receiveMessageWaitTimeSeconds) { doSetProperty("receiveMessageWaitTimeSeconds", receiveMessageWaitTimeSeconds); return this; } /** * Specify the policy that send message to DeadLetter queue. See detail * at Amazon docs. * * The option is a: <code>java.lang.String</code> type. * * Group: queue * * @param redrivePolicy the value to set * @return the dsl builder */ default Sqs2EndpointBuilder redrivePolicy(String redrivePolicy) { doSetProperty("redrivePolicy", redrivePolicy); return this; } /** * Amazon AWS Access Key. * * The option is a: <code>java.lang.String</code> type. * * Group: security * * @param accessKey the value to set * @return the dsl builder */ default Sqs2EndpointBuilder accessKey(String accessKey) { doSetProperty("accessKey", accessKey); return this; } /** * If using a profile credentials provider, this parameter will set the * profile name. * * The option is a: <code>java.lang.String</code> type. * * Group: security * * @param profileCredentialsName the value to set * @return the dsl builder */ default Sqs2EndpointBuilder profileCredentialsName(String profileCredentialsName) { doSetProperty("profileCredentialsName", profileCredentialsName); return this; } /** * Amazon AWS Secret Key. * * The option is a: <code>java.lang.String</code> type. * * Group: security * * @param secretKey the value to set * @return the dsl builder */ default Sqs2EndpointBuilder secretKey(String secretKey) { doSetProperty("secretKey", secretKey); return this; } /** * Amazon AWS Session Token used when the user needs to assume an IAM * role. * * The option is a: <code>java.lang.String</code> type. * * Group: security * * @param sessionToken the value to set * @return the dsl builder */ default Sqs2EndpointBuilder sessionToken(String sessionToken) { doSetProperty("sessionToken", sessionToken); return this; } /** * If we want to trust all certificates in case of overriding the * endpoint. * * The option is a: <code>boolean</code> type. * * Default: false * Group: security * * @param trustAllCertificates the value to set * @return the dsl builder */ default Sqs2EndpointBuilder trustAllCertificates(boolean trustAllCertificates) { doSetProperty("trustAllCertificates", trustAllCertificates); return this; } /** * If we want to trust all certificates in case of overriding the * endpoint. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: security * * @param trustAllCertificates the value to set * @return the dsl builder */ default Sqs2EndpointBuilder trustAllCertificates(String trustAllCertificates) { doSetProperty("trustAllCertificates", trustAllCertificates); return this; } /** * Set whether the SQS client should expect to load credentials on an * AWS infra instance or to expect static credentials to be passed in. * * The option is a: <code>boolean</code> type. * * Default: false * Group: security * * @param useDefaultCredentialsProvider the value to set * @return the dsl builder */ default Sqs2EndpointBuilder useDefaultCredentialsProvider(boolean useDefaultCredentialsProvider) { doSetProperty("useDefaultCredentialsProvider", useDefaultCredentialsProvider); return this; } /** * Set whether the SQS client should expect to load credentials on an * AWS infra instance or to expect static credentials to be passed in. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: security * * @param useDefaultCredentialsProvider the value to set * @return the dsl builder */ default Sqs2EndpointBuilder useDefaultCredentialsProvider(String useDefaultCredentialsProvider) { doSetProperty("useDefaultCredentialsProvider", useDefaultCredentialsProvider); return this; } /** * Set whether the SQS client should expect to load credentials through * a profile credentials provider. * * The option is a: <code>boolean</code> type. * * Default: false * Group: security * * @param useProfileCredentialsProvider the value to set * @return the dsl builder */ default Sqs2EndpointBuilder useProfileCredentialsProvider(boolean useProfileCredentialsProvider) { doSetProperty("useProfileCredentialsProvider", useProfileCredentialsProvider); return this; } /** * Set whether the SQS client should expect to load credentials through * a profile credentials provider. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: security * * @param useProfileCredentialsProvider the value to set * @return the dsl builder */ default Sqs2EndpointBuilder useProfileCredentialsProvider(String useProfileCredentialsProvider) { doSetProperty("useProfileCredentialsProvider", useProfileCredentialsProvider); return this; } /** * Set whether the SQS client should expect to use Session Credentials. * This is useful in a situation in which the user needs to assume an * IAM role for doing operations in SQS. * * The option is a: <code>boolean</code> type. * * Default: false * Group: security * * @param useSessionCredentials the value to set * @return the dsl builder */ default Sqs2EndpointBuilder useSessionCredentials(boolean useSessionCredentials) { doSetProperty("useSessionCredentials", useSessionCredentials); return this; } /** * Set whether the SQS client should expect to use Session Credentials. * This is useful in a situation in which the user needs to assume an * IAM role for doing operations in SQS. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: security * * @param useSessionCredentials the value to set * @return the dsl builder */ default Sqs2EndpointBuilder useSessionCredentials(String useSessionCredentials) { doSetProperty("useSessionCredentials", useSessionCredentials); return this; } } /** * Advanced builder for endpoint for the AWS Simple Queue Service (SQS) component. */ public
Sqs2EndpointBuilder
java
elastic__elasticsearch
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoTileGridTiler.java
{ "start": 11658, "end": 12982 }
class ____ extends GeoTileGridTiler { private final long maxTiles; UnboundedGeoTileGridTiler(int precision) { super(precision); maxTiles = (long) tiles * tiles; } @Override protected boolean validTile(int x, int y, int z) { return true; } @Override protected long getMaxCells() { return maxTiles; } @Override protected int setValuesForFullyContainedTile(int xTile, int yTile, int zTile, GeoShapeCellValues values, int valuesIndex) { // For every level we go down, we half each dimension. The total number of splits is equal to 1 << (levelEnd - levelStart) final int splits = 1 << precision - zTile; // The start value of a dimension is calculated by multiplying the value of that dimension at the start level // by the number of splits final int minX = xTile * splits; final int minY = yTile * splits; // The end value of a dimension is calculated by adding to the start value the number of splits final int maxX = minX + splits; final int maxY = minY + splits; return setValues(values, valuesIndex, minY, maxY, minX, maxX); } } }
UnboundedGeoTileGridTiler
java
square__moshi
moshi/src/test/java/com/squareup/moshi/AdapterMethodsTest.java
{ "start": 8124, "end": 9491 }
class ____ { @ToJson List<Integer> pointToJson(Point point) { return Arrays.asList(point.x, point.y); } @ToJson String integerListToJson(List<Integer> list) { StringBuilder result = new StringBuilder(); for (Integer i : list) { if (result.length() != 0) result.append(" "); result.append(i.intValue()); } return result.toString(); } @FromJson Point pointFromJson(List<Integer> o) throws Exception { if (o.size() != 2) throw new Exception("Expected 2 elements but was " + o); return new Point(o.get(0), o.get(1)); } @FromJson List<Integer> listOfIntegersFromJson(String list) throws Exception { List<Integer> result = new ArrayList<>(); for (String part : list.split(" ")) { result.add(Integer.parseInt(part)); } return result; } } @Test public void conflictingToAdapters() throws Exception { Moshi.Builder builder = new Moshi.Builder(); try { builder.add(new ConflictingsToJsonAdapter()); fail(); } catch (IllegalStateException expected) { assertThat(expected.getMessage()).contains("Conflicting @ToJson methods:"); assertThat(expected.getMessage()).contains("pointToJson1"); assertThat(expected.getMessage()).contains("pointToJson2"); } } static
MultipleLayersJsonAdapter
java
spring-projects__spring-security
core/src/main/java/org/springframework/security/core/SimpleAuthentication.java
{ "start": 2341, "end": 4053 }
class ____ implements Authentication.Builder<Builder> { private final Log logger = LogFactory.getLog(getClass()); private final Collection<GrantedAuthority> authorities = new LinkedHashSet<>(); private @Nullable Object principal; private @Nullable Object credentials; private @Nullable Object details; private boolean authenticated; Builder(Authentication authentication) { this.logger.debug("Creating a builder which will result in exchanging an authentication of type " + authentication.getClass() + " for " + SimpleAuthentication.class.getSimpleName() + ";" + " consider implementing " + authentication.getClass().getSimpleName() + "#toBuilder"); this.authorities.addAll(authentication.getAuthorities()); this.principal = authentication.getPrincipal(); this.credentials = authentication.getCredentials(); this.details = authentication.getDetails(); this.authenticated = authentication.isAuthenticated(); } @Override public Builder authorities(Consumer<Collection<GrantedAuthority>> authorities) { authorities.accept(this.authorities); return this; } @Override public Builder details(@Nullable Object details) { this.details = details; return this; } @Override public Builder principal(@Nullable Object principal) { this.principal = principal; return this; } @Override public Builder credentials(@Nullable Object credentials) { this.credentials = credentials; return this; } @Override public Builder authenticated(boolean authenticated) { this.authenticated = authenticated; return this; } @Override public Authentication build() { return new SimpleAuthentication(this); } } }
Builder
java
spring-projects__spring-boot
module/spring-boot-restclient-test/src/main/java/org/springframework/boot/restclient/test/MockServerRestClientCustomizer.java
{ "start": 2739, "end": 3368 }
class ____ implements RestClientCustomizer { private final Map<RestClient.Builder, RequestExpectationManager> expectationManagers = new ConcurrentHashMap<>(); private final Map<RestClient.Builder, MockRestServiceServer> servers = new ConcurrentHashMap<>(); private final Supplier<? extends RequestExpectationManager> expectationManagerSupplier; private boolean bufferContent; public MockServerRestClientCustomizer() { this(SimpleRequestExpectationManager::new); } /** * Create a new {@link MockServerRestClientCustomizer} instance. * @param expectationManager the expectation manager
MockServerRestClientCustomizer
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/SortValidator.java
{ "start": 4226, "end": 5000 }
class ____ { /** * Get raw data bytes from a {@link Writable} * @param writable {@link Writable} object from whom to get the raw data * @return raw data of the writable */ public byte[] getRawBytes(Writable writable) { return writable.toString().getBytes(); } /** * Get number of raw data bytes of the {@link Writable} * @param writable {@link Writable} object from whom to get the raw data * length * @return number of raw data bytes */ public int getRawBytesLength(Writable writable) { return writable.toString().getBytes().length; } } /** * Specialization of {@link Raw} for {@link BytesWritable}. */ static
Raw
java
elastic__elasticsearch
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java
{ "start": 1003, "end": 2381 }
class ____ extends SpatialRelatesFunctionTestCase { public SpatialIntersectsTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable<Object[]> parameters() { List<TestCaseSupplier> suppliers = new ArrayList<>(); SpatialRelatesFunctionTestCase.addSpatialGridCombinations(suppliers, GEO_POINT); DataType[] geoDataTypes = { GEO_POINT, DataType.GEO_SHAPE }; SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); DataType[] cartesianDataTypes = { DataType.CARTESIAN_POINT, DataType.CARTESIAN_SHAPE }; SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); return parameterSuppliersFromTypedData( errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialIntersectsTests::typeErrorMessage) ); } @Override protected Expression build(Source source, List<Expression> args) { return new SpatialIntersects(source, args.get(0), args.get(1)); } protected static String typeErrorMessage(boolean includeOrdinal, List<Set<DataType>> validPerPosition, List<DataType> types) { return typeErrorMessage(includeOrdinal, validPerPosition, types, false, true); } }
SpatialIntersectsTests
java
spring-projects__spring-framework
spring-context/src/main/java/org/springframework/resilience/annotation/RetryAnnotationBeanPostProcessor.java
{ "start": 2204, "end": 2904 }
class ____ extends AbstractBeanFactoryAwareAdvisingPostProcessor implements EmbeddedValueResolverAware { private @Nullable StringValueResolver embeddedValueResolver; public RetryAnnotationBeanPostProcessor() { setBeforeExistingAdvisors(true); Pointcut cpc = new AnnotationMatchingPointcut(Retryable.class, true); Pointcut mpc = new AnnotationMatchingPointcut(null, Retryable.class, true); this.advisor = new DefaultPointcutAdvisor( new ComposablePointcut(cpc).union(mpc), new RetryAnnotationInterceptor()); } @Override public void setEmbeddedValueResolver(StringValueResolver resolver) { this.embeddedValueResolver = resolver; } private
RetryAnnotationBeanPostProcessor
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/issue_2000/Issue2066.java
{ "start": 141, "end": 320 }
class ____ extends TestCase { public void test_issue() throws Exception { JSON.parseObject("{\"values\":[[1,2],[3,4]]}", Model.class); } public static
Issue2066
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/telemetry/metric/LongGauge.java
{ "start": 584, "end": 899 }
interface ____ extends Instrument, AutoCloseable { /** * Noop gauge for tests */ LongGauge NOOP = new LongGauge() { @Override public String getName() { return "noop"; } @Override public void close() throws Exception { } }; }
LongGauge
java
google__dagger
javatests/dagger/functional/producers/scope/SetProducerModule.java
{ "start": 912, "end": 1115 }
class ____ { @Produces @IntoSet static Object setValue1(Object value) { return value; } @Produces @IntoSet static Object setValue2(Object value) { return value; } }
SetProducerModule
java
quarkusio__quarkus
extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/attribute/IdentUsernameAttribute.java
{ "start": 781, "end": 1254 }
class ____ implements ExchangeAttributeBuilder { @Override public String name() { return "Ident Username"; } @Override public ExchangeAttribute build(final String token) { if (token.equals(IDENT_USERNAME)) { return IdentUsernameAttribute.INSTANCE; } return null; } @Override public int priority() { return 0; } } }
Builder
java
elastic__elasticsearch
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2ErrorTests.java
{ "start": 797, "end": 1387 }
class ____ extends ErrorsForCasesWithoutExamplesTestCase { @Override protected List<TestCaseSupplier> cases() { return paramsToSuppliers(Atan2Tests.parameters()); } @Override protected Expression build(Source source, List<Expression> args) { return new Atan2(source, args.get(0), args.get(1)); } @Override protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) { return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, i) -> "numeric")); } }
Atan2ErrorTests
java
netty__netty
codec-socks/src/main/java/io/netty/handler/codec/socksx/v5/Socks5Message.java
{ "start": 745, "end": 811 }
interface ____ all SOCKS5 protocol messages implement. */ public
that
java
grpc__grpc-java
util/src/main/java/io/grpc/util/OutlierDetectionLoadBalancer.java
{ "start": 8438, "end": 9983 }
class ____ extends ForwardingLoadBalancerHelper { private Helper delegate; ChildHelper(Helper delegate) { this.delegate = new HealthProducerHelper(delegate); } @Override protected Helper delegate() { return delegate; } @Override public Subchannel createSubchannel(CreateSubchannelArgs args) { // Subchannels are wrapped so that we can monitor call results and to trigger failures when // we decide to eject the subchannel. OutlierDetectionSubchannel subchannel = new OutlierDetectionSubchannel(args, delegate); // If the subchannel is associated with a single address that is also already in the map // the subchannel will be added to the map and be included in outlier detection. List<EquivalentAddressGroup> addressGroups = args.getAddresses(); if (hasSingleAddress(addressGroups) && addressMap.containsKey(addressGroups.get(0).getAddresses().get(0))) { EndpointTracker tracker = addressMap.get(addressGroups.get(0).getAddresses().get(0)); tracker.addSubchannel(subchannel); // If this address has already been ejected, we need to immediately eject this Subchannel. if (tracker.ejectionTimeNanos != null) { subchannel.eject(); } } return subchannel; } @Override public void updateBalancingState(ConnectivityState newState, SubchannelPicker newPicker) { delegate.updateBalancingState(newState, new OutlierDetectionPicker(newPicker)); } } final
ChildHelper
java
spring-projects__spring-security
aspects/src/test/java/org/springframework/security/authorization/method/aspectj/PreFilterAspectTests.java
{ "start": 1213, "end": 2047 }
class ____ { private MethodInterceptor interceptor; private PrePostSecured prePostSecured = new PrePostSecured(); @BeforeEach public final void setUp() { MockitoAnnotations.initMocks(this); this.interceptor = new PreFilterAuthorizationMethodInterceptor(); PreFilterAspect secAspect = PreFilterAspect.aspectOf(); secAspect.setSecurityInterceptor(this.interceptor); } @Test public void preFilterMethodWhenListThenFilters() { List<String> objects = new ArrayList<>(Arrays.asList("apple", "banana", "aubergine", "orange")); assertThat(this.prePostSecured.preFilterMethod(objects)).containsExactly("apple", "aubergine"); } @Test public void nestedDenyAllPreFilterDeniesAccess() { assertThat(this.prePostSecured.myObject().denyAllMethod(new ArrayList<>(List.of("deny")))).isEmpty(); } static
PreFilterAspectTests
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/oracle/create/OracleCreateIndexTest16_local.java
{ "start": 967, "end": 2510 }
class ____ extends OracleTest { public void test_0() throws Exception { String sql = // "CREATE INDEX dbobjs_idx ON dbobjs (created) LOCAL"; List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.ORACLE); SQLStatement stmt = statementList.get(0); print(statementList); assertEquals(1, statementList.size()); SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.ORACLE); stmt.accept(visitor); System.out.println("Tables : " + visitor.getTables()); System.out.println("fields : " + visitor.getColumns()); System.out.println("coditions : " + visitor.getConditions()); System.out.println("relationships : " + visitor.getRelationships()); System.out.println("orderBy : " + visitor.getOrderByColumns()); assertEquals("CREATE INDEX dbobjs_idx ON dbobjs(created) LOCAL", SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE)); assertEquals(1, visitor.getTables().size()); assertTrue(visitor.getTables().containsKey(new TableStat.Name("dbobjs"))); assertEquals(1, visitor.getColumns().size()); // assertTrue(visitor.getColumns().contains(new TableStat.Column("xwarehouses", "sales_rep_id"))); // assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "YEAR"))); // assertTrue(visitor.getColumns().contains(new TableStat.Column("pivot_table", "order_mode"))); } }
OracleCreateIndexTest16_local
java
netty__netty
transport-classes-io_uring/src/main/java/io/netty/channel/uring/MsgHdr.java
{ "start": 1235, "end": 7289 }
class ____ { private MsgHdr() { } static void set(ByteBuffer memory, long iovMemory, int iovLength) { int memoryPosition = memory.position(); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_NAMELEN, 0); if (Native.SIZEOF_SIZE_T == 4) { memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_NAME, 0); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOV, (int) iovMemory); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOVLEN, iovLength); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROL, 0); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROLLEN, 0); } else { assert Native.SIZEOF_SIZE_T == 8; memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_NAME, 0); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOV, iovMemory); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOVLEN, iovLength); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROL, 0); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROLLEN, 0); } } static void set(ByteBuffer memory, ByteBuffer sockAddrMemory, int addressSize, ByteBuffer iovMemory, int iovLength, ByteBuffer msgControl, int cmsgHdrDataOffset, short segmentSize) { int memoryPosition = memory.position(); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_NAMELEN, addressSize); int msgControlLen = 0; long msgControlAddr; if (segmentSize > 0 && msgControl != null && cmsgHdrDataOffset >= 0) { msgControlLen = Native.CMSG_LEN; CmsgHdr.write(msgControl, cmsgHdrDataOffset, Native.CMSG_LEN, Native.SOL_UDP, Native.UDP_SEGMENT, segmentSize); msgControlAddr = Buffer.memoryAddress(msgControl) + msgControl.position(); } else { // Set to 0 if we not explicit requested GSO. msgControlAddr = 0; } long sockAddr = sockAddrMemory == null ? 0 : Buffer.memoryAddress(sockAddrMemory); if (Native.SIZEOF_SIZE_T == 4) { memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_NAME, (int) sockAddr); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOV, (int) Buffer.memoryAddress(iovMemory)); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOVLEN, iovLength); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROL, (int) msgControlAddr); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROLLEN, msgControlLen); } else { assert Native.SIZEOF_SIZE_T == 8; memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_NAME, sockAddr); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOV, Buffer.memoryAddress(iovMemory)); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOVLEN, iovLength); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROL, msgControlAddr); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROLLEN, msgControlLen); } // No flags (we assume the memory was memset before) } static void prepSendFd(ByteBuffer memory, int fd, ByteBuffer msgControl, int cmsgHdrDataOffset, ByteBuffer iovMemory, int iovLength) { int memoryPosition = memory.position(); long msgControlAddr = Buffer.memoryAddress(msgControl); CmsgHdr.writeScmRights(msgControl, cmsgHdrDataOffset, fd); if (Native.SIZEOF_SIZE_T == 4) { memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROL, (int) msgControlAddr); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROLLEN, Native.MSG_CONTROL_LEN_FOR_FD); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOV, (int) Buffer.memoryAddress(iovMemory)); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOVLEN, iovLength); } else { assert Native.SIZEOF_SIZE_T == 8; memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROL, msgControlAddr); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROLLEN, Native.MSG_CONTROL_LEN_FOR_FD); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOV, Buffer.memoryAddress(iovMemory)); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOVLEN, iovLength); } } static void prepReadFd(ByteBuffer memory, ByteBuffer msgControl, int cmsgHdrDataOffset, ByteBuffer iovMemory, int iovLength) { int memoryPosition = memory.position(); long msgControlAddr = Buffer.memoryAddress(msgControl); if (Native.SIZEOF_SIZE_T == 4) { memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROL, (int) msgControlAddr); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROLLEN, Native.MSG_CONTROL_LEN_FOR_FD); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOV, (int) Buffer.memoryAddress(iovMemory)); memory.putInt(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOVLEN, iovLength); } else { assert Native.SIZEOF_SIZE_T == 8; memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROL, msgControlAddr); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_CONTROLLEN, Native.MSG_CONTROL_LEN_FOR_FD); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOV, Buffer.memoryAddress(iovMemory)); memory.putLong(memoryPosition + Native.MSGHDR_OFFSETOF_MSG_IOVLEN, iovLength); } } static int getCmsgData(ByteBuffer memory, ByteBuffer msgControl, int cmsgHdrDataOffset) { return CmsgHdr.readScmRights(msgControl, cmsgHdrDataOffset); } }
MsgHdr
java
junit-team__junit5
platform-tests/src/test/java/org/junit/platform/commons/support/ModifierSupportTests.java
{ "start": 7334, "end": 7807 }
class ____ { @SuppressWarnings({ "FinalMethodInFinalClass", "RedundantModifier" }) final void finalMethod() { } } // ------------------------------------------------------------------------- @Target(ElementType.METHOD) @Retention(RetentionPolicy.RUNTIME) @ParameterizedTest @ValueSource(classes = { PublicClass.class, PrivateClass.class, ProtectedClass.class, PackageVisibleClass.class, AbstractClass.class, StaticClass.class, FinalClass.class }) @
FinalClass
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/component/cascading/collection/LocalizedStrings.java
{ "start": 293, "end": 663 }
class ____ { private Map<Locale,String> strings = new HashMap<>(); public void addString(Locale locale, String value) { strings.put( locale, value ); } public String getString(Locale locale) { return ( String ) strings.get( locale ); } public Map<Locale,String> getStringsCopy() { return java.util.Collections.unmodifiableMap( strings ); } }
LocalizedStrings
java
spring-projects__spring-boot
core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/SpringBootApplication.java
{ "start": 4401, "end": 4545 }
class ____ be used for naming detected components * within the Spring container. * <p> * The default value of the {@link BeanNameGenerator}
to
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
{ "start": 43407, "end": 43932 }
enum ____ type {@code E} in terms of the value of its ordinal. Enums serialized like this must have a corresponding test * which uses {@code EnumSerializationTestUtils#assertEnumSerialization} to fix the wire protocol. */ public <E extends Enum<E>> void writeEnum(E enumValue) throws IOException { assert enumValue instanceof XContentType == false : "XContentHelper#writeTo should be used for XContentType serialisation"; writeVInt(enumValue.ordinal()); } /** * Writes an optional
with
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java
{ "start": 1854, "end": 6431 }
class ____ extends AggregateFunction implements SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, "WeightedAvg", WeightedAvg::new ); private final Expression weight; private static final String invalidWeightError = "{} argument of [{}] cannot be null or 0, received [{}]"; @FunctionInfo( returnType = "double", description = "The weighted average of a numeric expression.", type = FunctionType.AGGREGATE, examples = @Example(file = "stats", tag = "weighted-avg") ) public WeightedAvg( Source source, @Param(name = "number", type = { "double", "integer", "long" }, description = "A numeric value.") Expression field, @Param(name = "weight", type = { "double", "integer", "long" }, description = "A numeric weight.") Expression weight ) { this(source, field, Literal.TRUE, NO_WINDOW, weight); } public WeightedAvg(Source source, Expression field, Expression filter, Expression window, Expression weight) { super(source, field, filter, window, List.of(weight)); this.weight = weight; } private WeightedAvg(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class), readWindow(in), in.readNamedWriteableCollectionAsList(Expression.class).get(0) ); } @Override public String getWriteableName() { return ENTRY.name; } @Override protected Expression.TypeResolution resolveType() { if (childrenResolved() == false) { return new TypeResolution("Unresolved children"); } TypeResolution resolution = isType( field(), dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG, sourceText(), FIRST, "numeric except unsigned_long or counter types" ); if (resolution.unresolved()) { return resolution; } resolution = isType( weight(), dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG, sourceText(), SECOND, "numeric except unsigned_long or counter types" ); if (resolution.unresolved()) { return resolution; } if (weight.dataType() == DataType.NULL) { return new TypeResolution(format(null, invalidWeightError, SECOND, sourceText(), null)); } if (weight.foldable() == false) { return TypeResolution.TYPE_RESOLVED; } Object weightVal = weight.fold(FoldContext.small()/* TODO remove me*/); if (weightVal == null || weightVal.equals(0) || weightVal.equals(0.0)) { return new TypeResolution(format(null, invalidWeightError, SECOND, sourceText(), weightVal)); } return TypeResolution.TYPE_RESOLVED; } @Override public DataType dataType() { return DataType.DOUBLE; } @Override protected NodeInfo<WeightedAvg> info() { return NodeInfo.create(this, WeightedAvg::new, field(), filter(), window(), weight); } @Override public WeightedAvg replaceChildren(List<Expression> newChildren) { return new WeightedAvg(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); } @Override public WeightedAvg withFilter(Expression filter) { return new WeightedAvg(source(), field(), filter, window(), weight()); } @Override public Expression surrogate() { var s = source(); var field = field(); var weight = weight(); if (field.foldable()) { return new MvAvg(s, field); } if (weight.foldable()) { return new Div( s, new Sum(s, field, filter(), window(), SummationMode.COMPENSATED_LITERAL), new Count(s, field, filter(), window()), dataType() ); } else { return new Div( s, new Sum(s, new Mul(s, field, weight), filter(), window(), SummationMode.COMPENSATED_LITERAL), new Sum(s, weight, filter(), window(), SummationMode.COMPENSATED_LITERAL), dataType() ); } } public Expression weight() { return weight; } }
WeightedAvg
java
elastic__elasticsearch
libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/env/EnvironmentBridge.java
{ "start": 774, "end": 1368 }
interface ____ extends StableBridgeAPI<Environment> { static EnvironmentBridge fromInternal(final Environment delegate) { return new EnvironmentBridge.ProxyInternal(delegate); } static EnvironmentBridge create(final SettingsBridge bridgedSettings, final Path configPath) { return fromInternal(new Environment(bridgedSettings.toInternal(), configPath)); } /** * An implementation of {@link EnvironmentBridge} that proxies calls through * to an internal {@link Environment}. * @see StableBridgeAPI.ProxyInternal */ final
EnvironmentBridge
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/valuehandlingmode/inline/OptionalOneToOneTest.java
{ "start": 1056, "end": 1938 }
class ____ { @Test public void testBidirQueryEntityProperty(EntityManagerFactoryScope scope) { PersonAddress personAddress = scope.fromTransaction( session -> { PersonAddress address = new PersonAddress(); Person person = new Person(); address.setPerson( person ); person.setPersonAddress( address ); session.persist( person ); session.persist( address ); return address; } ); scope.inTransaction( session -> { CriteriaBuilder criteriaBuilder = session.getCriteriaBuilder(); CriteriaQuery<Person> criteria = criteriaBuilder.createQuery( Person.class ); Root<Person> root = criteria.from( Person.class ); criteria.where( criteriaBuilder.equal( root.get( "personAddress" ), personAddress ) ); session.createQuery( criteria ).getSingleResult(); } ); } @Entity(name = "Person") public static
OptionalOneToOneTest
java
elastic__elasticsearch
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/explain/FieldSelectionTests.java
{ "start": 578, "end": 1806 }
class ____ extends AbstractXContentSerializingTestCase<FieldSelection> { public static FieldSelection createRandom() { Set<String> mappingTypes = randomSubsetOf(randomIntBetween(1, 3), "int", "float", "double", "text", "keyword", "ip").stream() .collect(Collectors.toSet()); FieldSelection.FeatureType featureType = randomBoolean() ? null : randomFrom(FieldSelection.FeatureType.values()); String reason = randomBoolean() ? null : randomAlphaOfLength(20); return new FieldSelection(randomAlphaOfLength(10), mappingTypes, randomBoolean(), randomBoolean(), featureType, reason); } @Override protected FieldSelection createTestInstance() { return createRandom(); } @Override protected FieldSelection mutateInstance(FieldSelection instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } @Override protected FieldSelection doParseInstance(XContentParser parser) throws IOException { return FieldSelection.PARSER.apply(parser, null); } @Override protected Writeable.Reader<FieldSelection> instanceReader() { return FieldSelection::new; } }
FieldSelectionTests
java
elastic__elasticsearch
x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/patterntext/PatternTextDocValuesTests.java
{ "start": 8112, "end": 10292 }
class ____ extends SortedSetDocValues { private final List<String> ordToValues; private final List<Integer> docToOrds; private int currDoc = -1; // Single value for each docId, null if no value for a docId SimpleSortedSetDocValues(String... docIdToValue) { ordToValues = Arrays.stream(docIdToValue).filter(Objects::nonNull).collect(Collectors.toSet()).stream().sorted().toList(); docToOrds = Arrays.stream(docIdToValue).map(v -> v == null ? null : ordToValues.indexOf(v)).toList(); } @Override public long nextOrd() { return docToOrds.get(currDoc); } @Override public int docValueCount() { return 1; } @Override public BytesRef lookupOrd(long ord) { return new BytesRef(ordToValues.get((int) ord)); } @Override public long getValueCount() { return ordToValues.size(); } @Override public boolean advanceExact(int target) { for (currDoc = target; currDoc < docToOrds.size(); currDoc++) { if (docToOrds.get(currDoc) != null) { return currDoc == target; } } return false; } @Override public int docID() { return currDoc >= docToOrds.size() ? NO_MORE_DOCS : currDoc; } @Override public int nextDoc() throws IOException { throw new UnsupportedOperationException(); } @Override public int advance(int target) { throw new UnsupportedOperationException(); } @Override public long cost() { return 1; } } public static String info(int... offsets) { List<Arg.Info> argsInfo = new ArrayList<>(); for (var offset : offsets) { argsInfo.add(new Arg.Info(Arg.Type.GENERIC, offset)); } try { return Arg.encodeInfo(argsInfo); } catch (IOException e) { throw new RuntimeException(e); } } static
SimpleSortedSetDocValues
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/AsyncCalcTestPrograms.java
{ "start": 11662, "end": 12088 }
class ____ extends AsyncScalarFunction { public void eval(CompletableFuture<String> future, String s, Integer... a) { int m = 1; for (int n : a) { m *= n; } future.complete(s + m); } } /** * A UDF minus Timestamp with the specified offset. This UDF also ensures open and close are * called. */ public static
AsyncJavaFunc2
java
apache__flink
flink-core/src/test/java/org/apache/flink/api/common/typeutils/TypeSerializerUpgradeTestBase.java
{ "start": 6113, "end": 8225 }
class ____<UpgradedElementT> implements UpgradeVerifier<UpgradedElementT> { private final UpgradeVerifier<UpgradedElementT> delegateVerifier; private final ClassLoader verifierClassloader; ClassLoaderSafeUpgradeVerifier( Class<? extends UpgradeVerifier<UpgradedElementT>> delegateVerifierClass) throws Exception { checkNotNull(delegateVerifierClass); Class<? extends UpgradeVerifier<UpgradedElementT>> relocatedDelegateVerifierClass = ClassRelocator.relocate(delegateVerifierClass); this.verifierClassloader = relocatedDelegateVerifierClass.getClassLoader(); try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(verifierClassloader)) { this.delegateVerifier = relocatedDelegateVerifierClass.newInstance(); } } @Override public TypeSerializer<UpgradedElementT> createUpgradedSerializer() { try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(verifierClassloader)) { return delegateVerifier.createUpgradedSerializer(); } } @Override public Condition<UpgradedElementT> testDataCondition() { try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(verifierClassloader)) { return delegateVerifier.testDataCondition(); } } @Override public Condition<TypeSerializerSchemaCompatibility<UpgradedElementT>> schemaCompatibilityCondition(FlinkVersion version) { try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(verifierClassloader)) { return delegateVerifier.schemaCompatibilityCondition(version); } } } /** * Specification of one test scenario. This mainly needs a {@link PreUpgradeSetup} and {@link * UpgradeVerifier}. */ public static
ClassLoaderSafeUpgradeVerifier
java
elastic__elasticsearch
modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java
{ "start": 2244, "end": 6669 }
class ____ extends TransportMasterNodeReadProjectAction< ExplainDataStreamLifecycleAction.Request, ExplainDataStreamLifecycleAction.Response> { private final IndexNameExpressionResolver indexNameExpressionResolver; private final DataStreamLifecycleErrorStore errorStore; private final DataStreamGlobalRetentionSettings globalRetentionSettings; @Inject public TransportExplainDataStreamLifecycleAction( TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, ProjectResolver projectResolver, IndexNameExpressionResolver indexNameExpressionResolver, DataStreamLifecycleErrorStore dataLifecycleServiceErrorStore, DataStreamGlobalRetentionSettings globalRetentionSettings ) { super( ExplainDataStreamLifecycleAction.INSTANCE.name(), transportService, clusterService, threadPool, actionFilters, ExplainDataStreamLifecycleAction.Request::new, projectResolver, ExplainDataStreamLifecycleAction.Response::new, threadPool.executor(ThreadPool.Names.MANAGEMENT) ); this.indexNameExpressionResolver = indexNameExpressionResolver; this.errorStore = dataLifecycleServiceErrorStore; this.globalRetentionSettings = globalRetentionSettings; } @Override protected void masterOperation( Task task, ExplainDataStreamLifecycleAction.Request request, ProjectState state, ActionListener<ExplainDataStreamLifecycleAction.Response> listener ) throws Exception { ProjectMetadata metadata = state.metadata(); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(metadata, request); List<ExplainIndexDataStreamLifecycle> explainIndices = new ArrayList<>(concreteIndices.length); for (String index : concreteIndices) { IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(index); if (indexAbstraction == null) { continue; } IndexMetadata idxMetadata = metadata.index(index); if (idxMetadata == null) { continue; } DataStream parentDataStream = indexAbstraction.getParentDataStream(); if (parentDataStream == null || parentDataStream.isIndexManagedByDataStreamLifecycle(idxMetadata.getIndex(), metadata::index) == false) { explainIndices.add(new ExplainIndexDataStreamLifecycle(index, false, false, null, null, null, null, null)); continue; } RolloverInfo rolloverInfo = idxMetadata.getRolloverInfos().get(parentDataStream.getName()); TimeValue generationDate = parentDataStream.getGenerationLifecycleDate(idxMetadata); ExplainIndexDataStreamLifecycle explainIndexDataStreamLifecycle = new ExplainIndexDataStreamLifecycle( index, true, parentDataStream.isInternal(), idxMetadata.getCreationDate(), rolloverInfo == null ? null : rolloverInfo.getTime(), generationDate, parentDataStream.getDataLifecycleForIndex(idxMetadata.getIndex()), errorStore.getError(state.projectId(), index) ); explainIndices.add(explainIndexDataStreamLifecycle); } ClusterSettings clusterSettings = clusterService.getClusterSettings(); listener.onResponse( new ExplainDataStreamLifecycleAction.Response( explainIndices, request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null, globalRetentionSettings.get(false), globalRetentionSettings.get(true) ) ); } @Override protected ClusterBlockException checkBlock(ExplainDataStreamLifecycleAction.Request request, ProjectState state) { return state.blocks() .indicesBlockedException( state.projectId(), ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state.metadata(), request) ); } }
TransportExplainDataStreamLifecycleAction
java
elastic__elasticsearch
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java
{ "start": 34407, "end": 36956 }
class ____ extends AnalyzerRule<LogicalPlan> { @Override protected LogicalPlan rule(LogicalPlan plan) { if (plan instanceof Project p) { if (p.child() instanceof Filter f) { Expression condition = f.condition(); if (condition.resolved() == false && f.childrenResolved()) { Expression newCondition = replaceAliases(condition, p.projections()); if (newCondition != condition) { return new Project(p.source(), f.with(newCondition), p.projections()); } } } } if (plan instanceof Aggregate a) { if (a.child() instanceof Filter f) { Expression condition = f.condition(); if (condition.resolved() == false && f.childrenResolved()) { Expression newCondition = replaceAliases(condition, a.aggregates()); if (newCondition != condition) { return new Aggregate(a.source(), f.with(newCondition), a.groupings(), a.aggregates()); } } } } return plan; } private static Expression replaceAliases(Expression condition, List<? extends NamedExpression> named) { List<Alias> aliases = new ArrayList<>(); named.forEach(n -> { if (n instanceof Alias) { aliases.add((Alias) n); } }); // traverse bottom up to ensure that the transformation is not applied to children of inserted aliases. // Instead, the inserted aliases should be resolved by another round of name resolution. return condition.transformUp(UnresolvedAttribute.class, u -> { boolean qualified = u.qualifier() != null; for (Alias alias : aliases) { // don't replace field with their own aliases (it creates infinite cycles) if (alias.anyMatch(e -> e == u) == false && (qualified ? Objects.equals(alias.qualifiedName(), u.qualifiedName()) : Objects.equals(alias.name(), u.name()))) { return alias; } } return u; }); } } private static
ResolveFilterRefs
java
spring-projects__spring-boot
module/spring-boot-data-redis/src/dockerTest/java/org/springframework/boot/data/redis/testcontainers/RedisStackServerContainerConnectionDetailsFactoryTests.java
{ "start": 1886, "end": 2641 }
class ____ { @Container @ServiceConnection static final RedisStackServerContainer redis = TestImage.container(RedisStackServerContainer.class); @Autowired(required = false) private DataRedisConnectionDetails connectionDetails; @Autowired private RedisConnectionFactory connectionFactory; @Test void connectionCanBeMadeToRedisContainer() { assertThat(this.connectionDetails).isNotNull(); try (RedisConnection connection = this.connectionFactory.getConnection()) { assertThat(connection.commands().echo("Hello, World".getBytes())).isEqualTo("Hello, World".getBytes()); } } @Configuration(proxyBeanMethods = false) @ImportAutoConfiguration(DataRedisAutoConfiguration.class) static
RedisStackServerContainerConnectionDetailsFactoryTests
java
apache__maven
impl/maven-core/src/main/java/org/apache/maven/lifecycle/internal/ProjectSegment.java
{ "start": 1503, "end": 1734 }
class ____ the execution context of one such task segment. * </p> * <p> * Wise voices have suggested that maybe aggregators shouldn't be bound to the ordinary * lifecycle at all, in which case we wouldn't be needing this
represents
java
spring-projects__spring-security
web/src/main/java/org/springframework/security/web/authentication/preauth/j2ee/WebXmlMappableAttributesRetriever.java
{ "start": 2034, "end": 4868 }
class ____ implements ResourceLoaderAware, MappableAttributesRetriever, InitializingBean { protected final Log logger = LogFactory.getLog(getClass()); private @Nullable ResourceLoader resourceLoader; private Set<String> mappableAttributes = new HashSet<>(); @Override public void setResourceLoader(ResourceLoader resourceLoader) { this.resourceLoader = resourceLoader; } @Override public Set<String> getMappableAttributes() { return this.mappableAttributes; } /** * Loads the web.xml file using the configured <tt>ResourceLoader</tt> and parses the * role-name elements from it, using these as the set of <tt>mappableAttributes</tt>. */ @Override public void afterPropertiesSet() throws Exception { Assert.notNull(this.resourceLoader, "resourceLoader cannot be null"); Resource webXml = this.resourceLoader.getResource("/WEB-INF/web.xml"); Document doc = getDocument(webXml.getInputStream()); NodeList webApp = doc.getElementsByTagName("web-app"); Assert.isTrue(webApp.getLength() == 1, () -> "Failed to find 'web-app' element in resource" + webXml); NodeList securityRoles = ((Element) webApp.item(0)).getElementsByTagName("security-role"); List<String> roleNames = getRoleNames(webXml, securityRoles); this.mappableAttributes = Collections.unmodifiableSet(new HashSet<>(roleNames)); } private List<String> getRoleNames(Resource webXml, NodeList securityRoles) { ArrayList<String> roleNames = new ArrayList<>(); for (int i = 0; i < securityRoles.getLength(); i++) { Element securityRoleElement = (Element) securityRoles.item(i); NodeList roles = securityRoleElement.getElementsByTagName("role-name"); if (roles.getLength() > 0) { String roleName = roles.item(0).getTextContent().trim(); roleNames.add(roleName); this.logger.info("Retrieved role-name '" + roleName + "' from web.xml"); } else { this.logger.info("No security-role elements found in " + webXml); } } return roleNames; } /** * @return Document for the specified InputStream */ private Document getDocument(InputStream aStream) { try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(false); DocumentBuilder builder = factory.newDocumentBuilder(); builder.setEntityResolver(new MyEntityResolver()); return builder.parse(aStream); } catch (FactoryConfigurationError | IOException | SAXException | ParserConfigurationException ex) { throw new RuntimeException("Unable to parse document object", ex); } finally { try { aStream.close(); } catch (IOException ex) { this.logger.warn("Failed to close input stream for web.xml", ex); } } } /** * We do not need to resolve external entities, so just return an empty String. */ private static final
WebXmlMappableAttributesRetriever
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesReservation.java
{ "start": 8187, "end": 9223 }
class ____ extends JerseyBinder { CapTestServletModule(boolean flag) { if(flag) { getConf().setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); getConf().setStrings(YarnConfiguration.YARN_ADMIN_ACL, "testuser1"); } } @Override public void configureScheduler() { getConf().set(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class.getName()); getConf().setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class, ResourceScheduler.class); CapacitySchedulerConfiguration csconf = new CapacitySchedulerConfiguration(getConf()); String[] queues = { "default", "dedicated" }; QueuePath dedicatedQueuePath = new QueuePath("root.dedicated"); csconf.setQueues(new QueuePath("root"), queues); csconf.setCapacity(new QueuePath("root.default"), 50.0f); csconf.setCapacity(dedicatedQueuePath, 50.0f); csconf.setReservable(dedicatedQueuePath, true); setConf(csconf); } } private
CapTestServletModule
java
apache__logging-log4j2
log4j-jakarta-smtp/src/main/java/org/apache/logging/log4j/smtp/SmtpManager.java
{ "start": 2193, "end": 9125 }
class ____ extends MailManager { private final Session session; private final CyclicBuffer<LogEvent> buffer; private volatile MimeMessage message; private final FactoryData data; private static MimeMessage createMimeMessage( final FactoryData data, final Session session, final LogEvent appendEvent) throws MessagingException { return new MimeMessageBuilder(session) .setFrom(data.getFrom()) .setReplyTo(data.getReplyTo()) .setRecipients(Message.RecipientType.TO, data.getTo()) .setRecipients(Message.RecipientType.CC, data.getCc()) .setRecipients(Message.RecipientType.BCC, data.getBcc()) .setSubject(data.getSubjectSerializer().toSerializable(appendEvent)) .build(); } protected SmtpManager(final String name, final Session session, final MimeMessage message, final FactoryData data) { super(null, name); this.session = session; this.message = message; this.data = data; this.buffer = new CyclicBuffer<>(LogEvent.class, data.getBufferSize()); } @Override public void add(final LogEvent event) { buffer.add(event.toImmutable()); } @Override public void sendEvents(final Layout<?> layout, final LogEvent appendEvent) { if (message == null) { connect(appendEvent); } try { final LogEvent[] priorEvents = removeAllBufferedEvents(); // LOG4J-310: log appendEvent even if priorEvents is empty final byte[] rawBytes = formatContentToBytes(priorEvents, appendEvent, layout); final String contentType = layout.getContentType(); final String encoding = getEncoding(rawBytes, contentType); final byte[] encodedBytes = encodeContentToBytes(rawBytes, encoding); final InternetHeaders headers = getHeaders(contentType, encoding); final MimeMultipart mp = getMimeMultipart(encodedBytes, headers); final String subject = data.getSubjectSerializer().toSerializable(appendEvent); sendMultipartMessage(message, mp, subject); } catch (final MessagingException | IOException | RuntimeException e) { logError("Caught exception while sending e-mail notification.", e); throw new LoggingException("Error occurred while sending email", e); } } LogEvent[] removeAllBufferedEvents() { return buffer.removeAll(); } protected byte[] formatContentToBytes( final LogEvent[] priorEvents, final LogEvent appendEvent, final Layout<?> layout) throws IOException { final ByteArrayOutputStream raw = new ByteArrayOutputStream(); writeContent(priorEvents, appendEvent, layout, raw); return raw.toByteArray(); } private void writeContent( final LogEvent[] priorEvents, final LogEvent appendEvent, final Layout<?> layout, final ByteArrayOutputStream out) throws IOException { writeHeader(layout, out); writeBuffer(priorEvents, appendEvent, layout, out); writeFooter(layout, out); } protected void writeHeader(final Layout<?> layout, final OutputStream out) throws IOException { final byte[] header = layout.getHeader(); if (header != null) { out.write(header); } } protected void writeBuffer( final LogEvent[] priorEvents, final LogEvent appendEvent, final Layout<?> layout, final OutputStream out) throws IOException { for (final LogEvent priorEvent : priorEvents) { final byte[] bytes = layout.toByteArray(priorEvent); out.write(bytes); } final byte[] bytes = layout.toByteArray(appendEvent); out.write(bytes); } protected void writeFooter(final Layout<?> layout, final OutputStream out) throws IOException { final byte[] footer = layout.getFooter(); if (footer != null) { out.write(footer); } } protected String getEncoding(final byte[] rawBytes, final String contentType) { final DataSource dataSource = new ByteArrayDataSource(rawBytes, contentType); return MimeUtility.getEncoding(dataSource); } protected byte[] encodeContentToBytes(final byte[] rawBytes, final String encoding) throws MessagingException, IOException { final ByteArrayOutputStream encoded = new ByteArrayOutputStream(); encodeContent(rawBytes, encoding, encoded); return encoded.toByteArray(); } protected void encodeContent(final byte[] bytes, final String encoding, final ByteArrayOutputStream out) throws MessagingException, IOException { try (final OutputStream encoder = MimeUtility.encode(out, encoding)) { encoder.write(bytes); } } protected InternetHeaders getHeaders(final String contentType, final String encoding) { final InternetHeaders headers = new InternetHeaders(); headers.setHeader("Content-Type", contentType + "; charset=UTF-8"); headers.setHeader("Content-Transfer-Encoding", encoding); return headers; } protected MimeMultipart getMimeMultipart(final byte[] encodedBytes, final InternetHeaders headers) throws MessagingException { final MimeMultipart mp = new MimeMultipart(); final MimeBodyPart part = new MimeBodyPart(headers, encodedBytes); mp.addBodyPart(part); return mp; } /** * @deprecated Please use the {@link #sendMultipartMessage(MimeMessage, MimeMultipart, String)} method instead. */ @Deprecated protected void sendMultipartMessage(final MimeMessage msg, final MimeMultipart mp) throws MessagingException { synchronized (msg) { msg.setContent(mp); msg.setSentDate(new Date()); Transport.send(msg); } } protected void sendMultipartMessage(final MimeMessage msg, final MimeMultipart mp, final String subject) throws MessagingException { synchronized (msg) { msg.setContent(mp); msg.setSentDate(new Date()); msg.setSubject(subject); Transport.send(msg); } } private synchronized void connect(final LogEvent appendEvent) { if (message != null) { return; } try { message = createMimeMessage(data, session, appendEvent); } catch (final MessagingException e) { logError("Could not set SmtpAppender message options", e); message = null; } } /** * Factory to create the SMTP Manager. */ @ServiceProvider(value = MailManagerFactory.class, resolution = Resolution.OPTIONAL) public static
SmtpManager
java
apache__flink
flink-table/flink-table-code-splitter/src/test/resources/add-boolean/code/TestNotRewrite.java
{ "start": 7, "end": 306 }
class ____ { public void fun1(int[] a, int[] b) { a[0] += b[1]; b[1] += a[1]; a[1] += b[2]; b[2] += a[3]; a[3] += b[4]; } public void fun2() { return; } public void fun3(int a) { a += 5; return; } }
TestNotRewrite
java
apache__camel
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/aggregator/SpringAggregateExpressionSizeFallbackTest.java
{ "start": 1078, "end": 1420 }
class ____ extends AggregateExpressionSizeFallbackTest { @Override protected CamelContext createCamelContext() throws Exception { return createSpringCamelContext(this, "org/apache/camel/spring/processor/aggregator/SpringAggregateExpressionSizeFallbackTest.xml"); } }
SpringAggregateExpressionSizeFallbackTest
java
apache__flink
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/snapshot/RocksDBSnapshotStrategyBase.java
{ "start": 3268, "end": 3409 }
class ____ {@link SnapshotStrategy} implementations for RocksDB state backend. * * @param <K> type of the backend keys. */ public abstract
for
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_containsSubsequence_Test.java
{ "start": 977, "end": 1357 }
class ____ extends ByteArrayAssertBaseTest { @Override protected ByteArrayAssert invoke_api_method() { return assertions.containsSubsequence((byte) 6, (byte) 8); } @Override protected void verify_internal_effects() { verify(arrays).assertContainsSubsequence(getInfo(assertions), getActual(assertions), arrayOf(6, 8)); } }
ByteArrayAssert_containsSubsequence_Test
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/SortValidator.java
{ "start": 16852, "end": 22713 }
class ____ extends MapReduceBase implements Reducer<BytesWritable, IntWritable, BytesWritable, IntWritable> { public void reduce(BytesWritable key, Iterator<IntWritable> values, OutputCollector<BytesWritable, IntWritable> output, Reporter reporter) throws IOException { int ones = 0; int twos = 0; while (values.hasNext()) { IntWritable count = values.next(); if (count.equals(sortInput)) { ++ones; } else if (count.equals(sortOutput)) { ++twos; } else { throw new IOException("Invalid 'value' of " + count.get() + " for (key,value): " + key.toString()); } } // Check to ensure there are equal no. of ones and twos if (ones != twos) { throw new IOException("Illegal ('one', 'two'): (" + ones + ", " + twos + ") for (key, value): " + key.toString()); } } } static void checkRecords(Configuration defaults, int noMaps, int noReduces, Path sortInput, Path sortOutput) throws IOException { JobConf jobConf = new JobConf(defaults, RecordChecker.class); jobConf.setJobName("sortvalidate-record-checker"); jobConf.setInputFormat(SequenceFileInputFormat.class); jobConf.setOutputFormat(SequenceFileOutputFormat.class); jobConf.setOutputKeyClass(BytesWritable.class); jobConf.setOutputValueClass(IntWritable.class); jobConf.setMapperClass(Map.class); jobConf.setReducerClass(Reduce.class); JobClient client = new JobClient(jobConf); ClusterStatus cluster = client.getClusterStatus(); if (noMaps == -1) { noMaps = cluster.getTaskTrackers() * jobConf.getInt(MAPS_PER_HOST, 10); } if (noReduces == -1) { noReduces = (int) (cluster.getMaxReduceTasks() * 0.9); String sortReduces = jobConf.get(REDUCES_PER_HOST); if (sortReduces != null) { noReduces = cluster.getTaskTrackers() * Integer.parseInt(sortReduces); } } jobConf.setNumMapTasks(noMaps); jobConf.setNumReduceTasks(noReduces); FileInputFormat.setInputPaths(jobConf, sortInput); FileInputFormat.addInputPath(jobConf, sortOutput); Path outputPath = new Path("/tmp/sortvalidate/recordchecker"); FileSystem fs = FileSystem.get(defaults); if (fs.exists(outputPath)) { fs.delete(outputPath, true); } FileOutputFormat.setOutputPath(jobConf, outputPath); // Uncomment to run locally in a single process //job_conf.set(JTConfig.JT, "local"); Path[] inputPaths = FileInputFormat.getInputPaths(jobConf); System.out.println("\nSortValidator.RecordChecker: Running on " + cluster.getTaskTrackers() + " nodes to validate sort from " + inputPaths[0] + ", " + inputPaths[1] + " into " + FileOutputFormat.getOutputPath(jobConf) + " with " + noReduces + " reduces."); Date startTime = new Date(); System.out.println("Job started: " + startTime); JobClient.runJob(jobConf); Date end_time = new Date(); System.out.println("Job ended: " + end_time); System.out.println("The job took " + (end_time.getTime() - startTime.getTime()) /1000 + " seconds."); } } /** * The main driver for sort-validator program. * Invoke this method to submit the map/reduce job. * @throws IOException When there is communication problems with the * job tracker. */ public int run(String[] args) throws Exception { Configuration defaults = getConf(); int noMaps = -1, noReduces = -1; Path sortInput = null, sortOutput = null; boolean deepTest = false; for(int i=0; i < args.length; ++i) { try { if ("-m".equals(args[i])) { noMaps = Integer.parseInt(args[++i]); } else if ("-r".equals(args[i])) { noReduces = Integer.parseInt(args[++i]); } else if ("-sortInput".equals(args[i])){ sortInput = new Path(args[++i]); } else if ("-sortOutput".equals(args[i])){ sortOutput = new Path(args[++i]); } else if ("-deep".equals(args[i])) { deepTest = true; } else { printUsage(); return -1; } } catch (NumberFormatException except) { System.err.println("ERROR: Integer expected instead of " + args[i]); printUsage(); return -1; } catch (ArrayIndexOutOfBoundsException except) { System.err.println("ERROR: Required parameter missing from " + args[i-1]); printUsage(); return -1; } } // Sanity check if (sortInput == null || sortOutput == null) { printUsage(); return -2; } // Check if the records are consistent and sorted correctly RecordStatsChecker.checkRecords(defaults, sortInput, sortOutput); // Check if the same records are present in sort's inputs & outputs if (deepTest) { RecordChecker.checkRecords(defaults, noMaps, noReduces, sortInput, sortOutput); } System.out.println("\nSUCCESS! Validated the MapReduce framework's 'sort'" + " successfully."); return 0; } public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new SortValidator(), args); System.exit(res); } }
Reduce
java
apache__hadoop
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/integration/AbstractAzureScaleTest.java
{ "start": 1468, "end": 2387 }
class ____ extends AbstractWasbTestBase implements Sizes { protected static final Logger LOG = LoggerFactory.getLogger(AbstractAzureScaleTest.class); protected int getTestTimeoutMillis() { return AzureTestConstants.SCALE_TEST_TIMEOUT_MILLIS; } @BeforeEach @Override public void setUp() throws Exception { super.setUp(); LOG.debug("Scale test operation count = {}", getOperationCount()); assumeScaleTestsEnabled(getConfiguration()); } /** * Create the test account. * @return a test account * @throws Exception on any failure to create the account. */ protected AzureBlobStorageTestAccount createTestAccount() throws Exception { return AzureBlobStorageTestAccount.create(createConfiguration()); } protected long getOperationCount() { return getConfiguration().getLong(KEY_OPERATION_COUNT, DEFAULT_OPERATION_COUNT); } }
AbstractAzureScaleTest
java
quarkusio__quarkus
core/deployment/src/test/java/io/quarkus/deployment/util/JandexUtilTest.java
{ "start": 557, "end": 7553 }
class ____ { private static final DotName SIMPLE = DotName.createSimple(Single.class.getName()); private static final DotName MULTIPLE = DotName.createSimple(Multiple.class.getName()); @Test public void testInterfaceNotInHierarchy() { final Index index = index(Single.class, SingleImpl.class, Multiple.class); final DotName impl = DotName.createSimple(SingleImpl.class.getName()); final List<Type> result = JandexUtil.resolveTypeParameters(impl, MULTIPLE, index); assertThat(result).isEmpty(); } @Test public void testNoTypePassed() { final Index index = index(Single.class, SingleImplNoType.class); checkRepoArg(index, SingleImplNoType.class, Single.class, Object.class); } @Test public void testAbstractSingle() { final Index index = index(Single.class, AbstractSingle.class); final DotName impl = DotName.createSimple(AbstractSingle.class.getName()); List<Type> ret = JandexUtil.resolveTypeParameters(impl, SIMPLE, index); assertThat(ret).hasSize(1).allMatch(t -> t.kind() == Kind.TYPE_VARIABLE && t.asTypeVariable().identifier().equals("S")); } @Test public void testSimplestImpl() { final Index index = index(Single.class, SingleImpl.class); checkRepoArg(index, SingleImpl.class, Single.class, String.class); } @Test public void testSimplestImplWithBound() { final Index index = index(SingleWithBound.class, SingleWithBoundImpl.class); checkRepoArg(index, SingleWithBoundImpl.class, SingleWithBound.class, List.class); } @Test public void testSimpleImplMultipleParams() { final Index index = index(Multiple.class, MultipleImpl.class); checkRepoArg(index, MultipleImpl.class, Multiple.class, Integer.class, String.class); } @Test public void testInverseParameterNames() { final Index index = index(Multiple.class, InverseMultiple.class, InverseMultipleImpl.class); checkRepoArg(index, InverseMultipleImpl.class, Multiple.class, Double.class, Integer.class); } @Test public void testImplExtendsSimplestImplementation() { final Index index = index(Single.class, SingleImpl.class, SingleImplImpl.class); checkRepoArg(index, SingleImplImpl.class, Single.class, String.class); } @Test public void testImplementationOfInterfaceThatExtendsSimpleWithoutParam() { final Index index = index(Single.class, ExtendsSimpleNoParam.class, ExtendsSimpleNoParamImpl.class); checkRepoArg(index, ExtendsSimpleNoParamImpl.class, Single.class, Double.class); } @Test public void testImplExtendsImplOfInterfaceThatExtendsSimpleWithoutParams() { final Index index = index(Single.class, ExtendsSimpleNoParam.class, ExtendsSimpleNoParamImpl.class, ExtendsSimpleNoParamImplImpl.class); checkRepoArg(index, ExtendsSimpleNoParamImplImpl.class, Single.class, Double.class); } @Test public void testImplOfInterfaceThatExtendsSimpleWithParam() { final Index index = index(Single.class, ExtendsSimpleWithParam.class, ExtendsSimpleWithParamImpl.class); checkRepoArg(index, ExtendsSimpleWithParamImpl.class, Single.class, Integer.class); } @Test public void testImplOfInterfaceThatExtendsSimpleWithParamInMultipleLevels() { final Index index = index(Single.class, ExtendsSimpleWithParam.class, ExtendsExtendsSimpleWithParam.class, ExtendsExtendsSimpleWithParamImpl.class); checkRepoArg(index, ExtendsExtendsSimpleWithParamImpl.class, Single.class, Double.class); } @Test public void testImplOfInterfaceThatExtendsSimpleWithGenericParamInMultipleLevels() { final Index index = index(Single.class, ExtendsSimpleWithParam.class, ExtendsExtendsSimpleWithParam.class, ExtendsExtendsSimpleGenericParam.class); checkRepoArg(index, ExtendsExtendsSimpleGenericParam.class, Single.class, Map.class); } @Test public void testImplOfMultipleWithParamsInDifferentLevels() { final Index index = index(Multiple.class, MultipleT1.class, ExtendsMultipleT1Impl.class); checkRepoArg(index, ExtendsMultipleT1Impl.class, Multiple.class, Integer.class, String.class); } @Test public void testImplOfAbstractMultipleWithParamsInDifferentLevels() { final Index index = index(Multiple.class, MultipleT1.class, AbstractMultipleT1Impl.class, ExtendsAbstractMultipleT1Impl.class); checkRepoArg(index, ExtendsAbstractMultipleT1Impl.class, Multiple.class, Integer.class, String.class); } @Test public void testMultiplePathsToSingle() { final Index index = index(Single.class, SingleImpl.class, SingleFromInterfaceAndSuperClass.class); checkRepoArg(index, SingleFromInterfaceAndSuperClass.class, Single.class, String.class); } @Test public void testExtendsAbstractClass() { final Index index = index(Single.class, AbstractSingle.class, AbstractSingleImpl.class, ExtendsAbstractSingleImpl.class); checkRepoArg(index, AbstractSingleImpl.class, AbstractSingle.class, Integer.class); checkRepoArg(index, ExtendsAbstractSingleImpl.class, AbstractSingle.class, Integer.class); } @Test public void testArrayGenerics() { final Index index = index(Repo.class, ArrayRepo.class, GenericArrayRepo.class); checkRepoArg(index, ArrayRepo.class, Repo.class, Integer[].class); } @Test public void testCompositeGenerics() { final Index index = index(Repo.class, Repo2.class, CompositeRepo.class, CompositeRepo2.class, GenericCompositeRepo.class, GenericCompositeRepo2.class); checkRepoArg(index, CompositeRepo.class, Repo.class, Repo.class.getName() + "<java.lang.Integer>"); checkRepoArg(index, CompositeRepo2.class, Repo2.class, Repo.class.getName() + "<java.lang.Integer>"); } @Test public void testErasedGenerics() { final Index index = index(Repo.class, BoundedRepo.class, ErasedRepo1.class, MultiBoundedRepo.class, ErasedRepo2.class, A.class); checkRepoArg(index, ErasedRepo1.class, Repo.class, A.class); checkRepoArg(index, ErasedRepo2.class, Repo.class, A.class); } @Test public void testNonProblematicUnindexed() { final Index index = index(Single.class, SingleFromInterfaceAndSuperClass.class); checkRepoArg(index, SingleFromInterfaceAndSuperClass.class, Single.class, String.class); } @Test public void testProblematicUnindexed() { final Index index = index(Single.class, AbstractSingleImpl.class, ExtendsAbstractSingleImpl.class); assertThatThrownBy(() -> { JandexUtil.resolveTypeParameters(name(ExtendsAbstractSingleImpl.class), name(Single.class), index); }).isInstanceOf(IllegalArgumentException.class); } public
JandexUtilTest
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ConfiguredFailoverProxyProvider.java
{ "start": 1407, "end": 3026 }
class ____<T> extends AbstractNNFailoverProxyProvider<T> { protected final List<NNProxyInfo<T>> proxies; private int currentProxyIndex = 0; public ConfiguredFailoverProxyProvider(Configuration conf, URI uri, Class<T> xface, HAProxyFactory<T> factory) { this(conf, uri, xface, factory, DFS_NAMENODE_RPC_ADDRESS_KEY); } public ConfiguredFailoverProxyProvider(Configuration conf, URI uri, Class<T> xface, HAProxyFactory<T> factory, String addressKey) { super(conf, uri, xface, factory); this.proxies = getProxyAddresses(uri, addressKey); } /** * Lazily initialize the RPC proxy object. */ @Override public synchronized ProxyInfo<T> getProxy() { NNProxyInfo<T> current = proxies.get(currentProxyIndex); return createProxyIfNeeded(current); } @Override public void performFailover(T currentProxy) { incrementProxyIndex(); } synchronized void incrementProxyIndex() { currentProxyIndex = (currentProxyIndex + 1) % proxies.size(); } /** * Close all the proxy objects which have been opened over the lifetime of * this proxy provider. */ @Override public synchronized void close() throws IOException { for (ProxyInfo<T> proxy : proxies) { if (proxy.proxy != null) { if (proxy.proxy instanceof Closeable) { ((Closeable)proxy.proxy).close(); } else { RPC.stopProxy(proxy.proxy); } } } } /** * Logical URI is required for this failover proxy provider. */ @Override public boolean useLogicalURI() { return true; } }
ConfiguredFailoverProxyProvider
java
apache__camel
dsl/camel-jbang/camel-jbang-plugin-kubernetes/src/test/java/org/apache/camel/dsl/jbang/core/commands/kubernetes/KubernetesPodLogsTest.java
{ "start": 1414, "end": 2817 }
class ____ extends KubernetesBaseTest { @Test public void shouldHandlePodNotFound() throws Exception { KubernetesPodLogs command = createCommand(); command.name = "mickey-mouse"; command.maxRetryAttempts = 2; // total timeout of 4 seconds int exit = command.doCall(); Assertions.assertEquals(0, exit); Assertions.assertTrue( printer.getOutput().contains("Pod for label app.kubernetes.io/name=mickey-mouse not available")); } @Test public void shouldGetPodLogs() throws Exception { Pod pod = new PodBuilder() .withNewMetadata() .withName("pod") .withLabels(Map.of(BaseTrait.KUBERNETES_LABEL_NAME, "routes")) .endMetadata() .withNewStatus() .withPhase("Running") .endStatus() .build(); kubernetesClient.pods().resource(pod).create(); var podLog = createCommand(); podLog.maxMessageCount = 10; podLog.name = "routes"; int exit = podLog.doCall(); Assertions.assertEquals(0, exit); } private KubernetesPodLogs createCommand() { KubernetesPodLogs command = new KubernetesPodLogs(new CamelJBangMain().withPrinter(printer)); command.withClient(kubernetesClient); return command; } }
KubernetesPodLogsTest
java
apache__flink
flink-metrics/flink-metrics-core/src/test/java/org/apache/flink/metrics/util/MetricReporterTestUtils.java
{ "start": 1543, "end": 2110 }
class ____ load */ public static void testMetricReporterSetupViaSPI( final Class<? extends MetricReporterFactory> clazz) { final Set<Class<? extends MetricReporterFactory>> loadedFactories = StreamSupport.stream( ServiceLoader.load(MetricReporterFactory.class).spliterator(), false) .map(MetricReporterFactory::getClass) .collect(Collectors.toSet()); assertThat(loadedFactories).contains(clazz); } }
to
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java
{ "start": 20525, "end": 21713 }
class ____ used instead of plain {@link ShardId} to support the scenario where the same remote cluster is registered twice using * different aliases. In that case searching across the same cluster twice would make an assertion in lucene fail * (see TopDocs#tieBreakLessThan line 86). Generally, indices with same names on different clusters have different index uuids which * make their ShardIds different, which is not the case if the index is really the same one from the same cluster, in which case we * need to look at the cluster alias and make sure to assign a different shardIndex based on that. */ private record ShardIdAndClusterAlias(ShardId shardId, String clusterAlias) implements Comparable<ShardIdAndClusterAlias> { private ShardIdAndClusterAlias { assert clusterAlias != null : "clusterAlias is null"; } @Override public int compareTo(ShardIdAndClusterAlias o) { int shardIdCompareTo = shardId.compareTo(o.shardId); if (shardIdCompareTo != 0) { return shardIdCompareTo; } return clusterAlias.compareTo(o.clusterAlias); } } }
is
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/streaming/runtime/operators/sink/SinkWriterOperator.java
{ "start": 13128, "end": 15889 }
class ____ extends InitContextBase implements WriterInitContext { private final ProcessingTimeService processingTimeService; private final MailboxExecutor mailboxExecutor; private final SinkWriterMetricGroup metricGroup; private final StreamConfig operatorConfig; public InitContextImpl( StreamingRuntimeContext runtimeContext, ProcessingTimeService processingTimeService, MailboxExecutor mailboxExecutor, SinkWriterMetricGroup metricGroup, StreamConfig operatorConfig, OptionalLong restoredCheckpointId) { super(runtimeContext, restoredCheckpointId); this.mailboxExecutor = checkNotNull(mailboxExecutor); this.processingTimeService = checkNotNull(processingTimeService); this.metricGroup = checkNotNull(metricGroup); this.operatorConfig = checkNotNull(operatorConfig); } @Override public UserCodeClassLoader getUserCodeClassLoader() { return new UserCodeClassLoader() { @Override public ClassLoader asClassLoader() { return getRuntimeContext().getUserCodeClassLoader(); } @Override public void registerReleaseHookIfAbsent( String releaseHookName, Runnable releaseHook) { getRuntimeContext() .registerUserCodeClassLoaderReleaseHookIfAbsent( releaseHookName, releaseHook); } }; } @Override public MailboxExecutor getMailboxExecutor() { return mailboxExecutor; } @Override public org.apache.flink.api.common.operators.ProcessingTimeService getProcessingTimeService() { return processingTimeService; } @Override public SinkWriterMetricGroup metricGroup() { return metricGroup; } @Override public InitializationContext asSerializationSchemaInitializationContext() { return new InitContextInitializationContextAdapter( getUserCodeClassLoader(), () -> metricGroup.addGroup("user")); } @Override public boolean isObjectReuseEnabled() { return getRuntimeContext().isObjectReuseEnabled(); } @Override public <IN> TypeSerializer<IN> createInputSerializer() { return operatorConfig .<IN>getTypeSerializerIn(0, getRuntimeContext().getUserCodeClassLoader()) .duplicate(); } } }
InitContextImpl
java
apache__flink
flink-table/flink-table-code-splitter/src/test/resources/block/expected/TestIfStatementRewrite1.java
{ "start": 7, "end": 2075 }
class ____ { public void myFun1(int[] a, int[] b) throws RuntimeException { if (a[0] == 0) { myFun1_0_0_rewriteGroup5(a, b); } else { myFun1_0_6_rewriteGroup8(a, b); } } void myFun1_0_0_1_2_3_5(int[] a, int[] b) throws RuntimeException { a[2] = b[2]; a[22] = b[2]; } void myFun1_0_0_1_2_3_4(int[] a, int[] b) throws RuntimeException { a[2] = 1; a[22] = 1; } void myFun1_0_0_1(int[] a, int[] b) throws RuntimeException { System.out.println("0"); System.out.println("0"); } void myFun1_0_0_1_2_3(int[] a, int[] b) throws RuntimeException { System.out.println("1"); System.out.println("2"); } void myFun1_0_7_8(int[] a, int[] b) throws RuntimeException { System.out.println("3"); System.out.println("3"); } void myFun1_0_7_8_10(int[] a, int[] b) throws RuntimeException { a[0] = 2 * b[0]; a[1] = 2 * b[1]; a[2] = 2 * b[2]; } void myFun1_0_6_rewriteGroup8(int[] a, int[] b) throws RuntimeException { myFun1_0_7_8(a, b); if (a[1] == 1) { myFun1_0_7_8_9(a, b); } else { myFun1_0_7_8_10(a, b); } } void myFun1_0_0_rewriteGroup1_2_rewriteGroup4(int[] a, int[] b) throws RuntimeException { myFun1_0_0_1_2_3(a, b); if (a[2] == 0) { myFun1_0_0_1_2_3_4(a, b); } else { myFun1_0_0_1_2_3_5(a, b); } } void myFun1_0_0_1_6(int[] a, int[] b) throws RuntimeException { a[1] = b[1]; a[2] = b[2]; } void myFun1_0_0_rewriteGroup5(int[] a, int[] b) throws RuntimeException { myFun1_0_0_1(a, b); if (a[1] == 0) { myFun1_0_0_rewriteGroup1_2_rewriteGroup4(a, b); } else { myFun1_0_0_1_6(a, b); } } void myFun1_0_7_8_9(int[] a, int[] b) throws RuntimeException { a[0] = b[0]; a[1] = b[1]; a[2] = b[2]; } }
TestIfStatementRewrite1
java
alibaba__nacos
core/src/main/java/com/alibaba/nacos/core/namespace/injector/NamespaceDetailInjectorHolder.java
{ "start": 853, "end": 1740 }
class ____ { private static final NamespaceDetailInjectorHolder INSTANCE = new NamespaceDetailInjectorHolder(); private final Set<AbstractNamespaceDetailInjector> namespaceDetailInjectors; private NamespaceDetailInjectorHolder() { this.namespaceDetailInjectors = new HashSet<>(); } public static NamespaceDetailInjectorHolder getInstance() { return INSTANCE; } public void registerInjector(AbstractNamespaceDetailInjector injector) { this.namespaceDetailInjectors.add(injector); } /** * Inject detail from all injectors. * * @param namespace namespace */ public void injectDetail(Namespace namespace) { for (AbstractNamespaceDetailInjector each : this.namespaceDetailInjectors) { each.injectDetail(namespace); } } }
NamespaceDetailInjectorHolder
java
spring-projects__spring-framework
spring-web/src/test/java/org/springframework/web/filter/OncePerRequestFilterTests.java
{ "start": 4087, "end": 5462 }
class ____ extends OncePerRequestFilter { private boolean shouldNotFilter; private boolean shouldNotFilterAsyncDispatch = true; private boolean shouldNotFilterErrorDispatch = true; private boolean didFilter; private boolean didFilterNestedErrorDispatch; public void setShouldNotFilterErrorDispatch(boolean shouldNotFilterErrorDispatch) { this.shouldNotFilterErrorDispatch = shouldNotFilterErrorDispatch; } public void reset() { this.didFilter = false; this.didFilterNestedErrorDispatch = false; } @Override protected boolean shouldNotFilter(HttpServletRequest request) { return this.shouldNotFilter; } @Override protected boolean shouldNotFilterAsyncDispatch() { return this.shouldNotFilterAsyncDispatch; } @Override protected boolean shouldNotFilterErrorDispatch() { return this.shouldNotFilterErrorDispatch; } @Override protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) { this.didFilter = true; } @Override protected void doFilterNestedErrorDispatch(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws ServletException, IOException { this.didFilterNestedErrorDispatch = true; super.doFilterNestedErrorDispatch(request, response, filterChain); } } }
TestOncePerRequestFilter
java
apache__dubbo
dubbo-metrics/dubbo-metrics-api/src/main/java/org/apache/dubbo/metrics/data/ApplicationStatComposite.java
{ "start": 1777, "end": 3758 }
class ____ extends AbstractMetricsExport { public ApplicationStatComposite(ApplicationModel applicationModel) { super(applicationModel); } private final Map<MetricsKey, AtomicLong> applicationNumStats = new ConcurrentHashMap<>(); private final AtomicBoolean samplesChanged = new AtomicBoolean(true); public void init(List<MetricsKey> appKeys) { if (CollectionUtils.isEmpty(appKeys)) { return; } appKeys.forEach(appKey -> { applicationNumStats.put(appKey, new AtomicLong(0L)); }); samplesChanged.set(true); } public void incrementSize(MetricsKey metricsKey, int size) { if (!applicationNumStats.containsKey(metricsKey)) { return; } applicationNumStats.get(metricsKey).getAndAdd(size); } public void setAppKey(MetricsKey metricsKey, Long num) { if (!applicationNumStats.containsKey(metricsKey)) { return; } applicationNumStats.get(metricsKey).set(num); } public List<MetricSample> export(MetricsCategory category) { List<MetricSample> list = new ArrayList<>(); for (MetricsKey type : applicationNumStats.keySet()) { list.add(convertToSample(type, category, applicationNumStats.get(type))); } return list; } @SuppressWarnings({"rawtypes"}) private GaugeMetricSample convertToSample(MetricsKey type, MetricsCategory category, AtomicLong targetNumber) { return new GaugeMetricSample<>( type, MetricsSupport.applicationTags(getApplicationModel()), category, targetNumber, AtomicLong::get); } public Map<MetricsKey, AtomicLong> getApplicationNumStats() { return applicationNumStats; } @Override public boolean calSamplesChanged() { // CAS to get and reset the flag in an atomic operation return samplesChanged.compareAndSet(true, false); } }
ApplicationStatComposite
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/ExpectedExceptionCheckerTest.java
{ "start": 1527, "end": 2703 }
class ____ { @Rule ExpectedException thrown = ExpectedException.none(); @Test public void test() throws Exception { if (true) { Path p = Paths.get("NOSUCH"); thrown.expect(IOException.class); thrown.expect(CoreMatchers.is(CoreMatchers.instanceOf(IOException.class))); thrown.expectCause(CoreMatchers.is(CoreMatchers.instanceOf(IOException.class))); thrown.expectMessage("error"); thrown.expectMessage(CoreMatchers.containsString("error")); Files.readAllBytes(p); assertThat(Files.exists(p)).isFalse(); } } } """) .addOutputLines( "out/ExceptionTest.java", """ import static com.google.common.truth.Truth.assertThat; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertThrows; import java.io.IOException; import java.nio.file.*; import org.hamcrest.CoreMatchers; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException;
ExceptionTest
java
micronaut-projects__micronaut-core
inject/src/main/java/io/micronaut/inject/annotation/AnnotationMetadataSupport.java
{ "start": 4455, "end": 18238 }
class ____ { private static final Map<String, Map<CharSequence, Object>> CORE_ANNOTATION_DEFAULTS; private static final Map<String, Map<CharSequence, Object>> ANNOTATION_DEFAULTS = new ConcurrentHashMap<>(20); private static final Map<String, String> REPEATABLE_ANNOTATIONS_CONTAINERS = new ConcurrentHashMap<>(20); private static final Map<String, String> CORE_REPEATABLE_ANNOTATIONS_CONTAINERS; private static final Map<Class<? extends Annotation>, Optional<Constructor<InvocationHandler>>> ANNOTATION_PROXY_CACHE = new ConcurrentHashMap<>(20); private static final Map<String, Class<? extends Annotation>> ANNOTATION_TYPES = new ConcurrentHashMap<>(20); /** * The annotation default values provider. * @since 4.3.0 */ public static final AnnotationDefaultValuesProvider ANNOTATION_DEFAULT_VALUES_PROVIDER = new AnnotationDefaultValuesProvider() { @Override public Map<CharSequence, Object> provide(String annotationName) { return AnnotationMetadataSupport.getDefaultValues(annotationName); } }; static { // some common ones for startup optimization //noinspection removal Arrays.asList( Any.class, jakarta.annotation.Nullable.class, Nonnull.class, ReadableBytes.class, Format.class, Indexed.class, Bindable.class, DefaultScope.class, Internal.class, DefaultImplementation.class, Nullable.class, NonNull.class, PreDestroy.class, PostConstruct.class, Named.class, Singleton.class, Inject.class, Qualifier.class, Scope.class, Prototype.class, Executable.class, Bean.class, Primary.class, Value.class, Property.class, Provided.class, Requires.class, Secondary.class, Type.class, Context.class, EachBean.class, EachProperty.class, Configuration.class, ConfigurationProperties.class, ConfigurationBuilder.class, Introspected.class, Parameter.class, Requirements.class, Factory.class).forEach(ann -> ANNOTATION_TYPES.put(ann.getName(), ann) ); Map<String, Map<CharSequence, Object>> coreAnnotationsDefaults = new HashMap<>(100); coreAnnotationsDefaults.put( Deprecated.class.getName(), Map.of("forRemoval", false) ); coreAnnotationsDefaults.put( Order.class.getName(), Map.of("value", 0) ); coreAnnotationsDefaults.put( Executable.class.getName(), Map.of(Executable.MEMBER_PROCESS_ON_STARTUP, false) ); coreAnnotationsDefaults.put( ConfigurationProperties.class.getName(), Map.of("cliPrefix", EMPTY_STRING_ARRAY, "excludes", EMPTY_STRING_ARRAY, "includes", EMPTY_STRING_ARRAY) ); coreAnnotationsDefaults.put( EachProperty.class.getName(), Map.of("excludes", EMPTY_STRING_ARRAY, "includes", EMPTY_STRING_ARRAY, "list", false) ); coreAnnotationsDefaults.put( ConfigurationReader.class.getName(), Map.of("excludes", EMPTY_STRING_ARRAY, "includes", EMPTY_STRING_ARRAY) ); coreAnnotationsDefaults.put( Bean.class.getName(), Map.of("typed", ZERO_ANNOTATION_CLASS_VALUES) ); coreAnnotationsDefaults.put( Requires.class.getName(), Map.ofEntries(Map.entry("beans", ZERO_ANNOTATION_CLASS_VALUES), Map.entry("classes", ZERO_ANNOTATION_CLASS_VALUES), Map.entry("condition", TrueCondition.class), Map.entry("entities", ZERO_ANNOTATION_CLASS_VALUES), Map.entry("env", EMPTY_STRING_ARRAY), Map.entry("missing", ZERO_ANNOTATION_CLASS_VALUES), Map.entry("missingBeans", ZERO_ANNOTATION_CLASS_VALUES), Map.entry("missingClasses", EMPTY_STRING_ARRAY), Map.entry("missingConfigurations", EMPTY_STRING_ARRAY), Map.entry("notEnv", EMPTY_STRING_ARRAY), Map.entry("notOs", EMPTY_STRING_ARRAY), Map.entry("os", EMPTY_STRING_ARRAY), Map.entry("resources", EMPTY_STRING_ARRAY), Map.entry("sdk", "MICRONAUT")) ); coreAnnotationsDefaults.put( Replaces.class.getName(), Map.of("qualifier", Annotation.class) ); coreAnnotationsDefaults.put( Introspected.IntrospectionBuilder.class.getName(), Map.of("accessorStyle", new AnnotationValue("io.micronaut.core.annotation.AccessorsStyle", Map.of("writePrefixes", new String[]{""}), AnnotationMetadataSupport.ANNOTATION_DEFAULT_VALUES_PROVIDER), "creatorMethod", "build") ); coreAnnotationsDefaults.put( Introspected.class.getName(), Map.ofEntries(Map.entry("accessKind", new String[]{"METHOD"}), Map.entry("annotationMetadata", true), Map.entry("builder", new AnnotationValue("io.micronaut.core.annotation.Introspected$IntrospectionBuilder", Map.of(), AnnotationMetadataSupport.ANNOTATION_DEFAULT_VALUES_PROVIDER)), Map.entry("classNames", EMPTY_STRING_ARRAY), Map.entry("classes", ZERO_ANNOTATION_CLASS_VALUES), Map.entry("excludedAnnotations", ZERO_ANNOTATION_CLASS_VALUES), Map.entry("excludes", EMPTY_STRING_ARRAY), Map.entry("includedAnnotations", ZERO_ANNOTATION_CLASS_VALUES), Map.entry("includes", EMPTY_STRING_ARRAY), Map.entry("indexed", ZERO_ANNOTATION_VALUES), Map.entry("packages", EMPTY_STRING_ARRAY), Map.entry("visibility", new String[]{"DEFAULT"}), Map.entry("withPrefix", "with")) ); coreAnnotationsDefaults.put( MapFormat.class.getName(), Map.of("keyFormat", "HYPHENATED", "transformation", "NESTED") ); coreAnnotationsDefaults.put( Parallel.class.getName(), Map.of("shutdownOnError", true) ); coreAnnotationsDefaults.put( "io.micronaut.aop.constructor.TestConstructorAnn", Map.of() ); coreAnnotationsDefaults.put( "io.micronaut.aop.AroundConstruct", Map.of() ); coreAnnotationsDefaults.put( "io.micronaut.aop.InterceptorBinding", Map.of("bindMembers", false, "kind", "AROUND", "value", Annotation.class) ); coreAnnotationsDefaults.put( "io.micronaut.aop.InterceptorBean", Map.of() ); coreAnnotationsDefaults.put( "io.micronaut.aop.Around", Map.of("cacheableLazyTarget", false, "hotswap", false, "lazy", false, "proxyTarget", false, "proxyTargetMode", "ERROR") ); coreAnnotationsDefaults.put( "io.micronaut.aop.Introduction", Map.of("interfaces", ZERO_ANNOTATION_CLASS_VALUES) ); coreAnnotationsDefaults.put( "io.micronaut.aop.Adapter", Map.of() ); coreAnnotationsDefaults.put( "io.micronaut.validation.annotation.ValidatedElement", Map.of() ); coreAnnotationsDefaults.put( AccessorsStyle.class.getName(), Map.of("readPrefixes", new String[]{"get"}, "writePrefixes", new String[]{"set"}) ); CORE_ANNOTATION_DEFAULTS = Collections.unmodifiableMap(coreAnnotationsDefaults); ANNOTATION_DEFAULTS.putAll(CORE_ANNOTATION_DEFAULTS); Map<String, String> coreRepeatableAnnotationsContainers = new LinkedHashMap<>(); for (Map.Entry<Class<? extends Annotation>, Class<? extends Annotation>> e : getCoreRepeatableAnnotations()) { coreRepeatableAnnotationsContainers.put(e.getKey().getName(), e.getValue().getName()); } coreRepeatableAnnotationsContainers.put("io.micronaut.aop.InterceptorBinding", "io.micronaut.aop.InterceptorBindingDefinitions"); CORE_REPEATABLE_ANNOTATIONS_CONTAINERS = Collections.unmodifiableMap(coreRepeatableAnnotationsContainers); REPEATABLE_ANNOTATIONS_CONTAINERS.putAll(coreRepeatableAnnotationsContainers); } /** * @return core repeatable annotations */ @Internal public static List<Map.Entry<Class<? extends Annotation>, Class<? extends Annotation>>> getCoreRepeatableAnnotations() { return Arrays.asList( new AbstractMap.SimpleEntry<>(Indexed.class, Indexes.class), new AbstractMap.SimpleEntry<>(Requires.class, Requirements.class), new AbstractMap.SimpleEntry<>(AliasFor.class, Aliases.class), new AbstractMap.SimpleEntry<>(Property.class, PropertySource.class), new AbstractMap.SimpleEntry<>(ReflectionConfig.class, ReflectionConfig.ReflectionConfigList.class) ); } /** * @return The core repeatable annotations. * @since 4.3.0 */ @Internal public static Map<String, String> getCoreRepeatableAnnotationsContainers() { return CORE_REPEATABLE_ANNOTATIONS_CONTAINERS; } /** * @return The core annotation defaults. * @since 4.3.0 */ @Internal public static Map<String, Map<CharSequence, Object>> getCoreAnnotationDefaults() { return CORE_ANNOTATION_DEFAULTS; } /** * @param annotation The annotation * @return The default values for the annotation */ @UsedByGeneratedCode @NonNull public static Map<CharSequence, Object> getDefaultValues(String annotation) { return ANNOTATION_DEFAULTS.getOrDefault(annotation, Collections.emptyMap()); } /** * @param annotation The annotation * @return The default values for the annotation */ @Nullable public static Map<CharSequence, Object> getDefaultValuesOrNull(String annotation) { return ANNOTATION_DEFAULTS.get(annotation); } /** * @param annotation The annotation * @return The repeatable annotation container. */ @Internal public static String getRepeatableAnnotation(String annotation) { return REPEATABLE_ANNOTATIONS_CONTAINERS.get(annotation); } /** * Gets a registered annotation type. * * @param name The name of the annotation type * @return The annotation */ static Optional<Class<? extends Annotation>> getAnnotationType(String name) { return getAnnotationType(name, AnnotationMetadataSupport.class.getClassLoader()); } /** * Gets a registered annotation type. * * @param name The name of the annotation type * @param classLoader The classloader to retrieve the type * @return The annotation */ static Optional<Class<? extends Annotation>> getAnnotationType(String name, ClassLoader classLoader) { final Class<? extends Annotation> type = ANNOTATION_TYPES.get(name); if (type != null) { return Optional.of(type); } else { // last resort, try dynamic load, shouldn't normally happen. @SuppressWarnings("unchecked") final Class<? extends Annotation> aClass = (Class<? extends Annotation>) ClassUtils.forName(name, classLoader).orElse(null); if (aClass != null && Annotation.class.isAssignableFrom(aClass)) { ANNOTATION_TYPES.put(name, aClass); return Optional.of(aClass); } return Optional.empty(); } } /** * Gets a registered annotation type. * * @param name The name of the annotation type * @return The annotation */ static Optional<Class<? extends Annotation>> getRegisteredAnnotationType(String name) { final Class<? extends Annotation> type = ANNOTATION_TYPES.get(name); if (type != null) { return Optional.of(type); } return Optional.empty(); } /** * @param annotation The annotation * @return The default values for the annotation */ @SuppressWarnings("unchecked") static Map<CharSequence, Object> getDefaultValues(Class<? extends Annotation> annotation) { return getDefaultValues(annotation.getName()); } /** * Whether default values for the given annotation are present. * * @param annotation The annotation * @return True if they are */ static boolean hasDefaultValues(String annotation) { return ANNOTATION_DEFAULTS.containsKey(annotation); } /** * Registers default values for the given annotation and values. * * @param annotation The annotation * @param defaultValues The default values */ static void registerDefaultValues(String annotation, Map<CharSequence, Object> defaultValues) { if (StringUtils.isNotEmpty(annotation) && CollectionUtils.isNotEmpty(defaultValues)) { ANNOTATION_DEFAULTS.put(annotation, defaultValues); } } /** * Registers default values for the given annotation and values. * * @param annotation The annotation * @param defaultValues The default values */ static void registerDefaultValues(AnnotationClassValue<?> annotation, Map<CharSequence, Object> defaultValues) { if (defaultValues != null) { registerDefaultValues(annotation.getName(), defaultValues); } registerAnnotationType(annotation); } /** * Registers an annotation type. * * @param annotationClassValue the annotation
AnnotationMetadataSupport
java
quarkusio__quarkus
extensions/funqy/funqy-knative-events/deployment/src/test/java/io/quarkus/funqy/test/ExposedCloudEventTest.java
{ "start": 632, "end": 9213 }
class ____ { @RegisterExtension static QuarkusUnitTest test = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(ExposedCloudEvents.class)); @Test public void testVanillaHttp() { // when a function handles CloudEvent explicitly, vanilla HTTP is considered to be a bad request. RestAssured.given().contentType("application/json") .body("{}") .post("/doubleIt") .then() .statusCode(400); } @Test public void testCloudEventAttributeDefaultsForStructuredEncoding() { String event = "{ \"id\" : \"test-id\", " + " \"specversion\": \"1.0\", " + " \"source\": \"test-source\", " + " \"type\": \"test-defaults\" " + "}"; RestAssured.given().contentType("application/cloudevents+json") .body(event) .post("/") .then() .statusCode(200) .body("specversion", equalTo("1.0")) .body("id", notNullValue()) .body("type", equalTo("default-type")) .body("source", equalTo("default-source")); } @Test public void testCloudEventAttributeDefaultsForBinaryEncoding() { RestAssured.given() .header("ce-id", "test-id") .header("ce-specversion", "1.0") .header("ce-type", "test-defaults") .header("ce-source", "test-source") .post() .then() .statusCode(204) .header("ce-specversion", equalTo("1.0")) .header("ce-id", notNullValue()) .header("ce-type", equalTo("default-type")) .header("ce-source", equalTo("default-source")); } @Test public void testGenericInput() { RestAssured.given().contentType("application/json") .header("ce-id", "test-id") .header("ce-specversion", "1.0") .header("ce-type", "test-generics") .header("ce-source", "test-source") .body("[{\"i\" : 1}, {\"i\" : 2}, {\"i\" : 3}]") .then() .statusCode(200) .body(equalTo("6")); } @Test public void testNullResponse() { RestAssured.given().contentType("application/json") .header("ce-id", "test-id") .header("ce-specversion", "1.0") .header("ce-type", "test-null-response") .header("ce-source", "test-source") .post() .then() .statusCode(204); } @ParameterizedTest @MethodSource("provideBinaryEncodingTestArgs") public void testBinaryEncoding(Map<String, String> headers, String specversion, String dataSchemaHdrName) { RequestSpecification req = RestAssured.given().contentType("application/json"); for (Map.Entry<String, String> h : headers.entrySet()) { req = req.header(h.getKey(), h.getValue()); } req.body(BINARY_ENCODED_EVENT_BODY) .post("/") .then() .statusCode(200) .header("ce-specversion", equalTo(specversion)) .header("ce-id", equalTo("double-it-id")) .header("ce-type", equalTo("double-it-type")) .header("ce-source", equalTo("/OfDoubleIt")) .header(dataSchemaHdrName, equalTo("dataschema-server")) .header("ce-extserver", equalTo("ext-server-val")) .body("i", equalTo(42)) .body("s", equalTo("abcabc")); } @ParameterizedTest @MethodSource("provideStructuredEncodingTestArgs") public void testStructuredEncoding(String event, String specversion, String dataSchemaFieldName) { RestAssured.given().contentType("application/cloudevents+json") .body(event) .post("/") .then() .statusCode(200) .body("specversion", equalTo(specversion)) .body("id", equalTo("double-it-id")) .body("type", equalTo("double-it-type")) .body("source", equalTo("/OfDoubleIt")) .body(dataSchemaFieldName, equalTo("dataschema-server")) .body("extserver", equalTo("ext-server-val")) .body("data.i", equalTo(42)) .body("data.s", equalTo("abcabc")); } static { Map<String, String> common = new HashMap<>(); common.put("ce-id", "test-id"); common.put("ce-type", "test-type"); common.put("ce-source", "/OfTest"); common.put("ce-subject", "test-subj"); common.put("ce-time", "2018-04-05T17:31:00Z"); common.put("ce-extclient", "ext-client-val"); Map<String, String> v1 = new HashMap<>(common); v1.put("ce-specversion", "1.0"); v1.put("ce-dataschema", "test-dataschema-client"); BINARY_ENCODED_EVENT_V1_HEADERS = Collections.unmodifiableMap(v1); Map<String, String> v1_1 = new HashMap<>(common); v1_1.put("ce-specversion", "1.1"); v1_1.put("ce-dataschema", "test-dataschema-client"); BINARY_ENCODED_EVENT_V1_1_HEADERS = Collections.unmodifiableMap(v1_1); Map<String, String> v03 = new HashMap<>(common); v03.put("ce-specversion", "0.3"); v03.put("ce-schemaurl", "test-dataschema-client"); BINARY_ENCODED_EVENT_V03_HEADERS = Collections.unmodifiableMap(v03); } public static final Map<String, String> BINARY_ENCODED_EVENT_V1_HEADERS; public static final Map<String, String> BINARY_ENCODED_EVENT_V1_1_HEADERS; public static final Map<String, String> BINARY_ENCODED_EVENT_V03_HEADERS; private static Stream<Arguments> provideBinaryEncodingTestArgs() { return Stream.<Arguments> builder() .add(Arguments.arguments(BINARY_ENCODED_EVENT_V1_HEADERS, "1.0", "ce-dataschema")) .add(Arguments.arguments(BINARY_ENCODED_EVENT_V1_1_HEADERS, "1.1", "ce-dataschema")) .add(Arguments.arguments(BINARY_ENCODED_EVENT_V03_HEADERS, "0.3", "ce-schemaurl")) .build(); } public static final String BINARY_ENCODED_EVENT_BODY = " { \"i\" : 21, \"s\" : \"abc\" } "; static final String STRUCTURED_ENCODED_EVENT_V1_BODY = "{ \"id\" : \"test-id\", " + " \"specversion\": \"1.0\", " + " \"source\": \"/OfTest\", " + " \"subject\": \"test-subj\", " + " \"time\": \"2018-04-05T17:31:00Z\", " + " \"type\": \"test-type\", " + " \"extclient\": \"ext-client-val\", " + " \"dataschema\": \"test-dataschema-client\", " + " \"datacontenttype\": \"application/json\", " + " \"data\": { \"i\" : 21, \"s\" : \"abc\" } " + "}"; static final String STRUCTURED_ENCODED_EVENT_V1_1_BODY = "{ \"id\" : \"test-id\", " + " \"specversion\": \"1.1\", " + " \"source\": \"/OfTest\", " + " \"subject\": \"test-subj\", " + " \"time\": \"2018-04-05T17:31:00Z\", " + " \"type\": \"test-type\", " + " \"extclient\": \"ext-client-val\", " + " \"dataschema\": \"test-dataschema-client\", " + " \"data\": { \"i\" : 21, \"s\" : \"abc\" } " + "}"; static final String STRUCTURED_ENCODED_EVENT_V03_BODY = "{ \"id\" : \"test-id\", " + " \"specversion\": \"0.3\", " + " \"source\": \"/OfTest\", " + " \"subject\": \"test-subj\", " + " \"time\": \"2018-04-05T17:31:00Z\", " + " \"type\": \"test-type\", " + " \"extclient\": \"ext-client-val\", " + " \"schemaurl\": \"test-dataschema-client\", " + " \"datacontenttype\": \"application/json\", " + " \"data\": { \"i\" : 21, \"s\" : \"abc\" } " + "}"; private static Stream<Arguments> provideStructuredEncodingTestArgs() { return Stream.<Arguments> builder() .add(Arguments.arguments(STRUCTURED_ENCODED_EVENT_V1_BODY, "1.0", "dataschema")) .add(Arguments.arguments(STRUCTURED_ENCODED_EVENT_V1_1_BODY, "1.1", "dataschema")) .add(Arguments.arguments(STRUCTURED_ENCODED_EVENT_V03_BODY, "0.3", "schemaurl")) .build(); } }
ExposedCloudEventTest
java
elastic__elasticsearch
x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookTokenIntegrationTests.java
{ "start": 2793, "end": 7400 }
class ____ extends AbstractWatcherIntegrationTestCase { private MockWebServer webServer = new MockWebServer(); @Override protected boolean addMockHttpTransport() { return false; // enable http } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), Netty4Plugin.class); // for http } @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Settings.Builder builder = Settings.builder(); builder.put(super.nodeSettings(nodeOrdinal, otherSettings)); builder.put(WebhookService.SETTING_WEBHOOK_TOKEN_ENABLED.getKey(), true); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString(WebhookService.SETTING_WEBHOOK_HOST_TOKEN_PAIRS.getKey(), "localhost:0=oldtoken"); builder.setSecureSettings(secureSettings); return builder.build(); } @Before public void startWebservice() throws Exception { webServer.start(); } @After public void stopWebservice() throws Exception { webServer.close(); } public void testWebhook() throws Exception { assumeFalse( "Cannot run in FIPS mode since the keystore will be password protected and sending a password in the reload" + "settings api call, require TLS to be configured for the transport layer", inFipsJvm() ); String localServer = "localhost:" + webServer.getPort(); logger.info("--> updating keystore token hosts to: {}", localServer); Path configPath = internalCluster().configPaths().stream().findFirst().orElseThrow(); try (KeyStoreWrapper ksw = KeyStoreWrapper.create()) { ksw.setString(WebhookService.SETTING_WEBHOOK_HOST_TOKEN_PAIRS.getKey(), (localServer + "=token1234").toCharArray()); ksw.save(configPath, "".toCharArray(), false); } // Reload the keystore to load the new settings NodesReloadSecureSettingsRequest reloadReq = new NodesReloadSecureSettingsRequest(Strings.EMPTY_ARRAY); try { reloadReq.setSecureStorePassword(new SecureString("".toCharArray())); client().execute(TransportNodesReloadSecureSettingsAction.TYPE, reloadReq).get(); } finally { reloadReq.decRef(); } webServer.enqueue(new MockResponse().setResponseCode(200).setBody("body")); HttpRequestTemplate.Builder builder = HttpRequestTemplate.builder("localhost", webServer.getPort()) .path(new TextTemplate("/test/_id")) .putParam("param1", new TextTemplate("value1")) .putParam("watch_id", new TextTemplate("_id")) .body(new TextTemplate("_body")) .auth(new BasicAuth("user", "pass".toCharArray())) .method(HttpMethod.POST); new PutWatchRequestBuilder(client(), "_id").setSource( watchBuilder().trigger(schedule(interval("5s"))) .input(simpleInput("key", "value")) .condition(InternalAlwaysCondition.INSTANCE) .addAction("_id", ActionBuilders.webhookAction(builder)) ).get(); timeWarp().trigger("_id"); refresh(); assertWatchWithMinimumPerformedActionsCount("_id", 1, false); assertThat(webServer.requests(), hasSize(1)); MockRequest req = webServer.requests().get(0); assertThat( webServer.requests().get(0).getUri().getQuery(), anyOf(equalTo("watch_id=_id&param1=value1"), equalTo("param1=value1&watch_id=_id")) ); assertThat("token header should be set", req.getHeader(WebhookService.TOKEN_HEADER_NAME), equalTo("token1234")); assertThat(webServer.requests().get(0).getBody(), is("_body")); SearchResponse response = searchWatchRecords(b -> QueryBuilders.termQuery(WatchRecord.STATE.getPreferredName(), "executed")); try { assertNoFailures(response); XContentSource source = xContentSource(response.getHits().getAt(0).getSourceRef()); String body = source.getValue("result.actions.0.webhook.response.body"); assertThat(body, notNullValue()); assertThat(body, is("body")); Number status = source.getValue("result.actions.0.webhook.response.status"); assertThat(status, notNullValue()); assertThat(status.intValue(), is(200)); } finally { response.decRef(); } } }
WebhookTokenIntegrationTests
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/filter/wall/WallCallTest.java
{ "start": 800, "end": 1323 }
class ____ extends TestCase { public void testMySql() throws Exception { assertTrue(WallUtils.isValidateMySql("{ call INTERFACE_DATA_EXTRACTION.INVOICE_INFO(?,?,?)}")); } public void testOracle() throws Exception { assertTrue(WallUtils.isValidateOracle("{ call INTERFACE_DATA_EXTRACTION.INVOICE_INFO(?,?,?)}")); } public void testSqlServer() throws Exception { assertTrue(WallUtils.isValidateSqlServer("{ call INTERFACE_DATA_EXTRACTION.INVOICE_INFO(?,?,?)}")); } }
WallCallTest
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java
{ "start": 10066, "end": 11247 }
class ____ extends AtMostOnceOp { private String snapshotPath; private final String dir; private final String snapshotName; CreateSnapshotOp(DFSClient client, String dir, String snapshotName) { super("createSnapshot", client); this.dir = dir; this.snapshotName = snapshotName; } @Override void prepare() throws Exception { final Path dirPath = new Path(dir); if (!dfs.exists(dirPath)) { dfs.mkdirs(dirPath); dfs.allowSnapshot(dirPath); } } @Override void invoke() throws Exception { this.snapshotPath = client.createSnapshot(dir, snapshotName); } @Override boolean checkNamenodeBeforeReturn() throws Exception { final Path sPath = SnapshotTestHelper.getSnapshotRoot(new Path(dir), snapshotName); boolean snapshotCreated = dfs.exists(sPath); for (int i = 0; i < CHECKTIMES && !snapshotCreated; i++) { Thread.sleep(1000); snapshotCreated = dfs.exists(sPath); } return snapshotCreated; } @Override Object getResult() { return snapshotPath; } } /** deleteSnapshot */
CreateSnapshotOp
java
micronaut-projects__micronaut-core
core-processor/src/main/java/io/micronaut/inject/writer/DirectoryClassWriterOutputVisitor.java
{ "start": 1150, "end": 4025 }
class ____ extends AbstractClassWriterOutputVisitor { private final File targetDir; /** * @param targetDir The target directory */ public DirectoryClassWriterOutputVisitor(File targetDir) { super(true); this.targetDir = targetDir; } @Override @SuppressWarnings("java:S3878") public OutputStream visitClass(String classname, @Nullable Element originatingElement) throws IOException { return visitClass(classname, new Element[]{ originatingElement }); } @Override public OutputStream visitClass(String classname, Element... originatingElements) throws IOException { File targetFile = new File(targetDir, getClassFileName(classname)).getCanonicalFile(); makeParent(targetFile.toPath()); return Files.newOutputStream(targetFile.toPath()); } @Override @SuppressWarnings("java:S1075") public void visitServiceDescriptor(String type, String classname, Element originatingElement) { final String path = "META-INF/micronaut/" + type + "/" + classname; try { final Path filePath = targetDir.toPath().resolve(path); makeParent(filePath); Files.writeString(filePath, "", StandardOpenOption.WRITE, StandardOpenOption.CREATE ); } catch (IOException e) { throw new ClassGenerationException("Unable to generate Bean entry at path: " + path, e); } } @Override public Optional<GeneratedFile> visitMetaInfFile(String path, Element... originatingElements) { return Optional.ofNullable(targetDir).map(root -> new FileBackedGeneratedFile( new File(root, "META-INF" + File.separator + path) ) ); } @Override public Optional<GeneratedFile> visitGeneratedFile(String path) { return getGeneratedFile(path); } @NonNull private Optional<GeneratedFile> getGeneratedFile(String path) { File parentFile = targetDir.getParentFile(); File generatedDir = new File(parentFile, "generated"); File f = new File(generatedDir, path); if (f.getParentFile().mkdirs()) { return Optional.of(new FileBackedGeneratedFile(f)); } return Optional.empty(); } @Override public Optional<GeneratedFile> visitGeneratedFile(String path, Element... originatingElements) { return getGeneratedFile(path); } private void makeParent(Path filePath) throws IOException { final Path parent = filePath.getParent(); if (!Files.exists(parent)) { Files.createDirectories(parent); } } private String getClassFileName(String className) { return className.replace('.', File.separatorChar) + ".class"; } }
DirectoryClassWriterOutputVisitor
java
spring-projects__spring-boot
module/spring-boot-h2console/src/test/java/org/springframework/boot/h2console/autoconfigure/H2ConsoleAutoConfigurationTests.java
{ "start": 9394, "end": 9699 }
class ____ { @Bean DataSource dataSource() throws SQLException { DataSource dataSource = mock(DataSource.class); given(dataSource.getConnection()).willThrow(IllegalStateException.class); return dataSource; } } @Configuration(proxyBeanMethods = false) static
FailingDataSourceConfiguration
java
apache__camel
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
{ "start": 309202, "end": 314044 }
class ____ extends YamlDeserializerBase<DnsServiceCallServiceDiscoveryConfiguration> { public DnsServiceCallServiceDiscoveryConfigurationDeserializer() { super(DnsServiceCallServiceDiscoveryConfiguration.class); } @Override protected DnsServiceCallServiceDiscoveryConfiguration newInstance() { return new DnsServiceCallServiceDiscoveryConfiguration(); } @Override protected boolean setProperty(DnsServiceCallServiceDiscoveryConfiguration target, String propertyKey, String propertyName, Node node) { propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey); switch(propertyKey) { case "domain": { String val = asText(node); target.setDomain(val); break; } case "id": { String val = asText(node); target.setId(val); break; } case "properties": { java.util.List<org.apache.camel.model.PropertyDefinition> val = asFlatList(node, org.apache.camel.model.PropertyDefinition.class); target.setProperties(val); break; } case "proto": { String val = asText(node); target.setProto(val); break; } default: { return false; } } return true; } } @YamlType( nodes = "dynamicRouter", types = org.apache.camel.model.DynamicRouterDefinition.class, order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1, displayName = "Dynamic Router", description = "Route messages based on dynamic rules", deprecated = false, properties = { @YamlProperty(name = "__extends", type = "object:org.apache.camel.model.language.ExpressionDefinition", oneOf = "expression"), @YamlProperty(name = "cacheSize", type = "number", description = "Sets the maximum size used by the org.apache.camel.spi.ProducerCache which is used to cache and reuse producers when using this dynamic router, when uris are reused. Beware that when using dynamic endpoints then it affects how well the cache can be utilized. If each dynamic endpoint is unique then its best to turn off caching by setting this to -1, which allows Camel to not cache both the producers and endpoints; they are regarded as prototype scoped and will be stopped and discarded after use. This reduces memory usage as otherwise producers/endpoints are stored in memory in the caches. However if there are a high degree of dynamic endpoints that have been used before, then it can benefit to use the cache to reuse both producers and endpoints and therefore the cache size can be set accordingly or rely on the default size (1000). If there is a mix of unique and used before dynamic endpoints, then setting a reasonable cache size can help reduce memory usage to avoid storing too many non frequent used producers.", displayName = "Cache Size"), @YamlProperty(name = "description", type = "string", description = "Sets the description of this node", displayName = "Description"), @YamlProperty(name = "disabled", type = "boolean", defaultValue = "false", description = "Disables this EIP from the route.", displayName = "Disabled"), @YamlProperty(name = "expression", type = "object:org.apache.camel.model.language.ExpressionDefinition", description = "Expression to call that returns the endpoint(s) to route to in the dynamic routing. Important: The expression will be called in a while loop fashion, until the expression returns null which means the dynamic router is finished.", displayName = "Expression", oneOf = "expression"), @YamlProperty(name = "id", type = "string", description = "Sets the id of this node", displayName = "Id"), @YamlProperty(name = "ignoreInvalidEndpoints", type = "boolean", defaultValue = "false", description = "Ignore the invalidate endpoint exception when try to create a producer with that endpoint", displayName = "Ignore Invalid Endpoints"), @YamlProperty(name = "note", type = "string", description = "Sets the note of this node", displayName = "Note"), @YamlProperty(name = "uriDelimiter", type = "string", defaultValue = ",", description = "Sets the uri delimiter to use", displayName = "Uri Delimiter") } ) public static
DnsServiceCallServiceDiscoveryConfigurationDeserializer
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/query/spi/QueryProducerImplementor.java
{ "start": 694, "end": 831 }
interface ____ no longer needed and will be removed. * * @author Steve Ebersole */ @Deprecated(since = "7.0", forRemoval = true) public
is
java
apache__camel
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/LprComponentBuilderFactory.java
{ "start": 3877, "end": 4648 }
class ____ extends AbstractComponentBuilder<PrinterComponent> implements LprComponentBuilder { @Override protected PrinterComponent buildConcreteComponent() { return new PrinterComponent(); } @Override protected boolean setPropertyOnComponent( Component component, String name, Object value) { switch (name) { case "lazyStartProducer": ((PrinterComponent) component).setLazyStartProducer((boolean) value); return true; case "autowiredEnabled": ((PrinterComponent) component).setAutowiredEnabled((boolean) value); return true; default: return false; } } } }
LprComponentBuilderImpl
java
spring-projects__spring-boot
core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/condition/ConditionalOnJndi.java
{ "start": 1334, "end": 1598 }
interface ____ { /** * JNDI Locations, one of which must exist. If no locations are specific the condition * matches solely based on the presence of an {@link InitialContext}. * @return the JNDI locations */ String[] value() default {}; }
ConditionalOnJndi
java
apache__camel
core/camel-api/src/main/java/org/apache/camel/spi/NotifyBuilderMatcher.java
{ "start": 1084, "end": 1427 }
interface ____ { /** * When an exchange was received * * @param exchange the exchange */ void notifyBuilderOnExchange(Exchange exchange); /** * Whether the predicate matches */ boolean notifyBuilderMatches(); /** * Reset state */ void notifyBuilderReset(); }
NotifyBuilderMatcher
java
spring-projects__spring-framework
spring-messaging/src/main/java/org/springframework/messaging/handler/invocation/InvocableHandlerMethod.java
{ "start": 7111, "end": 8155 }
class ____ extends AnnotatedMethodParameter { private final @Nullable Object returnValue; private final ResolvableType returnType; public AsyncResultMethodParameter(@Nullable Object returnValue) { super(-1); this.returnValue = returnValue; this.returnType = ResolvableType.forType(super.getGenericParameterType()).getGeneric(); } protected AsyncResultMethodParameter(AsyncResultMethodParameter original) { super(original); this.returnValue = original.returnValue; this.returnType = original.returnType; } @Override public Class<?> getParameterType() { if (this.returnValue != null) { return this.returnValue.getClass(); } if (!ResolvableType.NONE.equals(this.returnType)) { return this.returnType.toClass(); } return super.getParameterType(); } @Override public Type getGenericParameterType() { return this.returnType.getType(); } @Override public AsyncResultMethodParameter clone() { return new AsyncResultMethodParameter(this); } } }
AsyncResultMethodParameter
java
quarkusio__quarkus
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/MutinyGrpcServiceWithPlainTextTest.java
{ "start": 1131, "end": 1964 }
class ____ extends GrpcServiceTestBase { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .setFlatClassPath(true).setArchiveProducer( () -> ShrinkWrap.create(JavaArchive.class) .addClasses(MutinyHelloService.class, MutinyTestService.class, AssertHelper.class, GreeterGrpc.class, Greeter.class, GreeterBean.class, HelloRequest.class, HelloReply.class, MutinyGreeterGrpc.class, HelloRequestOrBuilder.class, HelloReplyOrBuilder.class, EmptyProtos.class, Messages.class, MutinyTestServiceGrpc.class, TestServiceGrpc.class)); }
MutinyGrpcServiceWithPlainTextTest
java
junit-team__junit5
platform-tests/src/test/java/org/junit/platform/commons/util/ReflectionUtilsTests.java
{ "start": 75652, "end": 80423 }
class ____ { /** * @see https://github.com/junit-team/junit-framework/issues/3553 */ @Test void findFieldsDoesNotAllowInstanceFieldToHideStaticField() throws Exception { final String TEMP_DIR = "tempDir"; Class<?> superclass = SuperclassWithStaticPackagePrivateTempDirField.class; Field staticField = superclass.getDeclaredField(TEMP_DIR); Class<?> subclass = SubclassWithNonStaticPackagePrivateTempDirField.class; Field nonStaticField = subclass.getDeclaredField(TEMP_DIR); // Prerequisite var fields = findFields(superclass, ReflectionUtils::isStatic, TOP_DOWN); assertThat(fields).containsExactly(staticField); // Actual use cases for this test fields = findFields(subclass, ReflectionUtils::isStatic, TOP_DOWN); assertThat(fields).containsExactly(staticField); fields = findFields(subclass, ReflectionUtils::isNotStatic, TOP_DOWN); assertThat(fields).containsExactly(nonStaticField); } @SuppressWarnings("DataFlowIssue") @Test void readFieldValuesPreconditions() { List<Field> fields = new ArrayList<>(); assertPreconditionViolationFor(() -> readFieldValues(null, new Object())); assertPreconditionViolationFor(() -> readFieldValues(fields, null, null)); assertPreconditionViolationFor(() -> readFieldValues(fields, new Object(), null)); } @Test void readFieldValuesFromInstance() { var fields = findFields(ClassWithFields.class, f -> true, TOP_DOWN); var values = readFieldValues(fields, new ClassWithFields()); Assertions.<Object> assertThat(values).containsExactly("enigma", 3.14, "text", 2.5, null, 42, "constant", 99); } @Test void readFieldValuesFromClass() { var fields = findFields(ClassWithFields.class, ReflectionUtils::isStatic, TOP_DOWN); var values = readFieldValues(fields, null); Assertions.<Object> assertThat(values).containsExactly(2.5, "constant", 99); } /** * @see https://github.com/junit-team/junit-framework/issues/3646 * @since 1.11 */ @Test void readFieldValuesFromInterfacesAndClassesInTypeHierarchy() { var fields = findFields(InterfaceWithField.class, ReflectionUtils::isStatic, TOP_DOWN); var values = readFieldValues(fields, null); Assertions.<Object> assertThat(values).containsOnly("ifc"); fields = findFields(SuperclassWithFieldAndFieldFromInterface.class, ReflectionUtils::isStatic, TOP_DOWN); values = readFieldValues(fields, null); Assertions.<Object> assertThat(values).containsExactly("ifc", "super"); fields = findFields(SubclassWithFieldAndFieldFromInterface.class, ReflectionUtils::isStatic, TOP_DOWN); values = readFieldValues(fields, null); Assertions.<Object> assertThat(values).containsExactly("ifc", "super", "sub"); } @Test void readFieldValuesFromInstanceWithTypeFilterForString() { var fields = findFields(ClassWithFields.class, isA(String.class), TOP_DOWN); var values = readFieldValues(fields, new ClassWithFields(), isA(String.class)); Assertions.<Object> assertThat(values).containsExactly("enigma", "text", null, "constant"); } @Test void readFieldValuesFromClassWithTypeFilterForString() { var fields = findFields(ClassWithFields.class, isA(String.class).and(ReflectionUtils::isStatic), TOP_DOWN); var values = readFieldValues(fields, null, isA(String.class)); Assertions.<Object> assertThat(values).containsExactly("constant"); } @Test void readFieldValuesFromInstanceWithTypeFilterForInteger() { var fields = findFields(ClassWithFields.class, isA(int.class), TOP_DOWN); var values = readFieldValues(fields, new ClassWithFields(), isA(int.class)); Assertions.<Object> assertThat(values).containsExactly(42); } @Test void readFieldValuesFromClassWithTypeFilterForInteger() { var fields = findFields(ClassWithFields.class, isA(Integer.class).and(ReflectionUtils::isStatic), TOP_DOWN); var values = readFieldValues(fields, null, isA(Integer.class)); Assertions.<Object> assertThat(values).containsExactly(99); } @Test void readFieldValuesFromInstanceWithTypeFilterForDouble() { var fields = findFields(ClassWithFields.class, isA(double.class), TOP_DOWN); var values = readFieldValues(fields, new ClassWithFields(), isA(double.class)); Assertions.<Object> assertThat(values).containsExactly(3.14); } @Test void readFieldValuesFromClassWithTypeFilterForDouble() { var fields = findFields(ClassWithFields.class, isA(Double.class).and(ReflectionUtils::isStatic), TOP_DOWN); var values = readFieldValues(fields, null, isA(Double.class)); Assertions.<Object> assertThat(values).containsExactly(2.5); } private static Predicate<Field> isA(Class<?> type) { return f -> f.getType().isAssignableFrom(type); } public static
FindAndReadFieldsTests
java
apache__flink
flink-connectors/flink-file-sink-common/src/main/java/org/apache/flink/streaming/api/functions/sink/filesystem/rollingpolicies/DefaultRollingPolicy.java
{ "start": 1732, "end": 4641 }
class ____<IN, BucketID> implements RollingPolicy<IN, BucketID> { private static final long serialVersionUID = 1L; private static final long DEFAULT_INACTIVITY_INTERVAL = 60L * 1000L; private static final long DEFAULT_ROLLOVER_INTERVAL = 60L * 1000L; private static final long DEFAULT_MAX_PART_SIZE = 1024L * 1024L * 128L; private final long partSize; private final long rolloverInterval; private final long inactivityInterval; /** Private constructor to avoid direct instantiation. */ private DefaultRollingPolicy(long partSize, long rolloverInterval, long inactivityInterval) { Preconditions.checkArgument(partSize > 0L); Preconditions.checkArgument(rolloverInterval > 0L); Preconditions.checkArgument(inactivityInterval > 0L); this.partSize = partSize; this.rolloverInterval = rolloverInterval; this.inactivityInterval = inactivityInterval; } @Override public boolean shouldRollOnCheckpoint(PartFileInfo<BucketID> partFileState) throws IOException { return partFileState.getSize() > partSize; } @Override public boolean shouldRollOnEvent(PartFileInfo<BucketID> partFileState, IN element) throws IOException { return partFileState.getSize() > partSize; } @Override public boolean shouldRollOnProcessingTime( final PartFileInfo<BucketID> partFileState, final long currentTime) { return currentTime - partFileState.getCreationTime() >= rolloverInterval || currentTime - partFileState.getLastUpdateTime() >= inactivityInterval; } /** * Returns the maximum part file size before rolling. * * @return Max size in bytes */ public long getMaxPartSize() { return partSize; } /** * Returns the maximum time duration a part file can stay open before rolling. * * @return Time duration in milliseconds */ public long getRolloverInterval() { return rolloverInterval; } /** * Returns time duration of allowed inactivity after which a part file will have to roll. * * @return Time duration in milliseconds */ public long getInactivityInterval() { return inactivityInterval; } /** * Creates a new {@link PolicyBuilder} that is used to configure and build an instance of {@code * DefaultRollingPolicy}. */ public static DefaultRollingPolicy.PolicyBuilder builder() { return new DefaultRollingPolicy.PolicyBuilder( DEFAULT_MAX_PART_SIZE, DEFAULT_ROLLOVER_INTERVAL, DEFAULT_INACTIVITY_INTERVAL); } /** This method is {@link Deprecated}, use {@link DefaultRollingPolicy#builder()} instead. */ @Deprecated public static DefaultRollingPolicy.PolicyBuilder create() { return builder(); } /** * A helper
DefaultRollingPolicy
java
junit-team__junit5
junit-platform-engine/src/main/java/org/junit/platform/engine/support/store/NamespacedHierarchicalStore.java
{ "start": 1705, "end": 1949 }
class ____ thread-safe. Please note, however, that thread safety is * not guaranteed while the {@link #close()} method is being invoked. * * @param <N> Namespace type * @since 1.10 */ @API(status = MAINTAINED, since = "1.13.3") public final
is
java
apache__spark
sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/streaming/SupportsAdmissionControl.java
{ "start": 920, "end": 1524 }
interface ____ {@link SparkDataStream} streaming sources to signal that they can control * the rate of data ingested into the system. These rate limits can come implicitly from the * contract of triggers, e.g. Trigger.Once() requires that a micro-batch process all data * available to the system at the start of the micro-batch. Alternatively, sources can decide to * limit ingest through data source options. * <p> * Through this interface, a MicroBatchStream should be able to return the next offset that it will * process until given a {@link ReadLimit}. * * @since 3.0.0 */ @Evolving public
for
java
quarkusio__quarkus
extensions/panache/mongodb-panache/runtime/src/main/java/io/quarkus/mongodb/panache/runtime/PanacheQueryImpl.java
{ "start": 494, "end": 4000 }
class ____<Entity> implements PanacheQuery<Entity> { private final CommonPanacheQueryImpl<Entity> delegate; PanacheQueryImpl(MongoCollection<? extends Entity> collection, ClientSession session, Bson mongoQuery, Bson sort) { this.delegate = new CommonPanacheQueryImpl<>(collection, session, mongoQuery, sort); } private PanacheQueryImpl(CommonPanacheQueryImpl<Entity> delegate) { this.delegate = delegate; } @Override public <T> PanacheQuery<T> project(Class<T> type) { return new PanacheQueryImpl<>(delegate.project(type)); } @Override @SuppressWarnings("unchecked") public <T extends Entity> PanacheQuery<T> page(Page page) { delegate.page(page); return (PanacheQuery<T>) this; } @Override public <T extends Entity> PanacheQuery<T> page(int pageIndex, int pageSize) { delegate.page(Page.of(pageIndex, pageSize)); return (PanacheQuery<T>) this; } @Override public <T extends Entity> PanacheQuery<T> nextPage() { delegate.nextPage(); return (PanacheQuery<T>) this; } @Override public <T extends Entity> PanacheQuery<T> previousPage() { delegate.previousPage(); return (PanacheQuery<T>) this; } @Override public <T extends Entity> PanacheQuery<T> firstPage() { delegate.firstPage(); return (PanacheQuery<T>) this; } @Override public <T extends Entity> PanacheQuery<T> lastPage() { delegate.lastPage(); return (PanacheQuery<T>) this; } @Override public boolean hasNextPage() { return delegate.hasNextPage(); } @Override public boolean hasPreviousPage() { return delegate.hasPreviousPage(); } @Override public int pageCount() { return delegate.pageCount(); } @Override public Page page() { return delegate.page(); } @Override public <T extends Entity> PanacheQuery<T> range(int startIndex, int lastIndex) { delegate.range(startIndex, lastIndex); return (PanacheQuery<T>) this; } @Override public <T extends Entity> PanacheQuery<T> withCollation(Collation collation) { delegate.withCollation(collation); return (PanacheQuery<T>) this; } @Override public <T extends Entity> PanacheQuery<T> withReadPreference(ReadPreference readPreference) { delegate.withReadPreference(readPreference); return (PanacheQuery<T>) this; } @Override public <T extends Entity> PanacheQuery<T> withBatchSize(int batchSize) { delegate.withBatchSize(batchSize); return (PanacheQuery<T>) this; } // Results @Override public long count() { return delegate.count(); } @Override public <T extends Entity> List<T> list() { return delegate.list(); } @Override public <T extends Entity> Stream<T> stream() { return delegate.stream(); } @Override public <T extends Entity> T firstResult() { return delegate.firstResult(); } @Override public <T extends Entity> Optional<T> firstResultOptional() { return delegate.firstResultOptional(); } @Override public <T extends Entity> T singleResult() { return delegate.singleResult(); } @Override public <T extends Entity> Optional<T> singleResultOptional() { return delegate.singleResultOptional(); } }
PanacheQueryImpl
java
elastic__elasticsearch
server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java
{ "start": 1665, "end": 9578 }
class ____ extends ESIntegTestCase { @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class); } public void testBasic() { int indicesWithData = between(1, 10); for (int i = 0; i < indicesWithData; i++) { String index = "index-with-data-" + i; ElasticsearchAssertions.assertAcked( indicesAdmin().prepareCreate(index).setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) ); int numDocs = randomIntBetween(1, 10); for (int j = 0; j < numDocs; j++) { prepareIndex(index).setSource("value", i).setId(Integer.toString(i)).get(); } indicesAdmin().prepareRefresh(index).get(); } int indicesWithoutData = between(1, 10); for (int i = 0; i < indicesWithoutData; i++) { String index = "index-without-data-" + i; ElasticsearchAssertions.assertAcked( indicesAdmin().prepareCreate(index).setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) ); } // Range query { RangeQueryBuilder rangeQuery = new RangeQueryBuilder("value").from(0).includeLower(true); var request = new SearchShardsRequest( new String[] { "index-*" }, SearchRequest.DEFAULT_INDICES_OPTIONS, rangeQuery, null, null, randomBoolean(), randomBoolean() ? null : randomAlphaOfLength(10) ); var resp = client().execute(TransportSearchShardsAction.TYPE, request).actionGet(); assertThat(resp.getGroups(), hasSize(indicesWithData + indicesWithoutData)); int skipped = 0; for (SearchShardsGroup g : resp.getGroups()) { String indexName = g.shardId().getIndexName(); assertThat(g.allocatedNodes(), not(empty())); if (indexName.contains("without")) { assertTrue(g.skipped()); skipped++; } else { assertFalse(g.skipped()); } } assertThat(skipped, equalTo(indicesWithoutData)); } // Match all { MatchAllQueryBuilder matchAll = new MatchAllQueryBuilder(); var request = new SearchShardsRequest( new String[] { "index-*" }, SearchRequest.DEFAULT_INDICES_OPTIONS, matchAll, null, null, randomBoolean(), randomBoolean() ? null : randomAlphaOfLength(10) ); SearchShardsResponse resp = client().execute(TransportSearchShardsAction.TYPE, request).actionGet(); assertThat(resp.getGroups(), hasSize(indicesWithData + indicesWithoutData)); for (SearchShardsGroup g : resp.getGroups()) { assertFalse(g.skipped()); } } } public void testRandom() throws ExecutionException, InterruptedException { int numIndices = randomIntBetween(1, 10); for (int i = 0; i < numIndices; i++) { String index = "index-" + i; ElasticsearchAssertions.assertAcked( indicesAdmin().prepareCreate(index) .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5))) ); int numDocs = randomIntBetween(10, 1000); for (int j = 0; j < numDocs; j++) { prepareIndex(index).setSource("value", i).setId(Integer.toString(i)).get(); } indicesAdmin().prepareRefresh(index).get(); } int iterations = iterations(2, 10); for (int i = 0; i < iterations; i++) { long from = randomLongBetween(1, 100); long to = randomLongBetween(from, from + 100); String preference = randomBoolean() ? null : randomAlphaOfLength(10); RangeQueryBuilder rangeQuery = new RangeQueryBuilder("value").from(from).to(to).includeUpper(true).includeLower(true); SearchRequest searchRequest = new SearchRequest().indices("index-*").source(new SearchSourceBuilder().query(rangeQuery)); searchRequest.setPreFilterShardSize(1); assertResponse(client().search(searchRequest), searchResponse -> { var searchShardsRequest = new SearchShardsRequest( new String[] { "index-*" }, SearchRequest.DEFAULT_INDICES_OPTIONS, rangeQuery, null, preference, randomBoolean(), randomBoolean() ? null : randomAlphaOfLength(10) ); var searchShardsResponse = client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); assertThat(searchShardsResponse.getGroups(), hasSize(searchResponse.getTotalShards())); long skippedShards = searchShardsResponse.getGroups().stream().filter(SearchShardsGroup::skipped).count(); assertThat(skippedShards, equalTo((long) searchResponse.getSkippedShards())); }); } } public void testNoCanMatchWithoutQuery() { Queue<CanMatchNodeRequest> canMatchRequests = ConcurrentCollections.newQueue(); for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { MockTransportService ts = (MockTransportService) transportService; ts.addSendBehavior((connection, requestId, action, request, options) -> { if (action.equals(SearchTransportService.QUERY_CAN_MATCH_NODE_NAME)) { canMatchRequests.add((CanMatchNodeRequest) request); } connection.sendRequest(requestId, action, request, options); }); } try { int numIndices = randomIntBetween(1, 10); int totalShards = 0; for (int i = 0; i < numIndices; i++) { String index = "index-" + i; int numShards = between(1, 5); ElasticsearchAssertions.assertAcked( indicesAdmin().prepareCreate(index) .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards)) ); totalShards += numShards; int numDocs = randomIntBetween(10, 100); for (int j = 0; j < numDocs; j++) { prepareIndex(index).setSource("value", i).setId(Integer.toString(i)).get(); } indicesAdmin().prepareRefresh(index).get(); } SearchShardsRequest request = new SearchShardsRequest( new String[] { "index-*" }, IndicesOptions.LENIENT_EXPAND_OPEN, randomBoolean() ? new MatchAllQueryBuilder() : null, null, null, randomBoolean(), null ); SearchShardsResponse resp = client().execute(TransportSearchShardsAction.TYPE, request).actionGet(); assertThat(resp.getGroups(), hasSize(totalShards)); for (SearchShardsGroup group : resp.getGroups()) { assertFalse(group.skipped()); } assertThat(canMatchRequests, emptyIterable()); } finally { for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { ((MockTransportService) transportService).clearAllRules(); } } } }
SearchShardsIT
java
spring-projects__spring-security
web/src/main/java/org/springframework/security/web/savedrequest/DefaultSavedRequest.java
{ "start": 2278, "end": 9751 }
class ____ implements SavedRequest { private static final long serialVersionUID = 620L; protected static final Log logger = LogFactory.getLog(DefaultSavedRequest.class); private static final String HEADER_IF_NONE_MATCH = "If-None-Match"; private static final String HEADER_IF_MODIFIED_SINCE = "If-Modified-Since"; private final ArrayList<SavedCookie> cookies = new ArrayList<>(); private final ArrayList<Locale> locales = new ArrayList<>(); private final Map<String, List<String>> headers = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); private final Map<String, String[]> parameters = new TreeMap<>(); private final @Nullable String contextPath; private final String method; private final @Nullable String pathInfo; private final @Nullable String queryString; private final String requestURI; private final @Nullable String requestURL; private final String scheme; private final String serverName; private final @Nullable String servletPath; private final int serverPort; private final @Nullable String matchingRequestParameterName; public DefaultSavedRequest(HttpServletRequest request) { this(request, (String) null); } public DefaultSavedRequest(HttpServletRequest request, @Nullable String matchingRequestParameterName) { Assert.notNull(request, "Request required"); // Cookies addCookies(request.getCookies()); // Headers Enumeration<String> names = request.getHeaderNames(); while (names.hasMoreElements()) { String name = names.nextElement(); // Skip If-Modified-Since and If-None-Match header. SEC-1412, SEC-1624. if (HEADER_IF_MODIFIED_SINCE.equalsIgnoreCase(name) || HEADER_IF_NONE_MATCH.equalsIgnoreCase(name)) { continue; } Enumeration<String> values = request.getHeaders(name); while (values.hasMoreElements()) { this.addHeader(name, values.nextElement()); } } // Locales addLocales(request.getLocales()); // Parameters addParameters(request.getParameterMap()); // Primitives this.method = request.getMethod(); this.pathInfo = request.getPathInfo(); this.queryString = request.getQueryString(); this.requestURI = request.getRequestURI(); this.serverPort = request.getServerPort(); this.requestURL = request.getRequestURL().toString(); this.scheme = request.getScheme(); this.serverName = request.getServerName(); this.contextPath = request.getContextPath(); this.servletPath = request.getServletPath(); this.matchingRequestParameterName = matchingRequestParameterName; } /** * Private constructor invoked through Builder */ private DefaultSavedRequest(Builder builder) { this.contextPath = builder.contextPath; this.method = (builder.method != null) ? builder.method : "GET"; this.pathInfo = builder.pathInfo; this.queryString = builder.queryString; this.requestURI = Objects.requireNonNull(builder.requestURI); this.requestURL = builder.requestURL; this.scheme = Objects.requireNonNull(builder.scheme); this.serverName = Objects.requireNonNull(builder.serverName); this.servletPath = builder.servletPath; this.serverPort = builder.serverPort; this.matchingRequestParameterName = builder.matchingRequestParameterName; } /** * @since 4.2 */ private void addCookies(Cookie[] cookies) { if (cookies != null) { for (Cookie cookie : cookies) { this.addCookie(cookie); } } } private void addCookie(Cookie cookie) { this.cookies.add(new SavedCookie(cookie)); } private void addHeader(String name, String value) { List<String> values = this.headers.computeIfAbsent(name, (key) -> new ArrayList<>()); values.add(value); } /** * @since 4.2 */ private void addLocales(Enumeration<Locale> locales) { while (locales.hasMoreElements()) { Locale locale = locales.nextElement(); this.addLocale(locale); } } private void addLocale(Locale locale) { this.locales.add(locale); } /** * @since 4.2 */ private void addParameters(Map<String, String[]> parameters) { if (!ObjectUtils.isEmpty(parameters)) { for (String paramName : parameters.keySet()) { Object paramValues = parameters.get(paramName); if (paramValues instanceof String[]) { this.addParameter(paramName, (String[]) paramValues); } else { logger.warn("ServletRequest.getParameterMap() returned non-String array"); } } } } private void addParameter(String name, String[] values) { this.parameters.put(name, values); } public @Nullable String getContextPath() { return this.contextPath; } @Override public List<Cookie> getCookies() { List<Cookie> cookieList = new ArrayList<>(this.cookies.size()); for (SavedCookie savedCookie : this.cookies) { cookieList.add(savedCookie.getCookie()); } return cookieList; } /** * Indicates the URL that the user agent used for this request. * @return the full URL of this request */ @Override public String getRedirectUrl() { String queryString = createQueryString(this.queryString, this.matchingRequestParameterName); return UrlUtils.buildFullRequestUrl(this.scheme, this.serverName, this.serverPort, this.requestURI, queryString); } @Override public Collection<String> getHeaderNames() { return this.headers.keySet(); } @Override public List<String> getHeaderValues(String name) { List<String> values = this.headers.get(name); return (values != null) ? values : Collections.emptyList(); } @Override public List<Locale> getLocales() { return this.locales; } @Override public String getMethod() { return this.method; } @Override public Map<String, String[]> getParameterMap() { return this.parameters; } public Collection<String> getParameterNames() { return this.parameters.keySet(); } @Override public String @Nullable [] getParameterValues(String name) { return this.parameters.get(name); } public @Nullable String getPathInfo() { return this.pathInfo; } public @Nullable String getQueryString() { return (this.queryString); } public @Nullable String getRequestURI() { return (this.requestURI); } public @Nullable String getRequestURL() { return this.requestURL; } public @Nullable String getScheme() { return this.scheme; } public @Nullable String getServerName() { return this.serverName; } public int getServerPort() { return this.serverPort; } public @Nullable String getServletPath() { return this.servletPath; } private boolean propertyEquals(@Nullable Object arg1, Object arg2) { if ((arg1 == null) && (arg2 == null)) { return true; } if (arg1 == null || arg2 == null) { return false; } return arg1.equals(arg2); } @Override public String toString() { return "DefaultSavedRequest [" + getRedirectUrl() + "]"; } private static @Nullable String createQueryString(@Nullable String queryString, @Nullable String matchingRequestParameterName) { if (matchingRequestParameterName == null) { return queryString; } if (queryString == null || queryString.length() == 0) { return matchingRequestParameterName; } return UriComponentsBuilder.newInstance() .query(queryString) .replaceQueryParam(matchingRequestParameterName) .queryParam(matchingRequestParameterName) .build() .getQuery(); } /** * @since 4.2 */ @JsonIgnoreProperties(ignoreUnknown = true) @com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder(withPrefix = "set") @tools.jackson.databind.annotation.JsonPOJOBuilder(withPrefix = "set") public static
DefaultSavedRequest
java
apache__kafka
metadata/src/test/java/org/apache/kafka/metadata/ListenerInfoTest.java
{ "start": 1536, "end": 8146 }
class ____ { private static final Endpoint INTERNAL = new Endpoint("INTERNAL", SecurityProtocol.PLAINTEXT, null, 0); private static final Endpoint EXTERNAL = new Endpoint("EXTERNAL", SecurityProtocol.SASL_SSL, "example.com", 9092); private static final Endpoint SSL = new Endpoint("SSL", SecurityProtocol.SSL, "", 9093); private static final Endpoint SASL_PLAINTEXT = new Endpoint("SASL_PLAINTEXT", SecurityProtocol.SASL_PLAINTEXT, "example2.com", 9094); private static final List<Endpoint> ALL = List.of( INTERNAL, EXTERNAL, SSL, SASL_PLAINTEXT); @Test public void testNullHostname() { assertNull(ListenerInfo.create(List.of(INTERNAL)).firstListener().host()); } @Test public void testNullHostnameGetsResolved() throws Exception { assertNotNull(ListenerInfo.create(List.of(INTERNAL)). withWildcardHostnamesResolved().firstListener().host()); } @Test public void testEmptyHostname() { assertEquals("", ListenerInfo.create(List.of(SSL)).firstListener().host()); } @Test public void testEmptyHostnameGetsResolved() throws Exception { assertNotEquals("", ListenerInfo.create(List.of(SSL)). withWildcardHostnamesResolved().firstListener().host()); } @ParameterizedTest @ValueSource(ints = {0, 1, 2, 3}) public void testCreatePreservesOrdering(int startIndex) { List<Endpoint> endpoints = new ArrayList<>(); for (int i = 0; i < ALL.size(); i++) { endpoints.add(ALL.get((i + startIndex) % ALL.size())); } ListenerInfo listenerInfo = ListenerInfo.create(endpoints); assertEquals(ALL.get(startIndex).listener(), listenerInfo.firstListener().listener()); } @ParameterizedTest @ValueSource(ints = {0, 1, 2, 3}) public void testCreateWithExplicitFirstListener(int startIndex) { ListenerInfo listenerInfo = ListenerInfo.create(Optional.of(ALL.get(startIndex).listener()), ALL); assertEquals(ALL.get(startIndex).listener(), listenerInfo.firstListener().listener()); } @Test public void testRoundTripToControllerRegistrationRequest() throws Exception { ListenerInfo listenerInfo = ListenerInfo.create(ALL). withWildcardHostnamesResolved(). withEphemeralPortsCorrected(__ -> 9094); ListenerInfo newListenerInfo = ListenerInfo.fromControllerRegistrationRequest( listenerInfo.toControllerRegistrationRequest()); assertEquals(listenerInfo, newListenerInfo); } @Test public void testToControllerRegistrationRequestFailsOnNullHost() { assertThrows(RuntimeException.class, () -> ListenerInfo.create(List.of(INTERNAL)). toControllerRegistrationRequest()); } @Test public void testToControllerRegistrationRequestFailsOnZeroPort() { assertThrows(RuntimeException.class, () -> ListenerInfo.create(List.of(INTERNAL)). withWildcardHostnamesResolved(). toControllerRegistrationRequest()); } @Test public void testRoundTripToControllerRegistrationRecord() throws Exception { ListenerInfo listenerInfo = ListenerInfo.create(ALL). withWildcardHostnamesResolved(). withEphemeralPortsCorrected(__ -> 9094); ListenerInfo newListenerInfo = ListenerInfo.fromControllerRegistrationRecord( listenerInfo.toControllerRegistrationRecord()); assertEquals(listenerInfo, newListenerInfo); } @Test public void testToControllerRegistrationRecordFailsOnNullHost() { assertThrows(RuntimeException.class, () -> ListenerInfo.create(List.of(INTERNAL)). toControllerRegistrationRecord()); } @Test public void testToControllerRegistrationRecordFailsOnZeroPort() { assertThrows(RuntimeException.class, () -> ListenerInfo.create(List.of(INTERNAL)). withWildcardHostnamesResolved(). toControllerRegistrationRecord()); } @Test public void testRoundTripToBrokerRegistrationRequest() throws Exception { ListenerInfo listenerInfo = ListenerInfo.create(ALL). withWildcardHostnamesResolved(). withEphemeralPortsCorrected(__ -> 9094); ListenerInfo newListenerInfo = ListenerInfo.fromBrokerRegistrationRequest( listenerInfo.toBrokerRegistrationRequest()); assertEquals(listenerInfo, newListenerInfo); } @Test public void testToBrokerRegistrationRequestFailsOnNullHost() { assertThrows(RuntimeException.class, () -> ListenerInfo.create(List.of(INTERNAL)). toBrokerRegistrationRequest()); } @Test public void testToBrokerRegistrationRequestFailsOnZeroPort() { assertThrows(RuntimeException.class, () -> ListenerInfo.create(List.of(INTERNAL)). withWildcardHostnamesResolved(). toBrokerRegistrationRequest()); } @Test public void testRoundTripToBrokerRegistrationRecord() throws Exception { ListenerInfo listenerInfo = ListenerInfo.create(ALL). withWildcardHostnamesResolved(). withEphemeralPortsCorrected(__ -> 9094); ListenerInfo newListenerInfo = ListenerInfo.fromBrokerRegistrationRecord( listenerInfo.toBrokerRegistrationRecord()); assertEquals(listenerInfo, newListenerInfo); } @Test public void testToBrokerRegistrationRecordFailsOnNullHost() { assertThrows(RuntimeException.class, () -> ListenerInfo.create(List.of(INTERNAL)). toBrokerRegistrationRecord()); } @Test public void testToBrokerRegistrationRecordFailsOnZeroPort() { assertThrows(RuntimeException.class, () -> ListenerInfo.create(List.of(INTERNAL)). withWildcardHostnamesResolved(). toBrokerRegistrationRecord()); } @Test public void testToString() { ListenerInfo listenerInfo = ListenerInfo.create(List.of(EXTERNAL, SASL_PLAINTEXT)); assertEquals("ListenerInfo(Endpoint(listenerName='EXTERNAL', securityProtocol=SASL_SSL, host='example.com', port=9092), " + "Endpoint(listenerName='SASL_PLAINTEXT', securityProtocol=SASL_PLAINTEXT, host='example2.com', port=9094))", listenerInfo.toString()); } }
ListenerInfoTest
java
apache__spark
sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
{ "start": 66109, "end": 66686 }
class ____ implements Serializable { private Set<Long> fields; public Set<Long> getFields() { return fields; } public void setFields(Set<Long> fields) { this.fields = fields; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; BeanWithSet that = (BeanWithSet) o; return Objects.equals(fields, that.fields); } @Override public int hashCode() { return Objects.hashCode(fields); } } public static
BeanWithSet
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/ErasureCodeBenchmarkThroughput.java
{ "start": 4189, "end": 10048 }
enum ____ { READ, WRITE, GEN, CLEAN; } public static String getFilePath(int dataSizeMB, boolean isEc) { String parent = isEc ? EC_DIR : REP_DIR; String file = isEc ? EC_FILE_BASE : REP_FILE_BASE; return parent + "/" + file + dataSizeMB + "MB"; } private static void printUsage(String msg) { if (msg != null) { System.out.println(msg); } System.err.println("Usage: ErasureCodeBenchmarkThroughput " + "<read|write|gen|clean> <size in MB> " + "<ec|rep> [num clients] [stf|pos]\n" + "Stateful and positional option is only available for read."); System.exit(1); } private List<Long> doBenchmark(boolean isRead, int dataSizeMB, int numClients, boolean isEc, boolean statefulRead, boolean isGen) throws Exception { CompletionService<Long> cs = new ExecutorCompletionService<Long>( Executors.newFixedThreadPool(numClients)); for (int i = 0; i < numClients; i++) { cs.submit(isRead ? new ReadCallable(dataSizeMB, isEc, i, statefulRead) : new WriteCallable(dataSizeMB, isEc, i, isGen)); } List<Long> results = new ArrayList<>(numClients); for (int i = 0; i < numClients; i++) { results.add(cs.take().get()); } return results; } private void setReadThreadPoolSize(int numClients) { int numThread = numClients * ecPolicy.getNumDataUnits(); getConf().setInt(HdfsClientConfigKeys.StripedRead.THREADPOOL_SIZE_KEY, numThread); } private DecimalFormat getDecimalFormat() { return new DecimalFormat("#.##"); } private void benchmark(OpType type, int dataSizeMB, int numClients, boolean isEc, boolean statefulRead) throws Exception { List<Long> sizes = null; StopWatch sw = new StopWatch().start(); switch (type) { case READ: sizes = doBenchmark(true, dataSizeMB, numClients, isEc, statefulRead, false); break; case WRITE: sizes = doBenchmark( false, dataSizeMB, numClients, isEc, statefulRead, false); break; case GEN: sizes = doBenchmark(false, dataSizeMB, numClients, isEc, statefulRead, true); } long elapsedSec = sw.now(TimeUnit.SECONDS); double totalDataSizeMB = 0; for (Long size : sizes) { if (size >= 0) { totalDataSizeMB += size.doubleValue() / 1024 / 1024; } } double throughput = totalDataSizeMB / elapsedSec; DecimalFormat df = getDecimalFormat(); System.out.println(type + " " + df.format(totalDataSizeMB) + " MB data takes: " + elapsedSec + " s.\nTotal throughput: " + df.format(throughput) + " MB/s."); } private void setUpDir() throws IOException { DistributedFileSystem dfs = (DistributedFileSystem) fs; dfs.mkdirs(new Path(DFS_TMP_DIR)); Path repPath = new Path(REP_DIR); Path ecPath = new Path(EC_DIR); if (!dfs.exists(repPath)) { dfs.mkdirs(repPath); } else { Preconditions.checkArgument( dfs.getClient().getErasureCodingPolicy(repPath.toString()) == null); } if (!dfs.exists(ecPath)) { dfs.mkdirs(ecPath); dfs.getClient() .setErasureCodingPolicy(ecPath.toString(), ecPolicy.getName()); } else { Preconditions.checkArgument( dfs.getClient(). getErasureCodingPolicy(ecPath.toString()).equals(ecPolicy)); } } @Override public int run(String[] args) throws Exception { OpType type = null; int dataSizeMB = 0; boolean isEc = true; int numClients = 1; boolean statefulRead = true; if (args.length >= 3) { if (args[0].equals("read")) { type = OpType.READ; } else if (args[0].equals("write")) { type = OpType.WRITE; } else if (args[0].equals("gen")) { type = OpType.GEN; } else if (args[0].equals("clean")) { type = OpType.CLEAN; } else { printUsage("Unknown operation: " + args[0]); } try { dataSizeMB = Integer.parseInt(args[1]); if (dataSizeMB <= 0) { printUsage("Invalid data size: " + dataSizeMB); } } catch (NumberFormatException e) { printUsage("Invalid data size: " + e.getMessage()); } isEc = args[2].equals("ec"); if (!isEc && !args[2].equals("rep")) { printUsage("Unknown storage policy: " + args[2]); } } else { printUsage(null); } if (args.length >= 4 && type != OpType.CLEAN) { try { numClients = Integer.parseInt(args[3]); if (numClients <= 0) { printUsage("Invalid num of clients: " + numClients); } } catch (NumberFormatException e) { printUsage("Invalid num of clients: " + e.getMessage()); } } if (args.length >= 5 && type == OpType.READ) { statefulRead = args[4].equals("stf"); if (!statefulRead && !args[4].equals("pos")) { printUsage("Unknown read mode: " + args[4]); } } setUpDir(); if (type == OpType.CLEAN) { cleanUp(dataSizeMB, isEc); } else { if (type == OpType.READ && isEc) { setReadThreadPoolSize(numClients); } benchmark(type, dataSizeMB, numClients, isEc, statefulRead); } return 0; } private void cleanUp(int dataSizeMB, boolean isEc) throws IOException { final String fileName = getFilePath(dataSizeMB, isEc); Path path = isEc ? new Path(EC_DIR) : new Path(REP_DIR); FileStatus fileStatuses[] = fs.listStatus(path, new PathFilter() { @Override public boolean accept(Path path) { return path.toString().contains(fileName); } }); for (FileStatus fileStatus : fileStatuses) { fs.delete(fileStatus.getPath(), false); } } /** * A Callable that returns the number of bytes read/written */ private abstract
OpType
java
apache__flink
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/util/collections/binary/AbstractBytesMultiMap.java
{ "start": 13479, "end": 15201 }
class ____ implements KeyValueIterator<K, Iterator<RowData>> { private int count; private final boolean requiresCopy; public EntryIterator(boolean requiresCopy) { this.requiresCopy = requiresCopy; reusedValueIterator.setRequiresCopy(requiresCopy); count = 0; if (numKeys > 0) { recordArea.setReadPosition(0); } } @Override public boolean advanceNext() throws IOException { if (count < numKeys) { count++; keySerializer.mapFromPages(reusedKey, keyInView); // skip end pointer of value skipPointer(keyInView); // read pointer to second value pointerToSecondValue = readPointer(keyInView); reusedRecord = valueSerializer.mapFromPages(reusedRecord, keyInView); reusedValueIterator.setOffset(pointerToSecondValue); return true; } return false; } @Override public K getKey() { return requiresCopy ? keySerializer.copy(reusedKey) : reusedKey; } @Override public Iterator<RowData> getValue() { return reusedValue; } public boolean hasNext() { return count < numKeys; } } Iterator<RowData> valueIterator(int valueOffset) { reusedValueIterator.setOffset(valueOffset); return reusedValueIterator; } final
EntryIterator
java
elastic__elasticsearch
x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/SpatialPluginTests.java
{ "start": 2455, "end": 11244 }
class ____ extends ESTestCase { public void testGeoShapeCentroidLicenseCheck() { checkLicenseRequired(GeoShapeValuesSourceType.instance(), GeoCentroidAggregationBuilder.REGISTRY_KEY, (agg) -> { try { agg.build(null, null, null, null, null); } catch (IOException e) { fail("Unexpected exception: " + e.getMessage()); } }, "geo_centroid", "geo_shape"); } public void testGeoHexLicenseCheck() { checkLicenseRequired(CoreValuesSourceType.GEOPOINT, GeoHexGridAggregationBuilder.REGISTRY_KEY, (agg) -> { try { agg.build(null, AggregatorFactories.EMPTY, null, 0, null, 0, 0, null, null, CardinalityUpperBound.NONE, null); } catch (IOException e) { fail("Unexpected exception: " + e.getMessage()); } }, "geohex_grid", "geo_point"); } public void testGeoShapeHexLicenseCheck() { checkLicenseRequired(GeoShapeValuesSourceType.instance(), GeoHexGridAggregationBuilder.REGISTRY_KEY, (agg) -> { try { agg.build(null, AggregatorFactories.EMPTY, null, 0, null, 0, 0, null, null, CardinalityUpperBound.NONE, null); } catch (IOException e) { fail("Unexpected exception: " + e.getMessage()); } }, "geohex_grid", "geo_shape"); } public void testGeoGridLicenseCheck() { for (ValuesSourceRegistry.RegistryKey<GeoGridAggregatorSupplier> registryKey : Arrays.asList( GeoHashGridAggregationBuilder.REGISTRY_KEY, GeoTileGridAggregationBuilder.REGISTRY_KEY )) { checkLicenseRequired(GeoShapeValuesSourceType.instance(), registryKey, (agg) -> { try { agg.build(null, AggregatorFactories.EMPTY, null, 0, null, 0, 0, null, null, CardinalityUpperBound.NONE, null); } catch (IOException e) { fail("Unexpected exception: " + e.getMessage()); } }, registryKey.getName(), "geo_shape"); } } public void testCartesianShapeCentroidLicenseCheck() { checkLicenseRequired(CartesianShapeValuesSourceType.instance(), CartesianCentroidAggregationBuilder.REGISTRY_KEY, (agg) -> { try { agg.build(null, null, null, null, null); } catch (IOException e) { fail("Unexpected exception: " + e.getMessage()); } }, "cartesian_centroid", "shape"); } public void testCartesianPointCentroidLicenseCheck() { checkLicenseNotRequired(CartesianPointValuesSourceType.instance(), CartesianCentroidAggregationBuilder.REGISTRY_KEY, (agg) -> { try { agg.build(null, null, null, null, null); } catch (IOException e) { fail("Unexpected exception: " + e.getMessage()); } }, "cartesian_centroid", "point"); } public void testCartesianPointBoundsLicenseCheck() { CartesianPointValuesSourceType sourceType = CartesianPointValuesSourceType.instance(); TestValuesSourceConfig sourceConfig = new TestValuesSourceConfig(sourceType); checkLicenseNotRequired(sourceType, CartesianBoundsAggregationBuilder.REGISTRY_KEY, (agg) -> { try { agg.build(null, null, null, sourceConfig, null); } catch (IOException e) { fail("Unexpected exception: " + e.getMessage()); } }, "cartesian_bounds", "point"); } public void testCartesianShapeBoundsLicenseCheck() { CartesianShapeValuesSourceType sourceType = CartesianShapeValuesSourceType.instance(); TestValuesSourceConfig sourceConfig = new TestValuesSourceConfig(sourceType); checkLicenseNotRequired(sourceType, CartesianBoundsAggregationBuilder.REGISTRY_KEY, (agg) -> { try { agg.build(null, null, null, sourceConfig, null); } catch (IOException e) { fail("Unexpected exception: " + e.getMessage()); } }, "cartesian_bounds", "shape"); } public void testGenericNamedWriteables() { SearchModule module = new SearchModule(Settings.EMPTY, List.of(new SpatialPlugin())); Set<String> names = module.getNamedWriteables() .stream() .filter(e -> e.categoryClass.equals(GenericNamedWriteable.class)) .map(e -> e.name) .collect(Collectors.toSet()); assertThat( "Expect both Geo and Cartesian BoundingBox and ShapeValue", names, equalTo(Set.of("GeoBoundingBox", "CartesianBoundingBox", "GeoShapeValue", "CartesianShapeValue")) ); } private SpatialPlugin getPluginWithOperationMode(License.OperationMode operationMode) { return new SpatialPlugin() { protected XPackLicenseState getLicenseState() { TestUtils.UpdatableLicenseState licenseState = new TestUtils.UpdatableLicenseState(); licenseState.update(new XPackLicenseStatus(operationMode, true, null)); return licenseState; } }; } private <T> void checkLicenseNotRequired( ValuesSourceType sourceType, ValuesSourceRegistry.RegistryKey<T> registryKey, Consumer<T> builder, String aggName, String fieldTypeName ) { for (License.OperationMode operationMode : License.OperationMode.values()) { SpatialPlugin plugin = getPluginWithOperationMode(operationMode); ValuesSourceRegistry.Builder registryBuilder = new ValuesSourceRegistry.Builder(); List<Consumer<ValuesSourceRegistry.Builder>> registrar = plugin.getAggregationExtentions(); registrar.forEach(c -> c.accept(registryBuilder)); List<SearchPlugin.AggregationSpec> specs = plugin.getAggregations(); specs.forEach(c -> c.getAggregatorRegistrar().accept(registryBuilder)); ValuesSourceRegistry registry = registryBuilder.build(); T aggregator = registry.getAggregator( registryKey, new ValuesSourceConfig(sourceType, null, true, null, null, null, null, null) ); NullPointerException exception = expectThrows(NullPointerException.class, () -> builder.accept(aggregator)); assertThat( "Incorrect exception testing " + aggName + " on field " + fieldTypeName, exception.getMessage(), containsString("because \"context\" is null") ); } } private <T> void checkLicenseRequired( ValuesSourceType sourceType, ValuesSourceRegistry.RegistryKey<T> registryKey, Consumer<T> builder, String aggName, String fieldTypeName ) { for (License.OperationMode operationMode : License.OperationMode.values()) { SpatialPlugin plugin = getPluginWithOperationMode(operationMode); ValuesSourceRegistry.Builder registryBuilder = new ValuesSourceRegistry.Builder(); List<Consumer<ValuesSourceRegistry.Builder>> registrar = plugin.getAggregationExtentions(); registrar.forEach(c -> c.accept(registryBuilder)); List<SearchPlugin.AggregationSpec> specs = plugin.getAggregations(); specs.forEach(c -> c.getAggregatorRegistrar().accept(registryBuilder)); ValuesSourceRegistry registry = registryBuilder.build(); T aggregator = registry.getAggregator( registryKey, new ValuesSourceConfig(sourceType, null, true, null, null, null, null, null) ); if (License.OperationMode.TRIAL != operationMode && License.OperationMode.compare(operationMode, License.OperationMode.GOLD) < 0) { ElasticsearchSecurityException exception = expectThrows( ElasticsearchSecurityException.class, () -> builder.accept(aggregator) ); assertThat( exception.getMessage(), equalTo("current license is non-compliant for [" + aggName + " aggregation on " + fieldTypeName + " fields]") ); } else { try { builder.accept(aggregator); } catch (NullPointerException e) { // Expected exception from passing null aggregation context } catch (Exception e) { fail("Unexpected exception testing " + aggName + " at license level " + operationMode + ": " + e.getMessage()); } } } } private static
SpatialPluginTests
java
google__error-prone
test_helpers/src/test/java/com/google/errorprone/BugCheckerRefactoringTestHelperTest.java
{ "start": 4144, "end": 4390 }
class ____ { public Object foo() { Integer i = 2 + 1; return i; } } """) .addOutputLines( "out/Test.java", """ public
Test
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/codec/perfield/XPerFieldDocValuesFormat.java
{ "start": 9881, "end": 15599 }
class ____ extends DocValuesProducer { private final IntObjectHashMap<DocValuesProducer> fields = new IntObjectHashMap<>(); private final Map<String, DocValuesProducer> formats = new HashMap<>(); // clone for merge FieldsReader(FieldsReader other) { Map<DocValuesProducer, DocValuesProducer> oldToNew = new IdentityHashMap<>(); // First clone all formats for (Map.Entry<String, DocValuesProducer> ent : other.formats.entrySet()) { DocValuesProducer values = ent.getValue().getMergeInstance(); formats.put(ent.getKey(), values); oldToNew.put(ent.getValue(), values); } // Then rebuild fields: for (IntObjectHashMap.IntObjectCursor<DocValuesProducer> ent : other.fields) { DocValuesProducer producer = oldToNew.get(ent.value); assert producer != null; fields.put(ent.key, producer); } } FieldsReader(final SegmentReadState readState) throws IOException { // Init each unique format: boolean success = false; try { // Read field name -> format name for (FieldInfo fi : readState.fieldInfos) { if (fi.getDocValuesType() != DocValuesType.NONE) { final String fieldName = fi.name; final String formatName = fi.getAttribute(PER_FIELD_FORMAT_KEY); if (formatName != null) { // null formatName means the field is in fieldInfos, but has no docvalues! final String suffix = fi.getAttribute(PER_FIELD_SUFFIX_KEY); if (suffix == null) { throw new IllegalStateException("missing attribute: " + PER_FIELD_SUFFIX_KEY + " for field: " + fieldName); } DocValuesFormat format = DocValuesFormat.forName(formatName); String segmentSuffix = getFullSegmentSuffix(readState.segmentSuffix, getSuffix(formatName, suffix)); if (formats.containsKey(segmentSuffix) == false) { formats.put(segmentSuffix, format.fieldsProducer(new SegmentReadState(readState, segmentSuffix))); } fields.put(fi.number, formats.get(segmentSuffix)); } } } success = true; } finally { if (success == false) { IOUtils.closeWhileHandlingException(formats.values()); } } } // FORK note: the reason why PerFieldDocValuesFormat is forked: public DocValuesProducer getDocValuesProducer(FieldInfo field) { return fields.get(field.number); } public Map<String, DocValuesProducer> getFormats() { return formats; } @Override public NumericDocValues getNumeric(FieldInfo field) throws IOException { DocValuesProducer producer = fields.get(field.number); return producer == null ? null : producer.getNumeric(field); } @Override public BinaryDocValues getBinary(FieldInfo field) throws IOException { DocValuesProducer producer = fields.get(field.number); return producer == null ? null : producer.getBinary(field); } @Override public SortedDocValues getSorted(FieldInfo field) throws IOException { DocValuesProducer producer = fields.get(field.number); return producer == null ? null : producer.getSorted(field); } @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { DocValuesProducer producer = fields.get(field.number); return producer == null ? null : producer.getSortedNumeric(field); } @Override public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { DocValuesProducer producer = fields.get(field.number); return producer == null ? null : producer.getSortedSet(field); } @Override public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { DocValuesProducer producer = fields.get(field.number); return producer == null ? null : producer.getSkipper(field); } @Override public void close() throws IOException { IOUtils.close(formats.values()); } @Override public void checkIntegrity() throws IOException { for (DocValuesProducer format : formats.values()) { format.checkIntegrity(); } } @Override public DocValuesProducer getMergeInstance() { return new FieldsReader(this); } @Override public String toString() { return "PerFieldDocValues(formats=" + formats.size() + ")"; } } @Override public final DocValuesProducer fieldsProducer(SegmentReadState state) throws IOException { return new FieldsReader(state); } /** * Returns the doc values format that should be used for writing new segments of <code>field * </code>. * * <p>The field to format mapping is written to the index, so this method is only invoked when * writing, not when reading. */ public abstract DocValuesFormat getDocValuesFormatForField(String field); }
FieldsReader
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java
{ "start": 7126, "end": 9066 }
class ____. Keep going up until you hit a function with that name. * Then add your new "ENTRY" constant to the list it returns. * </li> * <li> * Rerun the {@code CsvTests}. They should find your function and maybe even pass. Add a * few more tests in the csv-spec tests. They run quickly so it isn’t a big deal having * half a dozen of them per function. In fact, it’s useful to add more complex combinations * of things here, just to catch any accidental strange interactions. For example, have * your function take its input from an index like {@code FROM employees | EVAL foo=MY_FUNCTION(emp_no)}. * It’s probably a good idea to have your function passed as a parameter to another function * like {@code EVAL foo=MOST(0, MY_FUNCTION(emp_no))}. And likely useful to try the reverse * like {@code EVAL foo=MY_FUNCTION(MOST(languages + 10000, emp_no)}. * </li> * <li> * Now it’s time to make a unit test! The infrastructure for these is under some flux at * the moment, but it’s good to extend {@code AbstractScalarFunctionTestCase}. All of * these tests are parameterized and expect to spend some time finding good parameters. * Also add serialization tests that extend {@code AbstractExpressionSerializationTests<>}. * And also add type error tests that extends {@code ErrorsForCasesWithoutExamplesTestCase}. * </li> * <li> * Once you are happy with the tests run the auto formatter: * {@code ./gradlew -p x-pack/plugin/esql/ spotlessApply} * </li> * <li> * Now you can run all of the ESQL tests like CI: * {@code ./gradlew -p x-pack/plugin/esql/ test} * </li> * <li> * We need to tag to what release the function applies to so we can generate docs in the next step! * On the constructor of your function
hierarchy
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java
{ "start": 937, "end": 1215 }
class ____ action requests that track acknowledgements of cluster state updates: such a request is acknowledged only once * the cluster state update is committed and all relevant nodes have applied it and acknowledged its application to the elected master.. */ public abstract
for