language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
quarkusio__quarkus
extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/engineconfigurations/section/WrongTargetConstructorTest.java
{ "start": 1024, "end": 1461 }
class ____ with @EngineConfiguration that also implements SectionHelperFactory or ParserHelper must be public and declare a no-args constructor"), rootCause.toString()); } else { fail("No TemplateException thrown: " + t); } }); @Test public void testValidation() { fail(); } @EngineConfiguration public static
annotated
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/associations/JoinColumnOrFormulaTest.java
{ "start": 870, "end": 2459 }
class ____ { @Test public void testLifecycle(EntityManagerFactoryScope scope) { //tag::associations-JoinColumnOrFormula-persistence-example[] Country US = new Country(); US.setId( 1 ); US.setDefault( true ); US.setPrimaryLanguage( "English" ); US.setName( "United States" ); Country Romania = new Country(); Romania.setId( 40 ); Romania.setDefault( true ); Romania.setName( "Romania" ); Romania.setPrimaryLanguage( "Romanian" ); scope.inTransaction( entityManager -> { entityManager.persist( US ); entityManager.persist( Romania ); } ); scope.inTransaction( entityManager -> { User user1 = new User(); user1.setId( 1L ); user1.setFirstName( "John" ); user1.setLastName( "Doe" ); user1.setLanguage( "English" ); entityManager.persist( user1 ); User user2 = new User(); user2.setId( 2L ); user2.setFirstName( "Vlad" ); user2.setLastName( "Mihalcea" ); user2.setLanguage( "Romanian" ); entityManager.persist( user2 ); } ); //end::associations-JoinColumnOrFormula-persistence-example[] //tag::associations-JoinColumnOrFormula-fetching-example[] scope.inTransaction( entityManager -> { User john = entityManager.find( User.class, 1L ); assertThat( john.getCountry() ).isEqualTo( US ); User vlad = entityManager.find( User.class, 2L ); assertThat( vlad.getCountry() ).isEqualTo( Romania ); } ); //end::associations-JoinColumnOrFormula-fetching-example[] } //tag::associations-JoinColumnOrFormula-example[] @Entity(name = "User") @Table(name = "users") public static
JoinColumnOrFormulaTest
java
google__dagger
javatests/dagger/internal/codegen/ComponentRequirementFieldTest.java
{ "start": 1393, "end": 2014 }
interface ____ {}"); @Rule public GoldenFileRule goldenFileRule = new GoldenFileRule(); private final CompilerMode compilerMode; public ComponentRequirementFieldTest(CompilerMode compilerMode) { this.compilerMode = compilerMode; } @Test public void bindsInstance() throws Exception { Source component = CompilerTests.javaSource( "test.TestComponent", "package test;", "", "import dagger.BindsInstance;", "import dagger.Component;", "import java.util.List;", "", "@Component", "
Nullable
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/stat/internal/AbstractCacheableDataStatistics.java
{ "start": 466, "end": 3448 }
class ____ implements CacheableDataStatistics { private final @Nullable String cacheRegionName; private final @Nullable LongAdder cacheHitCount; private final @Nullable LongAdder cacheMissCount; private final @Nullable LongAdder cachePutCount; private final @Nullable LongAdder cacheRemoveCount; public AbstractCacheableDataStatistics(Supplier<@Nullable Region> regionSupplier) { final var region = regionSupplier.get(); if ( region == null ) { cacheRegionName = null; cacheHitCount = null; cacheMissCount = null; cachePutCount = null; cacheRemoveCount = null; } else { cacheRegionName = region.getName(); cacheHitCount = new LongAdder(); cacheMissCount = new LongAdder(); cachePutCount = new LongAdder(); cacheRemoveCount = new LongAdder(); } } @Override public @Nullable String getCacheRegionName() { return cacheRegionName; } @Override public long getCacheHitCount() { return cacheRegionName == null ? NOT_CACHED_COUNT : NullnessUtil.castNonNull( cacheHitCount ).sum(); } @Override public long getCachePutCount() { return cacheRegionName == null ? NOT_CACHED_COUNT : NullnessUtil.castNonNull( cachePutCount ).sum(); } @Override public long getCacheMissCount() { return cacheRegionName == null ? NOT_CACHED_COUNT : NullnessUtil.castNonNull( cacheMissCount ).sum(); } @Override public long getCacheRemoveCount() { return cacheRegionName == null ? NOT_CACHED_COUNT : NullnessUtil.castNonNull( cacheRemoveCount ).sum(); } public void incrementCacheHitCount() { if ( cacheRegionName == null ) { throw new IllegalStateException( "Illegal attempt to increment cache hit count for non-cached data" ); } NullnessUtil.castNonNull( cacheHitCount ).increment(); } public void incrementCacheMissCount() { if ( cacheRegionName == null ) { throw new IllegalStateException( "Illegal attempt to increment cache miss count for non-cached data" ); } NullnessUtil.castNonNull( cacheMissCount ).increment(); } public void incrementCachePutCount() { if ( cacheRegionName == null ) { throw new IllegalStateException( "Illegal attempt to increment cache put count for non-cached data" ); } NullnessUtil.castNonNull( cachePutCount ).increment(); } public void incrementCacheRemoveCount() { if ( cacheRegionName == null ) { throw new IllegalStateException( "Illegal attempt to increment cache put count for non-cached data" ); } NullnessUtil.castNonNull( cacheRemoveCount ).increment(); } protected void appendCacheStats(StringBuilder text) { text.append( ",cacheRegion=" ).append( cacheRegionName ); if ( cacheRegionName != null ) { text.append( ",cacheHitCount=" ).append( getCacheHitCount() ) .append( ",cacheMissCount=" ).append( getCacheMissCount() ) .append( ",cachePutCount=" ).append( getCachePutCount() ) .append( ",cacheRemoveCount=" ).append( getCacheRemoveCount() ); } } }
AbstractCacheableDataStatistics
java
alibaba__nacos
core/src/main/java/com/alibaba/nacos/core/distributed/distro/monitor/DistroRecordsHolder.java
{ "start": 957, "end": 2575 }
class ____ { private static final DistroRecordsHolder INSTANCE = new DistroRecordsHolder(); private final ConcurrentMap<String, DistroRecord> distroRecords; private DistroRecordsHolder() { distroRecords = new ConcurrentHashMap<>(); } public static DistroRecordsHolder getInstance() { return INSTANCE; } public Optional<DistroRecord> getRecordIfExist(String type) { return Optional.ofNullable(distroRecords.get(type)); } public DistroRecord getRecord(String type) { return distroRecords.computeIfAbsent(type, s -> new DistroRecord(type)); } public long getTotalSyncCount() { final AtomicLong result = new AtomicLong(); distroRecords.forEach((s, distroRecord) -> result.addAndGet(distroRecord.getTotalSyncCount())); return result.get(); } public long getSuccessfulSyncCount() { final AtomicLong result = new AtomicLong(); distroRecords.forEach((s, distroRecord) -> result.addAndGet(distroRecord.getSuccessfulSyncCount())); return result.get(); } public long getFailedSyncCount() { final AtomicLong result = new AtomicLong(); distroRecords.forEach((s, distroRecord) -> result.addAndGet(distroRecord.getFailedSyncCount())); return result.get(); } public int getFailedVerifyCount() { final AtomicInteger result = new AtomicInteger(); distroRecords.forEach((s, distroRecord) -> result.addAndGet(distroRecord.getFailedVerifyCount())); return result.get(); } }
DistroRecordsHolder
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/formatstring/LenientFormatStringValidationTest.java
{ "start": 1840, "end": 2256 }
class ____ { void test() { // BUG: Diagnostic contains: Preconditions.checkState(false, "%s", 1, 1); } } """) .doTest(); } @Test public void tooManyArguments_fix() { refactoring .addInputLines( "Test.java", """ import com.google.common.base.Preconditions;
Test
java
apache__flink
flink-python/src/main/java/org/apache/flink/formats/json/JsonRowDeserializationSchema.java
{ "start": 3673, "end": 6548 }
class ____ implements DeserializationSchema<Row> { private static final long serialVersionUID = -228294330688809195L; /** Type information describing the result type. */ private final RowTypeInfo typeInfo; private final boolean failOnMissingField; private final boolean hasDecimalType; /** Object mapper for parsing the JSON. */ private transient ObjectMapper objectMapper; private final DeserializationRuntimeConverter runtimeConverter; /** Flag indicating whether to ignore invalid fields/rows (default: throw an exception). */ private final boolean ignoreParseErrors; private JsonRowDeserializationSchema( TypeInformation<Row> typeInfo, boolean failOnMissingField, boolean ignoreParseErrors) { checkNotNull(typeInfo, "Type information"); checkArgument(typeInfo instanceof RowTypeInfo, "Only RowTypeInfo is supported"); if (ignoreParseErrors && failOnMissingField) { throw new IllegalArgumentException( "JSON format doesn't support failOnMissingField and ignoreParseErrors are both true."); } this.typeInfo = (RowTypeInfo) typeInfo; this.failOnMissingField = failOnMissingField; this.runtimeConverter = createConverter(this.typeInfo); this.ignoreParseErrors = ignoreParseErrors; RowType rowType = (RowType) fromLegacyInfoToDataType(this.typeInfo).getLogicalType(); hasDecimalType = LogicalTypeChecks.hasNested(rowType, t -> t.getTypeRoot().equals(DECIMAL)); } @Override public void open(InitializationContext context) throws Exception { objectMapper = JacksonMapperFactory.createObjectMapper(); if (hasDecimalType) { objectMapper.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS); } } public JsonRowDeserializationSchema(TypeInformation<Row> typeInfo) { this(typeInfo, false, false); } public JsonRowDeserializationSchema(String jsonSchema) { this(JsonRowSchemaConverter.convert(checkNotNull(jsonSchema)), false, false); } @Override public Row deserialize(byte[] message) throws IOException { try { final JsonNode root = objectMapper.readTree(message); return (Row) runtimeConverter.convert(objectMapper, root); } catch (Throwable t) { if (ignoreParseErrors) { return null; } throw new IOException( format("Failed to deserialize JSON '%s'.", new String(message)), t); } } @Override public boolean isEndOfStream(Row nextElement) { return false; } @Override public TypeInformation<Row> getProducedType() { return typeInfo; } /** Builder for {@link JsonRowDeserializationSchema}. */ public static
JsonRowDeserializationSchema
java
google__error-prone
check_api/src/main/java/com/google/errorprone/bugpatterns/BugChecker.java
{ "start": 14248, "end": 14373 }
interface ____ extends Suppressible { Description matchCase(CaseTree tree, VisitorState state); } public
CaseTreeMatcher
java
dropwizard__dropwizard
dropwizard-client/src/main/java/io/dropwizard/client/HttpClientBuilder.java
{ "start": 3143, "end": 21102 }
class ____ { private static final HttpRequestRetryStrategy NO_RETRIES = new HttpRequestRetryStrategy() { @Override public boolean retryRequest(HttpRequest request, IOException exception, int execCount, HttpContext context) { return false; } @Override public boolean retryRequest(HttpResponse response, int execCount, HttpContext context) { return false; } @Override @Nullable public TimeValue getRetryInterval(HttpResponse response, int execCount, HttpContext context) { return null; } }; private final MetricRegistry metricRegistry; @Nullable private String environmentName; @Nullable private Environment environment; private HttpClientConfiguration configuration = new HttpClientConfiguration(); private DnsResolver resolver = new SystemDefaultDnsResolver(); @Nullable private HostnameVerifier verifier; @Nullable private HttpRequestRetryStrategy httpRequestRetryStrategy; @Nullable private Registry<ConnectionSocketFactory> registry; @Nullable private CredentialsStore credentialsStore; private HttpClientMetricNameStrategy metricNameStrategy = HttpClientMetricNameStrategies.METHOD_ONLY; @Nullable private HttpRoutePlanner routePlanner; @Nullable private RedirectStrategy redirectStrategy; private boolean disableContentCompression; @Nullable private List<? extends Header> defaultHeaders; @Nullable private HttpProcessor httpProcessor; public HttpClientBuilder(MetricRegistry metricRegistry) { this.metricRegistry = metricRegistry; } public HttpClientBuilder(Environment environment) { this(environment.metrics()); name(environment.getName()); this.environment = environment; } /** * Use the given environment name. This is used in the user agent. * * @param environmentName an environment name to use in the user agent. * @return {@code this} */ public HttpClientBuilder name(String environmentName) { this.environmentName = environmentName; return this; } /** * Use the given {@link HttpClientConfiguration} instance. * * @param configuration a {@link HttpClientConfiguration} instance * @return {@code this} */ public HttpClientBuilder using(HttpClientConfiguration configuration) { this.configuration = configuration; return this; } /** * Use the given {@link DnsResolver} instance. * * @param resolver a {@link DnsResolver} instance * @return {@code this} */ public HttpClientBuilder using(DnsResolver resolver) { this.resolver = resolver; return this; } /** * Use the given {@link HostnameVerifier} instance. * * @param verifier a {@link HostnameVerifier} instance * @return {@code this} */ public HttpClientBuilder using(HostnameVerifier verifier) { this.verifier = verifier; return this; } /** * Uses the {@link HttpRequestRetryStrategy} for handling request retries. * * @param httpRequestRetryStrategy an {@link HttpRequestRetryStrategy} * @return {@code this} */ public HttpClientBuilder using(HttpRequestRetryStrategy httpRequestRetryStrategy) { this.httpRequestRetryStrategy = httpRequestRetryStrategy; return this; } /** * Use the given {@link Registry} instance. * * @param registry * @return {@code this} */ public HttpClientBuilder using(Registry<ConnectionSocketFactory> registry) { this.registry = registry; return this; } /** * Use the given {@link HttpRoutePlanner} instance. * * @param routePlanner a {@link HttpRoutePlanner} instance * @return {@code this} */ public HttpClientBuilder using(HttpRoutePlanner routePlanner) { this.routePlanner = routePlanner; return this; } /** * Use the given {@link CredentialsStore} instance. * * @param credentialsStore a {@link CredentialsStore} instance * @return {@code this} */ public HttpClientBuilder using(CredentialsStore credentialsStore) { this.credentialsStore = credentialsStore; return this; } /** * Use the given {@link HttpClientMetricNameStrategy} instance. * * @param metricNameStrategy a {@link HttpClientMetricNameStrategy} instance * @return {@code this} */ public HttpClientBuilder using(HttpClientMetricNameStrategy metricNameStrategy) { this.metricNameStrategy = metricNameStrategy; return this; } /** * Use the given {@link RedirectStrategy} instance. * * @param redirectStrategy a {@link RedirectStrategy} instance * @return {@code this} */ public HttpClientBuilder using(RedirectStrategy redirectStrategy) { this.redirectStrategy = redirectStrategy; return this; } /** * Use the given default headers for each HTTP request * * @param defaultHeaders HTTP headers * @return {@code} this */ public HttpClientBuilder using(List<? extends Header> defaultHeaders) { this.defaultHeaders = defaultHeaders; return this; } /** * Use the given {@link HttpProcessor} instance * * @param httpProcessor a {@link HttpProcessor} instance * @return {@code} this */ public HttpClientBuilder using(HttpProcessor httpProcessor) { this.httpProcessor = httpProcessor; return this; } /** * Disable support of decompression of responses * * @param disableContentCompression {@code true}, if disabled * @return {@code this} */ public HttpClientBuilder disableContentCompression(boolean disableContentCompression) { this.disableContentCompression = disableContentCompression; return this; } /** * Builds the {@link org.apache.hc.client5.http.classic.HttpClient}. * * @param name * @return an {@link org.apache.hc.client5.http.impl.classic.CloseableHttpClient} */ public CloseableHttpClient build(String name) { final CloseableHttpClient client = buildWithDefaultRequestConfiguration(name).getClient(); // If the environment is present, we tie the client with the server lifecycle if (environment != null) { environment.lifecycle().manage(new Managed() { @Override public void stop() throws Exception { client.close(); } }); } return client; } /** * For internal use only, used in {@link io.dropwizard.client.JerseyClientBuilder} * to create an instance of {@link io.dropwizard.client.DropwizardApacheConnector} * * @param name * @return an {@link io.dropwizard.client.ConfiguredCloseableHttpClient} */ ConfiguredCloseableHttpClient buildWithDefaultRequestConfiguration(String name) { return createClient(createBuilder(), createConnectionManager(createConfiguredRegistry(), name), name); } /** * Creates a {@link org.apache.hc.core5.http.impl.io.HttpRequestExecutor}. * * Intended for use by subclasses to provide a customized request executor. * The default implementation is an {@link com.codahale.metrics.httpclient5.InstrumentedHttpRequestExecutor} * * @param name * @return a {@link org.apache.hc.core5.http.impl.io.HttpRequestExecutor} * @since 2.0 */ protected HttpRequestExecutor createRequestExecutor(String name) { return new InstrumentedHttpRequestExecutor(metricRegistry, metricNameStrategy, name); } /** * Creates an Apache {@link org.apache.hc.client5.http.impl.classic.HttpClientBuilder}. * * Intended for use by subclasses to create builder instance from subclass of * {@link org.apache.hc.client5.http.impl.classic.HttpClientBuilder} * * @return an {@link HttpClientBuilder} * @since 2.0 */ protected org.apache.hc.client5.http.impl.classic.HttpClientBuilder createBuilder() { return org.apache.hc.client5.http.impl.classic.HttpClientBuilder.create(); } /** * Configures an Apache {@link org.apache.hc.client5.http.impl.classic.HttpClientBuilder}. * * Intended for use by subclasses to inject HttpClientBuilder * configuration. The default implementation is an identity * function. */ protected org.apache.hc.client5.http.impl.classic.HttpClientBuilder customizeBuilder( org.apache.hc.client5.http.impl.classic.HttpClientBuilder builder ) { return builder; } /** * Map the parameters in {@link HttpClientConfiguration} to configuration on a * {@link org.apache.hc.client5.http.impl.classic.HttpClientBuilder} instance * * @param builder * @param manager * @param name * @return the configured {@link CloseableHttpClient} */ protected ConfiguredCloseableHttpClient createClient( final org.apache.hc.client5.http.impl.classic.HttpClientBuilder builder, final InstrumentedHttpClientConnectionManager manager, final String name) { final String cookiePolicy = configuration.isCookiesEnabled() ? StandardCookieSpec.RELAXED : StandardCookieSpec.IGNORE; final int timeout = (int) configuration.getTimeout().toMilliseconds(); final int connectionRequestTimeout = (int) configuration.getConnectionRequestTimeout().toMilliseconds(); final long keepAlive = configuration.getKeepAlive().toMilliseconds(); final ConnectionReuseStrategy reuseStrategy = keepAlive == 0 ? ((request, response, context) -> false) : new DefaultConnectionReuseStrategy(); final HttpRequestRetryStrategy retryHandler = configuration.getRetries() == 0 ? NO_RETRIES : (httpRequestRetryStrategy == null ? new DefaultHttpRequestRetryStrategy(configuration.getRetries(), TimeValue.ofSeconds(1L)) : httpRequestRetryStrategy); final boolean protocolUpgradeEnabled = configuration.isProtocolUpgradeEnabled(); final RequestConfig requestConfig = RequestConfig.custom().setCookieSpec(cookiePolicy) .setResponseTimeout(timeout, TimeUnit.MILLISECONDS) .setConnectionKeepAlive(TimeValue.of(-1, TimeUnit.MILLISECONDS)) .setConnectionRequestTimeout(connectionRequestTimeout, TimeUnit.MILLISECONDS) .setProtocolUpgradeEnabled(protocolUpgradeEnabled) .build(); final SocketConfig socketConfig = SocketConfig.custom() .setTcpNoDelay(true) .setSoTimeout(timeout, TimeUnit.MILLISECONDS) .build(); manager.setDefaultSocketConfig(socketConfig); builder.setRequestExecutor(createRequestExecutor(name)) .setConnectionManager(manager) .setDefaultRequestConfig(requestConfig) .setConnectionReuseStrategy(reuseStrategy) .setRetryStrategy(retryHandler) .setUserAgent(createUserAgent(name)); if (keepAlive != 0) { // either keep alive based on response header Keep-Alive, // or if the server can keep a persistent connection (-1), then override based on client's configuration builder.setKeepAliveStrategy(new DefaultConnectionKeepAliveStrategy() { @Override public TimeValue getKeepAliveDuration(HttpResponse response, HttpContext context) { final TimeValue duration = super.getKeepAliveDuration(response, context); return (duration.getDuration() == -1) ? TimeValue.ofMilliseconds(keepAlive) : duration; } }); } // create a tunnel through a proxy host if it's specified in the config final ProxyConfiguration proxy = configuration.getProxyConfiguration(); if (proxy != null) { final HttpHost httpHost = new HttpHost(proxy.getScheme(), proxy.getHost(), proxy.getPort()); builder.setRoutePlanner(new NonProxyListProxyRoutePlanner(httpHost, proxy.getNonProxyHosts())); // if the proxy host requires authentication then add the host credentials to the credentials provider final AuthConfiguration auth = proxy.getAuth(); if (auth != null) { if (credentialsStore == null) { credentialsStore = new BasicCredentialsProvider(); } // set the AuthScope AuthScope authScope = new AuthScope(httpHost, auth.getRealm(), auth.getAuthScheme()); // set the credentials type Credentials credentials = configureCredentials(auth); credentialsStore.setCredentials(authScope, credentials); } } if (credentialsStore != null) { builder.setDefaultCredentialsProvider(credentialsStore); } if (routePlanner != null) { builder.setRoutePlanner(routePlanner); } if (disableContentCompression) { builder.disableContentCompression(); } if (redirectStrategy != null) { builder.setRedirectStrategy(redirectStrategy); } if (defaultHeaders != null) { builder.setDefaultHeaders(defaultHeaders); } if (httpProcessor != null) { builder.addRequestInterceptorFirst(httpProcessor); builder.addResponseInterceptorLast(httpProcessor); } customizeBuilder(builder); return new ConfiguredCloseableHttpClient(builder.build(), requestConfig); } /** * Create a user agent string using the configured user agent if defined, otherwise * using a combination of the environment name and this client name * * @param name the name of this client * @return the user agent string to be used by this client */ protected String createUserAgent(String name) { final String defaultUserAgent = environmentName == null ? name : String.format("%s (%s)", environmentName, name); return configuration.getUserAgent().orElse(defaultUserAgent); } /** * Create a InstrumentedHttpClientConnectionManager based on the * HttpClientConfiguration. It sets the maximum connections per route and * the maximum total connections that the connection manager can create * * @param registry * @param name * @return a InstrumentedHttpClientConnectionManger instance */ protected InstrumentedHttpClientConnectionManager createConnectionManager(Registry<ConnectionSocketFactory> registry, String name) { final Duration ttl = configuration.getTimeToLive(); final InstrumentedHttpClientConnectionManager manager = InstrumentedHttpClientConnectionManager.builder(metricRegistry) .socketFactoryRegistry(registry) .dnsResolver(resolver) .timeToLive(TimeValue.of(ttl.getQuantity(), ttl.getUnit())) .name(name) .build(); return configureConnectionManager(manager); } Registry<ConnectionSocketFactory> createConfiguredRegistry() { if (registry != null) { return registry; } TlsConfiguration tlsConfiguration = configuration.getTlsConfiguration(); if (tlsConfiguration == null && verifier != null) { tlsConfiguration = new TlsConfiguration(); } final SSLConnectionSocketFactory sslConnectionSocketFactory; if (tlsConfiguration == null) { sslConnectionSocketFactory = SSLConnectionSocketFactory.getSocketFactory(); } else { sslConnectionSocketFactory = new DropwizardSSLConnectionSocketFactory(tlsConfiguration, verifier).getSocketFactory(); } return RegistryBuilder.<ConnectionSocketFactory>create() .register("http", PlainConnectionSocketFactory.getSocketFactory()) .register("https", sslConnectionSocketFactory) .build(); } protected InstrumentedHttpClientConnectionManager configureConnectionManager( InstrumentedHttpClientConnectionManager connectionManager) { connectionManager.setDefaultMaxPerRoute(configuration.getMaxConnectionsPerRoute()); connectionManager.setMaxTotal(configuration.getMaxConnections()); final ConnectionConfig connectionConfig = ConnectionConfig.custom() .setConnectTimeout(Timeout.of(configuration.getConnectionTimeout().toJavaDuration())) .setValidateAfterInactivity(TimeValue.of(configuration.getValidateAfterInactivityPeriod().toJavaDuration())) .build(); connectionManager.setConnectionConfigResolver(route -> connectionConfig); return connectionManager; } /** * determine the Credentials implementation to use * @param auth * @return a {@code Credentials} instance, either {{@link UsernamePasswordCredentials} or {@link NTCredentials}} */ protected Credentials configureCredentials(AuthConfiguration auth) { if (null != auth.getCredentialType() && auth.getCredentialType().equalsIgnoreCase(AuthConfiguration.NT_CREDS)) { return new NTCredentials(auth.getUsername(), auth.getPassword().toCharArray(), auth.getHostname(), auth.getDomain()); } else { return new UsernamePasswordCredentials(auth.getUsername(), auth.getPassword().toCharArray()); } } }
HttpClientBuilder
java
google__dagger
javatests/dagger/internal/codegen/MissingBindingValidationTest.java
{ "start": 78772, "end": 80701 }
class ____ {", " @Provides", " Bar provideBar() {", " return new Bar<Object>();", " }", "}"); CompilerTests.daggerCompiler(component, fooSrc, barSrc, moduleSrc) .withProcessingOptions(compilerMode.processorOptions()) .compile( subject -> { subject.hasErrorCount(1); subject.hasErrorContaining( String.join( "\n", // TODO(b/324325095): Align KSP and KAPT error message. CompilerTests.backend(subject) == XProcessingEnv.Backend.KSP ? "Bar<?> cannot be provided without an @Inject constructor or an " + "@Provides-annotated method." : "Bar<?> cannot be provided without an @Provides-annotated method.", "", " Bar<?> is injected at", " [MyComponent] Foo(bar)", " Foo is requested at", " [MyComponent] MyComponent.getFoo()", "", "Note: A similar binding is provided in the following other components:", " Bar is provided at:", " [MyComponent] TestModule.provideBar()", JVM_SUPPRESS_WILDCARDS_MESSAGE, "", "======================")); }); } @Test public void missingWildcardType_providedRawType_warnAboutSimilarTypeExists() { Source component = CompilerTests.javaSource( "test.MyComponent", "package test;", "", "import dagger.Component;", "import java.util.Set;", "", "@Component(modules = TestModule.class)", "
TestModule
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/streaming/runtime/operators/sink/committables/CommitRequestImpl.java
{ "start": 1224, "end": 4661 }
class ____<CommT> implements Committer.CommitRequest<CommT> { private CommT committable; private int numRetries; private CommitRequestState state; private SinkCommitterMetricGroup metricGroup; protected CommitRequestImpl(CommT committable, SinkCommitterMetricGroup metricGroup) { this.committable = committable; this.metricGroup = metricGroup; state = CommitRequestState.RECEIVED; } protected CommitRequestImpl( CommT committable, int numRetries, CommitRequestState state, SinkCommitterMetricGroup metricGroup) { this.committable = committable; this.numRetries = numRetries; this.state = state; this.metricGroup = metricGroup; } boolean isFinished() { return state.isFinalState(); } CommitRequestState getState() { return state; } @Override public CommT getCommittable() { return committable; } @Override public int getNumberOfRetries() { return numRetries; } @Override public void signalFailedWithKnownReason(Throwable t) { state = CommitRequestState.FAILED; metricGroup.getNumCommittablesFailureCounter().inc(); // let the user configure a strategy for failing and apply it here } @Override public void signalFailedWithUnknownReason(Throwable t) { state = CommitRequestState.FAILED; metricGroup.getNumCommittablesFailureCounter().inc(); // let the user configure a strategy for failing and apply it here throw new IllegalStateException("Failed to commit " + committable, t); } @Override public void retryLater() { state = CommitRequestState.RETRY; numRetries++; metricGroup.getNumCommittablesRetryCounter().inc(); } @Override public void updateAndRetryLater(CommT committable) { this.committable = committable; retryLater(); } @Override public void signalAlreadyCommitted() { state = CommitRequestState.COMMITTED; metricGroup.getNumCommittablesAlreadyCommittedCounter().inc(); } void setSelected() { state = CommitRequestState.RECEIVED; } void setCommittedIfNoError() { if (state == CommitRequestState.RECEIVED) { state = CommitRequestState.COMMITTED; metricGroup.getNumCommittablesSuccessCounter().inc(); } } CommitRequestImpl<CommT> copy() { return new CommitRequestImpl<>(committable, numRetries, state, metricGroup); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } CommitRequestImpl<?> that = (CommitRequestImpl<?>) o; return numRetries == that.numRetries && Objects.equals(committable, that.committable) && state == that.state; } @Override public int hashCode() { return Objects.hash(committable, numRetries, state); } @Override public String toString() { return "CommitRequestImpl{" + "state=" + state + ", numRetries=" + numRetries + ", committable=" + committable + '}'; } }
CommitRequestImpl
java
apache__flink
flink-core/src/test/java/org/apache/flink/api/common/io/FileInputFormatTest.java
{ "start": 2080, "end": 33869 }
class ____ { @TempDir private java.nio.file.Path temporaryFolder; @Test void testGetPathWithoutSettingFirst() { final DummyFileInputFormat format = new DummyFileInputFormat(); assertThat(format.getFilePaths()).as("Path should be null.").isEmpty(); } @Test void testGetPathsWithoutSettingFirst() { final DummyFileInputFormat format = new DummyFileInputFormat(); Path[] paths = format.getFilePaths(); assertThat(paths).as("Paths should not be null.").isNotNull(); assertThat(paths).as("Paths should be empty.").isEmpty(); } @Test void testToStringWithoutPathSet() { final DummyFileInputFormat format = new DummyFileInputFormat(); assertThat(format.toString()) .as("The toString() should be correct.") .isEqualTo("File Input (unknown file)"); } @Test void testSetPathsNull() { assertThatThrownBy(() -> new MultiDummyFileInputFormat().setFilePaths((String) null)) .isInstanceOf(IllegalArgumentException.class); } @Test void testSetPathNullString() { assertThatThrownBy(() -> new DummyFileInputFormat().setFilePath((String) null)) .isInstanceOf(IllegalArgumentException.class); } @Test void testSetPathNullPath() { assertThatThrownBy(() -> new DummyFileInputFormat().setFilePath((Path) null)) .isInstanceOf(IllegalArgumentException.class); } @Test void testSetPathsOnePathNull() { assertThatThrownBy( () -> new MultiDummyFileInputFormat() .setFilePaths("/an/imaginary/path", null)) .isInstanceOf(IllegalArgumentException.class); } @Test void testSetPathsEmptyArray() { assertThatThrownBy(() -> new MultiDummyFileInputFormat().setFilePaths(new String[0])) .isInstanceOf(IllegalArgumentException.class); } @Test void testSetPath() { final DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath("/some/imaginary/path"); assertThat("/some/imaginary/path").isEqualTo(format.getFilePaths()[0].toString()); } @Test void testSetPathOnMulti() { final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat(); final String myPath = "/an/imaginary/path"; format.setFilePath(myPath); final Path[] filePaths = format.getFilePaths(); assertThat(filePaths).hasSize(1); assertThat(filePaths[0].toUri().toString()).isEqualTo(myPath); // ensure backwards compatibility assertThat(format.getFilePaths()[0].toUri().toString()).isEqualTo(myPath); } @Test void testSetPathsSingleWithMulti() { final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat(); final String myPath = "/an/imaginary/path"; format.setFilePaths(myPath); final Path[] filePaths = format.getFilePaths(); assertThat(filePaths).hasSize(1); assertThat(filePaths[0].toUri().toString()).isEqualTo(myPath); // ensure backwards compatibility assertThat(format.getFilePaths()[0].toUri().toString()).isEqualTo(myPath); } @Test void testSetPathsMulti() { final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat(); final String myPath = "/an/imaginary/path"; final String myPath2 = "/an/imaginary/path2"; format.setFilePaths(myPath, myPath2); final Path[] filePaths = format.getFilePaths(); assertThat(filePaths).hasSize(2); assertThat(filePaths[0].toUri().toString()).isEqualTo(myPath); assertThat(filePaths[1].toUri().toString()).isEqualTo(myPath2); } @Test void testSetFileViaConfiguration() { final DummyFileInputFormat format = new DummyFileInputFormat(); final String filePath = "file:///some/none/existing/directory/"; Configuration conf = new Configuration(); conf.setString("input.file.path", filePath); format.configure(conf); assertThat(format.getFilePaths()[0]).isEqualTo(new Path(filePath)); } @Test void testSetFileViaConfigurationEmptyPath() { assertThatThrownBy( () -> { final DummyFileInputFormat format = new DummyFileInputFormat(); final String filePath = null; Configuration conf = new Configuration(); conf.setString("input.file.path", filePath); format.configure(conf); }) .isInstanceOf(RuntimeException.class); } // ------------------------------------------------------------------------ // Input Splits // ------------------------------------------------------------------------ @Test void testCreateInputSplitSingleFile() throws IOException { String tempFile = TestFileUtils.createTempFile("Hello World"); FileInputFormat<IntValue> fif = new DummyFileInputFormat(); fif.setFilePath(tempFile); fif.configure(new Configuration()); FileInputSplit[] splits = fif.createInputSplits(2); assertThat(splits).hasSize(2); assertThat(splits[0].getPath().toString()).isEqualTo(tempFile); assertThat(splits[1].getPath().toString()).isEqualTo(tempFile); } @Test void testCreateInputSplitMultiFiles() throws IOException { String tempFile1 = TestFileUtils.createTempFile(21); String tempFile2 = TestFileUtils.createTempFile(22); String tempFile3 = TestFileUtils.createTempFile(23); FileInputFormat<IntValue> fif = new MultiDummyFileInputFormat(); fif.setFilePaths(tempFile1, tempFile2, tempFile3); fif.configure(new Configuration()); FileInputSplit[] splits = fif.createInputSplits(3); int numSplitsFile1 = 0; int numSplitsFile2 = 0; int numSplitsFile3 = 0; assertThat(splits).hasSize(3); for (FileInputSplit fis : splits) { assertThat(fis.getStart()).isZero(); if (fis.getPath().toString().equals(tempFile1)) { numSplitsFile1++; assertThat(fis.getLength()).isEqualTo(21); } else if (fis.getPath().toString().equals(tempFile2)) { numSplitsFile2++; assertThat(fis.getLength()).isEqualTo(22); } else if (fis.getPath().toString().equals(tempFile3)) { numSplitsFile3++; assertThat(fis.getLength()).isEqualTo(23); } else { fail("Got split for unknown file."); } } assertThat(numSplitsFile1).isOne(); assertThat(numSplitsFile2).isOne(); assertThat(numSplitsFile3).isOne(); } // ------------------------------------------------------------------------ // Statistics // ------------------------------------------------------------------------ @Test void testGetStatisticsNonExistingFile() throws IOException { final DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath("file:///some/none/existing/directory/"); format.configure(new Configuration()); BaseStatistics stats = format.getStatistics(null); assertThat(stats).as("The file statistics should be null.").isNull(); } @Test void testGetStatisticsOneFileNoCachedVersion() throws IOException { final long SIZE = 1024 * 500; String tempFile = TestFileUtils.createTempFile(SIZE); final DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath(tempFile); format.configure(new Configuration()); BaseStatistics stats = format.getStatistics(null); assertThat(stats.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(SIZE); } @Test void testGetStatisticsMultipleFilesNoCachedVersion() throws IOException { final long SIZE1 = 2077; final long SIZE2 = 31909; final long SIZE3 = 10; final long TOTAL = SIZE1 + SIZE2 + SIZE3; String tempDir = TestFileUtils.createTempFileDir( TempDirUtils.newFolder(temporaryFolder), SIZE1, SIZE2, SIZE3); final DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath(tempDir); format.configure(new Configuration()); BaseStatistics stats = format.getStatistics(null); assertThat(stats.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(TOTAL); } @Test void testGetStatisticsOneFileWithCachedVersion() throws IOException { final long SIZE = 50873; final long FAKE_SIZE = 10065; String tempFile = TestFileUtils.createTempFile(SIZE); DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath(tempFile); format.configure(new Configuration()); FileBaseStatistics stats = format.getStatistics(null); assertThat(stats.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(SIZE); format = new DummyFileInputFormat(); format.setFilePath(tempFile); format.configure(new Configuration()); FileBaseStatistics newStats = format.getStatistics(stats); assertThat(stats).as("Statistics object was changed").isSameAs(newStats); // insert fake stats with the correct modification time. the call should return the fake // stats format = new DummyFileInputFormat(); format.setFilePath(tempFile); format.configure(new Configuration()); FileBaseStatistics fakeStats = new FileBaseStatistics( stats.getLastModificationTime(), FAKE_SIZE, BaseStatistics.AVG_RECORD_BYTES_UNKNOWN); BaseStatistics latest = format.getStatistics(fakeStats); assertThat(latest.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(FAKE_SIZE); // insert fake stats with the expired modification time. the call should return new // accurate stats format = new DummyFileInputFormat(); format.setFilePath(tempFile); format.configure(new Configuration()); FileBaseStatistics outDatedFakeStats = new FileBaseStatistics( stats.getLastModificationTime() - 1, FAKE_SIZE, BaseStatistics.AVG_RECORD_BYTES_UNKNOWN); BaseStatistics reGathered = format.getStatistics(outDatedFakeStats); assertThat(reGathered.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(SIZE); } @Test void testGetStatisticsMultipleFilesWithCachedVersion() throws IOException { FileSystem fs = FileSystem.getLocalFileSystem(); final long SIZE1 = 2077; final long SIZE2 = 31909; final long SIZE3 = 10; final long TOTAL = SIZE1 + SIZE2 + SIZE3; final long FAKE_SIZE = 10065; File tempDirFile = TempDirUtils.newFolder(temporaryFolder); String tempDir = tempDirFile.getAbsolutePath(); String f1 = TestFileUtils.createTempFileInDirectory(tempDir, SIZE1); long modTime1 = fs.getFileStatus(new Path(f1)).getModificationTime(); String f2 = TestFileUtils.createTempFileInDirectory(tempDir, SIZE2); long modTime2 = fs.getFileStatus(new Path(f2)).getModificationTime(); String f3 = TestFileUtils.createTempFileInDirectory(tempDir, SIZE3); long modTime3 = fs.getFileStatus(new Path(f3)).getModificationTime(); DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath(tempDir); format.configure(new Configuration()); FileBaseStatistics stats = format.getStatistics(null); assertThat(stats.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(TOTAL); format = new DummyFileInputFormat(); format.setFilePath(tempDir); format.configure(new Configuration()); FileBaseStatistics newStats = format.getStatistics(stats); assertThat(stats).as("Statistics object was changed").isSameAs(newStats); // insert fake stats with the correct modification time. the call should return the fake // stats format = new DummyFileInputFormat(); format.setFilePath(tempDir); format.configure(new Configuration()); FileBaseStatistics fakeStats = new FileBaseStatistics( stats.getLastModificationTime(), FAKE_SIZE, BaseStatistics.AVG_RECORD_BYTES_UNKNOWN); BaseStatistics latest = format.getStatistics(fakeStats); assertThat(latest.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(FAKE_SIZE); // insert fake stats with the correct modification time. the call should return the fake // stats format = new DummyFileInputFormat(); format.setFilePath(tempDir); format.configure(new Configuration()); FileBaseStatistics outDatedFakeStats = new FileBaseStatistics( Math.min(Math.min(modTime1, modTime2), modTime3) - 1, FAKE_SIZE, BaseStatistics.AVG_RECORD_BYTES_UNKNOWN); BaseStatistics reGathered = format.getStatistics(outDatedFakeStats); assertThat(reGathered.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(TOTAL); } // -- Multiple Files -- // @Test void testGetStatisticsMultipleNonExistingFile() throws IOException { final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat(); format.setFilePaths( "file:///some/none/existing/directory/", "file:///another/non/existing/directory/"); format.configure(new Configuration()); BaseStatistics stats = format.getStatistics(null); assertThat(stats).as("The file statistics should be null.").isNull(); } @Test void testGetStatisticsMultipleOneFileNoCachedVersion() throws IOException { final long size1 = 1024 * 500; String tempFile = TestFileUtils.createTempFile(size1); final long size2 = 1024 * 505; String tempFile2 = TestFileUtils.createTempFile(size2); final long totalSize = size1 + size2; final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat(); format.setFilePaths(tempFile, tempFile2); format.configure(new Configuration()); BaseStatistics stats = format.getStatistics(null); assertThat(stats.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(totalSize); } @Test void testGetStatisticsMultipleFilesMultiplePathsNoCachedVersion() throws IOException { final long size1 = 2077; final long size2 = 31909; final long size3 = 10; final long totalSize123 = size1 + size2 + size3; String tempDir = TestFileUtils.createTempFileDir( TempDirUtils.newFolder(temporaryFolder), size1, size2, size3); final long size4 = 2051; final long size5 = 31902; final long size6 = 15; final long totalSize456 = size4 + size5 + size6; String tempDir2 = TestFileUtils.createTempFileDir( TempDirUtils.newFolder(temporaryFolder), size4, size5, size6); final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat(); format.setFilePaths(tempDir, tempDir2); format.configure(new Configuration()); BaseStatistics stats = format.getStatistics(null); assertThat(stats.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(totalSize123 + totalSize456); } @Test void testGetStatisticsMultipleOneFileWithCachedVersion() throws IOException { FileSystem fs = FileSystem.getLocalFileSystem(); final long size1 = 50873; final long fakeSize = 10065; String tempFile1 = TestFileUtils.createTempFile(size1); final long lastModTime1 = fs.getFileStatus(new Path(tempFile1)).getModificationTime(); final long size2 = 52573; String tempFile2 = TestFileUtils.createTempFile(size2); final long lastModTime2 = fs.getFileStatus(new Path(tempFile2)).getModificationTime(); final long sizeTotal = size1 + size2; MultiDummyFileInputFormat format = new MultiDummyFileInputFormat(); format.setFilePaths(tempFile1, tempFile2); format.configure(new Configuration()); FileBaseStatistics stats = format.getStatistics(null); assertThat(stats.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(sizeTotal); format = new MultiDummyFileInputFormat(); format.setFilePath(tempFile1); format.configure(new Configuration()); FileBaseStatistics newStats = format.getStatistics(stats); assertThat(stats).as("Statistics object was changed").isSameAs(newStats); // insert fake stats with the correct modification time. the call should return the fake // stats format = new MultiDummyFileInputFormat(); format.setFilePath(tempFile1); format.configure(new Configuration()); FileBaseStatistics fakeStats = new FileBaseStatistics( stats.getLastModificationTime(), fakeSize, BaseStatistics.AVG_RECORD_BYTES_UNKNOWN); BaseStatistics latest = format.getStatistics(fakeStats); assertThat(latest.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(fakeSize); // insert fake stats with the expired modification time. the call should return new accurate // stats format = new MultiDummyFileInputFormat(); format.setFilePaths(tempFile1, tempFile2); format.configure(new Configuration()); FileBaseStatistics outDatedFakeStats = new FileBaseStatistics( Math.min(lastModTime1, lastModTime2) - 1, fakeSize, BaseStatistics.AVG_RECORD_BYTES_UNKNOWN); BaseStatistics reGathered = format.getStatistics(outDatedFakeStats); assertThat(reGathered.getTotalInputSize()) .as("The file size from the statistics is wrong.") .isEqualTo(sizeTotal); } // ------------------------------------------------------------------------ // Unsplittable input files // ------------------------------------------------------------------------ // ---- Tests for compressed files --------- /** * Create directory with compressed files and see if it creates a split for each file. Each * split has to start from the beginning. */ @Test void testFileInputFormatWithCompression() throws IOException { String tempFile = TestFileUtils.createTempFileDirForProvidedFormats( TempDirUtils.newFolder(temporaryFolder), FileInputFormat.getSupportedCompressionFormats()); final DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath(tempFile); format.configure(new Configuration()); FileInputSplit[] splits = format.createInputSplits(2); final Set<String> supportedCompressionFormats = FileInputFormat.getSupportedCompressionFormats(); assertThat(splits).hasSameSizeAs(supportedCompressionFormats); for (FileInputSplit split : splits) { assertThat(split.getLength()) .isEqualTo( FileInputFormat.READ_WHOLE_SPLIT_FLAG); // unsplittable compressed files // have this size as a // flag for "read whole file" assertThat(split.getStart()).isZero(); // always read from the beginning. } // test if this also works for "mixed" directories TestFileUtils.createTempFileInDirectory( tempFile.replace("file:", ""), "this creates a test file with a random extension (at least not .deflate)"); final DummyFileInputFormat formatMixed = new DummyFileInputFormat(); formatMixed.setFilePath(tempFile); formatMixed.configure(new Configuration()); FileInputSplit[] splitsMixed = formatMixed.createInputSplits(2); assertThat(splitsMixed).hasSize(supportedCompressionFormats.size() + 1); for (FileInputSplit split : splitsMixed) { final String extension = FileInputFormat.extractFileExtension(split.getPath().getName()); if (supportedCompressionFormats.contains(extension)) { assertThat(split.getLength()) .isEqualTo( FileInputFormat.READ_WHOLE_SPLIT_FLAG); // unsplittable compressed // files have this size as a // flag for "read whole file" assertThat(split.getStart()).isZero(); // always read from the beginning. } else { assertThat(split.getStart()).isEqualTo(0L); assertThat(split.getLength() > 0).as("split size not correct").isTrue(); } } } /** * Some FileInputFormats don't use FileInputFormat#createSplits (that would detect that the file * is non-splittable and deal with reading boundaries correctly), they all create splits * manually from FileSourceSplit. If input files are compressed, ensure that the size of the * split is not the compressed file size and that the compression decorator is called. */ @Test void testFileInputFormatWithCompressionFromFileSource() throws IOException { String tempFile = TestFileUtils.createTempFileDirForProvidedFormats( TempDirUtils.newFolder(temporaryFolder), FileInputFormat.getSupportedCompressionFormats()); DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath(tempFile); format.configure(new Configuration()); // manually create a FileInputSplit per file as FileSource would do // see org.apache.flink.connector.file.table.DeserializationSchemaAdapter.Reader() List<FileInputSplit> splits = manuallyCreateSplits(tempFile); final Set<String> supportedCompressionFormats = FileInputFormat.getSupportedCompressionFormats(); // one file per compression format, one split per file assertThat(splits).hasSameSizeAs(supportedCompressionFormats); for (FileInputSplit split : splits) { assertThat(split.getStart()).isZero(); // always read from the beginning. format.open(split); assertThat(format.compressedRead).isTrue(); assertThat(format.getSplitLength()) .isEqualTo( FileInputFormat.READ_WHOLE_SPLIT_FLAG); // unsplittable compressed files // have this size // as flag for "read whole file" } } /** * Simulates splits created by org.apache.flink.connector.file.src.FileSource (one split per * file with length = size of the file). For compressed file, the input format should override * it when it detects that the file is unsplittable in {@link * FileInputFormat#open(FileInputSplit)}. */ private List<FileInputSplit> manuallyCreateSplits(String pathString) throws IOException { List<FileInputSplit> splits = new ArrayList<>(); final Path path = new Path(pathString); final FileSystem fs = path.getFileSystem(); for (FileStatus file : fs.listStatus(path)) { // split created like in DeserializationSchemaAdapter.Reader() splits.add(new FileInputSplit(0, file.getPath(), 0, file.getLen(), null)); } return splits; } // ------------------------------------------------------------------------ // Ignored Files // ------------------------------------------------------------------------ @Test void testIgnoredUnderscoreFiles() throws IOException { final String contents = "CONTENTS"; // create some accepted, some ignored files File child1 = TempDirUtils.newFile(temporaryFolder, "dataFile1.txt"); File child2 = TempDirUtils.newFile(temporaryFolder, "another_file.bin"); File luigiFile = TempDirUtils.newFile(temporaryFolder, "_luigi"); File success = TempDirUtils.newFile(temporaryFolder, "_SUCCESS"); createTempFiles( contents.getBytes(ConfigConstants.DEFAULT_CHARSET), child1, child2, luigiFile, success); // test that only the valid files are accepted final DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath(temporaryFolder.toFile().getPath()); format.configure(new Configuration()); FileInputSplit[] splits = format.createInputSplits(1); assertThat(splits).hasSize(2); final URI uri1 = splits[0].getPath().toUri(); final URI uri2 = splits[1].getPath().toUri(); final URI childUri1 = child1.toURI(); final URI childUri2 = child2.toURI(); assertThat( (uri1.equals(childUri1) && uri2.equals(childUri2)) || (uri1.equals(childUri2) && uri2.equals(childUri1))) .isTrue(); } @Test void testExcludeFiles() throws IOException { final String contents = "CONTENTS"; // create some accepted, some ignored files File child1 = TempDirUtils.newFile(temporaryFolder, "dataFile1.txt"); File child2 = TempDirUtils.newFile(temporaryFolder, "another_file.bin"); File[] files = {child1, child2}; createTempFiles(contents.getBytes(ConfigConstants.DEFAULT_CHARSET), files); // test that only the valid files are accepted Configuration configuration = new Configuration(); final DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath(temporaryFolder.toFile().getPath()); format.configure(configuration); format.setFilesFilter( new GlobFilePathFilter( Collections.singletonList("**"), Collections.singletonList("**/another_file.bin"))); FileInputSplit[] splits = format.createInputSplits(1); assertThat(splits).hasSize(1); final URI uri1 = splits[0].getPath().toUri(); final URI childUri1 = child1.toURI(); assertThat(childUri1).isEqualTo(uri1); } @Test void testReadMultiplePatterns() throws Exception { final String contents = "CONTENTS"; // create some accepted, some ignored files File child1 = TempDirUtils.newFile(temporaryFolder, "dataFile1.txt"); File child2 = TempDirUtils.newFile(temporaryFolder, "another_file.bin"); createTempFiles(contents.getBytes(ConfigConstants.DEFAULT_CHARSET), child1, child2); // test that only the valid files are accepted Configuration configuration = new Configuration(); final DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath(temporaryFolder.toFile().getPath()); format.configure(configuration); format.setFilesFilter( new GlobFilePathFilter( Collections.singletonList("**"), Arrays.asList("**/another_file.bin", "**/dataFile1.txt"))); FileInputSplit[] splits = format.createInputSplits(1); assertThat(splits.length).isZero(); } @Test void testGetStatsIgnoredUnderscoreFiles() throws IOException { final int SIZE = 2048; final long TOTAL = 2 * SIZE; // create two accepted and two ignored files File child1 = TempDirUtils.newFile(temporaryFolder, "dataFile1.txt"); File child2 = TempDirUtils.newFile(temporaryFolder, "another_file.bin"); File luigiFile = TempDirUtils.newFile(temporaryFolder, "_luigi"); File success = TempDirUtils.newFile(temporaryFolder, "_SUCCESS"); createTempFiles(new byte[SIZE], child1, child2, luigiFile, success); final DummyFileInputFormat format = new DummyFileInputFormat(); format.setFilePath(temporaryFolder.toFile().getPath()); format.configure(new Configuration()); // check that only valid files are used for statistics computation BaseStatistics stats = format.getStatistics(null); assertThat(stats.getTotalInputSize()).isEqualTo(TOTAL); } // ------------------------------------------------------------------------ // Stream Decoration // ------------------------------------------------------------------------ @Test void testDecorateInputStream() throws IOException { // create temporary file with 3 blocks final File tempFile = File.createTempFile("input-stream-decoration-test", "tmp"); tempFile.deleteOnExit(); final int blockSize = 8; final int numBlocks = 3; FileOutputStream fileOutputStream = new FileOutputStream(tempFile); for (int i = 0; i < blockSize * numBlocks; i++) { fileOutputStream.write(new byte[] {(byte) i}); } fileOutputStream.close(); final Configuration config = new Configuration(); final FileInputFormat<byte[]> inputFormat = new MyDecoratedInputFormat(); inputFormat.setFilePath(tempFile.toURI().toString()); inputFormat.configure(config); inputFormat.openInputFormat(); FileInputSplit[] inputSplits = inputFormat.createInputSplits(3); byte[] bytes = null; byte prev = 0; for (FileInputSplit inputSplit : inputSplits) { inputFormat.open(inputSplit); while (!inputFormat.reachedEnd()) { if ((bytes = inputFormat.nextRecord(bytes)) != null) { assertThat(bytes).isEqualTo(new byte[] {--prev}); } } } inputFormat.closeInputFormat(); } // ------------------------------------------------------------------------ private void createTempFiles(byte[] contents, File... files) throws IOException { for (File child : files) { child.deleteOnExit(); try (BufferedOutputStream out = new BufferedOutputStream(Files.newOutputStream(child.toPath()))) { out.write(contents); } } } private static
FileInputFormatTest
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/constants/ConfigConstants.java
{ "start": 879, "end": 932 }
class ____ in various hadoop tests. */ public final
used
java
quarkusio__quarkus
extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/ResourcesConfig.java
{ "start": 78, "end": 286 }
interface ____ { /** * Limits Requirements */ ResourcesRequirementsConfig limits(); /** * Requests Requirements */ ResourcesRequirementsConfig requests();
ResourcesConfig
java
bumptech__glide
library/src/main/java/com/bumptech/glide/load/model/ByteArrayLoader.java
{ "start": 705, "end": 1394 }
class ____<Data> implements ModelLoader<byte[], Data> { private final Converter<Data> converter; @SuppressWarnings("WeakerAccess") // Public API public ByteArrayLoader(Converter<Data> converter) { this.converter = converter; } @Override public LoadData<Data> buildLoadData( @NonNull byte[] model, int width, int height, @NonNull Options options) { return new LoadData<>(new ObjectKey(model), new Fetcher<>(model, converter)); } @Override public boolean handles(@NonNull byte[] model) { return true; } /** * Converts between a byte array a desired model class. * * @param <Data> The type of data to convert to. */ public
ByteArrayLoader
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/create/MySqlCreateExternalCatalogTest2.java
{ "start": 856, "end": 2203 }
class ____ extends MysqlTest { public void test_0() throws Exception { String sql = "CREATE EXTERNAL CATALOG shanghao_test.oss_catalog_0\n" + "PROPERTIES\n" + "(\n" + " connector.name='oss'\n" + " 'connection-url'='http://oss-cn-hangzhou-zmf.aliyuncs.com'\n" + " 'bucket-name'='oss_test'\n" + " 'connection-user' = 'access_id'\n" + " 'connection-password' = 'access_key'\n" + " )\n" + "COMMENT 'This is a sample to create an oss connector catalog';"; MySqlStatementParser parser = new MySqlStatementParser(sql); List<SQLStatement> stmtList = parser.parseStatementList(); assertEquals(1, stmtList.size()); SQLStatement stmt = stmtList.get(0); assertEquals("CREATE EXTERNAL CATALOG shanghao_test.oss_catalog_0 PROPERTIES (\n" + "connector.name='oss'\n" + "'bucket-name'='oss_test'\n" + "'connection-url'='http://oss-cn-hangzhou-zmf.aliyuncs.com'\n" + "'connection-user'='access_id'\n" + "'connection-password'='access_key')\n" + "COMMENT 'This is a sample to create an oss connector catalog';", stmt.toString()); } }
MySqlCreateExternalCatalogTest2
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/ExtendsAutoValueTest.java
{ "start": 2429, "end": 2733 }
class ____ extends AutoClass {} """) .doTest(); } @Test public void extendsAutoValue_bad() { helper .addSourceLines( "TestClass.java", """ import com.google.auto.value.AutoValue; @AutoValue abstract
TestClass
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java
{ "start": 1768, "end": 8996 }
class ____ { final TermsEnum termsEnum; @Nullable PostingsEnum docsEnum; @Nullable final Bits bits; Holder(TermsEnum termsEnum, Bits bits) { this.termsEnum = termsEnum; this.bits = bits; } } static final String UNSUPPORTED_MESSAGE = "This TermsEnum only supports #seekExact(BytesRef) as well as #docFreq() and #totalTermFreq()"; protected static final int NOT_FOUND = -1; private final Holder[] enums; protected int currentDocFreq = 0; protected long currentTotalTermFreq = 0; protected BytesRef current; protected final int docsEnumFlag; public FilterableTermsEnum(IndexReader reader, String field, int docsEnumFlag, @Nullable Query filter) throws IOException { if ((docsEnumFlag != PostingsEnum.FREQS) && (docsEnumFlag != PostingsEnum.NONE)) { throw new IllegalArgumentException("invalid docsEnumFlag of " + docsEnumFlag); } this.docsEnumFlag = docsEnumFlag; List<LeafReaderContext> leaves = reader.leaves(); List<Holder> enums = new ArrayList<>(leaves.size()); final Weight weight; if (filter == null) { weight = null; } else { final IndexSearcher searcher = new IndexSearcher(reader); searcher.setQueryCache(null); weight = searcher.createWeight(searcher.rewrite(filter), ScoreMode.COMPLETE_NO_SCORES, 1f); } for (LeafReaderContext context : leaves) { Terms terms = context.reader().terms(field); if (terms == null) { continue; } TermsEnum termsEnum = terms.iterator(); if (termsEnum == null) { continue; } BitSet bits = null; if (weight != null) { Scorer scorer = weight.scorer(context); if (scorer == null) { // fully filtered, none matching, no need to iterate on this continue; } DocIdSetIterator docs = scorer.iterator(); // we want to force apply deleted docs final Bits liveDocs = context.reader().getLiveDocs(); if (liveDocs != null) { docs = new FilteredDocIdSetIterator(docs) { @Override protected boolean match(int doc) { return liveDocs.get(doc); } }; } bits = BitSet.of(docs, context.reader().maxDoc()); } enums.add(new Holder(termsEnum, bits)); } this.enums = enums.toArray(new Holder[enums.size()]); } @Override public BytesRef term() throws IOException { return current; } @Override public AttributeSource attributes() { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public boolean seekExact(BytesRef text) throws IOException { int docFreq = 0; long totalTermFreq = 0; for (Holder anEnum : enums) { if (anEnum.termsEnum.seekExact(text)) { if (anEnum.bits == null) { docFreq += anEnum.termsEnum.docFreq(); if (docsEnumFlag == PostingsEnum.FREQS) { long leafTotalTermFreq = anEnum.termsEnum.totalTermFreq(); if (totalTermFreq == -1 || leafTotalTermFreq == -1) { totalTermFreq = -1; continue; } totalTermFreq += leafTotalTermFreq; } } else { final PostingsEnum docsEnum = anEnum.docsEnum = anEnum.termsEnum.postings(anEnum.docsEnum, docsEnumFlag); // 2 choices for performing same heavy loop - one attempts to calculate totalTermFreq and other does not if (docsEnumFlag == PostingsEnum.FREQS) { for (int docId = docsEnum.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = docsEnum.nextDoc()) { if (anEnum.bits != null && anEnum.bits.get(docId) == false) { continue; } docFreq++; // docsEnum.freq() returns 1 if doc indexed with IndexOptions.DOCS_ONLY so no way of knowing if value // is really 1 or unrecorded when filtering like this totalTermFreq += docsEnum.freq(); } } else { for (int docId = docsEnum.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = docsEnum.nextDoc()) { if (anEnum.bits != null && anEnum.bits.get(docId) == false) { continue; } // docsEnum.freq() behaviour is undefined if docsEnumFlag==PostingsEnum.FLAG_NONE so don't bother with call docFreq++; } } } } } if (docFreq > 0) { currentDocFreq = docFreq; currentTotalTermFreq = totalTermFreq; current = text; return true; } else { currentDocFreq = NOT_FOUND; currentTotalTermFreq = NOT_FOUND; current = null; return false; } } @Override public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) { return () -> this.seekExact(bytesRef); } @Override public int docFreq() throws IOException { return currentDocFreq; } @Override public long totalTermFreq() throws IOException { return currentTotalTermFreq; } @Override public void seekExact(long ord) throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public void seekExact(BytesRef term, TermState state) throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public TermState termState() throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public SeekStatus seekCeil(BytesRef text) throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public long ord() throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public ImpactsEnum impacts(int flags) throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } @Override public BytesRef next() throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } }
Holder
java
apache__logging-log4j2
log4j-core/src/main/java/org/apache/logging/log4j/core/config/plugins/visitors/AbstractPluginVisitor.java
{ "start": 2155, "end": 2959 }
class ____ PluginVisitor is for. */ protected AbstractPluginVisitor(final Class<A> clazz) { this.clazz = clazz; } @SuppressWarnings("unchecked") @Override public PluginVisitor<A> setAnnotation(final Annotation anAnnotation) { final Annotation a = Objects.requireNonNull(anAnnotation, "No annotation was provided"); if (this.clazz.isInstance(a)) { this.annotation = (A) a; } return this; } @Override public PluginVisitor<A> setAliases(final String... someAliases) { this.aliases = someAliases; return this; } @Override public PluginVisitor<A> setConversionType(final Class<?> aConversionType) { this.conversionType = Objects.requireNonNull(aConversionType, "No conversion type
this
java
apache__camel
components/camel-kubernetes/src/main/java/org/apache/camel/component/kubernetes/pods/KubernetesPodsConsumer.java
{ "start": 1746, "end": 3114 }
class ____ extends DefaultConsumer { private static final Logger LOG = LoggerFactory.getLogger(KubernetesPodsConsumer.class); private final Processor processor; private ExecutorService executor; private PodsConsumerTask podsWatcher; public KubernetesPodsConsumer(AbstractKubernetesEndpoint endpoint, Processor processor) { super(endpoint, processor); this.processor = processor; } @Override public AbstractKubernetesEndpoint getEndpoint() { return (AbstractKubernetesEndpoint) super.getEndpoint(); } @Override protected void doStart() throws Exception { super.doStart(); executor = getEndpoint().createExecutor(this); podsWatcher = new PodsConsumerTask(); executor.submit(podsWatcher); } @Override protected void doStop() throws Exception { super.doStop(); LOG.debug("Stopping Kubernetes Pods Consumer"); if (executor != null) { KubernetesHelper.close(podsWatcher, podsWatcher::getWatch); if (getEndpoint() != null && getEndpoint().getCamelContext() != null) { getEndpoint().getCamelContext().getExecutorServiceManager().shutdownNow(executor); } else { executor.shutdownNow(); } } executor = null; }
KubernetesPodsConsumer
java
square__retrofit
retrofit-adapters/guava/src/main/java/retrofit2/adapter/guava/GuavaCallAdapterFactory.java
{ "start": 1286, "end": 1981 }
interface ____ { * &#64;GET("user/me") * ListenableFuture&lt;User&gt; getUser() * } * </code></pre> * * There are two configurations supported for the {@code ListenableFuture} type parameter: * * <ul> * <li>Direct body (e.g., {@code ListenableFuture<User>}) returns the deserialized body for 2XX * responses, sets {@link retrofit2.HttpException HttpException} errors for non-2XX responses, * and sets {@link IOException} for network errors. * <li>Response wrapped body (e.g., {@code ListenableFuture<Response<User>>}) returns a {@link * Response} object for all HTTP responses and sets {@link IOException} for network errors * </ul> */ public final
MyService
java
elastic__elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DefaultDetectorDescription.java
{ "start": 417, "end": 3003 }
class ____ { private static final String BY_TOKEN = " by "; private static final String OVER_TOKEN = " over "; private static final String USE_NULL_OPTION = " usenull="; private static final String PARTITION_FIELD_OPTION = " partitionfield="; private static final String EXCLUDE_FREQUENT_OPTION = " excludefrequent="; private DefaultDetectorDescription() { // do nothing } /** * Returns the default description for the given {@code detector} * * @param detector the {@code Detector} for which a default description is requested * @return the default description */ public static String of(Detector detector) { StringBuilder sb = new StringBuilder(); appendOn(detector, sb); return sb.toString(); } /** * Appends to the given {@code StringBuilder} the default description * for the given {@code detector} * * @param detector the {@code Detector} for which a default description is requested * @param sb the {@code StringBuilder} to append to */ public static void appendOn(Detector detector, StringBuilder sb) { if (isNotNullOrEmpty(detector.getFunction().getFullName())) { sb.append(detector.getFunction()); if (isNotNullOrEmpty(detector.getFieldName())) { sb.append('(').append(quoteField(detector.getFieldName())).append(')'); } } else if (isNotNullOrEmpty(detector.getFieldName())) { sb.append(quoteField(detector.getFieldName())); } if (isNotNullOrEmpty(detector.getByFieldName())) { sb.append(BY_TOKEN).append(quoteField(detector.getByFieldName())); } if (isNotNullOrEmpty(detector.getOverFieldName())) { sb.append(OVER_TOKEN).append(quoteField(detector.getOverFieldName())); } if (detector.isUseNull()) { sb.append(USE_NULL_OPTION).append(detector.isUseNull()); } if (isNotNullOrEmpty(detector.getPartitionFieldName())) { sb.append(PARTITION_FIELD_OPTION).append(quoteField(detector.getPartitionFieldName())); } if (detector.getExcludeFrequent() != null) { sb.append(EXCLUDE_FREQUENT_OPTION).append(detector.getExcludeFrequent()); } } private static String quoteField(String field) { return MlStrings.doubleQuoteIfNotAlphaNumeric(field); } private static boolean isNotNullOrEmpty(String arg) { return Strings.isNullOrEmpty(arg) == false; } }
DefaultDetectorDescription
java
elastic__elasticsearch
x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java
{ "start": 1322, "end": 9448 }
class ____ extends ConnectionConfiguration { static final String URL_PREFIX = "jdbc:es://"; static final String URL_FULL_PREFIX = "jdbc:elasticsearch://"; public static final URI DEFAULT_URI = URI.create("http://localhost:9200/"); static final String DEBUG = "debug"; static final String DEBUG_DEFAULT = "false"; static final String DEBUG_OUTPUT = "debug.output"; // can be out/err/url static final String DEBUG_OUTPUT_DEFAULT = "err"; static final String DEBUG_FLUSH_ALWAYS = "debug.flushAlways"; // can be buffered/immediate static final String DEBUG_FLUSH_ALWAYS_DEFAULT = "false"; public static final String TIME_ZONE = "timezone"; // follow the JDBC spec and use the JVM default... // to avoid inconsistency, the default is picked up once at startup and reused across connections // to cater to the principle of least surprise // really, the way to move forward is to specify a calendar or the timezone manually static final String TIME_ZONE_DEFAULT = TimeZone.getDefault().getID(); public static final String CATALOG = "catalog"; static final String FIELD_MULTI_VALUE_LENIENCY = "field.multi.value.leniency"; static final String FIELD_MULTI_VALUE_LENIENCY_DEFAULT = "true"; static final String INDEX_INCLUDE_FROZEN = "index.include.frozen"; static final String INDEX_INCLUDE_FROZEN_DEFAULT = "false"; // options that don't change at runtime private static final Set<String> OPTION_NAMES = new LinkedHashSet<>( Arrays.asList(TIME_ZONE, CATALOG, FIELD_MULTI_VALUE_LENIENCY, INDEX_INCLUDE_FROZEN, DEBUG, DEBUG_OUTPUT, DEBUG_FLUSH_ALWAYS) ); static { // trigger version initialization // typically this should have already happened but in case the // EsDriver/EsDataSource are not used and the impl. classes used directly // this covers that case ClientVersion.CURRENT.toString(); } private final boolean debug; private final String debugOut; private final boolean flushAlways; private final ZoneId zoneId; private final String catalog; private final boolean fieldMultiValueLeniency; private final boolean includeFrozen; public static JdbcConfiguration create(String u, Properties props, int loginTimeoutSeconds) throws JdbcSQLException { URI uri = parseUrl(u); Properties urlProps = parseProperties(uri, u); uri = removeQuery(uri, u, DEFAULT_URI); // override properties set in the URL with the ones specified programmatically if (props != null) { urlProps.putAll(props); } if (loginTimeoutSeconds > 0) { urlProps.setProperty(CONNECT_TIMEOUT, Long.toString(TimeUnit.SECONDS.toMillis(loginTimeoutSeconds))); } try { return new JdbcConfiguration(uri, u, urlProps); } catch (JdbcSQLException e) { throw e; } catch (Exception ex) { throw new JdbcSQLException(ex, ex.getMessage()); } } private static URI parseUrl(String u) throws JdbcSQLException { if (canAccept(u) == false) { throw new JdbcSQLException("Expected [" + URL_PREFIX + "] url, received [" + u + "]"); } try { return parseURI(removeJdbcPrefix(u), DEFAULT_URI); } catch (IllegalArgumentException ex) { final String format = "jdbc:[es|elasticsearch]://[[http|https]://]?[host[:port]]?/[prefix]?[\\?[option=value]&]*"; throw new JdbcSQLException(ex, "Invalid URL: " + ex.getMessage() + "; format should be [" + format + "]"); } } private static String removeJdbcPrefix(String connectionString) throws JdbcSQLException { if (connectionString.startsWith(URL_PREFIX)) { return connectionString.substring(URL_PREFIX.length()); } else if (connectionString.startsWith(URL_FULL_PREFIX)) { return connectionString.substring(URL_FULL_PREFIX.length()); } else { throw new JdbcSQLException("Expected [" + URL_PREFIX + "] url, received [" + connectionString + "]"); } } private static Properties parseProperties(URI uri, String u) throws JdbcSQLException { Properties props = new Properties(); try { if (uri.getRawQuery() != null) { // parse properties List<String> prms = StringUtils.tokenize(uri.getRawQuery(), "&"); for (String param : prms) { List<String> args = StringUtils.tokenize(param, "="); if (args.size() != 2) { throw new JdbcSQLException("Invalid parameter [" + param + "], format needs to be key=value"); } final String key = URLDecoder.decode(args.get(0), "UTF-8").trim(); final String val = URLDecoder.decode(args.get(1), "UTF-8"); // further validation happens in the constructor (since extra properties might be specified either way) props.setProperty(key, val); } } } catch (JdbcSQLException e) { throw e; } catch (Exception e) { // Add the url to unexpected exceptions throw new IllegalArgumentException("Failed to parse acceptable jdbc url [" + u + "]", e); } return props; } // constructor is private to force the use of a factory in order to catch and convert any validation exception // and also do input processing as oppose to handling this from the constructor (which is tricky or impossible) private JdbcConfiguration(URI baseURI, String u, Properties props) throws JdbcSQLException { super(baseURI, u, props); this.debug = parseValue(DEBUG, props.getProperty(DEBUG, DEBUG_DEFAULT), Boolean::parseBoolean); this.debugOut = props.getProperty(DEBUG_OUTPUT, DEBUG_OUTPUT_DEFAULT); this.flushAlways = parseValue( DEBUG_FLUSH_ALWAYS, props.getProperty(DEBUG_FLUSH_ALWAYS, DEBUG_FLUSH_ALWAYS_DEFAULT), Boolean::parseBoolean ); this.zoneId = parseValue( TIME_ZONE, props.getProperty(TIME_ZONE, TIME_ZONE_DEFAULT), s -> TimeZone.getTimeZone(s).toZoneId().normalized() ); this.catalog = props.getProperty(CATALOG); this.fieldMultiValueLeniency = parseValue( FIELD_MULTI_VALUE_LENIENCY, props.getProperty(FIELD_MULTI_VALUE_LENIENCY, FIELD_MULTI_VALUE_LENIENCY_DEFAULT), Boolean::parseBoolean ); this.includeFrozen = parseValue( INDEX_INCLUDE_FROZEN, props.getProperty(INDEX_INCLUDE_FROZEN, INDEX_INCLUDE_FROZEN_DEFAULT), Boolean::parseBoolean ); } @Override protected Collection<String> extraOptions() { return OPTION_NAMES; } ZoneId zoneId() { return zoneId; } public boolean debug() { return debug; } public String debugOut() { return debugOut; } public boolean flushAlways() { return flushAlways; } public TimeZone timeZone() { return zoneId != null ? TimeZone.getTimeZone(zoneId) : null; } String catalog() { return catalog; } public boolean fieldMultiValueLeniency() { return fieldMultiValueLeniency; } public boolean indexIncludeFrozen() { return includeFrozen; } public static boolean canAccept(String url) { String u = url.trim(); return (StringUtils.hasText(u) && (u.startsWith(JdbcConfiguration.URL_PREFIX) || u.startsWith(JdbcConfiguration.URL_FULL_PREFIX))); } public DriverPropertyInfo[] driverPropertyInfo() { List<DriverPropertyInfo> info = new ArrayList<>(); for (String option : optionNames()) { DriverPropertyInfo prop = new DriverPropertyInfo(option, null); info.add(prop); } return info.toArray(new DriverPropertyInfo[info.size()]); } }
JdbcConfiguration
java
apache__avro
lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestSortedKeyValueFile.java
{ "start": 1767, "end": 10278 }
class ____ { private static final Logger LOG = LoggerFactory.getLogger(TestSortedKeyValueFile.class); @TempDir public File mTempDir; @Test void writeOutOfSortedOrder() throws IOException { assertThrows(IllegalArgumentException.class, () -> { LOG.debug("Writing some records to a SortedKeyValueFile..."); Configuration conf = new Configuration(); SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options() .withKeySchema(Schema.create(Schema.Type.STRING)).withValueSchema(Schema.create(Schema.Type.STRING)) .withConfiguration(conf).withPath(new Path(mTempDir.getPath(), "myfile")).withIndexInterval(2); // Index // every // other // record. try (SortedKeyValueFile.Writer<CharSequence, CharSequence> writer = new SortedKeyValueFile.Writer<>(options)) { Utf8 key = new Utf8(); // re-use key, to test copied writer.append(key.set("banana"), "Banana"); writer.append(key.set("apple"), "Apple"); // Ruh, roh! } }); } @Test void namedCodecs() throws IOException { Configuration conf = new Configuration(); Path myfile = new Path(mTempDir.getPath(), "myfile"); Schema key = Schema.create(Schema.Type.STRING); Schema value = Schema.create(Schema.Type.STRING); Schema recordSchema = AvroKeyValue.getSchema(key, value); DatumReader<GenericRecord> datumReader = SpecificData.get().createDatumReader(recordSchema); DataFileReader<GenericRecord> reader; SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options().withKeySchema(key) .withValueSchema(value).withConfiguration(conf).withPath(myfile); SortedKeyValueFile.Writer<CharSequence, CharSequence> writer; for (String codec : new String[] { "null", "deflate", "snappy", "bzip2" }) { LOG.debug("Using " + codec + "codec for a SortedKeyValueFile..."); options.withCodec(codec); writer = new SortedKeyValueFile.Writer<>(options); writer.close(); reader = new DataFileReader<>(new FsInput(new Path(myfile, SortedKeyValueFile.DATA_FILENAME), conf), datumReader); assertEquals(codec, reader.getMetaString("avro.codec")); reader.close(); } } @Test void deflateClassCodec() throws IOException { Configuration conf = new Configuration(); Path myfile = new Path(mTempDir.getPath(), "myfile"); Schema key = Schema.create(Schema.Type.STRING); Schema value = Schema.create(Schema.Type.STRING); Schema recordSchema = AvroKeyValue.getSchema(key, value); DatumReader<GenericRecord> datumReader = SpecificData.get().createDatumReader(recordSchema); DataFileReader<GenericRecord> reader; LOG.debug("Using CodecFactory.deflateCodec() for a SortedKeyValueFile..."); SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options().withKeySchema(key) .withValueSchema(value).withConfiguration(conf).withPath(myfile).withCodec(CodecFactory.deflateCodec(9)); SortedKeyValueFile.Writer<CharSequence, CharSequence> writer = new SortedKeyValueFile.Writer<>(options); writer.close(); reader = new DataFileReader<>(new FsInput(new Path(myfile, SortedKeyValueFile.DATA_FILENAME), conf), datumReader); assertEquals("deflate", reader.getMetaString("avro.codec")); reader.close(); } @Test void badCodec() throws IOException { LOG.debug("Using a bad codec for a SortedKeyValueFile..."); try { new SortedKeyValueFile.Writer.Options().withCodec("foobar"); } catch (AvroRuntimeException e) { assertEquals("Unrecognized codec: foobar", e.getMessage()); } } @Test void writer() throws IOException { LOG.debug("Writing some records to a SortedKeyValueFile..."); Configuration conf = new Configuration(); SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options() .withKeySchema(Schema.create(Schema.Type.STRING)).withValueSchema(Schema.create(Schema.Type.STRING)) .withConfiguration(conf).withPath(new Path(mTempDir.getPath(), "myfile")).withIndexInterval(2); // Index // every // other // record. try (SortedKeyValueFile.Writer<CharSequence, CharSequence> writer = new SortedKeyValueFile.Writer<>(options)) { writer.append("apple", "Apple"); // Will be indexed. writer.append("banana", "Banana"); writer.append("carrot", "Carrot"); // Will be indexed. writer.append("durian", "Durian"); } LOG.debug("Checking the generated directory..."); File directory = new File(mTempDir.getPath(), "myfile"); assertTrue(directory.exists()); LOG.debug("Checking the generated index file..."); File indexFile = new File(directory, SortedKeyValueFile.INDEX_FILENAME); DatumReader<GenericRecord> indexReader = new GenericDatumReader<>( AvroKeyValue.getSchema(options.getKeySchema(), Schema.create(Schema.Type.LONG))); List<AvroKeyValue<CharSequence, Long>> indexRecords = new ArrayList<>(); try (FileReader<GenericRecord> indexFileReader = DataFileReader.openReader(indexFile, indexReader)) { for (GenericRecord indexRecord : indexFileReader) { indexRecords.add(new AvroKeyValue<>(indexRecord)); } } assertEquals(2, indexRecords.size()); assertEquals("apple", indexRecords.get(0).getKey().toString()); LOG.debug("apple's position in the file: " + indexRecords.get(0).getValue()); assertEquals("carrot", indexRecords.get(1).getKey().toString()); LOG.debug("carrot's position in the file: " + indexRecords.get(1).getValue()); LOG.debug("Checking the generated data file..."); File dataFile = new File(directory, SortedKeyValueFile.DATA_FILENAME); DatumReader<GenericRecord> dataReader = new GenericDatumReader<>( AvroKeyValue.getSchema(options.getKeySchema(), options.getValueSchema())); try (DataFileReader<GenericRecord> dataFileReader = new DataFileReader<>(dataFile, dataReader)) { dataFileReader.seek(indexRecords.get(0).getValue()); assertTrue(dataFileReader.hasNext()); AvroKeyValue<CharSequence, CharSequence> appleRecord = new AvroKeyValue<>(dataFileReader.next()); assertEquals("apple", appleRecord.getKey().toString()); assertEquals("Apple", appleRecord.getValue().toString()); dataFileReader.seek(indexRecords.get(1).getValue()); assertTrue(dataFileReader.hasNext()); AvroKeyValue<CharSequence, CharSequence> carrotRecord = new AvroKeyValue<>(dataFileReader.next()); assertEquals("carrot", carrotRecord.getKey().toString()); assertEquals("Carrot", carrotRecord.getValue().toString()); assertTrue(dataFileReader.hasNext()); AvroKeyValue<CharSequence, CharSequence> durianRecord = new AvroKeyValue<>(dataFileReader.next()); assertEquals("durian", durianRecord.getKey().toString()); assertEquals("Durian", durianRecord.getValue().toString()); } } @Test void reader() throws IOException { Configuration conf = new Configuration(); SortedKeyValueFile.Writer.Options writerOptions = new SortedKeyValueFile.Writer.Options() .withKeySchema(Schema.create(Schema.Type.STRING)).withValueSchema(Schema.create(Schema.Type.STRING)) .withConfiguration(conf).withPath(new Path(mTempDir.getPath(), "myfile")).withIndexInterval(2); // Index // every // other // record. try ( SortedKeyValueFile.Writer<CharSequence, CharSequence> writer = new SortedKeyValueFile.Writer<>(writerOptions)) { writer.append("apple", "Apple"); // Will be indexed. writer.append("banana", "Banana"); writer.append("carrot", "Carrot"); // Will be indexed. writer.append("durian", "Durian"); } LOG.debug("Reading the file back using a reader..."); SortedKeyValueFile.Reader.Options readerOptions = new SortedKeyValueFile.Reader.Options() .withKeySchema(Schema.create(Schema.Type.STRING)).withValueSchema(Schema.create(Schema.Type.STRING)) .withConfiguration(conf).withPath(new Path(mTempDir.getPath(), "myfile")); try ( SortedKeyValueFile.Reader<CharSequence, CharSequence> reader = new SortedKeyValueFile.Reader<>(readerOptions)) { assertEquals("Carrot", reader.get("carrot").toString()); assertEquals("Banana", reader.get("banana").toString()); assertNull(reader.get("a-vegetable")); assertNull(reader.get("beet")); assertNull(reader.get("zzz")); } } public static
TestSortedKeyValueFile
java
quarkusio__quarkus
extensions/smallrye-fault-tolerance/deployment/src/test/java/io/quarkus/smallrye/faulttolerance/test/circuitbreaker/CircuitBreakerTest.java
{ "start": 639, "end": 1980 }
class ____ { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar.addClasses(CircuitBreakerBean.class)); @Inject CircuitBreakerBean circuitBreaker; @Inject CircuitBreakerMaintenance circuitBreakerMaintenance; @Test public void test() { assertEquals(CircuitBreakerState.CLOSED, circuitBreakerMaintenance.currentState("my-cb")); assertDoesNotThrow(() -> circuitBreaker.hello()); assertThrows(RuntimeException.class, () -> circuitBreaker.hello()); assertThrows(RuntimeException.class, () -> circuitBreaker.hello()); assertThrows(RuntimeException.class, () -> circuitBreaker.hello()); assertThrows(RuntimeException.class, () -> circuitBreaker.hello()); assertThrows(CircuitBreakerOpenException.class, () -> circuitBreaker.hello()); assertEquals(CircuitBreakerState.OPEN, circuitBreakerMaintenance.currentState("my-cb")); } @Test public void undefinedCircuitBreaker() { assertThrows(IllegalArgumentException.class, () -> { circuitBreakerMaintenance.currentState("undefined"); }); assertThrows(IllegalArgumentException.class, () -> { circuitBreakerMaintenance.reset("undefined"); }); } }
CircuitBreakerTest
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/execution/librarycache/BlobLibraryCacheManager.java
{ "start": 14825, "end": 16004 }
class ____, i.e. URLs, into a set for performance reasons // see http://findbugs.sourceforge.net/bugDescriptions.html#DMI_COLLECTION_OF_URLS // -> alternatively, compare their string representation this.classPaths = CollectionUtil.newHashSetWithExpectedSize(requiredClassPaths.size()); for (URL url : requiredClassPaths) { classPaths.add(url.toString()); } this.libraries = new HashSet<>(requiredLibraries); this.wrapsSystemClassLoader = wrapsSystemClassLoader; this.releaseHooks = new HashMap<>(); } @Override public ClassLoader asClassLoader() { return classLoader; } @Override public void registerReleaseHookIfAbsent(String releaseHookName, Runnable releaseHook) { releaseHooks.putIfAbsent(releaseHookName, releaseHook); } private void verifyClassLoader( Collection<PermanentBlobKey> requiredLibraries, Collection<URL> requiredClassPaths) { // Make sure the previous registration referred to the same libraries and
paths
java
spring-projects__spring-framework
spring-web/src/main/java/org/springframework/http/client/reactive/AbstractClientHttpRequest.java
{ "start": 1488, "end": 1834 }
class ____ implements ClientHttpRequest { /** * COMMITTING -> COMMITTED is the period after doCommit is called but before * the response status and headers have been applied to the underlying * response during which time pre-commit actions can still make changes to * the response status and headers. */ private
AbstractClientHttpRequest
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileLongAggregator.java
{ "start": 748, "end": 2158 }
class ____ { public static QuantileStates.SingleState initSingle(DriverContext driverContext, double percentile) { return new QuantileStates.SingleState(driverContext.breaker(), percentile); } public static void combine(QuantileStates.SingleState current, long v) { current.add(v); } public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) { state.add(inValue); } public static Block evaluateFinal(QuantileStates.SingleState state, DriverContext driverContext) { return state.evaluatePercentile(driverContext); } public static QuantileStates.GroupingState initGrouping(DriverContext driverContext, double percentile) { return new QuantileStates.GroupingState(driverContext.breaker(), driverContext.bigArrays(), percentile); } public static void combine(QuantileStates.GroupingState state, int groupId, long v) { state.add(groupId, v); } public static void combineIntermediate(QuantileStates.GroupingState state, int groupId, BytesRef inValue) { state.add(groupId, inValue); } public static Block evaluateFinal( QuantileStates.GroupingState state, IntVector selectedGroups, GroupingAggregatorEvaluationContext ctx ) { return state.evaluatePercentile(selectedGroups, ctx.driverContext()); } }
PercentileLongAggregator
java
apache__camel
components/camel-netty-http/src/main/java/org/apache/camel/component/netty/http/NettyChannelBufferStreamCacheOnCompletion.java
{ "start": 1151, "end": 1575 }
class ____ extends SynchronizationAdapter { private final NettyChannelBufferStreamCache cache; public NettyChannelBufferStreamCacheOnCompletion(NettyChannelBufferStreamCache cache) { this.cache = cache; } @Override public void onDone(Exchange exchange) { // release the cache when we are done routing the Exchange cache.release(); } }
NettyChannelBufferStreamCacheOnCompletion
java
google__error-prone
core/src/test/java/com/google/errorprone/refaster/testdata/input/PlaceholderAllowsIdentityTemplateExample.java
{ "start": 769, "end": 1427 }
class ____ { public void positiveExample(List<Integer> list) { Iterator<Integer> itr = list.iterator(); while (itr.hasNext()) { if (itr.next() < 0) { itr.remove(); } } } public void positiveIdentityExample(List<Boolean> list) { Iterator<Boolean> itr = list.iterator(); while (itr.hasNext()) { if (itr.next()) { itr.remove(); } } } public void refersToForbiddenVariable(List<Integer> list) { Iterator<Integer> itr = list.iterator(); while (itr.hasNext()) { if (itr.next() < list.size()) { itr.remove(); } } } }
PlaceholderAllowsIdentityTemplateExample
java
spring-projects__spring-security
webauthn/src/main/java/org/springframework/security/web/webauthn/api/AuthenticatorAssertionResponse.java
{ "start": 1766, "end": 5173 }
class ____ extends AuthenticatorResponse { @Serial private static final long serialVersionUID = 324976481675434298L; private final Bytes authenticatorData; private final Bytes signature; private final @Nullable Bytes userHandle; private final @Nullable Bytes attestationObject; private AuthenticatorAssertionResponse(Bytes clientDataJSON, Bytes authenticatorData, Bytes signature, @Nullable Bytes userHandle, @Nullable Bytes attestationObject) { super(clientDataJSON); this.authenticatorData = authenticatorData; this.signature = signature; this.userHandle = userHandle; this.attestationObject = attestationObject; } /** * The <a href= * "https://www.w3.org/TR/webauthn-3/#dom-authenticatorassertionresponse-authenticatordata">authenticatorData</a> * contains the * <a href="https://www.w3.org/TR/webauthn-3/#authenticator-data">authenticator * data</a> returned by the authenticator. See * <a href="https://www.w3.org/TR/webauthn-3/#sctn-authenticator-data">6.1 * Authenticator Data.</a>. * @return the {@code authenticatorData} */ public Bytes getAuthenticatorData() { return this.authenticatorData; } /** * The <a href= * "https://www.w3.org/TR/webauthn-3/#dom-authenticatorassertionresponse-signature">signature</a> * contains the raw signature returned from the authenticator. See * <a href="https://www.w3.org/TR/webauthn-3/#sctn-op-get-assertion">6.3.3 The * authenticatorGetAssertion Operation</a>. * @return the {@code signature} */ public Bytes getSignature() { return this.signature; } /** * The <a href= * "https://www.w3.org/TR/webauthn-3/#dom-authenticatorassertionresponse-userhandle">userHandle</a> * is the <a href="https://www.w3.org/TR/webauthn-3/#user-handle">user handle</a> * which is returned from the authenticator, or null if the authenticator did not * return a user handle. See * <a href="https://www.w3.org/TR/webauthn-3/#sctn-op-get-assertion">6.3.3 The * authenticatorGetAssertion Operation</a>. The authenticator MUST always return a * user handle if the <a href= * "https://www.w3.org/TR/webauthn-3/#dom-publickeycredentialrequestoptions-allowcredentials">allowCredentials</a> * option used in the * <a href="https://www.w3.org/TR/webauthn-3/#authentication-ceremony">authentication * ceremony</a> is empty, and MAY return one otherwise. * @return the <a href="https://www.w3.org/TR/webauthn-3/#user-handle">user handle</a> */ public @Nullable Bytes getUserHandle() { return this.userHandle; } /** * The <a href= * "https://www.w3.org/TR/webauthn-3/#dom-authenticatorattestationresponse-attestationobject">attestationObject</a> * is an OPTIONAL attribute contains an * <a href="https://www.w3.org/TR/webauthn-3/#attestation-object">attestation * object</a>, if the authenticator supports attestation in assertions. * @return the {@code attestationObject} */ public @Nullable Bytes getAttestationObject() { return this.attestationObject; } /** * Creates a new {@link AuthenticatorAssertionResponseBuilder} * @return the {@link AuthenticatorAssertionResponseBuilder} */ public static AuthenticatorAssertionResponseBuilder builder() { return new AuthenticatorAssertionResponseBuilder(); } /** * Builds a {@link AuthenticatorAssertionResponse}. * * @author Rob Winch * @since 6.4 */ public static final
AuthenticatorAssertionResponse
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/refresh/JPARefreshTest.java
{ "start": 4047, "end": 4709 }
class ____ { @Id @ManyToOne(fetch= FetchType.LAZY, cascade = CascadeType.PERSIST) @JoinColumn(name = "REALM_ID") protected RealmEntity realm; @Id @Column(name = "NAME") protected String name; @Column(name = "VALUE_COLUMN") protected String value; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public RealmEntity getRealm() { return realm; } public void setRealm(RealmEntity realm) { this.realm = realm; } } public static
RealmAttributeEntity
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/mutability/attribute/EntityAttributeMutabilityTest.java
{ "start": 2529, "end": 3198 }
class ____ { @Id private Integer id; @Basic private String name; @ManyToOne @JoinColumn( name = "manager_fk" ) @Immutable private Employee manager; @ManyToOne @JoinColumn( name = "manager2_fk" ) @Mutability(Immutability.class) private Employee manager2; private Employee() { // for use by Hibernate } public Employee(Integer id, String name) { this.id = id; this.name = name; } public Integer getId() { return id; } public String getName() { return name; } public void setName(String name) { this.name = name; } } //tag::collection-immutability-example[] @Entity(name = "Batch") public static
Employee
java
grpc__grpc-java
netty/src/main/java/io/grpc/netty/InternalProtocolNegotiators.java
{ "start": 1187, "end": 2095 }
class ____ { private InternalProtocolNegotiators() {} /** * Returns a {@link ProtocolNegotiator} that ensures the pipeline is set up so that TLS will * be negotiated, the {@code handler} is added and writes to the {@link io.netty.channel.Channel} * may happen immediately, even before the TLS Handshake is complete. * @param executorPool a dedicated {@link Executor} pool for time-consuming TLS tasks */ public static InternalProtocolNegotiator.ProtocolNegotiator tls(SslContext sslContext, ObjectPool<? extends Executor> executorPool, Optional<Runnable> handshakeCompleteRunnable, X509TrustManager extendedX509TrustManager, String sni) { final io.grpc.netty.ProtocolNegotiator negotiator = ProtocolNegotiators.tls(sslContext, executorPool, handshakeCompleteRunnable, extendedX509TrustManager, sni); final
InternalProtocolNegotiators
java
spring-projects__spring-boot
buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/build/Phase.java
{ "start": 1298, "end": 5045 }
class ____ { private final String name; private boolean daemonAccess; private final List<String> args = new ArrayList<>(); private final List<Binding> bindings = new ArrayList<>(); private final Map<String, String> env = new LinkedHashMap<>(); private final List<String> securityOptions = new ArrayList<>(); private @Nullable String networkMode; private boolean requiresApp; /** * Create a new {@link Phase} instance. * @param name the name of the phase * @param verboseLogging if verbose logging is requested */ Phase(String name, boolean verboseLogging) { this.name = name; withLogLevelArg(verboseLogging); } void withApp(String path, Binding binding) { withArgs("-app", path); withBinding(binding); this.requiresApp = true; } void withBuildCache(String path, Binding binding) { withArgs("-cache-dir", path); withBinding(binding); } /** * Update this phase with Docker daemon access. */ void withDaemonAccess() { this.withArgs("-daemon"); this.daemonAccess = true; } void withImageName(ImageReference imageName) { withArgs(imageName); } void withLaunchCache(String path, Binding binding) { withArgs("-launch-cache", path); withBinding(binding); } void withLayers(String path, Binding binding) { withArgs("-layers", path); withBinding(binding); } void withPlatform(String path) { withArgs("-platform", path); } void withProcessType(String type) { withArgs("-process-type", type); } void withRunImage(ImageReference runImage) { withArgs("-run-image", runImage); } void withSkipRestore() { withArgs("-skip-restore"); } /** * Update this phase with a debug log level arguments if verbose logging has been * requested. * @param verboseLogging if verbose logging is requested */ private void withLogLevelArg(boolean verboseLogging) { if (verboseLogging) { this.args.add("-log-level"); this.args.add("debug"); } } /** * Update this phase with additional run arguments. * @param args the arguments to add */ void withArgs(Object... args) { Arrays.stream(args).map(Object::toString).forEach(this.args::add); } /** * Update this phase with an addition volume binding. * @param binding the binding */ void withBinding(Binding binding) { this.bindings.add(binding); } /** * Update this phase with an additional environment variable. * @param name the variable name * @param value the variable value */ void withEnv(String name, String value) { this.env.put(name, value); } /** * Update this phase with the network the build container will connect to. * @param networkMode the network */ void withNetworkMode(@Nullable String networkMode) { this.networkMode = networkMode; } /** * Update this phase with a security option. * @param option the security option */ void withSecurityOption(String option) { this.securityOptions.add(option); } /** * Return the name of the phase. * @return the phase name */ String getName() { return this.name; } boolean requiresApp() { return this.requiresApp; } @Override public String toString() { return this.name; } /** * Apply this phase settings to a {@link ContainerConfig} update. * @param update the update to apply the phase to */ void apply(ContainerConfig.Update update) { if (this.daemonAccess) { update.withUser("root"); } update.withCommand("/cnb/lifecycle/" + this.name, StringUtils.toStringArray(this.args)); update.withLabel("author", "spring-boot"); this.bindings.forEach(update::withBinding); this.env.forEach(update::withEnv); if (this.networkMode != null) { update.withNetworkMode(this.networkMode); } this.securityOptions.forEach(update::withSecurityOption); } }
Phase
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/core/SmartClassLoader.java
{ "start": 1238, "end": 1501 }
class ____ reloadable (in this ClassLoader). * <p>Typically used to check whether the result may be cached (for this * ClassLoader) or whether it should be reobtained every time. * The default implementation always returns {@code false}. * @param clazz the
is
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/atomic/reference/AtomicReferenceAssert_hasValue_Test.java
{ "start": 1022, "end": 1953 }
class ____ { @Test void should_pass_when_actual_has_the_expected_value() { String initialValue = "foo"; AtomicReference<String> actual = new AtomicReference<>(initialValue); assertThat(actual).hasValue(initialValue); } @Test void should_fail_when_actual_does_not_have_the_expected_value() { AtomicReference<String> actual = new AtomicReference<>("foo"); String expectedValue = "bar"; assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(actual).hasValue(expectedValue)) .withMessage(shouldHaveValue(actual, expectedValue).create()); } @Test void should_fail_when_actual_is_null() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> { AtomicReference<String> actual = null; assertThat(actual).hasValue("foo"); }).withMessage(actualIsNull()); } }
AtomicReferenceAssert_hasValue_Test
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/query/sqm/tuple/internal/AnonymousTupleSqmAssociationPathSource.java
{ "start": 1052, "end": 3190 }
class ____<O, J> extends AnonymousTupleSqmPathSource<J> implements SqmSingularPersistentAttribute<O, J> { private final SimpleDomainType<J> domainType; public AnonymousTupleSqmAssociationPathSource( String localPathName, SqmPath<J> path, SimpleDomainType<J> domainType) { super( localPathName, path ); this.domainType = domainType; } @Override public SqmJoin<O, J> createSqmJoin( SqmFrom<?, O> lhs, SqmJoinType joinType, @Nullable String alias, boolean fetched, SqmCreationState creationState) { return new SqmSingularJoin<>( lhs, this, alias, joinType, fetched, creationState.getCreationContext().getNodeBuilder() ); } @Override public Class<J> getBindableJavaType() { return getExpressibleJavaType().getJavaTypeClass(); } @Override public Class<J> getJavaType() { return getExpressibleJavaType().getJavaTypeClass(); } @Override public SqmDomainType<J> getPathType() { return (SqmDomainType<J>) domainType; } @Override public SimpleDomainType<J> getType() { return domainType; } @Override public ManagedDomainType<O> getDeclaringType() { return null; } @Override public SqmPathSource<J> getSqmPathSource() { return this; } @Override public boolean isId() { return false; } @Override public boolean isVersion() { return false; } @Override public boolean isOptional() { return true; } @Override public JavaType<J> getAttributeJavaType() { return domainType.getExpressibleJavaType(); } @Override public AttributeClassification getAttributeClassification() { return AttributeClassification.MANY_TO_ONE; } @Override public SimpleDomainType<?> getKeyGraphType() { return domainType; } @Override public String getName() { return getPathName(); } @Override public PersistentAttributeType getPersistentAttributeType() { return PersistentAttributeType.MANY_TO_ONE; } @Override public Member getJavaMember() { return null; } @Override public boolean isAssociation() { return true; } @Override public boolean isCollection() { return false; } }
AnonymousTupleSqmAssociationPathSource
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/TaskManagerLocation.java
{ "start": 15965, "end": 16927 }
class ____ implements HostNameSupplier { private final InetAddress inetAddress; public IpOnlyHostNameSupplier(InetAddress inetAddress) { this.inetAddress = inetAddress; } /** * Returns the textual representation of the TaskManager's IP address as host name. * * @return The textual representation of the TaskManager's IP address. */ @Override public String getHostName() { return inetAddress.getHostAddress(); } /** * Returns the textual representation of the TaskManager's IP address as FQDN host name. * * @return The textual representation of the TaskManager's IP address. */ @Override public String getFqdnHostName() { return inetAddress.getHostAddress(); } } /** The DNS resolution mode for TaskManager's IP address. */ public
IpOnlyHostNameSupplier
java
apache__kafka
streams/src/main/java/org/apache/kafka/streams/state/internals/WindowStoreIteratorWrapper.java
{ "start": 1177, "end": 2568 }
class ____ { private final KeyValueIterator<Bytes, byte[]> bytesIterator; private final long windowSize; private final Function<byte[], Long> timestampExtractor; private final BiFunction<byte[], Long, Windowed<Bytes>> windowConstructor; WindowStoreIteratorWrapper(final KeyValueIterator<Bytes, byte[]> bytesIterator, final long windowSize) { this(bytesIterator, windowSize, WindowKeySchema::extractStoreTimestamp, WindowKeySchema::fromStoreBytesKey); } WindowStoreIteratorWrapper(final KeyValueIterator<Bytes, byte[]> bytesIterator, final long windowSize, final Function<byte[], Long> timestampExtractor, final BiFunction<byte[], Long, Windowed<Bytes>> windowConstructor) { this.bytesIterator = bytesIterator; this.windowSize = windowSize; this.timestampExtractor = timestampExtractor; this.windowConstructor = windowConstructor; } public WindowStoreIterator<byte[]> valuesIterator() { return new WrappedWindowStoreIterator(bytesIterator, timestampExtractor); } public KeyValueIterator<Windowed<Bytes>, byte[]> keyValueIterator() { return new WrappedKeyValueIterator(bytesIterator, windowSize, windowConstructor); } private static
WindowStoreIteratorWrapper
java
apache__kafka
connect/transforms/src/main/java/org/apache/kafka/connect/transforms/MaskField.java
{ "start": 8831, "end": 9401 }
class ____<R extends ConnectRecord<R>> extends MaskField<R> { @Override protected Schema operatingSchema(R record) { return record.valueSchema(); } @Override protected Object operatingValue(R record) { return record.value(); } @Override protected R newRecord(R record, Object updatedValue) { return record.newRecord(record.topic(), record.kafkaPartition(), record.keySchema(), record.key(), record.valueSchema(), updatedValue, record.timestamp()); } } }
Value
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/joined/JoinedInheritanceEmbeddedIdTest.java
{ "start": 3441, "end": 4313 }
class ____ extends PkEmbeddable { private String lessonCd; private String recordCd; public BasePk() { } public BasePk(Integer siteCd, String lessonCd, String recordCd) { super( siteCd ); this.lessonCd = lessonCd; this.recordCd = recordCd; } @Override public boolean equals(Object o) { if ( this == o ) { return true; } if ( !super.equals( o ) ) { return false; } final BasePk basePk = (BasePk) o; return Objects.equals( lessonCd, basePk.lessonCd ) && Objects.equals( recordCd, basePk.recordCd ); } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + Objects.hashCode( lessonCd ); result = 31 * result + Objects.hashCode( recordCd ); return result; } } @Entity( name = "BaseEntity" ) @Inheritance( strategy = InheritanceType.JOINED ) static
BasePk
java
spring-projects__spring-security
ldap/src/main/java/org/springframework/security/ldap/jackson/LdapJacksonModule.java
{ "start": 1894, "end": 2638 }
class ____ extends SecurityJacksonModule { public LdapJacksonModule() { super(LdapJacksonModule.class.getName(), new Version(1, 0, 0, null, null, null)); } @Override public void configurePolymorphicTypeValidator(BasicPolymorphicTypeValidator.Builder builder) { builder.allowIfSubType(InetOrgPerson.class) .allowIfSubType(LdapUserDetailsImpl.class) .allowIfSubType(Person.class); } @Override public void setupModule(SetupContext context) { context.setMixIn(LdapAuthority.class, LdapAuthorityMixin.class); context.setMixIn(LdapUserDetailsImpl.class, LdapUserDetailsImplMixin.class); context.setMixIn(Person.class, PersonMixin.class); context.setMixIn(InetOrgPerson.class, InetOrgPersonMixin.class); } }
LdapJacksonModule
java
google__dagger
dagger-compiler/main/java/dagger/internal/codegen/binding/AssistedInjectionAnnotations.java
{ "start": 8209, "end": 10825 }
class ____ { public static AssistedFactoryMetadata create(XType factoryType) { XTypeElement factoryElement = factoryType.getTypeElement(); XMethodElement factoryMethod = assistedFactoryMethod(factoryElement); XMethodType factoryMethodType = factoryMethod.asMemberOf(factoryType); XType assistedInjectType = factoryMethodType.getReturnType(); XTypeElement assistedInjectElement = assistedInjectType.getTypeElement(); return new AutoValue_AssistedInjectionAnnotations_AssistedFactoryMetadata( factoryElement, factoryType, factoryMethod, factoryMethodType, assistedInjectElement, assistedInjectType, AssistedInjectionAnnotations.assistedInjectAssistedParameters(assistedInjectType), AssistedInjectionAnnotations.assistedFactoryAssistedParameters( factoryMethod, factoryMethodType)); } public abstract XTypeElement factory(); public abstract XType factoryType(); public abstract XMethodElement factoryMethod(); public abstract XMethodType factoryMethodType(); public abstract XTypeElement assistedInjectElement(); public abstract XType assistedInjectType(); public abstract ImmutableList<AssistedParameter> assistedInjectAssistedParameters(); public abstract ImmutableList<AssistedParameter> assistedFactoryAssistedParameters(); @Memoized public ImmutableMap<AssistedParameter, XExecutableParameterElement> assistedInjectAssistedParametersMap() { ImmutableMap.Builder<AssistedParameter, XExecutableParameterElement> builder = ImmutableMap.builder(); for (AssistedParameter assistedParameter : assistedInjectAssistedParameters()) { builder.put(assistedParameter, assistedParameter.element()); } return builder.build(); } @Memoized public ImmutableMap<AssistedParameter, XExecutableParameterElement> assistedFactoryAssistedParametersMap() { ImmutableMap.Builder<AssistedParameter, XExecutableParameterElement> builder = ImmutableMap.builder(); for (AssistedParameter assistedParameter : assistedFactoryAssistedParameters()) { builder.put(assistedParameter, assistedParameter.element()); } return builder.build(); } } /** * Metadata about an {@link Assisted} annotated parameter. * * <p>This parameter can represent an {@link Assisted} annotated parameter from an {@link * AssistedInject} constructor or an {@link AssistedFactory} method. */ @AutoValue public abstract static
AssistedFactoryMetadata
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/JUnit4ClassUsedInJUnit3Test.java
{ "start": 2474, "end": 3032 }
class ____ { @Rule public TemporaryFolder folder = new TemporaryFolder(); @Ignore @Test public void testOne() {} @Test public void testTwo() {} } """) .doTest(); } @Test public void positive_assume_in_test() { compilationHelper .addSourceLines( "Foo.java", """ import junit.framework.TestCase; import org.junit.Test; import org.junit.Assume; public
Foo
java
apache__hadoop
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/LoadJob.java
{ "start": 19597, "end": 20796 }
class ____ extends RecordReader<NullWritable,GridmixRecord> { private RecordFactory factory; private final Random r = new Random(); private final GridmixRecord val = new GridmixRecord(); public LoadRecordReader() { } @Override public void initialize(InputSplit genericSplit, TaskAttemptContext ctxt) throws IOException, InterruptedException { final LoadSplit split = (LoadSplit)genericSplit; final Configuration conf = ctxt.getConfiguration(); factory = new ReadRecordFactory(split.getLength(), split.getInputRecords(), new FileQueue(split, conf), conf); } @Override public boolean nextKeyValue() throws IOException { val.setSeed(r.nextLong()); return factory.next(null, val); } @Override public float getProgress() throws IOException { return factory.getProgress(); } @Override public NullWritable getCurrentKey() { return NullWritable.get(); } @Override public GridmixRecord getCurrentValue() { return val; } @Override public void close() throws IOException { factory.close(); } } static
LoadRecordReader
java
apache__camel
components/camel-google/camel-google-drive/src/generated/java/org/apache/camel/component/google/drive/DriveAboutEndpointConfigurationConfigurer.java
{ "start": 745, "end": 5929 }
class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter { private static final Map<String, Object> ALL_OPTIONS; static { Map<String, Object> map = new CaseInsensitiveMap(); map.put("AccessToken", java.lang.String.class); map.put("ApiName", org.apache.camel.component.google.drive.internal.GoogleDriveApiName.class); map.put("ApplicationName", java.lang.String.class); map.put("ClientId", java.lang.String.class); map.put("ClientSecret", java.lang.String.class); map.put("Delegate", java.lang.String.class); map.put("MethodName", java.lang.String.class); map.put("RefreshToken", java.lang.String.class); map.put("Scopes", java.lang.String.class); map.put("ServiceAccountKey", java.lang.String.class); ALL_OPTIONS = map; } @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { org.apache.camel.component.google.drive.DriveAboutEndpointConfiguration target = (org.apache.camel.component.google.drive.DriveAboutEndpointConfiguration) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "accesstoken": case "accessToken": target.setAccessToken(property(camelContext, java.lang.String.class, value)); return true; case "apiname": case "apiName": target.setApiName(property(camelContext, org.apache.camel.component.google.drive.internal.GoogleDriveApiName.class, value)); return true; case "applicationname": case "applicationName": target.setApplicationName(property(camelContext, java.lang.String.class, value)); return true; case "clientid": case "clientId": target.setClientId(property(camelContext, java.lang.String.class, value)); return true; case "clientsecret": case "clientSecret": target.setClientSecret(property(camelContext, java.lang.String.class, value)); return true; case "delegate": target.setDelegate(property(camelContext, java.lang.String.class, value)); return true; case "methodname": case "methodName": target.setMethodName(property(camelContext, java.lang.String.class, value)); return true; case "refreshtoken": case "refreshToken": target.setRefreshToken(property(camelContext, java.lang.String.class, value)); return true; case "scopes": target.setScopes(property(camelContext, java.lang.String.class, value)); return true; case "serviceaccountkey": case "serviceAccountKey": target.setServiceAccountKey(property(camelContext, java.lang.String.class, value)); return true; default: return false; } } @Override public Map<String, Object> getAllOptions(Object target) { return ALL_OPTIONS; } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "accesstoken": case "accessToken": return java.lang.String.class; case "apiname": case "apiName": return org.apache.camel.component.google.drive.internal.GoogleDriveApiName.class; case "applicationname": case "applicationName": return java.lang.String.class; case "clientid": case "clientId": return java.lang.String.class; case "clientsecret": case "clientSecret": return java.lang.String.class; case "delegate": return java.lang.String.class; case "methodname": case "methodName": return java.lang.String.class; case "refreshtoken": case "refreshToken": return java.lang.String.class; case "scopes": return java.lang.String.class; case "serviceaccountkey": case "serviceAccountKey": return java.lang.String.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { org.apache.camel.component.google.drive.DriveAboutEndpointConfiguration target = (org.apache.camel.component.google.drive.DriveAboutEndpointConfiguration) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "accesstoken": case "accessToken": return target.getAccessToken(); case "apiname": case "apiName": return target.getApiName(); case "applicationname": case "applicationName": return target.getApplicationName(); case "clientid": case "clientId": return target.getClientId(); case "clientsecret": case "clientSecret": return target.getClientSecret(); case "delegate": return target.getDelegate(); case "methodname": case "methodName": return target.getMethodName(); case "refreshtoken": case "refreshToken": return target.getRefreshToken(); case "scopes": return target.getScopes(); case "serviceaccountkey": case "serviceAccountKey": return target.getServiceAccountKey(); default: return null; } } }
DriveAboutEndpointConfigurationConfigurer
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java
{ "start": 41758, "end": 42289 }
class ____ { public void doTest() { Client client = new Client(); client.foo2("frobber!"); client.foo2("don't change this!"); } } """) .doTest(); } // b/268215956 @Test public void varArgs() { refactoringTestHelper .addInputLines( "Client.java", """ package com.google.foo; import com.google.errorprone.annotations.InlineMe; public final
Caller
java
apache__flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/AbstractSqlGatewayStatementITCase.java
{ "start": 1774, "end": 4494 }
class ____ extends AbstractSqlGatewayStatementITCaseBase { private static final Logger LOG = LoggerFactory.getLogger(AbstractSqlGatewayStatementITCase.class); @RegisterExtension @Order(1) public static final MiniClusterExtension MINI_CLUSTER = new MiniClusterExtension(); @RegisterExtension @Order(2) public static final SqlGatewayServiceExtension SQL_GATEWAY_SERVICE_EXTENSION = new SqlGatewayServiceExtension(MINI_CLUSTER::getClientConfiguration); protected static SqlGatewayService service; @BeforeAll static void setUp() { service = SQL_GATEWAY_SERVICE_EXTENSION.getService(); } @BeforeEach public void before(@TempDir Path temporaryFolder) throws Exception { super.before(temporaryFolder); replaceVars.put( "$VAR_REST_PORT", MINI_CLUSTER.getClientConfiguration().get(PORT).toString()); } /** * Returns printed results for each ran SQL statements. * * @param statements the SQL statements to run * @return the stringified results */ protected String runStatements(List<TestSqlStatement> statements) throws Exception { List<String> output = new ArrayList<>(); for (TestSqlStatement statement : statements) { StringBuilder builder = new StringBuilder(); builder.append(statement.getComment()); builder.append(statement.getSql()); String trimmedSql = statement.getSql().trim(); if (trimmedSql.endsWith(";")) { trimmedSql = trimmedSql.substring(0, trimmedSql.length() - 1); } try { builder.append(runSingleStatement(trimmedSql)); } catch (Throwable t) { LOG.error("Failed to execute statements.", t); builder.append( AbstractSqlGatewayStatementITCase.Tag.ERROR.addTag( removeRowNumber(stringifyException(t).trim()) + "\n")); } output.add(builder.toString()); } return String.join("", output); } // ------------------------------------------------------------------------------------------- // Utility // ------------------------------------------------------------------------------------------- /** * Returns printed results for each ran SQL statements. * * @param statement the SQL statement to run * @return the printed results in tableau style */ protected abstract String runSingleStatement(String statement) throws Exception; protected abstract String stringifyException(Throwable t); }
AbstractSqlGatewayStatementITCase
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongFloatAggregatorFunctionSupplier.java
{ "start": 651, "end": 1810 }
class ____ implements AggregatorFunctionSupplier { private final int limit; private final boolean ascending; public TopLongFloatAggregatorFunctionSupplier(int limit, boolean ascending) { this.limit = limit; this.ascending = ascending; } @Override public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() { return TopLongFloatAggregatorFunction.intermediateStateDesc(); } @Override public List<IntermediateStateDesc> groupingIntermediateStateDesc() { return TopLongFloatGroupingAggregatorFunction.intermediateStateDesc(); } @Override public TopLongFloatAggregatorFunction aggregator(DriverContext driverContext, List<Integer> channels) { return TopLongFloatAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override public TopLongFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List<Integer> channels) { return TopLongFloatGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } @Override public String describe() { return "top_long of floats"; } }
TopLongFloatAggregatorFunctionSupplier
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/conversion/java8time/Source.java
{ "start": 418, "end": 4850 }
class ____ { private ZonedDateTime zonedDateTime; private LocalDateTime localDateTime; private LocalDate localDate; private LocalTime localTime; private ZonedDateTime forCalendarConversion; private ZonedDateTime forDateConversionWithZonedDateTime; private LocalDateTime forDateConversionWithLocalDateTime; private LocalDate forDateConversionWithLocalDate; private LocalDate forSqlDateConversionWithLocalDate; private Instant forDateConversionWithInstant; private LocalDate forLocalDateTimeConversionWithLocalDate; private Instant forInstantConversionWithString; private Period forPeriodConversionWithString; private Duration forDurationConversionWithString; public ZonedDateTime getZonedDateTime() { return zonedDateTime; } public void setZonedDateTime(ZonedDateTime dateTime) { this.zonedDateTime = dateTime; } public LocalDateTime getLocalDateTime() { return localDateTime; } public void setLocalDateTime(LocalDateTime localDateTime) { this.localDateTime = localDateTime; } public LocalDate getLocalDate() { return localDate; } public void setLocalDate(LocalDate localDate) { this.localDate = localDate; } public LocalTime getLocalTime() { return localTime; } public void setLocalTime(LocalTime localTime) { this.localTime = localTime; } public ZonedDateTime getForCalendarConversion() { return forCalendarConversion; } public void setForCalendarConversion(ZonedDateTime forCalendarConversion) { this.forCalendarConversion = forCalendarConversion; } public ZonedDateTime getForDateConversionWithZonedDateTime() { return forDateConversionWithZonedDateTime; } public void setForDateConversionWithZonedDateTime(ZonedDateTime forDateConversionWithZonedDateTime) { this.forDateConversionWithZonedDateTime = forDateConversionWithZonedDateTime; } public LocalDateTime getForDateConversionWithLocalDateTime() { return forDateConversionWithLocalDateTime; } public void setForDateConversionWithLocalDateTime(LocalDateTime forDateConversionWithLocalDateTime) { this.forDateConversionWithLocalDateTime = forDateConversionWithLocalDateTime; } public LocalDate getForDateConversionWithLocalDate() { return forDateConversionWithLocalDate; } public void setForDateConversionWithLocalDate(LocalDate forDateConversionWithLocalDate) { this.forDateConversionWithLocalDate = forDateConversionWithLocalDate; } public LocalDate getForSqlDateConversionWithLocalDate() { return forSqlDateConversionWithLocalDate; } public void setForSqlDateConversionWithLocalDate(LocalDate forSqlDateConversionWithLocalDate) { this.forSqlDateConversionWithLocalDate = forSqlDateConversionWithLocalDate; } public Instant getForDateConversionWithInstant() { return forDateConversionWithInstant; } public void setForDateConversionWithInstant(Instant forDateConversionWithInstant) { this.forDateConversionWithInstant = forDateConversionWithInstant; } public LocalDate getForLocalDateTimeConversionWithLocalDate() { return forLocalDateTimeConversionWithLocalDate; } public void setForLocalDateTimeConversionWithLocalDate(LocalDate forLocalDateTimeConversionWithLocalDate) { this.forLocalDateTimeConversionWithLocalDate = forLocalDateTimeConversionWithLocalDate; } public Instant getForInstantConversionWithString() { return forInstantConversionWithString; } public void setForInstantConversionWithString(Instant forInstantConversionWithString) { this.forInstantConversionWithString = forInstantConversionWithString; } public Period getForPeriodConversionWithString() { return forPeriodConversionWithString; } public void setForPeriodConversionWithString(Period forPeriodConversionWithString) { this.forPeriodConversionWithString = forPeriodConversionWithString; } public Duration getForDurationConversionWithString() { return forDurationConversionWithString; } public void setForDurationConversionWithString(Duration forDurationConversionWithString) { this.forDurationConversionWithString = forDurationConversionWithString; } }
Source
java
eclipse-vertx__vert.x
vertx-core/src/main/java/io/vertx/core/impl/SysProps.java
{ "start": 789, "end": 4634 }
enum ____ { /** * Duplicate of {@link HttpHeadersInternal#DISABLE_HTTP_HEADERS_VALIDATION} */ DISABLE_HTTP_HEADERS_VALIDATION("vertx.disableHttpHeadersValidation"), /** * Internal property that disables websockets benchmarking purpose. */ DISABLE_WEBSOCKETS("vertx.disableWebsockets"), /** * Internal property that disables metrics for benchmarking purpose. */ DISABLE_METRICS("vertx.disableMetrics"), /** * Internal property that disables the context task execution measures for benchmarking purpose. */ DISABLE_CONTEXT_TIMINGS("vertx.disableContextTimings"), /** * Disable Netty DNS resolver usage. * * Documented and (not much) tested. */ DISABLE_DNS_RESOLVER("vertx.disableDnsResolver"), /** * Default value of {@link io.vertx.core.file.FileSystemOptions#DEFAULT_FILE_CACHING_ENABLED} * */ DISABLE_FILE_CACHING("vertx.disableFileCaching"), /** * Default value of {@link io.vertx.core.file.FileSystemOptions#DEFAULT_CLASS_PATH_RESOLVING_ENABLED} * */ DISABLE_FILE_CP_RESOLVING("vertx.disableFileCPResolving"), /** * Default value of {@link io.vertx.core.file.FileSystemOptions#DEFAULT_FILE_CACHING_DIR} */ FILE_CACHE_DIR("vertx.cacheDirBase") { @Override public String get() { String val = super.get(); if (val == null) { // get the system default temp dir location (can be overriden by using the standard java system property) // if not present default to the process start CWD String tmpDir = System.getProperty("java.io.tmpdir", "."); String cacheDirBase = "vertx-cache"; val = tmpDir + File.separator + cacheDirBase; } return val; } }, /** * Enable bytes caching of HTTP/1.x immutable response headers. */ @Unstable CACHE_IMMUTABLE_HTTP_RESPONSE_HEADERS("vertx.cacheImmutableHttpResponseHeaders"), /** * Enable common HTTP/1.x request headers to their lower case version * * <ul> * <li>host/Host: {@link io.vertx.core.http.HttpHeaders#HOST}</li> * <li>accept/Accept: {@link io.vertx.core.http.HttpHeaders#ACCEPT}</li> * <li>content-type/Content-Type: {@link io.vertx.core.http.HttpHeaders#CONTENT_TYPE}</li> * <li>content-length/Content-Length: {@link io.vertx.core.http.HttpHeaders#CONTENT_LENGTH}</li> * <li>connection/Connection: {@link io.vertx.core.http.HttpHeaders#CONNECTION}</li> * </ul> * */ @Unstable INTERN_COMMON_HTTP_REQUEST_HEADERS_TO_LOWER_CASE("vertx.internCommonHttpRequestHeadersToLowerCase"), /** * Configure the Vert.x logger. * * Documented and tested. */ LOGGER_DELEGATE_FACTORY_CLASS_NAME("vertx.logger-delegate-factory-class-name"), JACKSON_DEFAULT_READ_MAX_NESTING_DEPTH("vertx.jackson.defaultReadMaxNestingDepth"), JACKSON_DEFAULT_READ_MAX_DOC_LEN("vertx.jackson.defaultReadMaxDocumentLength"), JACKSON_DEFAULT_READ_MAX_NUM_LEN("vertx.jackson.defaultReadMaxNumberLength"), JACKSON_DEFAULT_READ_MAX_STRING_LEN("vertx.jackson.defaultReadMaxStringLength"), JACKSON_DEFAULT_READ_MAX_NAME_LEN("vertx.jackson.defaultReadMaxNameLength"), JACKSON_DEFAULT_READ_MAX_TOKEN_COUNT("vertx.jackson.defaultMaxTokenCount"), ; public final String name; SysProps(String name) { this.name = name; } public String get() { return System.getProperty(name); } public OptionalLong getAsLong() throws NumberFormatException { String s = get(); if (s != null) { return OptionalLong.of(Long.parseLong(s)); } return OptionalLong.empty(); } public OptionalInt getAsInt() throws NumberFormatException { String s = get(); if (s != null) { return OptionalInt.of(Integer.parseInt(s)); } return OptionalInt.empty(); } public boolean getBoolean() { return Boolean.getBoolean(name); } }
SysProps
java
google__guice
core/test/com/google/inject/errors/GenericErrorTest.java
{ "start": 1142, "end": 1249 }
class ____ { BadConstructor() { throw new RuntimeException("bad"); } } static
BadConstructor
java
apache__commons-lang
src/test/java/org/apache/commons/lang3/function/FailableTest.java
{ "start": 79758, "end": 80309 }
interface ____ properly defined to throw any exception using the top level generic types * Object and Throwable. */ @Test void testThrows_FailableDoubleBinaryOperator_Throwable() { assertThrows(IOException.class, () -> new FailableDoubleBinaryOperator<Throwable>() { @Override public double applyAsDouble(final double left, final double right) throws Throwable { throw new IOException("test"); } }.applyAsDouble(0, 0)); } /** * Tests that our failable
is
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/collectionelement/Toy.java
{ "start": 374, "end": 1459 }
class ____ { private String name; private Brand brand; private String serial; private Boy owner; @AttributeOverride(name = "name", column = @Column(name = "brand_name")) public Brand getBrand() { return brand; } public void setBrand(Brand brand) { this.brand = brand; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getSerial() { return serial; } public void setSerial(String serial) { this.serial = serial; } @Parent public Boy getOwner() { return owner; } public void setOwner(Boy owner) { this.owner = owner; } public boolean equals(Object o) { if ( this == o ) return true; if ( o == null || getClass() != o.getClass() ) return false; final Toy toy = (Toy) o; if ( !brand.equals( toy.brand ) ) return false; if ( !name.equals( toy.name ) ) return false; if ( !serial.equals( toy.serial ) ) return false; return true; } public int hashCode() { int result; result = name.hashCode(); result = 29 * result + brand.hashCode(); return result; } }
Toy
java
quarkusio__quarkus
independent-projects/resteasy-reactive/client/processor/src/main/java/org/jboss/resteasy/reactive/client/processor/scanning/ClientEndpointIndexer.java
{ "start": 2640, "end": 4513 }
class ____ extends EndpointIndexer<ClientEndpointIndexer, ClientEndpointIndexer.ClientIndexedParam, ResourceMethod> { static final DotName CONTINUATION = DotName.createSimple("kotlin.coroutines.Continuation"); static final DotName CLIENT_EXCEPTION_MAPPER = DotName .createSimple("io.quarkus.rest.client.reactive.ClientExceptionMapper"); private final String[] defaultProduces; private final String[] defaultProducesNegotiated; private final boolean smartDefaultProduces; public ClientEndpointIndexer(AbstractBuilder builder, String defaultProduces, boolean smartDefaultProduces) { super(builder); this.defaultProduces = new String[] { defaultProduces }; this.defaultProducesNegotiated = new String[] { defaultProduces, MediaType.WILDCARD }; this.smartDefaultProduces = smartDefaultProduces; } public MaybeRestClientInterface createClientProxy(ClassInfo classInfo, String path) { try { RestClientInterface clazz = new RestClientInterface(); clazz.setClassName(classInfo.name().toString()); clazz.setEncoded(classInfo.hasDeclaredAnnotation(ENCODED)); if (path != null) { clazz.setPath(path); } List<ResourceMethod> methods = createEndpoints(classInfo, classInfo, new HashSet<>(), new HashSet<>(), clazz.getPathParameters(), clazz.getPath(), false); clazz.getMethods().addAll(methods); warnForUnsupportedAnnotations(classInfo); return MaybeRestClientInterface.success(clazz); } catch (Exception e) { //kinda bogus, but we just ignore failed interfaces for now //they can have methods that are not valid until they are actually extended by a concrete type log.warn("Ignoring
ClientEndpointIndexer
java
apache__logging-log4j2
log4j-core/src/main/java/org/apache/logging/log4j/core/selector/ContextSelector.java
{ "start": 2550, "end": 3014 }
class ____ of the caller. * @param loader ClassLoader to use or null. * @param currentContext If true returns the current Context, if false returns the Context appropriate * for the caller if a more appropriate Context can be determined. * @return The LoggerContext. */ LoggerContext getContext(String fqcn, ClassLoader loader, boolean currentContext); /** * Returns the LoggerContext. * @param fqcn The fully qualified
name
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/transaction/batch/AbstractBatchingTest.java
{ "start": 1108, "end": 1300 }
class ____ implements SettingProvider.Provider<String> { @Override public String getSetting() { return BatchBuilderLocal.class.getName(); } } public static
Batch2BuilderSettingProvider
java
quarkusio__quarkus
independent-projects/tools/analytics-common/src/main/java/io/quarkus/analytics/util/PropertyUtils.java
{ "start": 43, "end": 2471 }
class ____ { public static Integer getProperty(String propertyName, int defaultValue) { if (propertyName == null) { throw new IllegalArgumentException("Property name cannot be null"); } Integer result = Integer.getInteger(propertyName); try { if (result == null) { String stringValue = System.getenv(transformToEnvVarName(propertyName)); if (stringValue != null) { result = Integer.parseInt(stringValue); } else { result = defaultValue; } } } catch (NumberFormatException e) { result = defaultValue; } return result; } public static String getProperty(String propertyName, String defaultValue) { if (propertyName == null) { throw new IllegalArgumentException("Property name cannot be null"); } String result = System.getProperty(propertyName); try { if (result == null) { String stringValue = System.getenv(transformToEnvVarName(propertyName)); if (stringValue != null) { result = stringValue; } else { result = defaultValue; } } } catch (NumberFormatException e) { result = defaultValue; } return result; } public static boolean getProperty(String propertyName, boolean defaultValue) { if (propertyName == null) { throw new IllegalArgumentException("Property name cannot be null"); } boolean result; String systemValue = System.getProperty(propertyName); try { if (systemValue == null) { String envValue = System.getenv(transformToEnvVarName(propertyName)); if (envValue != null) { result = Boolean.parseBoolean(envValue); } else { result = defaultValue; } } else { result = Boolean.parseBoolean(systemValue); } } catch (NumberFormatException e) { result = defaultValue; } return result; } private static String transformToEnvVarName(String propertyName) { return propertyName.toUpperCase().replace('.', '_'); } }
PropertyUtils
java
apache__commons-lang
src/test/java/org/apache/commons/lang3/ValidateTest.java
{ "start": 52015, "end": 53241 }
class ____ { @Test void shouldNotThrowExceptionForMapContainingNullMapping() { Validate.notEmpty(Collections.singletonMap("key", null)); } @Test void shouldReturnTheSameInstance() { final Map<String, String> singletonMap = Collections.singletonMap("key", "value"); assertSame(singletonMap, Validate.notEmpty(singletonMap)); } @Test void shouldThrowIllegalArgumentExceptionWithDefaultMessageForEmptyMap() { final IllegalArgumentException ex = assertIllegalArgumentException(() -> Validate.notEmpty(Collections.emptyMap())); assertEquals("The validated map is empty", ex.getMessage()); } @Test void shouldThrowNullPointerExceptionWithDefaultMessageForNullMap() { final NullPointerException ex = assertNullPointerException(() -> Validate.notEmpty((Map<?, ?>) null)); assertEquals("The validated map is empty", ex.getMessage()); } } } } @Nested final
WithoutMessage
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/inject/AssistedInjectAndInjectOnConstructorsTest.java
{ "start": 3266, "end": 3429 }
class ____ { @javax.inject.Inject public TestClass2() {} } /** Class has a constructor with a @com.google.inject.Inject annotation. */ public
TestClass2
java
elastic__elasticsearch
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlSpMetadataAction.java
{ "start": 1862, "end": 4431 }
class ____ extends HandledTransportAction<SamlSpMetadataRequest, SamlSpMetadataResponse> { private final Realms realms; @Inject public TransportSamlSpMetadataAction(TransportService transportService, ActionFilters actionFilters, Realms realms) { super(SamlSpMetadataAction.NAME, transportService, actionFilters, SamlSpMetadataRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.realms = realms; } @Override protected void doExecute(Task task, SamlSpMetadataRequest request, ActionListener<SamlSpMetadataResponse> listener) { List<SamlRealm> realms = findSamlRealms(this.realms, request.getRealmName(), null); if (realms.isEmpty()) { listener.onFailure(SamlUtils.samlException("Cannot find any matching realm for [{}]", request.getRealmName())); } else if (realms.size() > 1) { listener.onFailure(SamlUtils.samlException("Found multiple matching realms [{}] for [{}]", realms, request.getRealmName())); } else { prepareMetadata(realms.get(0), listener); } } private void prepareMetadata(SamlRealm realm, ActionListener<SamlSpMetadataResponse> listener) { try { final EntityDescriptorMarshaller marshaller = new EntityDescriptorMarshaller(); final SpConfiguration spConfig = realm.getServiceProvider(); final SamlSpMetadataBuilder builder = new SamlSpMetadataBuilder(Locale.getDefault(), spConfig.getEntityId()) .assertionConsumerServiceUrl(spConfig.getAscUrl()) .singleLogoutServiceUrl(spConfig.getLogoutUrl()) .encryptionCredentials(spConfig.getEncryptionCredentials()) .signingCredential(spConfig.getSigningConfiguration().getCredential()) .authnRequestsSigned(spConfig.getSigningConfiguration().shouldSign(AuthnRequest.DEFAULT_ELEMENT_LOCAL_NAME)); final EntityDescriptor descriptor = builder.build(); final Element element = marshaller.marshall(descriptor); final StringWriter writer = new StringWriter(); final Transformer serializer = XmlUtils.getHardenedXMLTransformer(); serializer.transform(new DOMSource(element), new StreamResult(writer)); listener.onResponse(new SamlSpMetadataResponse(writer.toString())); } catch (Exception e) { logger.error(() -> "Error during SAML SP metadata generation for realm [" + realm.name() + "]", e); listener.onFailure(e); } } }
TransportSamlSpMetadataAction
java
spring-projects__spring-framework
spring-test/src/test/java/org/springframework/test/context/junit/jupiter/nested/ContextHierarchyTestClassScopedExtensionContextNestedTests.java
{ "start": 6266, "end": 6413 }
class ____ { @Bean String foo() { return QUX + 1; } @Bean String bar() { return BAZ + 1; } } @Configuration static
Child1Config
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/job/JobManagerJobConfigurationHandlerTest.java
{ "start": 1615, "end": 3009 }
class ____ { @Test void testRequestConfiguration() throws Exception { final Configuration configuration = new Configuration(); configuration.set(JobManagerOptions.ADDRESS, "address"); final JobManagerJobConfigurationHandler handler = new JobManagerJobConfigurationHandler( () -> null, TestingUtils.TIMEOUT, Collections.emptyMap(), JobManagerJobConfigurationHeaders.getInstance(), configuration); final ConfigurationInfo configurationInfo = handler.handleRequest( HandlerRequest.resolveParametersAndCreate( EmptyRequestBody.getInstance(), new JobMessageParameters(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyList()), new TestingRestfulGateway.Builder().build()) .get(); assertThat(configurationInfo.get(0).getKey()).isEqualTo(JobManagerOptions.ADDRESS.key()); assertThat(configurationInfo.get(0).getValue()).isEqualTo("address"); } }
JobManagerJobConfigurationHandlerTest
java
redisson__redisson
redisson/src/main/java/org/redisson/transaction/operation/bucket/BucketCompareAndSetOperation.java
{ "start": 1022, "end": 2325 }
class ____<V> extends TransactionalOperation { private V expected; private V value; private String lockName; private String transactionId; public BucketCompareAndSetOperation(String name, String lockName, Codec codec, V expected, V value, String transactionId, long threadId) { super(name, codec, threadId); this.expected = expected; this.value = value; this.lockName = lockName; this.transactionId = transactionId; } @Override public void commit(CommandAsyncExecutor commandExecutor) { RedissonBucket<V> bucket = new RedissonBucket<V>(codec, commandExecutor, name); bucket.compareAndSetAsync(expected, value); RedissonLock lock = new RedissonTransactionalLock(commandExecutor, lockName, transactionId); lock.unlockAsync(getThreadId()); } @Override public void rollback(CommandAsyncExecutor commandExecutor) { RedissonLock lock = new RedissonTransactionalLock(commandExecutor, lockName, transactionId); lock.unlockAsync(getThreadId()); } public V getExpected() { return expected; } public V getValue() { return value; } public String getLockName() { return lockName; } }
BucketCompareAndSetOperation
java
apache__kafka
streams/src/main/java/org/apache/kafka/streams/processor/api/FixedKeyRecord.java
{ "start": 1884, "end": 5460 }
class ____, but only * modify records they were handed by the framework. */ FixedKeyRecord(final K key, final V value, final long timestamp, final Headers headers) { this.key = key; this.value = value; if (timestamp < 0) { throw new StreamsException( "Malformed Record", new IllegalArgumentException("Timestamp may not be negative. Got: " + timestamp) ); } this.timestamp = timestamp; this.headers = new RecordHeaders(headers); } /** * The key of the record. May be null. */ public K key() { return key; } /** * The value of the record. May be null. */ public V value() { return value; } /** * The timestamp of the record. Will never be negative. */ public long timestamp() { return timestamp; } /** * The headers of the record. Never null. */ public Headers headers() { return headers; } /** * A convenient way to produce a new record if you only need to change the value. * * Copies the attributes of this record with the value replaced. * * @param value The value of the result record. * @param <NewV> The type of the new record's value. * @return A new Record instance with all the same attributes (except that the value is replaced). */ public <NewV> FixedKeyRecord<K, NewV> withValue(final NewV value) { return new FixedKeyRecord<>(key, value, timestamp, headers); } /** * A convenient way to produce a new record if you only need to change the timestamp. * * Copies the attributes of this record with the timestamp replaced. * * @param timestamp The timestamp of the result record. * @return A new Record instance with all the same attributes (except that the timestamp is replaced). */ public FixedKeyRecord<K, V> withTimestamp(final long timestamp) { return new FixedKeyRecord<>(key, value, timestamp, headers); } /** * A convenient way to produce a new record if you only need to change the headers. * * Copies the attributes of this record with the headers replaced. * Also makes a copy of the provided headers. * * See {@link FixedKeyProcessorContext#forward(FixedKeyRecord)} for * considerations around mutability of keys, values, and headers. * * @param headers The headers of the result record. * @return A new Record instance with all the same attributes (except that the headers are replaced). */ public FixedKeyRecord<K, V> withHeaders(final Headers headers) { return new FixedKeyRecord<>(key, value, timestamp, headers); } @Override public String toString() { return "FixedKeyRecord{" + "key=" + key + ", value=" + value + ", timestamp=" + timestamp + ", headers=" + headers + '}'; } @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final FixedKeyRecord<?, ?> record = (FixedKeyRecord<?, ?>) o; return timestamp == record.timestamp && Objects.equals(key, record.key) && Objects.equals(value, record.value) && Objects.equals(headers, record.headers); } @Override public int hashCode() { return Objects.hash(key, value, timestamp, headers); } }
directly
java
apache__maven
compat/maven-model-builder/src/main/java/org/apache/maven/model/path/ModelPathTranslator.java
{ "start": 1165, "end": 1749 }
interface ____ { /** * Resolves the well-known paths of the specified model against the given base directory. Paths within plugin * configuration are not processed. * * @param model The model whose paths should be resolved, may be {@code null}. * @param basedir The base directory to resolve relative paths against, may be {@code null}. * @param request The model building request that holds further settings, must not be {@code null}. */ void alignToBaseDirectory(Model model, File basedir, ModelBuildingRequest request); }
ModelPathTranslator
java
apache__camel
core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedCamelHealth.java
{ "start": 1716, "end": 6545 }
class ____ implements ManagedCamelHealthMBean { private final CamelContext context; private final HealthCheckRegistry healthCheckRegistry; public ManagedCamelHealth(CamelContext context, HealthCheckRegistry healthCheckRegistry) { this.context = context; this.healthCheckRegistry = healthCheckRegistry; } public void init(ManagementStrategy strategy) { // do nothing } public CamelContext getContext() { return context; } @Override public boolean isEnabled() { return healthCheckRegistry.isEnabled(); } @Override public boolean isHealthy() { for (HealthCheck.Result result : HealthCheckHelper.invoke(context)) { if (result.getState() == HealthCheck.State.DOWN) { return false; } } return true; } @Override public boolean isHealthyReadiness() { for (HealthCheck.Result result : HealthCheckHelper.invokeReadiness(context)) { if (result.getState() == HealthCheck.State.DOWN) { return false; } } return true; } @Override public boolean isHealthyLiveness() { for (HealthCheck.Result result : HealthCheckHelper.invokeLiveness(context)) { if (result.getState() == HealthCheck.State.DOWN) { return false; } } return true; } @Override public Collection<String> getHealthChecksIDs() { return healthCheckRegistry.getCheckIDs(); } @Override public TabularData details() { try { final TabularData answer = new TabularDataSupport(CamelOpenMBeanTypes.camelHealthDetailsTabularType()); final CompositeType type = CamelOpenMBeanTypes.camelHealthDetailsCompositeType(); for (HealthCheck.Result result : HealthCheckHelper.invoke(context)) { String failureUri = (String) result.getDetails().getOrDefault(HealthCheck.ENDPOINT_URI, ""); Integer failureCount = (Integer) result.getDetails().getOrDefault(HealthCheck.FAILURE_COUNT, 0); String stacktrace = ""; if (result.getError().isPresent()) { stacktrace = ExceptionHelper.stackTraceToString(result.getError().get()); } CompositeData data = new CompositeDataSupport( type, new String[] { "id", "group", "state", "enabled", "message", "failureUri", "failureCount", "failureStackTrace", "readiness", "liveness" }, new Object[] { result.getCheck().getId(), result.getCheck().getGroup(), result.getState().name(), result.getCheck().isEnabled(), result.getMessage().orElse(""), failureUri, failureCount, stacktrace, result.getCheck().isReadiness(), result.getCheck().isLiveness() }); answer.put(data); } return answer; } catch (Exception e) { throw RuntimeCamelException.wrapRuntimeCamelException(e); } } @Override public String invoke(String id) { Optional<HealthCheck.Result> result = HealthCheckHelper.invoke(context, id, Collections.emptyMap()); return result.map(r -> r.getState().name()).orElse(HealthCheck.State.UNKNOWN.name()); } @Override public void enableById(String id) { Optional<HealthCheck> hc = healthCheckRegistry.getCheck(id); if (hc.isPresent()) { hc.get().setEnabled(true); } else { Optional<HealthCheckRepository> hcr = healthCheckRegistry.getRepository(id); hcr.ifPresent(repository -> repository.setEnabled(true)); } } @Override public void disableById(String id) { Optional<HealthCheck> hc = healthCheckRegistry.getCheck(id); if (hc.isPresent()) { hc.get().setEnabled(false); } else { Optional<HealthCheckRepository> hcr = healthCheckRegistry.getRepository(id); hcr.ifPresent(repository -> repository.setEnabled(false)); } } }
ManagedCamelHealth
java
apache__camel
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringAggregatorWithCustomStrategyTest.java
{ "start": 1117, "end": 1931 }
class ____ extends ContextTestSupport { @Test public void testSendingMessagesWithCustomAggregator() throws Exception { MockEndpoint resultEndpoint = resolveMandatoryEndpoint("mock:result", MockEndpoint.class); resultEndpoint.expectedBodiesReceived("message:1 message:2 message:3"); // lets send a large batch of messages for (int i = 1; i <= 3; i++) { String body = "message:" + i; template.sendBodyAndHeader("direct:start", body, "cheese", 123); } resultEndpoint.assertIsSatisfied(); } @Override protected CamelContext createCamelContext() throws Exception { return createSpringCamelContext(this, "org/apache/camel/spring/processor/aggregator-custom-strategy.xml"); } }
SpringAggregatorWithCustomStrategyTest
java
apache__avro
lang/java/avro/src/main/java/org/apache/avro/io/Decoder.java
{ "start": 1543, "end": 11178 }
class ____ { /** * "Reads" a null value. (Doesn't actually read anything, but advances the state * of the parser if the implementation is stateful.) * * @throws AvroTypeException If this is a stateful reader and null is not the * type of the next value to be read */ public abstract void readNull() throws IOException; /** * Reads a boolean value written by {@link Encoder#writeBoolean}. * * @throws AvroTypeException If this is a stateful reader and boolean is not the * type of the next value to be read */ public abstract boolean readBoolean() throws IOException; /** * Reads an integer written by {@link Encoder#writeInt}. * * @throws AvroTypeException If encoded value is larger than 32-bits * @throws AvroTypeException If this is a stateful reader and int is not the * type of the next value to be read */ public abstract int readInt() throws IOException; /** * Reads a long written by {@link Encoder#writeLong}. * * @throws AvroTypeException If this is a stateful reader and long is not the * type of the next value to be read */ public abstract long readLong() throws IOException; /** * Reads a float written by {@link Encoder#writeFloat}. * * @throws AvroTypeException If this is a stateful reader and is not the type of * the next value to be read */ public abstract float readFloat() throws IOException; /** * Reads a double written by {@link Encoder#writeDouble}. * * @throws AvroTypeException If this is a stateful reader and is not the type of * the next value to be read */ public abstract double readDouble() throws IOException; /** * Reads a char-string written by {@link Encoder#writeString}. * * @throws AvroTypeException If this is a stateful reader and char-string is not * the type of the next value to be read */ public abstract Utf8 readString(Utf8 old) throws IOException; /** * Reads a char-string written by {@link Encoder#writeString}. * * @throws AvroTypeException If this is a stateful reader and char-string is not * the type of the next value to be read */ public abstract String readString() throws IOException; /** * Discards a char-string written by {@link Encoder#writeString}. * * @throws AvroTypeException If this is a stateful reader and char-string is not * the type of the next value to be read */ public abstract void skipString() throws IOException; /** * Reads a byte-string written by {@link Encoder#writeBytes}. if <tt>old</tt> is * not null and has sufficient capacity to take in the bytes being read, the * bytes are returned in <tt>old</tt>. * * @throws AvroTypeException If this is a stateful reader and byte-string is not * the type of the next value to be read */ public abstract ByteBuffer readBytes(ByteBuffer old) throws IOException; /** * Discards a byte-string written by {@link Encoder#writeBytes}. * * @throws AvroTypeException If this is a stateful reader and byte-string is not * the type of the next value to be read */ public abstract void skipBytes() throws IOException; /** * Reads fixed sized binary object. * * @param bytes The buffer to store the contents being read. * @param start The position where the data needs to be written. * @param length The size of the binary object. * @throws AvroTypeException If this is a stateful reader and fixed sized binary * object is not the type of the next value to be read * or the length is incorrect. * @throws IOException */ public abstract void readFixed(byte[] bytes, int start, int length) throws IOException; /** * A shorthand for <tt>readFixed(bytes, 0, bytes.length)</tt>. * * @throws AvroTypeException If this is a stateful reader and fixed sized binary * object is not the type of the next value to be read * or the length is incorrect. * @throws IOException */ public void readFixed(byte[] bytes) throws IOException { readFixed(bytes, 0, bytes.length); } /** * Discards fixed sized binary object. * * @param length The size of the binary object to be skipped. * @throws AvroTypeException If this is a stateful reader and fixed sized binary * object is not the type of the next value to be read * or the length is incorrect. * @throws IOException */ public abstract void skipFixed(int length) throws IOException; /** * Reads an enumeration. * * @return The enumeration's value. * @throws AvroTypeException If this is a stateful reader and enumeration is not * the type of the next value to be read. * @throws IOException */ public abstract int readEnum() throws IOException; /** * Reads and returns the size of the first block of an array. If this method * returns non-zero, then the caller should read the indicated number of items, * and then call {@link #arrayNext} to find out the number of items in the next * block. The typical pattern for consuming an array looks like: * * <pre> * for(long i = in.readArrayStart(); i != 0; i = in.arrayNext()) { * for (long j = 0; j < i; j++) { * read next element of the array; * } * } * </pre> * * @throws AvroTypeException If this is a stateful reader and array is not the * type of the next value to be read */ public abstract long readArrayStart() throws IOException; /** * Processes the next block of an array and returns the number of items in the * block and lets the caller read those items. * * @throws AvroTypeException When called outside an array context */ public abstract long arrayNext() throws IOException; /** * Used for quickly skipping through an array. Note you can either skip the * entire array, or read the entire array (with {@link #readArrayStart}), but * you can't mix the two on the same array. * * This method will skip through as many items as it can, all of them if * possible. It will return zero if there are no more items to skip through, or * an item count if it needs the client's help in skipping. The typical usage * pattern is: * * <pre> * for(long i = in.skipArray(); i != 0; i = i.skipArray()) { * for (long j = 0; j < i; j++) { * read and discard the next element of the array; * } * } * </pre> * * Note that this method can automatically skip through items if a byte-count is * found in the underlying data, or if a schema has been provided to the * implementation, but otherwise the client will have to skip through items * itself. * * @throws AvroTypeException If this is a stateful reader and array is not the * type of the next value to be read */ public abstract long skipArray() throws IOException; /** * Reads and returns the size of the next block of map-entries. Similar to * {@link #readArrayStart}. * * As an example, let's say you want to read a map of records, the record * consisting of a Long field and a Boolean field. Your code would look * something like this: * * <pre> * Map<String, Record> m = new HashMap<String, Record>(); * Record reuse = new Record(); * for (long i = in.readMapStart(); i != 0; i = in.readMapNext()) { * for (long j = 0; j < i; j++) { * String key = in.readString(); * reuse.intField = in.readInt(); * reuse.boolField = in.readBoolean(); * m.put(key, reuse); * } * } * </pre> * * @throws AvroTypeException If this is a stateful reader and map is not the * type of the next value to be read */ public abstract long readMapStart() throws IOException; /** * Processes the next block of map entries and returns the count of them. * Similar to {@link #arrayNext}. See {@link #readMapStart} for details. * * @throws AvroTypeException When called outside a map context */ public abstract long mapNext() throws IOException; /** * Support for quickly skipping through a map similar to {@link #skipArray}. * * As an example, let's say you want to skip a map of records, the record * consisting of a Long field and a Boolean field. Your code would look * something like this: * * <pre> * for (long i = in.skipMap(); i != 0; i = in.skipMap()) { * for (long j = 0; j < i; j++) { * in.skipString(); // Discard key * in.readInt(); // Discard int-field of value * in.readBoolean(); // Discard boolean-field of value * } * } * </pre> * * @throws AvroTypeException If this is a stateful reader and array is not the * type of the next value to be read */ public abstract long skipMap() throws IOException; /** * Reads the tag of a union written by {@link Encoder#writeIndex}. * * @throws AvroTypeException If this is a stateful reader and union is not the * type of the next value to be read */ public abstract int readIndex() throws IOException; }
Decoder
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/transport/RawIndexingDataTransportRequest.java
{ "start": 817, "end": 933 }
interface ____ { default boolean isRawIndexingData() { return true; } }
RawIndexingDataTransportRequest
java
spring-projects__spring-framework
spring-test/src/main/java/org/springframework/test/context/support/AbstractContextLoader.java
{ "start": 9412, "end": 10560 }
class ____ which the locations are associated: to be * used when generating default locations * @param locations the unmodified locations to use for loading the * application context (can be {@code null} or empty) * @return a processed array of application context resource locations * @since 2.5 * @see #isGenerateDefaultLocations() * @see #generateDefaultLocations(Class) * @see #modifyLocations(Class, String...) * @see org.springframework.test.context.ContextLoader#processLocations(Class, String...) * @see #processContextConfiguration(ContextConfigurationAttributes) */ @Override @SuppressWarnings("deprecation") public final String[] processLocations(Class<?> clazz, String... locations) { return processLocationsInternal(clazz, locations); } private String[] processLocationsInternal(Class<?> clazz, String... locations) { return (ObjectUtils.isEmpty(locations) && isGenerateDefaultLocations()) ? generateDefaultLocations(clazz) : modifyLocations(clazz, locations); } /** * Generate the default classpath resource locations array based on the * supplied class. * <p>For example, if the supplied
with
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/TestSchedulerPlanFollowerBase.java
{ "start": 2663, "end": 8902 }
class ____ { final static int GB = 1024; protected Clock mClock = null; protected ResourceScheduler scheduler = null; protected ReservationAgent mAgent; protected Resource minAlloc = Resource.newInstance(GB, 1); protected Resource maxAlloc = Resource.newInstance(GB * 8, 8); protected CapacityOverTimePolicy policy = new CapacityOverTimePolicy(); protected Plan plan; private ResourceCalculator res = new DefaultResourceCalculator(); private RMContext context = ReservationSystemTestUtil.createMockRMContext(); protected void testPlanFollower(boolean isMove) throws PlanningException, InterruptedException, AccessControlException { // Initialize plan based on move flag plan = new InMemoryPlan(scheduler.getRootQueueMetrics(), policy, mAgent, scheduler.getClusterResource(), 1L, res, scheduler.getMinimumResourceCapability(), maxAlloc, "dedicated", null, isMove, context); // add a few reservations to the plan long ts = System.currentTimeMillis(); ReservationId r1 = ReservationId.newInstance(ts, 1); int[] f1 = { 10, 10, 10, 10, 10 }; ReservationDefinition rDef = ReservationSystemTestUtil.createSimpleReservationDefinition( 0, 0 + f1.length + 1, f1.length); assertTrue(plan.addReservation(new InMemoryReservationAllocation(r1, rDef, "u3", "dedicated", 0, 0 + f1.length, ReservationSystemTestUtil .generateAllocation(0L, 1L, f1), res, minAlloc), false), plan.toString()); ReservationId r2 = ReservationId.newInstance(ts, 2); assertTrue(plan.addReservation(new InMemoryReservationAllocation(r2, rDef, "u3", "dedicated", 3, 3 + f1.length, ReservationSystemTestUtil .generateAllocation(3L, 1L, f1), res, minAlloc), false), plan.toString()); ReservationId r3 = ReservationId.newInstance(ts, 3); int[] f2 = { 0, 10, 20, 10, 0 }; assertTrue(plan.addReservation(new InMemoryReservationAllocation(r3, rDef, "u4", "dedicated", 10, 10 + f2.length, ReservationSystemTestUtil .generateAllocation(10L, 1L, f2), res, minAlloc), false), plan.toString()); // default reseration queue should exist before run of PlanFollower AND have // no apps checkDefaultQueueBeforePlanFollowerRun(); AbstractSchedulerPlanFollower planFollower = createPlanFollower(); when(mClock.getTime()).thenReturn(0L); planFollower.run(); Queue q = getReservationQueue(r1.toString()); assertReservationQueueExists(r1); // submit an app to r1 String user_0 = "test-user"; ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId appAttemptId_0 = ApplicationAttemptId.newInstance(appId, 0); AppAddedSchedulerEvent addAppEvent; if (scheduler instanceof FairScheduler) { addAppEvent = new AppAddedSchedulerEvent(appId, q.getQueueName(), user_0, new ApplicationPlacementContext("dedicated")); } else { addAppEvent = new AppAddedSchedulerEvent(appId, q.getQueueName(), user_0); } scheduler.handle(addAppEvent); AppAttemptAddedSchedulerEvent appAttemptAddedEvent = new AppAttemptAddedSchedulerEvent(appAttemptId_0, false); scheduler.handle(appAttemptAddedEvent); // initial default reservation queue should have no apps after first run Queue defQ = getDefaultQueue(); assertEquals(0, getNumberOfApplications(defQ)); assertReservationQueueExists(r1, 0.1, 0.1); assertEquals(1, getNumberOfApplications(q)); assertReservationQueueDoesNotExist(r2); assertReservationQueueDoesNotExist(r3); when(mClock.getTime()).thenReturn(3L); planFollower.run(); assertEquals(0, getNumberOfApplications(defQ)); assertReservationQueueExists(r1, 0.1, 0.1); assertEquals(1, getNumberOfApplications(q)); assertReservationQueueExists(r2, 0.1, 0.1); assertReservationQueueDoesNotExist(r3); when(mClock.getTime()).thenReturn(10L); planFollower.run(); q = getReservationQueue(r1.toString()); if (isMove) { // app should have been moved to default reservation queue assertEquals(1, getNumberOfApplications(defQ)); assertNull(q); } else { // app should be killed assertEquals(0, getNumberOfApplications(defQ)); assertNotNull(q); AppAttemptRemovedSchedulerEvent appAttemptRemovedEvent = new AppAttemptRemovedSchedulerEvent(appAttemptId_0, RMAppAttemptState.KILLED, false); scheduler.handle(appAttemptRemovedEvent); } assertReservationQueueDoesNotExist(r2); assertReservationQueueExists(r3, 0, 1.0); when(mClock.getTime()).thenReturn(11L); planFollower.run(); if (isMove) { // app should have been moved to default reservation queue assertEquals(1, getNumberOfApplications(defQ)); } else { // app should be killed assertEquals(0, getNumberOfApplications(defQ)); } assertReservationQueueDoesNotExist(r1); assertReservationQueueDoesNotExist(r2); assertReservationQueueExists(r3, 0.1, 0.1); when(mClock.getTime()).thenReturn(12L); planFollower.run(); assertReservationQueueDoesNotExist(r1); assertReservationQueueDoesNotExist(r2); assertReservationQueueExists(r3, 0.2, 0.2); when(mClock.getTime()).thenReturn(16L); planFollower.run(); assertReservationQueueDoesNotExist(r1); assertReservationQueueDoesNotExist(r2); assertReservationQueueDoesNotExist(r3); verifyCapacity(defQ); } protected abstract void checkDefaultQueueBeforePlanFollowerRun(); protected abstract Queue getReservationQueue(String reservationId); protected abstract void verifyCapacity(Queue defQ); protected abstract Queue getDefaultQueue(); protected abstract int getNumberOfApplications(Queue queue); protected abstract AbstractSchedulerPlanFollower createPlanFollower(); protected abstract void assertReservationQueueExists(ReservationId r); protected abstract void assertReservationQueueExists(ReservationId r2, double expectedCapacity, double expectedMaxCapacity); protected abstract void assertReservationQueueDoesNotExist(ReservationId r2); }
TestSchedulerPlanFollowerBase
java
elastic__elasticsearch
modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java
{ "start": 125606, "end": 126231 }
class ____ extends PrimaryContext { public ListinitializerContext listinitializer() { return getRuleContext(ListinitializerContext.class, 0); } public ListinitContext(PrimaryContext ctx) { copyFrom(ctx); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if (visitor instanceof PainlessParserVisitor) return ((PainlessParserVisitor<? extends T>) visitor).visitListinit(this); else return visitor.visitChildren(this); } } @SuppressWarnings("CheckReturnValue") public static
ListinitContext
java
apache__camel
components/camel-micrometer/src/main/java/org/apache/camel/component/micrometer/MicrometerComponent.java
{ "start": 1411, "end": 4253 }
class ____ extends DefaultComponent { public static final Meter.Type DEFAULT_METER_TYPE = Meter.Type.COUNTER; private static final Logger LOG = LoggerFactory.getLogger(MicrometerComponent.class); @Metadata(label = "advanced") private MeterRegistry metricsRegistry; public MicrometerComponent() { } @Override protected void doInit() throws Exception { super.doInit(); if (metricsRegistry == null) { Registry camelRegistry = getCamelContext().getRegistry(); metricsRegistry = MicrometerUtils.getOrCreateMeterRegistry(camelRegistry, MicrometerConstants.METRICS_REGISTRY_NAME); } } @Override protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { String metricsName = getMetricsName(remaining); Meter.Type metricsType = getMetricsType(remaining); // backwards compatible Map<String, String> map = getTags(parameters); if (map != null && !map.isEmpty()) { LOG.warn( "Deprecated tags=key1=value&key2=value2 parameter in use. Migrate to use multi-valued tags.key1=value1&tags.key2=value2 syntax"); } LOG.debug("Metrics type: {}; name: {}", metricsType, metricsName); MicrometerEndpoint endpoint = new MicrometerEndpoint(uri, this, metricsRegistry, metricsType, metricsName); if (map != null) { endpoint.setTags(map); } setProperties(endpoint, parameters); return endpoint; } Map<String, String> getTags(Map<String, Object> parameters) { String tagsString = getAndRemoveParameter(parameters, "tags", String.class, ""); if (tagsString != null && !tagsString.isEmpty()) { Map<String, String> answer = new HashMap<>(); for (String tag : tagsString.split("\\s*,\\s*")) { String[] e = tag.split("\\s*=\\s*"); if (e.length == 2) { answer.put(e[0], e[1]); } } return answer; } return null; } String getMetricsName(String remaining) { String name = StringHelper.after(remaining, ":"); return name == null ? remaining : name; } Meter.Type getMetricsType(String remaining) { String type = StringHelper.before(remaining, ":"); return type == null ? DEFAULT_METER_TYPE : MicrometerUtils.getByName(type); } public MeterRegistry getMetricsRegistry() { return metricsRegistry; } /** * To use a custom configured MetricRegistry. */ public void setMetricsRegistry(MeterRegistry metricsRegistry) { this.metricsRegistry = metricsRegistry; } }
MicrometerComponent
java
netty__netty
common/src/test/java/io/netty/util/NettyRuntimeTests.java
{ "start": 1196, "end": 7383 }
class ____ { @Test public void testIllegalSet() { final NettyRuntime.AvailableProcessorsHolder holder = new NettyRuntime.AvailableProcessorsHolder(); for (final int i : new int[] { -1, 0 }) { try { holder.setAvailableProcessors(i); fail(); } catch (final IllegalArgumentException e) { assertThat(e.getMessage()).contains("(expected: > 0)"); } } } @Test public void testMultipleSets() { final NettyRuntime.AvailableProcessorsHolder holder = new NettyRuntime.AvailableProcessorsHolder(); holder.setAvailableProcessors(1); try { holder.setAvailableProcessors(2); fail(); } catch (final IllegalStateException e) { assertThat(e.getMessage()).contains("availableProcessors is already set to [1], rejecting [2]"); } } @Test public void testSetAfterGet() { final NettyRuntime.AvailableProcessorsHolder holder = new NettyRuntime.AvailableProcessorsHolder(); holder.availableProcessors(); try { holder.setAvailableProcessors(1); fail(); } catch (final IllegalStateException e) { assertThat(e.getMessage()).contains("availableProcessors is already set"); } } @Test public void testRacingGetAndGet() throws InterruptedException { final NettyRuntime.AvailableProcessorsHolder holder = new NettyRuntime.AvailableProcessorsHolder(); final CyclicBarrier barrier = new CyclicBarrier(3); final AtomicReference<IllegalStateException> firstReference = new AtomicReference<IllegalStateException>(); final Runnable firstTarget = getRunnable(holder, barrier, firstReference); final Thread firstGet = new Thread(firstTarget); firstGet.start(); final AtomicReference<IllegalStateException> secondRefernce = new AtomicReference<IllegalStateException>(); final Runnable secondTarget = getRunnable(holder, barrier, secondRefernce); final Thread secondGet = new Thread(secondTarget); secondGet.start(); // release the hounds await(barrier); // wait for the hounds await(barrier); firstGet.join(); secondGet.join(); assertNull(firstReference.get()); assertNull(secondRefernce.get()); } private static Runnable getRunnable( final NettyRuntime.AvailableProcessorsHolder holder, final CyclicBarrier barrier, final AtomicReference<IllegalStateException> reference) { return new Runnable() { @Override public void run() { await(barrier); try { holder.availableProcessors(); } catch (final IllegalStateException e) { reference.set(e); } await(barrier); } }; } @Test public void testRacingGetAndSet() throws InterruptedException { final NettyRuntime.AvailableProcessorsHolder holder = new NettyRuntime.AvailableProcessorsHolder(); final CyclicBarrier barrier = new CyclicBarrier(3); final Thread get = new Thread(new Runnable() { @Override public void run() { await(barrier); holder.availableProcessors(); await(barrier); } }); get.start(); final AtomicReference<IllegalStateException> setException = new AtomicReference<IllegalStateException>(); final Thread set = new Thread(new Runnable() { @Override public void run() { await(barrier); try { holder.setAvailableProcessors(2048); } catch (final IllegalStateException e) { setException.set(e); } await(barrier); } }); set.start(); // release the hounds await(barrier); // wait for the hounds await(barrier); get.join(); set.join(); if (setException.get() == null) { assertEquals(2048, holder.availableProcessors()); } else { assertNotNull(setException.get()); } } @Test public void testGetWithSystemProperty() { final String availableProcessorsSystemProperty = SystemPropertyUtil.get("io.netty.availableProcessors"); try { System.setProperty("io.netty.availableProcessors", "2048"); final NettyRuntime.AvailableProcessorsHolder holder = new NettyRuntime.AvailableProcessorsHolder(); assertEquals(2048, holder.availableProcessors()); } finally { if (availableProcessorsSystemProperty != null) { System.setProperty("io.netty.availableProcessors", availableProcessorsSystemProperty); } else { System.clearProperty("io.netty.availableProcessors"); } } } @Test @SuppressForbidden(reason = "testing fallback to Runtime#availableProcessors") public void testGet() { final String availableProcessorsSystemProperty = SystemPropertyUtil.get("io.netty.availableProcessors"); try { System.clearProperty("io.netty.availableProcessors"); final NettyRuntime.AvailableProcessorsHolder holder = new NettyRuntime.AvailableProcessorsHolder(); assertEquals(Runtime.getRuntime().availableProcessors(), holder.availableProcessors()); } finally { if (availableProcessorsSystemProperty != null) { System.setProperty("io.netty.availableProcessors", availableProcessorsSystemProperty); } else { System.clearProperty("io.netty.availableProcessors"); } } } private static void await(final CyclicBarrier barrier) { try { barrier.await(); } catch (InterruptedException | BrokenBarrierException e) { fail(e.toString()); } } }
NettyRuntimeTests
java
elastic__elasticsearch
x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/ssl/SslEntitlementRestIT.java
{ "start": 995, "end": 3180 }
class ____ extends ESRestTestCase { private static final MutableSettingsProvider settingsProvider = new MutableSettingsProvider(); @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .apply(SecurityOnTrialLicenseRestTestCase.commonTrialSecurityClusterConfig) .settings(settingsProvider) .build(); @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); } public void testSslEntitlementInaccessiblePath() throws IOException { settingsProvider.put("xpack.security.transport.ssl.key", "/bad/path/transport.key"); settingsProvider.put("xpack.security.transport.ssl.certificate", "/bad/path/transport.crt"); expectThrows(Exception.class, () -> cluster.restart(false)); AtomicBoolean found = new AtomicBoolean(false); for (int i = 0; i < cluster.getNumNodes(); i++) { try (InputStream log = cluster.getNodeLog(i, LogType.SERVER)) { Streams.readAllLines(log, line -> { if (line.contains("failed to load SSL configuration") && line.contains("because access to read the file is blocked")) { found.set(true); } }); } } assertThat("cluster logs did not include events of blocked file access", found.get(), is(true)); } @Override protected Settings restAdminSettings() { String token = basicAuthHeaderValue("admin_user", new SecureString("admin-password".toCharArray())); return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); } @Override protected Settings restClientSettings() { String token = basicAuthHeaderValue("admin_user", new SecureString("admin-password".toCharArray())); return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); } @Override protected boolean preserveClusterUponCompletion() { // as the cluster is dead its state can not be wiped successfully so we have to bypass wiping the cluster return true; } }
SslEntitlementRestIT
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/context/annotation/Spr11202Tests.java
{ "start": 2866, "end": 3269 }
class ____ implements FactoryBean<Foo>, InitializingBean { private Foo foo = new Foo(); @Override public Foo getObject() { return foo; } @Override public Class<?> getObjectType() { return Foo.class; } @Override public boolean isSingleton() { return true; } @Override public void afterPropertiesSet() { this.foo.name = "foo"; } } protected static
FooFactoryBean
java
google__truth
extensions/proto/src/main/java/com/google/common/truth/extensions/proto/ProtoFluentAssertion.java
{ "start": 1300, "end": 21126 }
interface ____ { /** * Specifies that the 'has' bit of individual fields should be ignored when comparing for * equality. * * <p>For version 2 Protocol Buffers, this setting determines whether two protos with the same * value for a field compare equal if one explicitly sets the value, and the other merely * implicitly uses the schema-defined default. This setting also determines whether unknown fields * should be considered in the comparison. By {@code ignoringFieldAbsence()}, unknown fields are * ignored, and value-equal fields as specified above are considered equal. * * <p>For version 3 Protocol Buffers, this setting does not affect primitive fields, because their * default value is indistinguishable from unset. */ ProtoFluentAssertion ignoringFieldAbsence(); /** * Specifies that the 'has' bit of these explicitly specified top-level field numbers should be * ignored when comparing for equality. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if they are to be ignored as well. * * <p>Use {@link #ignoringFieldAbsence()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsence() for details */ ProtoFluentAssertion ignoringFieldAbsenceOfFields(int firstFieldNumber, int... rest); /** * Specifies that the 'has' bit of these explicitly specified top-level field numbers should be * ignored when comparing for equality. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if they are to be ignored as well. * * <p>Use {@link #ignoringFieldAbsence()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsence() for details */ ProtoFluentAssertion ignoringFieldAbsenceOfFields(Iterable<Integer> fieldNumbers); /** * Specifies that the 'has' bit of these explicitly specified field descriptors should be ignored * when comparing for equality. Sub-fields must be specified explicitly if they are to be ignored * as well. * * <p>Use {@link #ignoringFieldAbsence()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsence() for details */ ProtoFluentAssertion ignoringFieldAbsenceOfFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest); /** * Specifies that the 'has' bit of these explicitly specified field descriptors should be ignored * when comparing for equality. Sub-fields must be specified explicitly if they are to be ignored * as well. * * <p>Use {@link #ignoringFieldAbsence()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsence() for details */ ProtoFluentAssertion ignoringFieldAbsenceOfFieldDescriptors( Iterable<FieldDescriptor> fieldDescriptors); /** * Specifies that the ordering of repeated fields, at all levels, should be ignored when comparing * for equality. * * <p>This setting applies to all repeated fields recursively, but it does not ignore structure. * For example, with {@link #ignoringRepeatedFieldOrder()}, a repeated {@code int32} field {@code * bar}, set inside a repeated message field {@code foo}, the following protos will all compare * equal: * * <pre>{@code * message1: { * foo: { * bar: 1 * bar: 2 * } * foo: { * bar: 3 * bar: 4 * } * } * * message2: { * foo: { * bar: 2 * bar: 1 * } * foo: { * bar: 4 * bar: 3 * } * } * * message3: { * foo: { * bar: 4 * bar: 3 * } * foo: { * bar: 2 * bar: 1 * } * } * }</pre> * * <p>However, the following message will compare equal to none of these: * * <pre>{@code * message4: { * foo: { * bar: 1 * bar: 3 * } * foo: { * bar: 2 * bar: 4 * } * } * }</pre> * * <p>This setting does not apply to map fields, for which field order is always ignored. The * serialization order of map fields is undefined, and it may change from runtime to runtime. */ ProtoFluentAssertion ignoringRepeatedFieldOrder(); /** * Specifies that the ordering of repeated fields for these explicitly specified top-level field * numbers should be ignored when comparing for equality. Sub-fields must be specified explicitly * (via {@link FieldDescriptor}) if their orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrder()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrder() for details. */ ProtoFluentAssertion ignoringRepeatedFieldOrderOfFields(int firstFieldNumber, int... rest); /** * Specifies that the ordering of repeated fields for these explicitly specified top-level field * numbers should be ignored when comparing for equality. Sub-fields must be specified explicitly * (via {@link FieldDescriptor}) if their orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrder()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrder() for details. */ ProtoFluentAssertion ignoringRepeatedFieldOrderOfFields(Iterable<Integer> fieldNumbers); /** * Specifies that the ordering of repeated fields for these explicitly specified field descriptors * should be ignored when comparing for equality. Sub-fields must be specified explicitly if their * orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrder()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrder() for details. */ ProtoFluentAssertion ignoringRepeatedFieldOrderOfFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest); /** * Specifies that the ordering of repeated fields for these explicitly specified field descriptors * should be ignored when comparing for equality. Sub-fields must be specified explicitly if their * orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrder()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrder() for details. */ ProtoFluentAssertion ignoringRepeatedFieldOrderOfFieldDescriptors( Iterable<FieldDescriptor> fieldDescriptors); /** * Specifies that, for all repeated and map fields, any elements in the 'actual' proto which are * not found in the 'expected' proto are ignored, with the exception of fields in the expected * proto which are empty. To ignore empty repeated fields as well, use {@link * #comparingExpectedFieldsOnly}. * * <p>This rule is applied independently from {@link #ignoringRepeatedFieldOrder}. If ignoring * repeated field order AND extra repeated field elements, all that is tested is that the expected * elements comprise a subset of the actual elements. If not ignoring repeated field order, but * still ignoring extra repeated field elements, the actual elements must contain a subsequence * that matches the expected elements for the test to pass. (The subsequence rule does not apply * to Map fields, which are always compared by key.) */ ProtoFluentAssertion ignoringExtraRepeatedFieldElements(); /** * Specifies that extra repeated field elements for these explicitly specified top-level field * numbers should be ignored. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if their extra elements are to be ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElements()} instead to ignore these for all fields. * * @see #ignoringExtraRepeatedFieldElements() for details. */ ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFields( int firstFieldNumber, int... rest); /** * Specifies that extra repeated field elements for these explicitly specified top-level field * numbers should be ignored. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if their extra elements are to be ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElements()} instead to ignore these for all fields. * * @see #ignoringExtraRepeatedFieldElements() for details. */ ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFields(Iterable<Integer> fieldNumbers); /** * Specifies that extra repeated field elements for these explicitly specified field descriptors * should be ignored. Sub-fields must be specified explicitly if their extra elements are to be * ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElements()} instead to ignore these for all fields. * * @see #ignoringExtraRepeatedFieldElements() for details. */ ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest); /** * Specifies that extra repeated field elements for these explicitly specified field descriptors * should be ignored. Sub-fields must be specified explicitly if their extra elements are to be * ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElements()} instead to ignore these for all fields. * * @see #ignoringExtraRepeatedFieldElements() for details. */ ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFieldDescriptors( Iterable<FieldDescriptor> fieldDescriptors); /** * Compares double fields as equal if they are both finite and their absolute difference is less * than or equal to {@code tolerance}. * * @param tolerance A finite, non-negative tolerance. */ ProtoFluentAssertion usingDoubleTolerance(double tolerance); /** * Compares double fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ ProtoFluentAssertion usingDoubleToleranceForFields( double tolerance, int firstFieldNumber, int... rest); /** * Compares double fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ ProtoFluentAssertion usingDoubleToleranceForFields( double tolerance, Iterable<Integer> fieldNumbers); /** * Compares double fields with these explicitly specified fields using the provided absolute * tolerance. * * @param tolerance A finite, non-negative tolerance. */ ProtoFluentAssertion usingDoubleToleranceForFieldDescriptors( double tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest); /** * Compares double fields with these explicitly specified fields using the provided absolute * tolerance. * * @param tolerance A finite, non-negative tolerance. */ ProtoFluentAssertion usingDoubleToleranceForFieldDescriptors( double tolerance, Iterable<FieldDescriptor> fieldDescriptors); /** * Compares float fields as equal if they are both finite and their absolute difference is less * than or equal to {@code tolerance}. * * @param tolerance A finite, non-negative tolerance. */ ProtoFluentAssertion usingFloatTolerance(float tolerance); /** * Compares float fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ ProtoFluentAssertion usingFloatToleranceForFields( float tolerance, int firstFieldNumber, int... rest); /** * Compares float fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ ProtoFluentAssertion usingFloatToleranceForFields( float tolerance, Iterable<Integer> fieldNumbers); /** * Compares float fields with these explicitly specified fields using the provided absolute * tolerance. * * @param tolerance A finite, non-negative tolerance. */ ProtoFluentAssertion usingFloatToleranceForFieldDescriptors( float tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest); /** * Compares float fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ ProtoFluentAssertion usingFloatToleranceForFieldDescriptors( float tolerance, Iterable<FieldDescriptor> fieldDescriptors); /** * Limits the comparison of Protocol buffers to the fields set in the expected proto(s). When * multiple protos are specified, the comparison is limited to the union of set fields in all the * expected protos. * * <p>The "expected proto(s)" are those passed to the void method at the end of the {@code * ProtoFluentAssertion} call-chain: For example, {@link #isEqualTo(Message)}, or {@link * #isNotEqualTo(Message)}. * * <p>Fields not set in the expected proto(s) are ignored. In particular, proto3 fields which have * their default values are ignored, as these are indistinguishable from unset fields. If you want * to assert that a proto3 message has certain fields with default values, you cannot use this * method. */ ProtoFluentAssertion comparingExpectedFieldsOnly(); /** * Limits the comparison of Protocol buffers to the defined {@link FieldScope}. * * <p>This method is additive and has well-defined ordering semantics. If the invoking {@link * ProtoFluentAssertion} is already scoped to a {@link FieldScope} {@code X}, and this method is * invoked with {@link FieldScope} {@code Y}, the resultant {@link ProtoFluentAssertion} is * constrained to the intersection of {@link FieldScope}s {@code X} and {@code Y}. * * <p>By default, {@link ProtoFluentAssertion} is constrained to {@link FieldScopes#all()}, that * is, no fields are excluded from comparison. */ ProtoFluentAssertion withPartialScope(FieldScope fieldScope); /** * Excludes the top-level message fields with the given tag numbers from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * numbers are ignored, and all sub-messages of type {@code M} will also have these field numbers * ignored. * * <p>If an invalid field number is supplied, the terminal comparison operation will throw a * runtime exception. */ ProtoFluentAssertion ignoringFields(int firstFieldNumber, int... rest); /** * Excludes the top-level message fields with the given tag numbers from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * numbers are ignored, and all sub-messages of type {@code M} will also have these field numbers * ignored. * * <p>If an invalid field number is supplied, the terminal comparison operation will throw a * runtime exception. */ ProtoFluentAssertion ignoringFields(Iterable<Integer> fieldNumbers); /** * Excludes all message fields matching the given {@link FieldDescriptor}s from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * descriptors are ignored, no matter where they occur in the tree. * * <p>If a field descriptor which does not, or cannot occur in the proto structure is supplied, it * is silently ignored. */ ProtoFluentAssertion ignoringFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest); /** * Excludes all message fields matching the given {@link FieldDescriptor}s from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * descriptors are ignored, no matter where they occur in the tree. * * <p>If a field descriptor which does not, or cannot occur in the proto structure is supplied, it * is silently ignored. */ ProtoFluentAssertion ignoringFieldDescriptors(Iterable<FieldDescriptor> fieldDescriptors); /** * Excludes all specific field paths under the argument {@link FieldScope} from the comparison. * * <p>This method is additive and has well-defined ordering semantics. If the invoking {@link * ProtoFluentAssertion} is already scoped to a {@link FieldScope} {@code X}, and this method is * invoked with {@link FieldScope} {@code Y}, the resultant {@link ProtoFluentAssertion} is * constrained to the subtraction of {@code X - Y}. * * <p>By default, {@link ProtoFluentAssertion} is constrained to {@link FieldScopes#all()}, that * is, no fields are excluded from comparison. */ ProtoFluentAssertion ignoringFieldScope(FieldScope fieldScope); /** * If set, in the event of a comparison failure, the error message printed will list only those * specific fields that did not match between the actual and expected values. Useful for very * large protocol buffers. * * <p>This a purely cosmetic setting, and it has no effect on the behavior of the test. */ ProtoFluentAssertion reportingMismatchesOnly(); /** * Specifies the {@link TypeRegistry} and {@link ExtensionRegistry} to use for {@link * com.google.protobuf.Any Any} messages. * * <p>To compare the value of an {@code Any} message, ProtoTruth looks in the given type registry * for a descriptor for the message's type URL: * * <ul> * <li>If ProtoTruth finds a descriptor, it unpacks the value and compares it against the * expected value, respecting any configuration methods used for the assertion. * <li>If ProtoTruth does not find a descriptor (or if the value can't be deserialized with the * descriptor), it compares the raw, serialized bytes of the expected and actual values. * </ul> * * <p>When ProtoTruth unpacks a value, it is parsing a serialized proto. That proto may contain * extensions. To look up those extensions, ProtoTruth uses the provided {@link * ExtensionRegistry}. * * @since 1.1 */ ProtoFluentAssertion unpackingAnyUsing( TypeRegistry typeRegistry, ExtensionRegistry extensionRegistry); /** * Compares the subject of the assertion to {@code expected}, using all of the rules specified by * earlier operations. If no settings are changed, this invokes the default {@code equals} * implementation of the subject {@link Message}. */ void isEqualTo(@Nullable Message expected); /** * Compares the subject of the assertion to {@code expected}, expecting a difference, using all of * the rules specified by earlier operations. If no settings are changed, this invokes the default * {@code equals} implementation of the subject {@link Message}. */ void isNotEqualTo(@Nullable Message expected); /** * @deprecated Do not call {@code equals()} on a {@code ProtoFluentAssertion}. Use {@link * #isEqualTo(Message)} instead. * @see com.google.common.truth.Subject#equals(Object) */ @Override @Deprecated boolean equals(@Nullable Object o); /** * @deprecated {@code ProtoFluentAssertion} does not support {@code hashCode()}. Use {@link * #isEqualTo(Message)} for testing. * @see com.google.common.truth.Subject#hashCode() */ @Override @Deprecated int hashCode(); }
ProtoFluentAssertion
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/query/MappedSuperclassAttributeInMultipleSubtypesTest.java
{ "start": 4151, "end": 4439 }
class ____ extends MappedSuper { public ChildOne() { } public ChildOne(Long id, String stringProp, Integer otherProp, BasicEntity toOneProp) { super( id, stringProp, otherProp, toOneProp ); } } @Entity( name = "ChildTwo" ) @SuppressWarnings( "unused" ) public static
ChildOne
java
apache__logging-log4j2
log4j-jul/src/main/java/org/apache/logging/log4j/jul/LevelTranslator.java
{ "start": 1182, "end": 3385 }
class ____ { /** * Custom Log4j level corresponding to the {@link java.util.logging.Level#FINEST} logging level. This maps to a * level more specific than {@link org.apache.logging.log4j.Level#TRACE}. */ public static final Level FINEST = Level.forName("FINEST", Level.TRACE.intLevel() + 100); /** * Custom Log4j level corresponding to the {@link java.util.logging.Level#CONFIG} logging level. This maps to a * level in between {@link org.apache.logging.log4j.Level#INFO} and {@link org.apache.logging.log4j.Level#DEBUG}. */ public static final Level CONFIG = Level.forName("CONFIG", Level.INFO.intLevel() + 50); private static final Logger LOGGER = StatusLogger.getLogger(); private static final LevelConverter LEVEL_CONVERTER; static { final String levelConverterClassName = PropertiesUtil.getProperties().getStringProperty(Constants.LEVEL_CONVERTER_PROPERTY); if (levelConverterClassName != null) { LevelConverter levelConverter; try { levelConverter = LoaderUtil.newCheckedInstanceOf(levelConverterClassName, LevelConverter.class); } catch (final Exception e) { LOGGER.error("Could not create custom LevelConverter [{}].", levelConverterClassName, e); levelConverter = new DefaultLevelConverter(); } LEVEL_CONVERTER = levelConverter; } else { LEVEL_CONVERTER = new DefaultLevelConverter(); } } /** * Converts a JDK logging Level to a Log4j logging Level. * * @param level JDK Level to convert, may be null per the JUL specification. * @return converted Level or null */ public static Level toLevel(final java.util.logging.Level level) { return LEVEL_CONVERTER.toLevel(level); } /** * Converts a Log4j logging Level to a JDK logging Level. * * @param level Log4j Level to convert. * @return converted Level. */ public static java.util.logging.Level toJavaLevel(final Level level) { return LEVEL_CONVERTER.toJavaLevel(level); } private LevelTranslator() {} }
LevelTranslator
java
quarkusio__quarkus
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/interceptors/ServerInterceptorProducerTest.java
{ "start": 1412, "end": 2679 }
class ____ { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( () -> ShrinkWrap.create(JavaArchive.class) .addClasses(MyService.class, ServerInterceptors.class, GreeterGrpc.class, Greeter.class, GreeterBean.class, HelloRequest.class, HelloReply.class, MutinyGreeterGrpc.class, HelloRequestOrBuilder.class, HelloReplyOrBuilder.class)); protected ManagedChannel channel; @BeforeEach public void init() { channel = ManagedChannelBuilder.forAddress("localhost", 9001) .usePlaintext() .build(); } @AfterEach public void shutdown() { if (channel != null) { channel.shutdownNow(); } } @Test public void testInterceptors() { HelloReply reply = GreeterGrpc.newBlockingStub(channel) .sayHello(HelloRequest.newBuilder().setName("neo").build()); assertThat(reply.getMessage()).isEqualTo("Hello, neo"); assertFalse(MyInterceptor.callTime == 0); } @RegisterInterceptor(MyInterceptor.class) @GrpcService public static
ServerInterceptorProducerTest
java
elastic__elasticsearch
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpRequestAttachment.java
{ "start": 592, "end": 2906 }
class ____ implements EmailAttachmentParser.EmailAttachment { private final HttpRequestTemplate requestTemplate; private boolean inline; private final String contentType; private final String id; public HttpRequestAttachment(String id, HttpRequestTemplate requestTemplate, boolean inline, @Nullable String contentType) { this.id = id; this.requestTemplate = requestTemplate; this.inline = inline; this.contentType = contentType; } public HttpRequestTemplate getRequestTemplate() { return requestTemplate; } public String getContentType() { return contentType; } @Override public String id() { return id; } @Override public boolean inline() { return inline; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(id) .startObject(HttpEmailAttachementParser.TYPE) .field(HttpEmailAttachementParser.Fields.REQUEST.getPreferredName(), requestTemplate, params); if (Strings.hasLength(contentType)) { builder.field(HttpEmailAttachementParser.Fields.CONTENT_TYPE.getPreferredName(), contentType); } if (inline) { builder.field(HttpEmailAttachementParser.Fields.INLINE.getPreferredName(), inline); } return builder.endObject().endObject(); } public static Builder builder(String id) { return new Builder(id); } @Override public String type() { return HttpEmailAttachementParser.TYPE; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; HttpRequestAttachment otherDataAttachment = (HttpRequestAttachment) o; return Objects.equals(id, otherDataAttachment.id) && Objects.equals(requestTemplate, otherDataAttachment.requestTemplate) && Objects.equals(contentType, otherDataAttachment.contentType) && Objects.equals(inline, otherDataAttachment.inline); } @Override public int hashCode() { return Objects.hash(id, requestTemplate, contentType, inline); } public static
HttpRequestAttachment
java
mockito__mockito
mockito-core/src/main/java/org/mockito/plugins/MockitoPlugins.java
{ "start": 274, "end": 579 }
interface ____ available via {@link MockitoFramework#getPlugins()}. * This object enables framework integrators to get access to default Mockito plugins. * <p> * Example use case: one needs to implement custom {@link MockMaker} * and delegate some behavior to the default Mockito implementation. * The
is
java
micronaut-projects__micronaut-core
http-server-netty/src/main/java/io/micronaut/http/server/netty/ssl/SelfSignedSslBuilder.java
{ "start": 1601, "end": 2694 }
class ____ extends AbstractServerSslBuilder implements ServerSslBuilder { private final ServerSslConfiguration ssl; /** * @param serverConfiguration The server configuration * @param ssl The SSL configuration * @param resourceResolver The resource resolver */ public SelfSignedSslBuilder( HttpServerConfiguration serverConfiguration, ServerSslConfiguration ssl, ResourceResolver resourceResolver) { super(resourceResolver, serverConfiguration); this.ssl = ssl; } @Override public ServerSslConfiguration getSslConfiguration() { return ssl; } @Override protected Optional<KeyStore> getKeyStore(SslConfiguration ssl) throws Exception { KeyStore store = KeyStore.getInstance(KeyStore.getDefaultType()); store.load(null, null); SelfSignedCertificate ssc = new SelfSignedCertificate(); store.setKeyEntry("key", ssc.key(), null, new Certificate[]{ssc.cert()}); return Optional.of(store); } static
SelfSignedSslBuilder
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CosmosDbEndpointBuilderFactory.java
{ "start": 1467, "end": 1610 }
interface ____ { /** * Builder for endpoint consumers for the Azure CosmosDB component. */ public
CosmosDbEndpointBuilderFactory
java
google__dagger
javatests/dagger/functional/factory/FactoryImplicitModulesTest.java
{ "start": 1794, "end": 2193 }
interface ____ { InstantiableConcreteModuleComponent create(); } } @Test public void instantiableConcreteModule() { InstantiableConcreteModuleComponent component = DaggerFactoryImplicitModulesTest_InstantiableConcreteModuleComponent.factory().create(); assertThat(component.getInt()).isEqualTo(42); } @Component(modules = InstantiableConcreteModule.class)
Factory
java
apache__camel
components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/fix/BindySimpleKeyValuePairMarshallTest.java
{ "start": 3368, "end": 3726 }
class ____ extends RouteBuilder { BindyKeyValuePairDataFormat camelDataFormat = new BindyKeyValuePairDataFormat(org.apache.camel.dataformat.bindy.model.fix.simple.Order.class); @Override public void configure() { from("direct:start").marshal(camelDataFormat).to("mock:result"); } } }
ContextConfig
java
netty__netty
buffer/src/test/java/io/netty/buffer/AdaptiveByteBufAllocatorUseCacheForNonEventLoopThreadsTest.java
{ "start": 915, "end": 2241 }
class ____ extends AdaptiveByteBufAllocatorTest { @Override protected AdaptiveByteBufAllocator newAllocator(final boolean preferDirect) { return new AdaptiveByteBufAllocator(preferDirect, true); } @Override protected AdaptiveByteBufAllocator newUnpooledAllocator() { return newAllocator(false); } @Test void testFastThreadLocalThreadWithoutCleanupFastThreadLocals() throws InterruptedException { final AtomicReference<Throwable> throwable = new AtomicReference<Throwable>(); Runnable task = new Runnable() { @Override public void run() { try { AdaptiveByteBufAllocatorUseCacheForNonEventLoopThreadsTest.super.testUsedHeapMemory(); AdaptiveByteBufAllocatorUseCacheForNonEventLoopThreadsTest.super.testUsedDirectMemory(); } catch (Throwable e) { throwable.set(e); } } }; Thread customizefastThreadLocalThread = new CustomizeFastThreadLocalThreadWithoutCleanupFastThreadLocals(task); customizefastThreadLocalThread.start(); customizefastThreadLocalThread.join(); assertNull(throwable.get()); } private static final
AdaptiveByteBufAllocatorUseCacheForNonEventLoopThreadsTest
java
spring-projects__spring-security
core/src/test/java/org/springframework/security/core/annotation/UniqueSecurityAnnotationScannerTests.java
{ "start": 15884, "end": 16365 }
class ____ implements UserService, OtherUserService, RemoteUserService { @Override public void add(String user) { } @Override public List<String> list(String user) { return List.of(user); } @Override public String get(@CustomParameterAnnotation("five") String user) { return user; } @Override public void delete(String user) { } @Override public void batch(@CustomParameterAnnotation("seven") String... user) { } } static
UserServiceImpl
java
google__guice
extensions/persist/test/com/google/inject/persist/jpa/JoiningLocalTransactionsTest.java
{ "start": 4766, "end": 6220 }
class ____ { private final Provider<EntityManager> emProvider; @Inject public TransactionalObject(Provider<EntityManager> emProvider) { this.emProvider = emProvider; } @Transactional public void runOperationInTxn() { runOperationInTxnInternal(); } @Transactional(rollbackOn = IOException.class) public void runOperationInTxnInternal() { EntityManager em = emProvider.get(); JpaTestEntity entity = new JpaTestEntity(); entity.setText(UNIQUE_TEXT); em.persist(entity); } @Transactional(rollbackOn = IOException.class) public void runOperationInTxnThrowingChecked() throws IOException { runOperationInTxnThrowingCheckedInternal(); } @Transactional private void runOperationInTxnThrowingCheckedInternal() throws IOException { JpaTestEntity entity = new JpaTestEntity(); entity.setText(TRANSIENT_UNIQUE_TEXT); emProvider.get().persist(entity); throw new IOException(); } @Transactional public void runOperationInTxnThrowingUnchecked() { runOperationInTxnThrowingUncheckedInternal(); } @Transactional(rollbackOn = IOException.class) public void runOperationInTxnThrowingUncheckedInternal() { JpaTestEntity entity = new JpaTestEntity(); entity.setText(TRANSIENT_UNIQUE_TEXT); emProvider.get().persist(entity); throw new IllegalStateException(); } } }
TransactionalObject
java
micronaut-projects__micronaut-core
inject/src/main/java/io/micronaut/inject/ArgumentInjectionPoint.java
{ "start": 950, "end": 1386 }
interface ____<B, T> extends InjectionPoint<B>, ArgumentCoercible<T> { /** * @return The outer injection point (method or constructor) */ @NonNull CallableInjectionPoint<B> getOuterInjectionPoint(); /** * @return The argument that is being injected */ @NonNull Argument<T> getArgument(); @Override default Argument<T> asArgument() { return getArgument(); } }
ArgumentInjectionPoint
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/action/support/ActiveShardsObserver.java
{ "start": 1089, "end": 1249 }
class ____ a primitive for waiting for a configured number of shards * to become active before sending a response on an {@link ActionListener}. */ public
provides
java
google__guava
android/guava-tests/test/com/google/common/primitives/ImmutableDoubleArrayTest.java
{ "start": 21799, "end": 21990 }
class ____ extends SampleElements<Double> { public SampleDoubles() { super(-0.0, Long.MAX_VALUE * 3.0, Double.MAX_VALUE, Double.POSITIVE_INFINITY, Double.NaN); } } }
SampleDoubles
java
spring-projects__spring-security
config/src/main/java/org/springframework/security/config/annotation/method/configuration/GlobalMethodSecurityConfiguration.java
{ "start": 5362, "end": 17514 }
class ____ implements ImportAware, SmartInitializingSingleton, BeanFactoryAware { private static final Log logger = LogFactory.getLog(GlobalMethodSecurityConfiguration.class); private ObjectPostProcessor<Object> objectPostProcessor = new ObjectPostProcessor<>() { @Override public <T> T postProcess(T object) { throw new IllegalStateException(ObjectPostProcessor.class.getName() + " is a required bean. Ensure you have used @" + EnableGlobalMethodSecurity.class.getName()); } }; private SecurityContextHolderStrategy securityContextHolderStrategy = SecurityContextHolder .getContextHolderStrategy(); private DefaultMethodSecurityExpressionHandler defaultMethodExpressionHandler = new DefaultMethodSecurityExpressionHandler(); private AuthenticationManager authenticationManager; private AuthenticationManagerBuilder auth; private boolean disableAuthenticationRegistry; private AnnotationAttributes enableMethodSecurity; private BeanFactory context; private MethodSecurityExpressionHandler expressionHandler; private MethodSecurityInterceptor methodSecurityInterceptor; /** * Creates the default MethodInterceptor which is a MethodSecurityInterceptor using * the following methods to construct it. * <ul> * <li>{@link #accessDecisionManager()}</li> * <li>{@link #afterInvocationManager()}</li> * <li>{@link #authenticationManager()}</li> * <li>{@link #runAsManager()}</li> * * </ul> * * <p> * Subclasses can override this method to provide a different * {@link MethodInterceptor}. * </p> * @param methodSecurityMetadataSource the default * {@link MethodSecurityMetadataSource}. * @return the {@link MethodInterceptor}. */ @Bean public MethodInterceptor methodSecurityInterceptor(MethodSecurityMetadataSource methodSecurityMetadataSource) { this.methodSecurityInterceptor = isAspectJ() ? new AspectJMethodSecurityInterceptor() : new MethodSecurityInterceptor(); this.methodSecurityInterceptor.setAccessDecisionManager(accessDecisionManager()); this.methodSecurityInterceptor.setAfterInvocationManager(afterInvocationManager()); this.methodSecurityInterceptor.setSecurityMetadataSource(methodSecurityMetadataSource); this.methodSecurityInterceptor.setSecurityContextHolderStrategy(this.securityContextHolderStrategy); RunAsManager runAsManager = runAsManager(); if (runAsManager != null) { this.methodSecurityInterceptor.setRunAsManager(runAsManager); } return this.methodSecurityInterceptor; } @Override public void afterSingletonsInstantiated() { try { initializeMethodSecurityInterceptor(); } catch (Exception ex) { throw new RuntimeException(ex); } PermissionEvaluator permissionEvaluator = getBeanOrNull(PermissionEvaluator.class); if (permissionEvaluator != null) { this.defaultMethodExpressionHandler.setPermissionEvaluator(permissionEvaluator); } RoleHierarchy roleHierarchy = getBeanOrNull(RoleHierarchy.class); if (roleHierarchy != null) { this.defaultMethodExpressionHandler.setRoleHierarchy(roleHierarchy); } AuthenticationTrustResolver trustResolver = getBeanOrNull(AuthenticationTrustResolver.class); if (trustResolver != null) { this.defaultMethodExpressionHandler.setTrustResolver(trustResolver); } GrantedAuthorityDefaults grantedAuthorityDefaults = getBeanOrNull(GrantedAuthorityDefaults.class); if (grantedAuthorityDefaults != null) { this.defaultMethodExpressionHandler.setDefaultRolePrefix(grantedAuthorityDefaults.getRolePrefix()); } this.defaultMethodExpressionHandler = this.objectPostProcessor.postProcess(this.defaultMethodExpressionHandler); } private <T> T getBeanOrNull(Class<T> type) { return this.context.getBeanProvider(type).getIfUnique(); } private void initializeMethodSecurityInterceptor() throws Exception { if (this.methodSecurityInterceptor == null) { return; } this.methodSecurityInterceptor.setAuthenticationManager(authenticationManager()); } /** * Provide a custom {@link AfterInvocationManager} for the default implementation of * {@link #methodSecurityInterceptor(MethodSecurityMetadataSource)}. The default is * null if pre post is not enabled. Otherwise, it returns a * {@link AfterInvocationProviderManager}. * * <p> * Subclasses should override this method to provide a custom * {@link AfterInvocationManager} * </p> * @return the {@link AfterInvocationManager} to use */ protected AfterInvocationManager afterInvocationManager() { if (prePostEnabled()) { AfterInvocationProviderManager invocationProviderManager = new AfterInvocationProviderManager(); ExpressionBasedPostInvocationAdvice postAdvice = new ExpressionBasedPostInvocationAdvice( getExpressionHandler()); PostInvocationAdviceProvider postInvocationAdviceProvider = new PostInvocationAdviceProvider(postAdvice); List<AfterInvocationProvider> afterInvocationProviders = new ArrayList<>(); afterInvocationProviders.add(postInvocationAdviceProvider); invocationProviderManager.setProviders(afterInvocationProviders); return invocationProviderManager; } return null; } /** * Provide a custom {@link RunAsManager} for the default implementation of * {@link #methodSecurityInterceptor(MethodSecurityMetadataSource)}. The default is * null. * @return the {@link RunAsManager} to use */ protected RunAsManager runAsManager() { return null; } /** * Allows subclasses to provide a custom {@link AccessDecisionManager}. The default is * a {@link AffirmativeBased} with the following voters: * * <ul> * <li>{@link PreInvocationAuthorizationAdviceVoter}</li> * <li>{@link RoleVoter}</li> * <li>{@link AuthenticatedVoter}</li> * </ul> * @return the {@link AccessDecisionManager} to use */ protected AccessDecisionManager accessDecisionManager() { List<AccessDecisionVoter<?>> decisionVoters = new ArrayList<>(); if (prePostEnabled()) { ExpressionBasedPreInvocationAdvice expressionAdvice = new ExpressionBasedPreInvocationAdvice(); expressionAdvice.setExpressionHandler(getExpressionHandler()); decisionVoters.add(new PreInvocationAuthorizationAdviceVoter(expressionAdvice)); } if (jsr250Enabled()) { decisionVoters.add(new Jsr250Voter()); } RoleVoter roleVoter = new RoleVoter(); GrantedAuthorityDefaults grantedAuthorityDefaults = getBeanOrNull(GrantedAuthorityDefaults.class); if (grantedAuthorityDefaults != null) { roleVoter.setRolePrefix(grantedAuthorityDefaults.getRolePrefix()); } decisionVoters.add(roleVoter); decisionVoters.add(new AuthenticatedVoter()); return new AffirmativeBased(decisionVoters); } /** * Provide a {@link MethodSecurityExpressionHandler} that is registered with the * {@link ExpressionBasedPreInvocationAdvice}. The default is * {@link DefaultMethodSecurityExpressionHandler} which optionally will Autowire an * {@link AuthenticationTrustResolver}. * * <p> * Subclasses may override this method to provide a custom * {@link MethodSecurityExpressionHandler} * </p> * @return the {@link MethodSecurityExpressionHandler} to use */ protected MethodSecurityExpressionHandler createExpressionHandler() { return this.defaultMethodExpressionHandler; } /** * Gets the {@link MethodSecurityExpressionHandler} or creates it using * {@link #expressionHandler}. * @return a non {@code null} {@link MethodSecurityExpressionHandler} */ protected final MethodSecurityExpressionHandler getExpressionHandler() { if (this.expressionHandler == null) { this.expressionHandler = createExpressionHandler(); } return this.expressionHandler; } /** * Provides a custom {@link MethodSecurityMetadataSource} that is registered with the * {@link #methodSecurityMetadataSource()}. Default is null. * @return a custom {@link MethodSecurityMetadataSource} that is registered with the * {@link #methodSecurityMetadataSource()} */ protected MethodSecurityMetadataSource customMethodSecurityMetadataSource() { return null; } /** * Allows providing a custom {@link AuthenticationManager}. The default is to use any * authentication mechanisms registered by * {@link #configure(AuthenticationManagerBuilder)}. If * {@link #configure(AuthenticationManagerBuilder)} was not overridden, then an * {@link AuthenticationManager} is attempted to be autowired by type. * @return the {@link AuthenticationManager} to use */ protected AuthenticationManager authenticationManager() throws Exception { if (this.authenticationManager == null) { DefaultAuthenticationEventPublisher eventPublisher = this.objectPostProcessor .postProcess(new DefaultAuthenticationEventPublisher()); this.auth = new AuthenticationManagerBuilder(this.objectPostProcessor); this.auth.authenticationEventPublisher(eventPublisher); configure(this.auth); this.authenticationManager = (this.disableAuthenticationRegistry) ? getAuthenticationConfiguration().getAuthenticationManager() : this.auth.build(); } return this.authenticationManager; } /** * Sub classes can override this method to register different types of authentication. * If not overridden, {@link #configure(AuthenticationManagerBuilder)} will attempt to * autowire by type. * @param auth the {@link AuthenticationManagerBuilder} used to register different * authentication mechanisms for the global method security. * @throws Exception */ protected void configure(AuthenticationManagerBuilder auth) throws Exception { this.disableAuthenticationRegistry = true; } /** * Provides the default {@link MethodSecurityMetadataSource} that will be used. It * creates a {@link DelegatingMethodSecurityMetadataSource} based upon * {@link #customMethodSecurityMetadataSource()} and the attributes on * {@link EnableGlobalMethodSecurity}. * @return the {@link MethodSecurityMetadataSource} */ @Bean @Role(BeanDefinition.ROLE_INFRASTRUCTURE) public MethodSecurityMetadataSource methodSecurityMetadataSource() { List<MethodSecurityMetadataSource> sources = new ArrayList<>(); ExpressionBasedAnnotationAttributeFactory attributeFactory = new ExpressionBasedAnnotationAttributeFactory( getExpressionHandler()); MethodSecurityMetadataSource customMethodSecurityMetadataSource = customMethodSecurityMetadataSource(); if (customMethodSecurityMetadataSource != null) { sources.add(customMethodSecurityMetadataSource); } boolean hasCustom = customMethodSecurityMetadataSource != null; boolean isPrePostEnabled = prePostEnabled(); boolean isSecuredEnabled = securedEnabled(); boolean isJsr250Enabled = jsr250Enabled(); Assert.state(isPrePostEnabled || isSecuredEnabled || isJsr250Enabled || hasCustom, "In the composition of all global method configuration, " + "no annotation support was actually activated"); if (isPrePostEnabled) { sources.add(new PrePostAnnotationSecurityMetadataSource(attributeFactory)); } if (isSecuredEnabled) { sources.add(new SecuredAnnotationSecurityMetadataSource()); } if (isJsr250Enabled) { GrantedAuthorityDefaults grantedAuthorityDefaults = getBeanOrNull(GrantedAuthorityDefaults.class); Jsr250MethodSecurityMetadataSource jsr250MethodSecurityMetadataSource = this.context .getBean(Jsr250MethodSecurityMetadataSource.class); if (grantedAuthorityDefaults != null) { jsr250MethodSecurityMetadataSource.setDefaultRolePrefix(grantedAuthorityDefaults.getRolePrefix()); } sources.add(jsr250MethodSecurityMetadataSource); } return new DelegatingMethodSecurityMetadataSource(sources); } /** * Creates the {@link PreInvocationAuthorizationAdvice} to be used. The default is * {@link ExpressionBasedPreInvocationAdvice}. * @return the {@link PreInvocationAuthorizationAdvice} */ @Bean public PreInvocationAuthorizationAdvice preInvocationAuthorizationAdvice() { ExpressionBasedPreInvocationAdvice preInvocationAdvice = new ExpressionBasedPreInvocationAdvice(); preInvocationAdvice.setExpressionHandler(getExpressionHandler()); return preInvocationAdvice; } /** * Obtains the attributes from {@link EnableGlobalMethodSecurity} if this
GlobalMethodSecurityConfiguration
java
spring-projects__spring-boot
smoke-test/spring-boot-smoke-test-bootstrap-registry/src/main/java/smoketest/bootstrapregistry/app/MySubversionClient.java
{ "start": 861, "end": 1127 }
class ____ extends SubversionClient { public MySubversionClient(@Nullable SubversionServerCertificate serverCertificate) { super(serverCertificate); } @Override public String load(String location) { return "my-" + super.load(location); } }
MySubversionClient
java
FasterXML__jackson-databind
src/main/java/tools/jackson/databind/introspect/MethodGenericTypeResolver.java
{ "start": 645, "end": 737 }
class ____ variable bindings * (see [databind#2895] for more). * * @since 2.12 */ final
type
java
apache__camel
components/camel-quickfix/src/test/java/org/apache/camel/component/quickfixj/QuickfixjConvertersTest.java
{ "start": 1791, "end": 9197 }
class ____ extends CamelTestSupport { private File settingsFile; private ClassLoader contextClassLoader; private SessionSettings settings; private File tempdir; private QuickfixjEngine quickfixjEngine; @Override public void doPostSetup() throws Exception { settingsFile = File.createTempFile("quickfixj_test_", ".cfg"); tempdir = settingsFile.getParentFile(); URL[] urls = new URL[] { tempdir.toURI().toURL() }; contextClassLoader = Thread.currentThread().getContextClassLoader(); ClassLoader testClassLoader = new URLClassLoader(urls, contextClassLoader); Thread.currentThread().setContextClassLoader(testClassLoader); settings = new SessionSettings(); settings.setString(Acceptor.SETTING_SOCKET_ACCEPT_PROTOCOL, ProtocolFactory.getTypeString(ProtocolFactory.VM_PIPE)); settings.setString(Initiator.SETTING_SOCKET_CONNECT_PROTOCOL, ProtocolFactory.getTypeString(ProtocolFactory.VM_PIPE)); } @Override public void doPostTearDown() { Thread.currentThread().setContextClassLoader(contextClassLoader); } @Test public void convertSessionID() { Object value = context.getTypeConverter().convertTo(SessionID.class, "FIX.4.0:FOO->BAR"); assertThat(value, instanceOf(SessionID.class)); assertThat((SessionID) value, is(new SessionID("FIX.4.0", "FOO", "BAR"))); } @Test public void convertToExchange() { SessionID sessionID = new SessionID("FIX.4.0", "FOO", "BAR"); QuickfixjEndpoint endpoint = new QuickfixjEndpoint(null, "", new QuickfixjComponent(context)); Message message = new Message(); message.getHeader().setString(MsgType.FIELD, MsgType.ORDER_SINGLE); Exchange exchange = QuickfixjConverters.toExchange(endpoint, sessionID, message, QuickfixjEventCategory.AppMessageSent); assertThat((SessionID) exchange.getIn().getHeader(QuickfixjEndpoint.SESSION_ID_KEY), is(sessionID)); assertThat((QuickfixjEventCategory) exchange.getIn().getHeader(QuickfixjEndpoint.EVENT_CATEGORY_KEY), is(QuickfixjEventCategory.AppMessageSent)); assertThat((String) exchange.getIn().getHeader(QuickfixjEndpoint.MESSAGE_TYPE_KEY), is(MsgType.ORDER_SINGLE)); } @Test public void convertToExchangeWithNullMessage() { SessionID sessionID = new SessionID("FIX.4.0", "FOO", "BAR"); QuickfixjEndpoint endpoint = new QuickfixjEndpoint(null, "", new QuickfixjComponent(context)); Exchange exchange = QuickfixjConverters.toExchange(endpoint, sessionID, null, QuickfixjEventCategory.AppMessageSent); assertThat((SessionID) exchange.getIn().getHeader(QuickfixjEndpoint.SESSION_ID_KEY), is(sessionID)); assertThat((QuickfixjEventCategory) exchange.getIn().getHeader(QuickfixjEndpoint.EVENT_CATEGORY_KEY), is(QuickfixjEventCategory.AppMessageSent)); assertThat(exchange.getIn().getHeader(QuickfixjEndpoint.MESSAGE_TYPE_KEY), is(nullValue())); } @Test public void convertMessageWithoutRepeatingGroups() { String data = "8=FIX.4.0\0019=100\00135=D\00134=2\00149=TW\00156=ISLD\00111=ID\00121=1\001" + "40=1\00154=1\00140=2\00138=200\00155=INTC\00110=160\001"; Exchange exchange = new DefaultExchange(context); Object value = context.getTypeConverter().convertTo(Message.class, exchange, data); assertThat(value, instanceOf(Message.class)); } @Test public void convertMessageWithRepeatingGroupsUsingSessionID() throws Exception { SessionID sessionID = new SessionID("FIX.4.4", "FOO", "BAR"); createSession(sessionID); try { String data = "8=FIX.4.4\0019=40\00135=A\001" + "627=2\001628=FOO\001628=BAR\001" + "98=0\001384=2\001372=D\001385=R\001372=8\001385=S\00110=230\001"; Exchange exchange = new DefaultExchange(context); exchange.getIn().setHeader(QuickfixjEndpoint.SESSION_ID_KEY, sessionID); exchange.getIn().setBody(data); Message message = exchange.getIn().getBody(Message.class); NoHops hop = new NoHops(); message.getHeader().getGroup(1, hop); assertEquals("FOO", hop.getString(HopCompID.FIELD)); message.getHeader().getGroup(2, hop); assertEquals("BAR", hop.getString(HopCompID.FIELD)); } finally { quickfixjEngine.stop(); } } @Test public void convertMessageWithRepeatingGroupsUsingExchangeDictionary() throws Exception { SessionID sessionID = new SessionID("FIX.4.4", "FOO", "BAR"); createSession(sessionID); try { String data = "8=FIX.4.4\0019=40\00135=A\001" + "627=2\001628=FOO\001628=BAR\001" + "98=0\001384=2\001372=D\001385=R\001372=8\001385=S\00110=230\001"; Exchange exchange = new DefaultExchange(context); exchange.setProperty(QuickfixjEndpoint.DATA_DICTIONARY_KEY, new DataDictionary("FIX44.xml")); exchange.getIn().setBody(data); Message message = exchange.getIn().getBody(Message.class); NoHops hop = new NoHops(); message.getHeader().getGroup(1, hop); assertEquals("FOO", hop.getString(HopCompID.FIELD)); message.getHeader().getGroup(2, hop); assertEquals("BAR", hop.getString(HopCompID.FIELD)); } finally { quickfixjEngine.stop(); } } @Test public void convertMessageWithRepeatingGroupsUsingExchangeDictionaryResource() throws Exception { SessionID sessionID = new SessionID("FIX.4.4", "FOO", "BAR"); createSession(sessionID); try { String data = "8=FIX.4.4\0019=40\00135=A\001" + "627=2\001628=FOO\001628=BAR\001" + "98=0\001384=2\001372=D\001385=R\001372=8\001385=S\00110=230\001"; Exchange exchange = new DefaultExchange(context); exchange.setProperty(QuickfixjEndpoint.DATA_DICTIONARY_KEY, "FIX44.xml"); exchange.getIn().setBody(data); Message message = exchange.getIn().getBody(Message.class); NoHops hop = new NoHops(); message.getHeader().getGroup(1, hop); assertEquals("FOO", hop.getString(HopCompID.FIELD)); message.getHeader().getGroup(2, hop); assertEquals("BAR", hop.getString(HopCompID.FIELD)); } finally { quickfixjEngine.stop(); } } private void createSession(SessionID sessionID) throws Exception { SessionSettings settings = new SessionSettings(); settings.setString(Acceptor.SETTING_SOCKET_ACCEPT_PROTOCOL, ProtocolFactory.getTypeString(ProtocolFactory.VM_PIPE)); settings.setString(sessionID, SessionFactory.SETTING_CONNECTION_TYPE, SessionFactory.ACCEPTOR_CONNECTION_TYPE); settings.setLong(sessionID, Acceptor.SETTING_SOCKET_ACCEPT_PORT, 1234); TestSupport.setSessionID(settings, sessionID); TestSupport.writeSettings(settings, settingsFile); quickfixjEngine = new QuickfixjEngine(context, "quickfix:test", settingsFile.getName()); quickfixjEngine.start(); } }
QuickfixjConvertersTest
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/inlineme/SuggesterTest.java
{ "start": 12835, "end": 13044 }
class ____ { @InlineMe(replacement = "new NestedClass()", imports = "com.google.frobber.Client.NestedClass") @Deprecated public NestedClass silly() { return new NestedClass(); } public static
Client
java
apache__flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Compilable.java
{ "start": 1044, "end": 1732 }
interface ____ { /** * Compiles this object into a {@link CompiledPlan} that can be executed as one job. * * <p>Compiled plans can be persisted and reloaded across Flink versions. They describe static * pipelines to ensure backwards compatibility and enable stateful streaming job upgrades. See * {@link CompiledPlan} and the website documentation for more information. * * <p>Note: The compiled plan feature is experimental in batch mode. * * @throws TableException if any of the statements is invalid or if the plan cannot be * persisted. */ @Experimental CompiledPlan compilePlan() throws TableException; }
Compilable