language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
spring-projects__spring-boot
loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/jar/NestedJarFileResources.java
{ "start": 1313, "end": 6511 }
class ____ implements Runnable { private static final int INFLATER_CACHE_LIMIT = 20; private ZipContent zipContent; private ZipContent zipContentForManifest; private final Set<InputStream> inputStreams = Collections.newSetFromMap(new WeakHashMap<>()); private Deque<Inflater> inflaterCache = new ArrayDeque<>(); /** * Create a new {@link NestedJarFileResources} instance. * @param file the source zip file * @param nestedEntryName the nested entry or {@code null} * @throws IOException on I/O error */ NestedJarFileResources(File file, String nestedEntryName) throws IOException { this.zipContent = ZipContent.open(file.toPath(), nestedEntryName); this.zipContentForManifest = (this.zipContent.getKind() != Kind.NESTED_DIRECTORY) ? null : ZipContent.open(file.toPath()); } /** * Return the underling {@link ZipContent}. * @return the zip content */ ZipContent zipContent() { return this.zipContent; } /** * Return the underlying {@link ZipContent} that should be used to load manifest * content. * @return the zip content to use when loading the manifest */ ZipContent zipContentForManifest() { return (this.zipContentForManifest != null) ? this.zipContentForManifest : this.zipContent; } /** * Add a managed input stream resource. * @param inputStream the input stream */ void addInputStream(InputStream inputStream) { synchronized (this.inputStreams) { this.inputStreams.add(inputStream); } } /** * Remove a managed input stream resource. * @param inputStream the input stream */ void removeInputStream(InputStream inputStream) { synchronized (this.inputStreams) { this.inputStreams.remove(inputStream); } } /** * Create a {@link Runnable} action to cleanup the given inflater. * @param inflater the inflater to cleanup * @return the cleanup action */ Runnable createInflatorCleanupAction(Inflater inflater) { return () -> endOrCacheInflater(inflater); } /** * Get previously used {@link Inflater} from the cache, or create a new one. * @return a usable {@link Inflater} */ Inflater getOrCreateInflater() { Deque<Inflater> inflaterCache = this.inflaterCache; if (inflaterCache != null) { synchronized (inflaterCache) { Inflater inflater = this.inflaterCache.poll(); if (inflater != null) { return inflater; } } } return new Inflater(true); } /** * Either release the given {@link Inflater} by calling {@link Inflater#end()} or add * it to the cache for later reuse. * @param inflater the inflater to end or cache */ private void endOrCacheInflater(Inflater inflater) { Deque<Inflater> inflaterCache = this.inflaterCache; if (inflaterCache != null) { synchronized (inflaterCache) { if (this.inflaterCache == inflaterCache && inflaterCache.size() < INFLATER_CACHE_LIMIT) { inflater.reset(); this.inflaterCache.add(inflater); return; } } } inflater.end(); } /** * Called by the {@link Cleaner} to free resources. * @see java.lang.Runnable#run() */ @Override public void run() { releaseAll(); } private void releaseAll() { IOException exceptionChain = null; exceptionChain = releaseInflators(exceptionChain); exceptionChain = releaseInputStreams(exceptionChain); exceptionChain = releaseZipContent(exceptionChain); exceptionChain = releaseZipContentForManifest(exceptionChain); if (exceptionChain != null) { throw new UncheckedIOException(exceptionChain); } } private IOException releaseInflators(IOException exceptionChain) { Deque<Inflater> inflaterCache = this.inflaterCache; if (inflaterCache != null) { try { synchronized (inflaterCache) { inflaterCache.forEach(Inflater::end); } } finally { this.inflaterCache = null; } } return exceptionChain; } private IOException releaseInputStreams(IOException exceptionChain) { synchronized (this.inputStreams) { for (InputStream inputStream : List.copyOf(this.inputStreams)) { try { inputStream.close(); } catch (IOException ex) { exceptionChain = addToExceptionChain(exceptionChain, ex); } } this.inputStreams.clear(); } return exceptionChain; } private IOException releaseZipContent(IOException exceptionChain) { ZipContent zipContent = this.zipContent; if (zipContent != null) { try { zipContent.close(); } catch (IOException ex) { exceptionChain = addToExceptionChain(exceptionChain, ex); } finally { this.zipContent = null; } } return exceptionChain; } private IOException releaseZipContentForManifest(IOException exceptionChain) { ZipContent zipContentForManifest = this.zipContentForManifest; if (zipContentForManifest != null) { try { zipContentForManifest.close(); } catch (IOException ex) { exceptionChain = addToExceptionChain(exceptionChain, ex); } finally { this.zipContentForManifest = null; } } return exceptionChain; } private IOException addToExceptionChain(IOException exceptionChain, IOException ex) { if (exceptionChain != null) { exceptionChain.addSuppressed(ex); return exceptionChain; } return ex; } }
NestedJarFileResources
java
spring-projects__spring-boot
core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java
{ "start": 71464, "end": 71789 }
class ____ extends BasicProperties { @NotEmpty @SuppressWarnings("NullAway.Init") private String description; String getDescription() { return this.description; } void setDescription(String description) { this.description = description; } } @ConfigurationProperties @Validated static
Jsr303Properties
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableRefactoringTest.java
{ "start": 2356, "end": 2514 }
class ____ annotated with javax.annotation.concurrent.Immutable, but didn't" + " seem to be provably immutable.", "", "
was
java
spring-projects__spring-framework
spring-test/src/test/java/org/springframework/test/web/servlet/samples/client/standalone/resulthandlers/PrintingResultHandlerSmokeTests.java
{ "start": 2462, "end": 2650 }
class ____ { @PostMapping("/") public String hello(HttpServletResponse response) { response.addCookie(new Cookie("enigma", "42")); return "Hello Response"; } } }
SimpleController
java
quarkusio__quarkus
extensions/oidc/deployment/src/test/java/io/quarkus/oidc/test/OidcRequestAndResponseFilterTest.java
{ "start": 32704, "end": 33416 }
class ____ implements OidcResponseFilter, OidcRequestFilter { private final CallableFilterParent request = new CallableFilterParent(); private final CallableFilterParent response = new CallableFilterParent(); @Override public void filter(OidcResponseContext responseContext) { response.called(); } @Override public void filter(OidcRequestContext requestContext) { request.called(); } public void reset() { request.reset(); response.reset(); } public boolean isCalled() { return request.isCalled() && response.isCalled(); } } }
RequestAndResponseFilter
java
spring-projects__spring-boot
cli/spring-boot-cli/src/main/java/org/springframework/boot/cli/command/shell/ShellPrompts.java
{ "start": 836, "end": 1532 }
class ____ { private static final String DEFAULT_PROMPT = "$ "; private final Deque<String> prompts = new ArrayDeque<>(); /** * Push a new prompt to be used by the shell. * @param prompt the prompt * @see #popPrompt() */ public void pushPrompt(String prompt) { this.prompts.push(prompt); } /** * Pop a previously pushed prompt, returning to the previous value. * @see #pushPrompt(String) */ public void popPrompt() { if (!this.prompts.isEmpty()) { this.prompts.pop(); } } /** * Returns the current prompt. * @return the current prompt */ public String getPrompt() { return this.prompts.isEmpty() ? DEFAULT_PROMPT : this.prompts.peek(); } }
ShellPrompts
java
apache__kafka
clients/src/main/java/org/apache/kafka/common/ConsumerGroupState.java
{ "start": 1127, "end": 2052 }
enum ____ { UNKNOWN("Unknown"), PREPARING_REBALANCE("PreparingRebalance"), COMPLETING_REBALANCE("CompletingRebalance"), STABLE("Stable"), DEAD("Dead"), EMPTY("Empty"), ASSIGNING("Assigning"), RECONCILING("Reconciling"); private static final Map<String, ConsumerGroupState> NAME_TO_ENUM = Arrays.stream(values()) .collect(Collectors.toMap(state -> state.name.toUpperCase(Locale.ROOT), Function.identity())); private final String name; ConsumerGroupState(String name) { this.name = name; } /** * Case-insensitive consumer group state lookup by string name. */ public static ConsumerGroupState parse(String name) { ConsumerGroupState state = NAME_TO_ENUM.get(name.toUpperCase(Locale.ROOT)); return state == null ? UNKNOWN : state; } @Override public String toString() { return name; } }
ConsumerGroupState
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ThreadSafeCheckerTest.java
{ "start": 11956, "end": 12535 }
class ____ { @GuardedBy("this") int a = 42; @GuardedBy("this") final Map<Long, List<Long>> b = null; @GuardedBy("this") volatile int c = 42; } """) .doTest(); } @Test public void mutableFieldNotGuarded() { compilationHelper .addSourceLines( "Test.java", """ import com.google.errorprone.annotations.ThreadSafe; import javax.annotation.concurrent.GuardedBy; @ThreadSafe
Test
java
ReactiveX__RxJava
src/main/java/io/reactivex/rxjava3/internal/operators/completable/CompletableDetach.java
{ "start": 988, "end": 1338 }
class ____ extends Completable { final CompletableSource source; public CompletableDetach(CompletableSource source) { this.source = source; } @Override protected void subscribeActual(CompletableObserver observer) { source.subscribe(new DetachCompletableObserver(observer)); } static final
CompletableDetach
java
spring-projects__spring-security
config/src/test/java/org/springframework/security/config/annotation/web/configurers/AnonymousConfigurerTests.java
{ "start": 6944, "end": 7188 }
class ____ extends AbstractHttpConfigurer<CustomDsl, HttpSecurity> { @Override public void init(HttpSecurity http) { http.anonymous((anonymous) -> anonymous.principal("myAnonymousUser")); } } } @RestController static
CustomDsl
java
apache__camel
core/camel-core-model/src/main/java/org/apache/camel/model/dataformat/JsonApiDataFormat.java
{ "start": 4035, "end": 4625 }
class ____ implements DataFormatBuilder<JsonApiDataFormat> { private String dataFormatTypes; private Class<?>[] dataFormatTypeClasses; private String mainFormatType; private Class<?> mainFormatTypeClass; /** * The classes to take into account for the marshalling, */ public Builder dataFormatTypes(Class<?>[] dataFormatTypes) { this.dataFormatTypeClasses = dataFormatTypes; return this; } /** * The classes (FQN name) to take into account for the marshalling. Multiple
Builder
java
elastic__elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/StopILMRequest.java
{ "start": 498, "end": 1081 }
class ____ extends AcknowledgedRequest<StopILMRequest> { public StopILMRequest(StreamInput in) throws IOException { super(in); } public StopILMRequest(TimeValue masterNodeTimeout, TimeValue ackTimeout) { super(masterNodeTimeout, ackTimeout); } @Override public int hashCode() { return 75; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (obj.getClass() != getClass()) { return false; } return true; } }
StopILMRequest
java
spring-projects__spring-framework
spring-context-indexer/src/test/java/org/springframework/context/index/sample/Scope.java
{ "start": 1002, "end": 1061 }
interface ____ { String value() default "singleton"; }
Scope
java
assertj__assertj-core
assertj-core/src/main/java/org/assertj/core/util/NaturalOrderComparator.java
{ "start": 686, "end": 1230 }
class ____<T extends Comparable<? super T>> extends NullSafeComparator<T> { private String description; public NaturalOrderComparator(Class<T> clazz) { this.description = "%s natural order".formatted(clazz.getSimpleName()); } public NaturalOrderComparator(String description) { this.description = description; } @Override protected int compareNonNull(T o1, T o2) { return Comparator.<T> naturalOrder().compare(o1, o2); } @Override public String toString() { return description; } }
NaturalOrderComparator
java
spring-projects__spring-security
saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/metadata/Saml2MetadataResponseResolver.java
{ "start": 901, "end": 1220 }
interface ____ { /** * Construct and serialize a relying party's SAML 2.0 metadata based on the given * {@link HttpServletRequest} * @param request the HTTP request * @return a {@link Saml2MetadataResponse} instance */ Saml2MetadataResponse resolve(HttpServletRequest request); }
Saml2MetadataResponseResolver
java
spring-projects__spring-boot
module/spring-boot-mongodb/src/dockerTest/java/org/springframework/boot/mongodb/health/MongoReactiveHealthIndicatorIntegrationTests.java
{ "start": 1688, "end": 3145 }
class ____ { @Container static MongoDBContainer mongo = TestImage.container(MongoDBContainer.class); @Test void standardApi() { Health health = mongoHealth(); assertHealth(health); } @Test void strictV1Api() { Health health = mongoHealth(ServerApi.builder().strict(true).version(ServerApiVersion.V1).build()); assertHealth(health); } private Health mongoHealth() { return mongoHealth(null); } private Health mongoHealth(@Nullable ServerApi serverApi) { Builder settingsBuilder = MongoClientSettings.builder() .applyConnectionString(new ConnectionString(mongo.getConnectionString())); if (serverApi != null) { settingsBuilder.serverApi(serverApi); } MongoClientSettings settings = settingsBuilder.build(); MongoClient mongoClient = MongoClients.create(settings); MongoReactiveHealthIndicator healthIndicator = new MongoReactiveHealthIndicator(mongoClient); Health health = healthIndicator.health(true).block(Duration.ofSeconds(30)); assertThat(health).isNotNull(); return health; } private void assertHealth(Health health) { assertThat(health.getStatus()).isEqualTo(Status.UP); assertThat(health.getDetails()).containsKey("maxWireVersion"); assertThat(health.getDetails()).hasEntrySatisfying("databases", (databases) -> assertThat(databases).asInstanceOf(InstanceOfAssertFactories.LIST) .containsExactlyInAnyOrder("local", "admin", "config")); } }
MongoReactiveHealthIndicatorIntegrationTests
java
elastic__elasticsearch
x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java
{ "start": 16257, "end": 18354 }
class ____ implements SearchStats { @Override public boolean exists(FieldName field) { return true; } @Override public boolean isIndexed(FieldName field) { return exists(field); } @Override public boolean hasDocValues(FieldName field) { return exists(field); } @Override public boolean hasExactSubfield(FieldName field) { return exists(field); } @Override public boolean supportsLoaderConfig( FieldName name, BlockLoaderFunctionConfig config, MappedFieldType.FieldExtractPreference preference ) { return true; } @Override public long count() { return -1; } @Override public long count(FieldName field) { return exists(field) ? -1 : 0; } @Override public long count(FieldName field, BytesRef value) { return exists(field) ? -1 : 0; } @Override public Object min(FieldName field) { return null; } @Override public Object max(FieldName field) { return null; } @Override public boolean isSingleValue(FieldName field) { return false; } @Override public boolean canUseEqualityOnSyntheticSourceDelegate(FieldName name, String value) { return false; } } /** * This version of SearchStats can be preconfigured to return true/false for various combinations of the four field settings: * <ol> * <li>exists</li> * <li>isIndexed</li> * <li>hasDocValues</li> * <li>hasExactSubfield</li> * </ol> * The default will return true for all fields. The include/exclude methods can be used to configure the settings for specific fields. * If you call 'include' with no fields, it will switch to return false for all fields. */ public static
TestSearchStats
java
apache__camel
components/camel-beanio/src/test/java/org/apache/camel/dataformat/beanio/MyErrorHandler.java
{ "start": 898, "end": 1501 }
class ____ extends BeanIOErrorHandler { public MyErrorHandler() { } @Override public void invalidRecord(InvalidRecordException ex) throws Exception { String id = getExchange().getExchangeId(); String line = "ExchangeId: " + id + " Invalid record: " + ex.getMessage() + ": " + ex.getRecordContext().getRecordText(); LOG.warn(line); // lets handle the error and store to the results a dummy error DTO MyErrorDto dto = new MyErrorDto(ex.getRecordName(), ex.getMessage()); handleErrorAndAddAsResult(dto); } }
MyErrorHandler
java
apache__dubbo
dubbo-common/src/main/java/org/apache/dubbo/common/logger/log4j/Log4jLoggerAdapter.java
{ "start": 1162, "end": 5517 }
class ____ implements LoggerAdapter { public static final String NAME = "log4j"; private File file; @SuppressWarnings("unchecked") public Log4jLoggerAdapter() { try { org.apache.log4j.Logger logger = LogManager.getRootLogger(); if (logger != null) { Enumeration<Appender> appenders = logger.getAllAppenders(); if (appenders != null) { while (appenders.hasMoreElements()) { Appender appender = appenders.nextElement(); if (appender instanceof FileAppender) { FileAppender fileAppender = (FileAppender) appender; String filename = fileAppender.getFile(); file = new File(filename); break; } } } } } catch (Exception t) { // ignore } } private static org.apache.log4j.Level toLog4jLevel(Level level) { if (level == Level.ALL) { return org.apache.log4j.Level.ALL; } if (level == Level.TRACE) { return org.apache.log4j.Level.TRACE; } if (level == Level.DEBUG) { return org.apache.log4j.Level.DEBUG; } if (level == Level.INFO) { return org.apache.log4j.Level.INFO; } if (level == Level.WARN) { return org.apache.log4j.Level.WARN; } if (level == Level.ERROR) { return org.apache.log4j.Level.ERROR; } // if (level == Level.OFF) return org.apache.log4j.Level.OFF; } private static Level fromLog4jLevel(org.apache.log4j.Level level) { if (level == org.apache.log4j.Level.ALL) { return Level.ALL; } if (level == org.apache.log4j.Level.TRACE) { return Level.TRACE; } if (level == org.apache.log4j.Level.DEBUG) { return Level.DEBUG; } if (level == org.apache.log4j.Level.INFO) { return Level.INFO; } if (level == org.apache.log4j.Level.WARN) { return Level.WARN; } if (level == org.apache.log4j.Level.ERROR) { return Level.ERROR; } // if (level == org.apache.log4j.Level.OFF) return Level.OFF; } @Override public Logger getLogger(Class<?> key) { return new Log4jLogger(LogManager.getLogger(key)); } @Override public Logger getLogger(String key) { return new Log4jLogger(LogManager.getLogger(key)); } @Override public Logger getLogger(String fqcn, Class<?> key) { return new Log4jLogger(fqcn, LogManager.getLogger(key)); } @Override public Logger getLogger(String fqcn, String key) { return new Log4jLogger(fqcn, LogManager.getLogger(key)); } @Override public Level getLevel() { return fromLog4jLevel(LogManager.getRootLogger().getLevel()); } @Override public void setLevel(Level level) { LogManager.getRootLogger().setLevel(toLog4jLevel(level)); } @Override public File getFile() { return file; } @Override public void setFile(File file) { // ignore } @Override public boolean isConfigured() { boolean hasAppender = false; try { org.apache.log4j.Logger logger = LogManager.getRootLogger(); if (logger != null) { Enumeration<Appender> appenders = logger.getAllAppenders(); if (appenders != null) { while (appenders.hasMoreElements()) { hasAppender = true; Appender appender = appenders.nextElement(); if (appender instanceof FileAppender) { FileAppender fileAppender = (FileAppender) appender; String filename = fileAppender.getFile(); file = new File(filename); break; } } } } } catch (Exception t) { // ignore } return hasAppender; } }
Log4jLoggerAdapter
java
apache__kafka
clients/src/test/java/org/apache/kafka/common/security/oauthbearer/internals/OAuthBearerSaslServerTest.java
{ "start": 2694, "end": 11232 }
class ____ { private static final String USER = "user"; private static final String JAAS_CONFIG_TEXT = "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule Required" + " unsecuredLoginStringClaim_sub=\"" + USER + "\";"; private static final Map<String, ?> CONFIGS = Map.of(SaslConfigs.SASL_JAAS_CONFIG, new Password(JAAS_CONFIG_TEXT)); private static final AuthenticateCallbackHandler LOGIN_CALLBACK_HANDLER; static { LOGIN_CALLBACK_HANDLER = new OAuthBearerUnsecuredLoginCallbackHandler(); LOGIN_CALLBACK_HANDLER.configure(CONFIGS, OAuthBearerLoginModule.OAUTHBEARER_MECHANISM, JaasContext.loadClientContext(CONFIGS).configurationEntries()); } private static final AuthenticateCallbackHandler VALIDATOR_CALLBACK_HANDLER; private static final AuthenticateCallbackHandler EXTENSIONS_VALIDATOR_CALLBACK_HANDLER; static { VALIDATOR_CALLBACK_HANDLER = new OAuthBearerUnsecuredValidatorCallbackHandler(); VALIDATOR_CALLBACK_HANDLER.configure(CONFIGS, OAuthBearerLoginModule.OAUTHBEARER_MECHANISM, JaasContext.loadClientContext(CONFIGS).configurationEntries()); // only validate extensions "firstKey" and "secondKey" EXTENSIONS_VALIDATOR_CALLBACK_HANDLER = new OAuthBearerUnsecuredValidatorCallbackHandler() { @Override public void handle(Callback[] callbacks) throws UnsupportedCallbackException { for (Callback callback : callbacks) { if (callback instanceof OAuthBearerValidatorCallback) { OAuthBearerValidatorCallback validationCallback = (OAuthBearerValidatorCallback) callback; validationCallback.token(new OAuthBearerTokenMock()); } else if (callback instanceof OAuthBearerExtensionsValidatorCallback) { OAuthBearerExtensionsValidatorCallback extensionsCallback = (OAuthBearerExtensionsValidatorCallback) callback; extensionsCallback.valid("firstKey"); extensionsCallback.valid("secondKey"); } else throw new UnsupportedCallbackException(callback); } } }; } private OAuthBearerSaslServer saslServer; @BeforeEach public void setUp() { saslServer = new OAuthBearerSaslServer(VALIDATOR_CALLBACK_HANDLER); } @Test public void noAuthorizationIdSpecified() throws Exception { byte[] nextChallenge = saslServer .evaluateResponse(clientInitialResponse(null)); // also asserts that no authentication error is thrown if OAuthBearerExtensionsValidatorCallback is not supported assertEquals(0, nextChallenge.length, "Next challenge is not empty"); } @Test public void negotiatedProperty() throws Exception { saslServer.evaluateResponse(clientInitialResponse(USER)); OAuthBearerToken token = (OAuthBearerToken) saslServer.getNegotiatedProperty("OAUTHBEARER.token"); assertNotNull(token); assertEquals(token.lifetimeMs(), saslServer.getNegotiatedProperty(SaslInternalConfigs.CREDENTIAL_LIFETIME_MS_SASL_NEGOTIATED_PROPERTY_KEY)); } /** * SASL Extensions that are validated by the callback handler should be accessible through the {@code #getNegotiatedProperty()} method */ @Test public void savesCustomExtensionAsNegotiatedProperty() throws Exception { Map<String, String> customExtensions = new HashMap<>(); customExtensions.put("firstKey", "value1"); customExtensions.put("secondKey", "value2"); byte[] nextChallenge = saslServer .evaluateResponse(clientInitialResponse(null, false, customExtensions)); assertEquals(0, nextChallenge.length, "Next challenge is not empty"); assertEquals("value1", saslServer.getNegotiatedProperty("firstKey")); assertEquals("value2", saslServer.getNegotiatedProperty("secondKey")); } /** * SASL Extensions that were not recognized (neither validated nor invalidated) * by the callback handler must not be accessible through the {@code #getNegotiatedProperty()} method */ @Test public void unrecognizedExtensionsAreNotSaved() throws Exception { saslServer = new OAuthBearerSaslServer(EXTENSIONS_VALIDATOR_CALLBACK_HANDLER); Map<String, String> customExtensions = new HashMap<>(); customExtensions.put("firstKey", "value1"); customExtensions.put("secondKey", "value1"); customExtensions.put("thirdKey", "value1"); byte[] nextChallenge = saslServer .evaluateResponse(clientInitialResponse(null, false, customExtensions)); assertEquals(0, nextChallenge.length, "Next challenge is not empty"); assertNull(saslServer.getNegotiatedProperty("thirdKey"), "Extensions not recognized by the server must be ignored"); } /** * If the callback handler handles the `OAuthBearerExtensionsValidatorCallback` * and finds an invalid extension, SaslServer should throw an authentication exception */ @Test public void throwsAuthenticationExceptionOnInvalidExtensions() { OAuthBearerUnsecuredValidatorCallbackHandler invalidHandler = new OAuthBearerUnsecuredValidatorCallbackHandler() { @Override public void handle(Callback[] callbacks) throws UnsupportedCallbackException { for (Callback callback : callbacks) { if (callback instanceof OAuthBearerValidatorCallback) { OAuthBearerValidatorCallback validationCallback = (OAuthBearerValidatorCallback) callback; validationCallback.token(new OAuthBearerTokenMock()); } else if (callback instanceof OAuthBearerExtensionsValidatorCallback) { OAuthBearerExtensionsValidatorCallback extensionsCallback = (OAuthBearerExtensionsValidatorCallback) callback; extensionsCallback.error("firstKey", "is not valid"); extensionsCallback.error("secondKey", "is not valid either"); } else throw new UnsupportedCallbackException(callback); } } }; saslServer = new OAuthBearerSaslServer(invalidHandler); Map<String, String> customExtensions = new HashMap<>(); customExtensions.put("firstKey", "value"); customExtensions.put("secondKey", "value"); assertThrows(SaslAuthenticationException.class, () -> saslServer.evaluateResponse(clientInitialResponse(null, false, customExtensions))); } @Test public void authorizationIdEqualsAuthenticationId() throws Exception { byte[] nextChallenge = saslServer .evaluateResponse(clientInitialResponse(USER)); assertEquals(0, nextChallenge.length, "Next challenge is not empty"); } @Test public void authorizationIdNotEqualsAuthenticationId() { assertThrows(SaslAuthenticationException.class, () -> saslServer.evaluateResponse(clientInitialResponse(USER + "x"))); } @Test public void illegalToken() throws Exception { byte[] bytes = saslServer.evaluateResponse(clientInitialResponse(null, true, Collections.emptyMap())); String challenge = new String(bytes, StandardCharsets.UTF_8); assertEquals("{\"status\":\"invalid_token\"}", challenge); } private byte[] clientInitialResponse(String authorizationId) throws OAuthBearerConfigException, IOException, UnsupportedCallbackException { return clientInitialResponse(authorizationId, false, Collections.emptyMap()); } private byte[] clientInitialResponse(String authorizationId, boolean illegalToken, Map<String, String> customExtensions) throws OAuthBearerConfigException, IOException, UnsupportedCallbackException { OAuthBearerTokenCallback callback = new OAuthBearerTokenCallback(); LOGIN_CALLBACK_HANDLER.handle(new Callback[] {callback}); OAuthBearerToken token = callback.token(); String compactSerialization = token.value(); String tokenValue = compactSerialization + (illegalToken ? "AB" : ""); return new OAuthBearerClientInitialResponse(tokenValue, authorizationId, new SaslExtensions(customExtensions)).toBytes(); } }
OAuthBearerSaslServerTest
java
apache__flink
flink-formats/flink-protobuf/src/main/java/org/apache/flink/formats/protobuf/serialize/PbCodegenMapSerializer.java
{ "start": 1456, "end": 5220 }
class ____ implements PbCodegenSerializer { private final Descriptors.FieldDescriptor fd; private final MapType mapType; private final PbFormatContext formatContext; public PbCodegenMapSerializer( Descriptors.FieldDescriptor fd, MapType mapType, PbFormatContext formatContext) { this.fd = fd; this.mapType = mapType; this.formatContext = formatContext; } @Override public String codegen(String resultVar, String flinkObjectCode, int indent) throws PbCodegenException { // The type of flinkObjectCode is a MapData of flink, // it should be converted to map of protobuf as resultVariable. PbCodegenVarId varUid = PbCodegenVarId.getInstance(); int uid = varUid.getAndIncrement(); LogicalType keyType = mapType.getKeyType(); LogicalType valueType = mapType.getValueType(); Descriptors.FieldDescriptor keyFd = fd.getMessageType().findFieldByName(PbConstant.PB_MAP_KEY_NAME); Descriptors.FieldDescriptor valueFd = fd.getMessageType().findFieldByName(PbConstant.PB_MAP_VALUE_NAME); PbCodegenAppender appender = new PbCodegenAppender(indent); String keyProtoTypeStr = PbCodegenUtils.getTypeStrFromProto(keyFd, false); String valueProtoTypeStr = PbCodegenUtils.getTypeStrFromProto(valueFd, false); String flinkKeyArrDataVar = "keyArrData" + uid; String flinkValueArrDataVar = "valueArrData" + uid; String iVar = "i" + uid; String pbMapVar = "resultPbMap" + uid; String keyPbVar = "keyPbVar" + uid; String valuePbVar = "valuePbVar" + uid; appender.appendLine( "ArrayData " + flinkKeyArrDataVar + " = " + flinkObjectCode + ".keyArray()"); appender.appendLine( "ArrayData " + flinkValueArrDataVar + " = " + flinkObjectCode + ".valueArray()"); appender.appendLine( "Map<" + keyProtoTypeStr + ", " + valueProtoTypeStr + "> " + pbMapVar + " = new HashMap()"); appender.begin( "for(int " + iVar + " = 0; " + iVar + " < " + flinkKeyArrDataVar + ".size(); " + iVar + "++){"); // process key String convertFlinkKeyArrayElementToPbCode = PbCodegenUtils.convertFlinkArrayElementToPbWithDefaultValueCode( flinkKeyArrDataVar, iVar, keyPbVar, keyFd, keyType, formatContext, appender.currentIndent()); appender.appendSegment(convertFlinkKeyArrayElementToPbCode); // process value String convertFlinkValueArrayElementToPbCode = PbCodegenUtils.convertFlinkArrayElementToPbWithDefaultValueCode( flinkValueArrDataVar, iVar, valuePbVar, valueFd, valueType, formatContext, appender.currentIndent()); appender.appendSegment(convertFlinkValueArrayElementToPbCode); appender.appendLine(pbMapVar + ".put(" + keyPbVar + ", " + valuePbVar + ")"); appender.end("}"); appender.appendLine(resultVar + " = " + pbMapVar); return appender.code(); } }
PbCodegenMapSerializer
java
apache__dubbo
dubbo-common/src/main/java/org/apache/dubbo/common/extension/AdaptiveClassCodeGenerator.java
{ "start": 4243, "end": 5626 }
class ____ there's no adaptive method found. if (!hasAdaptiveMethod()) { throw new IllegalStateException("No adaptive method exist on extension " + type.getName() + ", refuse to create the adaptive class!"); } StringBuilder code = new StringBuilder(); code.append(generatePackageInfo()); code.append(generateImports()); code.append(generateClassDeclaration()); Method[] methods = type.getMethods(); if (sort) { Arrays.sort(methods, Comparator.comparing(Method::toString)); } for (Method method : methods) { code.append(generateMethod(method)); } code.append('}'); if (logger.isDebugEnabled()) { logger.debug(code.toString()); } return code.toString(); } /** * generate package info */ private String generatePackageInfo() { return String.format(CODE_PACKAGE, type.getPackage().getName()); } /** * generate imports */ private String generateImports() { StringBuilder builder = new StringBuilder(); builder.append(String.format(CODE_IMPORTS, ScopeModel.class.getName())); builder.append(String.format(CODE_IMPORTS, ScopeModelUtil.class.getName())); return builder.toString(); } /** * generate
since
java
spring-projects__spring-boot
module/spring-boot-rsocket/src/test/java/org/springframework/boot/rsocket/autoconfigure/RSocketRequesterAutoConfigurationTests.java
{ "start": 3258, "end": 3422 }
class ____ { @Bean MyRSocketRequesterBuilder myRSocketRequesterBuilder() { return mock(MyRSocketRequesterBuilder.class); } }
CustomRSocketRequesterBuilder
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/issue_1300/Issue1335.java
{ "start": 1499, "end": 1768 }
class ____ { public String id; public String title; public String url; public String type; public int optimalWidth; public int optimalHeight; public String original_save_url; public String phash; } }
Image
java
apache__kafka
raft/src/test/java/org/apache/kafka/raft/RaftEventSimulationTest.java
{ "start": 38088, "end": 40553 }
class ____ { final LogContext logContext; final int nodeId; final KafkaRaftClient<Integer> client; final MockLog log; final MockNetworkChannel channel; final MockMessageQueue messageQueue; final MockQuorumStateStore store; final ReplicatedCounter counter; final RecordSerde<Integer> intSerde; private RaftNode( int nodeId, KafkaRaftClient<Integer> client, MockLog log, MockNetworkChannel channel, MockMessageQueue messageQueue, MockQuorumStateStore store, LogContext logContext, Time time, Random random, RecordSerde<Integer> intSerde ) { this.logContext = logContext; this.nodeId = nodeId; this.client = client; this.log = log; this.channel = channel; this.messageQueue = messageQueue; this.store = store; this.counter = new ReplicatedCounter(nodeId, client, logContext); this.intSerde = intSerde; } void initialize(Map<Integer, InetSocketAddress> voterAddresses, Metrics metrics) { client.register(counter); client.initialize( voterAddresses, store, metrics, Mockito.mock(ExternalKRaftMetrics.class) ); } void poll() { try { do { client.poll(); } while (client.isRunning() && !messageQueue.isEmpty()); } catch (Exception e) { throw new RuntimeException("Uncaught exception during poll of node " + nodeId, e); } } long highWatermark() { return client.quorum().highWatermark() .map(LogOffsetMetadata::offset) .orElse(0L); } long logEndOffset() { return log.endOffset().offset(); } @Override public String toString() { return String.format( "Node(id=%s, hw=%s, logEndOffset=%s)", nodeId, highWatermark(), logEndOffset() ); } LogContext logContext() { return logContext; } } private record InflightRequest(int sourceId, Node destination) { } private
RaftNode
java
elastic__elasticsearch
server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java
{ "start": 1474, "end": 8459 }
class ____ extends ESIntegTestCase { public void testSimple() { assertAcked(prepareCreate("test")); ensureGreen(); prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); assertResponses(response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getVersion(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); }, prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true), prepareSearch("test").storedFields("_none_") ); } public void testInnerHits() { assertAcked(prepareCreate("test").setMapping("nested", "type=nested")); ensureGreen(); prepareIndex("test").setId("1").setSource("field", "value", "nested", Collections.singletonMap("title", "foo")).get(); refresh(); assertResponse( prepareSearch("test").storedFields("_none_") .setFetchSource(false) .setQuery( new NestedQueryBuilder("nested", new TermQueryBuilder("nested.title", "foo"), ScoreMode.Total).innerHit( new InnerHitBuilder().setStoredFieldNames(Collections.singletonList("_none_")) .setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) ) ), response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits hits = response.getHits().getAt(0).getInnerHits().get("nested"); assertThat(hits.getTotalHits().value(), equalTo(1L)); assertThat(hits.getAt(0).getId(), nullValue()); assertThat(hits.getAt(0).getSourceAsString(), nullValue()); } ); } public void testWithRouting() { assertAcked(prepareCreate("test")); ensureGreen(); prepareIndex("test").setId("1").setSource("field", "value").setRouting("toto").get(); refresh(); assertResponse(prepareSearch("test"), response -> { assertThat(response.getHits().getAt(0).getId(), notNullValue()); assertThat(response.getHits().getAt(0).field("_routing"), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); }); assertResponse(prepareSearch("test").storedFields("_none_").setFetchSource(false), response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); }); assertResponse(prepareSearch("test").storedFields("_none_"), response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); }); GetResponse getResponse = client().prepareGet("test", "1").setRouting("toto").get(); assertTrue(getResponse.isExists()); assertEquals("toto", getResponse.getFields().get("_routing").getValue()); } public void testWithIgnored() { assertAcked(prepareCreate("test").setMapping("ip", "type=ip,ignore_malformed=true")); ensureGreen(); prepareIndex("test").setId("1").setSource("ip", "value").get(); refresh(); assertResponse(prepareSearch("test"), response -> { assertThat(response.getHits().getAt(0).getId(), notNullValue()); assertThat(response.getHits().getAt(0).field("_ignored").getValue(), equalTo("ip")); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); }); assertResponse(prepareSearch("test").storedFields("_none_"), response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).field("_ignored"), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); }); { GetResponse getResponse = client().prepareGet("test", "1").get(); assertTrue(getResponse.isExists()); assertThat(getResponse.getField("_ignored"), nullValue()); } { GetResponse getResponse = client().prepareGet("test", "1").setStoredFields("_ignored").get(); assertTrue(getResponse.isExists()); assertEquals("ip", getResponse.getField("_ignored").getValue()); } } public void testInvalid() { assertAcked(prepareCreate("test")); ensureGreen(); indexDoc("test", "1", "field", "value"); refresh(); { ValidationException exc = expectThrows( ValidationException.class, prepareSearch("test").setFetchSource(true).storedFields("_none_") ); assertThat(exc.getMessage(), containsString("[stored_fields] cannot be disabled if [_source] is requested")); } { ValidationException exc = expectThrows( ValidationException.class, prepareSearch("test").storedFields("_none_").addFetchField("field") ); assertThat(exc.getMessage(), containsString("[stored_fields] cannot be disabled when using the [fields] option")); } { IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, () -> prepareSearch("test").storedFields("_none_", "field1") ); assertThat(exc.getMessage(), equalTo("cannot combine _none_ with other fields")); } { IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, () -> prepareSearch("test").storedFields("_none_").storedFields("field1") ); assertThat(exc.getMessage(), equalTo("cannot combine _none_ with other fields")); } } public void testFetchId() { assertAcked(prepareCreate("test")); ensureGreen(); prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); assertResponse(prepareSearch("test").addFetchField("_id"), response -> { assertEquals(1, response.getHits().getHits().length); assertEquals("1", response.getHits().getAt(0).getId()); assertEquals("1", response.getHits().getAt(0).field("_id").getValue()); }); } }
MetadataFetchingIT
java
quarkusio__quarkus
independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/Qualifiers.java
{ "start": 6235, "end": 6627 }
class ____ implements BiFunction<Class<? extends Annotation>, Integer, Integer> { private static final TimesSeenBiFunction INSTANCE = new TimesSeenBiFunction(); private TimesSeenBiFunction() { } @Override public Integer apply(Class<? extends Annotation> k, Integer v) { return (v == null) ? 1 : (v + 1); } } }
TimesSeenBiFunction
java
apache__flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterTableReset.java
{ "start": 1465, "end": 2600 }
class ____ extends SqlAlterTable { private final SqlNodeList propertyKeyList; public SqlAlterTableReset( SqlParserPos pos, SqlIdentifier tableName, SqlNodeList propertyKeyList, boolean ifTableExists) { super(pos, tableName, null, ifTableExists); this.propertyKeyList = requireNonNull(propertyKeyList, "propertyKeyList should not be null"); } @Override public List<SqlNode> getOperandList() { return ImmutableNullableList.of(tableIdentifier, propertyKeyList); } public SqlNodeList getPropertyKeyList() { return propertyKeyList; } public Set<String> getResetKeys() { return propertyKeyList.getList().stream() .map(SqlParseUtils::extractString) .collect(Collectors.toSet()); } @Override public void unparseAlterOperation(SqlWriter writer, int leftPrec, int rightPrec) { super.unparseAlterOperation(writer, leftPrec, rightPrec); SqlUnparseUtils.unparseResetOptions(propertyKeyList, writer, leftPrec, rightPrec); } }
SqlAlterTableReset
java
quarkusio__quarkus
extensions/reactive-routes/deployment/src/test/java/io/quarkus/vertx/web/compress/CompressionTest.java
{ "start": 706, "end": 2371 }
class ____ { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot(root -> root .addClasses(MyRoutes.class) .addAsManifestResource(new StringAsset(MyRoutes.MESSAGE), "resources/file.txt") .addAsManifestResource(new StringAsset(MyRoutes.MESSAGE), "resources/my.doc")) .overrideConfigKey("quarkus.http.enable-compression", "true"); @Test public void testRoutes() { assertCompressed("/compressed"); assertUncompressed("/uncompressed"); assertCompressed("/compressed-content-type"); assertUncompressed("/uncompressed-content-type"); assertCompressed("/content-type-implicitly-compressed"); assertCompressed("/content-type-with-param-implicitly-compressed"); assertUncompressed("/content-type-implicitly-uncompressed"); assertCompressed("/compression-disabled-manually"); assertCompressed("/file.txt"); assertUncompressed("/my.doc"); } private void assertCompressed(String path) { String bodyStr = get(path).then().statusCode(200).header("Content-Encoding", "gzip").extract().asString(); assertEquals("Hello compression!", bodyStr); } private void assertUncompressed(String path) { ExtractableResponse<Response> response = get(path) .then().statusCode(200).extract(); assertTrue(response.header("Content-Encoding") == null, response.headers().toString()); assertEquals(MyRoutes.MESSAGE, response.asString()); } @ApplicationScoped public static
CompressionTest
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java
{ "start": 47258, "end": 47825 }
class ____ { static Object NULL = null; Object get() { return NULL; } } """) .doTest(); } @Test public void negativeCases_polyNull() { createCompilationTestHelper() .addSourceLines( "com/google/errorprone/bugpatterns/nullness/LiteralNullReturnTest.java", """ package com.google.errorprone.bugpatterns.nullness; import org.checkerframework.checker.nullness.qual.PolyNull; public
LiteralNullReturnTest
java
google__guava
android/guava/src/com/google/common/base/FinalizableReferenceQueue.java
{ "start": 10257, "end": 10502 }
class ____ null if this loader shouldn't or can't load it. * * @throws SecurityException if we don't have the appropriate privileges */ @Nullable Class<?> loadFinalizer(); } /** * Tries to load Finalizer from the system
or
java
eclipse-vertx__vert.x
vertx-core/src/main/java/io/vertx/core/eventbus/impl/clustered/DefaultNodeSelector.java
{ "start": 1205, "end": 1418 }
interface ____<T> { Op<String> SEND = RoundRobinSelector::selectForSend; Op<Iterable<String>> PUBLISH = RoundRobinSelector::selectForPublish; T select(RoundRobinSelector selector); } private static
Op
java
apache__rocketmq
controller/src/main/java/org/apache/rocketmq/controller/impl/event/CleanBrokerDataEvent.java
{ "start": 885, "end": 1972 }
class ____ implements EventMessage { private String brokerName; private Set<Long> brokerIdSetToClean; public CleanBrokerDataEvent(String brokerName, Set<Long> brokerIdSetToClean) { this.brokerName = brokerName; this.brokerIdSetToClean = brokerIdSetToClean; } public String getBrokerName() { return brokerName; } public void setBrokerName(String brokerName) { this.brokerName = brokerName; } public void setBrokerIdSetToClean(Set<Long> brokerIdSetToClean) { this.brokerIdSetToClean = brokerIdSetToClean; } public Set<Long> getBrokerIdSetToClean() { return brokerIdSetToClean; } /** * Returns the event type of this message */ @Override public EventType getEventType() { return EventType.CLEAN_BROKER_DATA_EVENT; } @Override public String toString() { return "CleanBrokerDataEvent{" + "brokerName='" + brokerName + '\'' + ", brokerIdSetToClean=" + brokerIdSetToClean + '}'; } }
CleanBrokerDataEvent
java
quarkusio__quarkus
extensions/picocli/deployment/src/main/java/io/quarkus/picocli/deployment/PicocliProcessor.java
{ "start": 1384, "end": 2843 }
class ____ { @BuildStep FeatureBuildItem feature() { return new FeatureBuildItem(Feature.PICOCLI); } @BuildStep void addScopeToCommands(BuildProducer<AutoAddScopeBuildItem> autoAddScope) { // First add @Dependent to all classes annotated with @Command that: // (a) require container services autoAddScope.produce(AutoAddScopeBuildItem.builder() .isAnnotatedWith(DotName.createSimple(CommandLine.Command.class.getName())) .requiresContainerServices() .defaultScope(BuiltinScope.DEPENDENT) .priority(20) .unremovable() .build()); // (b) or declare a single constructor with at least one parameter autoAddScope.produce(AutoAddScopeBuildItem.builder() .match((clazz, annotations, index) -> { List<MethodInfo> constructors = clazz.methods().stream().filter(m -> m.name().equals(MethodDescriptor.INIT)) .collect(Collectors.toList()); return constructors.size() == 1 && constructors.get(0).parametersCount() > 0; }) .isAnnotatedWith(DotName.createSimple(CommandLine.Command.class.getName())) .defaultScope(BuiltinScope.DEPENDENT) .priority(10) .unremovable() .build()); // Also add @Dependent to any
PicocliProcessor
java
quarkusio__quarkus
integration-tests/injectmock/src/test/java/io/quarkus/it/mockbean/WithSpiesTest.java
{ "start": 399, "end": 2602 }
class ____ { @InjectSpy(convertScopes = true) CapitalizerService capitalizerService; @InjectSpy MessageService messageService; @InjectSpy SuffixService suffixService; @InjectSpy @Named("first") DummyService firstDummyService; @InjectSpy @Named("second") DummyService secondDummyService; @Test @DisplayName("Verify default Greeting values are returned from Spied objects") public void testGreet() { given() .when().get("/greeting") .then() .statusCode(200) .body(is("HELLO")); Mockito.verify(capitalizerService, Mockito.times(1)).capitalize(Mockito.eq("hello")); Mockito.verify(messageService, Mockito.times(1)).getMessage(); Mockito.verify(suffixService, Mockito.times(1)).getSuffix(); } @Test @DisplayName("Verify default Dummy values are returned from Spied objects") public void testDummy() { given() .when().get("/dummy") .then() .statusCode(200) .body(is("first/second")); Mockito.verify(firstDummyService, Mockito.times(1)).returnDummyValue(); Mockito.verify(secondDummyService, Mockito.times(1)).returnDummyValue(); } @Test @DisplayName("Verify we can override default Greeting values are returned from Spied objects") public void testOverrideGreet() { Mockito.when(messageService.getMessage()).thenReturn("hi"); Mockito.when(suffixService.getSuffix()).thenReturn("!"); given() .when().get("/greeting") .then() .statusCode(200) .body(is("HI!")); } @Test @DisplayName("Verify can override default Dummy values are returned from Spied objects") public void testOverrideDummy() { Mockito.when(firstDummyService.returnDummyValue()).thenReturn("1"); Mockito.when(secondDummyService.returnDummyValue()).thenReturn("2"); given() .when().get("/dummy") .then() .statusCode(200) .body(is("1/2")); } @Nested
WithSpiesTest
java
spring-projects__spring-framework
spring-core/src/test/java/org/springframework/core/annotation/AnnotatedElementUtilsTests.java
{ "start": 59373, "end": 59552 }
interface ____ extends SubNonInheritedAnnotationInterface { } @ConventionBasedComposedContextConfig(locations = "explicitDeclaration") static
SubSubNonInheritedAnnotationInterface
java
google__guava
android/guava-testlib/src/com/google/common/collect/testing/AbstractCollectionTestSuiteBuilder.java
{ "start": 2125, "end": 2362 }
class ____< B extends AbstractCollectionTestSuiteBuilder<B, E>, E> extends PerCollectionSizeTestSuiteBuilder<B, TestCollectionGenerator<E>, Collection<E>, E> { @SuppressWarnings("rawtypes") //
AbstractCollectionTestSuiteBuilder
java
elastic__elasticsearch
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissionsTests.java
{ "start": 1015, "end": 12912 }
class ____ extends ESTestCase { public void test() { FieldPermissions fieldPermissions = FieldPermissions.DEFAULT; fieldPermissions = fieldPermissions.limitFieldPermissions( new FieldPermissions(fieldPermissionDef(new String[] { "f1", "f2" }, new String[] { "" })) ); assertThat(fieldPermissions.grantsAccessTo("f1"), is(true)); assertThat(fieldPermissions.grantsAccessTo("f2"), is(true)); } public void testFieldPermissionsIntersection() { final FieldPermissions fieldPermissions = FieldPermissions.DEFAULT; final FieldPermissions fieldPermissions1 = new FieldPermissions( fieldPermissionDef(new String[] { "f1", "f2", "f3*" }, new String[] { "f3" }) ); final FieldPermissions fieldPermissions2 = new FieldPermissions( fieldPermissionDef(new String[] { "f1", "f3*", "f4" }, new String[] { "f3" }) ); { FieldPermissions result = fieldPermissions.limitFieldPermissions(randomFrom(FieldPermissions.DEFAULT, null)); assertThat(result, is(notNullValue())); assertThat(result, IsSame.sameInstance(FieldPermissions.DEFAULT)); } { FieldPermissions result = fieldPermissions1.limitFieldPermissions(FieldPermissions.DEFAULT); assertThat(result, is(notNullValue())); assertThat(result, not(same(fieldPermissions))); assertThat(result, not(same(fieldPermissions1))); CharacterRunAutomaton automaton = new CharacterRunAutomaton(result.getIncludeAutomaton()); assertThat(automaton.run("f1"), is(true)); assertThat(automaton.run("f2"), is(true)); assertThat(automaton.run("f3"), is(false)); assertThat(automaton.run("f31"), is(true)); assertThat(automaton.run("f4"), is(false)); } { FieldPermissions result = fieldPermissions1.limitFieldPermissions(fieldPermissions2); assertThat(result, is(notNullValue())); assertThat(result, not(same(fieldPermissions1))); assertThat(result, not(same(fieldPermissions2))); CharacterRunAutomaton automaton = new CharacterRunAutomaton(result.getIncludeAutomaton()); assertThat(automaton.run("f1"), is(true)); assertThat(automaton.run("f2"), is(false)); assertThat(automaton.run("f3"), is(false)); assertThat(automaton.run("f31"), is(true)); assertThat(automaton.run("f4"), is(false)); } { FieldPermissions result = fieldPermissions.limitFieldPermissions(fieldPermissions2); assertThat(result, is(notNullValue())); assertThat(result, not(same(fieldPermissions1))); assertThat(result, not(same(fieldPermissions2))); CharacterRunAutomaton automaton = new CharacterRunAutomaton(result.getIncludeAutomaton()); assertThat(automaton.run("f1"), is(true)); assertThat(automaton.run("f2"), is(false)); assertThat(automaton.run("f3"), is(false)); assertThat(automaton.run("f31"), is(true)); assertThat(automaton.run("f4"), is(true)); assertThat(automaton.run("f5"), is(false)); } } public void testMultipleLimiting() { // Basic test for a number of permission definitions FieldPermissions fieldPermissions = FieldPermissions.DEFAULT; final int nSets = randomIntBetween(2, 8); final FieldPermissionsDefinition fieldPermissionsDefinition = fieldPermissionDef( new String[] { "f1", "f2", "f3*" }, new String[] { "f3" } ); for (int i = 0; i < nSets; i++) { fieldPermissions = fieldPermissions.limitFieldPermissions(new FieldPermissions(fieldPermissionsDefinition)); } final List<FieldPermissionsDefinition> fieldPermissionsDefinitions = fieldPermissions.getFieldPermissionsDefinitions(); assertNonNullFieldPermissionDefinitions(fieldPermissionsDefinitions, nSets); fieldPermissionsDefinitions.forEach(fpd -> assertThat(fpd, equalTo(fieldPermissionsDefinition))); assertThat(fieldPermissions.grantsAccessTo(randomFrom("f1", "f2", "f31")), is(true)); assertThat(fieldPermissions.grantsAccessTo("f3"), is(false)); // More realistic intersection fieldPermissions = FieldPermissions.DEFAULT; fieldPermissions = fieldPermissions.limitFieldPermissions( new FieldPermissions(fieldPermissionDef(new String[] { "f1", "f2", "f3*", "f4*" }, new String[] { "f3" })) ); fieldPermissions = fieldPermissions.limitFieldPermissions( new FieldPermissions(fieldPermissionDef(new String[] { "f2", "f3*", "f4*", "f5*" }, new String[] { "f4" })) ); fieldPermissions = fieldPermissions.limitFieldPermissions( new FieldPermissions(fieldPermissionDef(new String[] { "f3*", "f4*", "f5*", "f6" }, new String[] { "f5" })) ); assertNonNullFieldPermissionDefinitions(fieldPermissions.getFieldPermissionsDefinitions(), 3); assertThat(fieldPermissions.grantsAccessTo(randomFrom("f1", "f2", "f5", "f6") + randomAlphaOfLengthBetween(0, 10)), is(false)); assertThat(fieldPermissions.grantsAccessTo("f3"), is(false)); assertThat(fieldPermissions.grantsAccessTo("f4"), is(false)); assertThat(fieldPermissions.grantsAccessTo("f3" + randomAlphaOfLengthBetween(1, 10)), is(true)); assertThat(fieldPermissions.grantsAccessTo("f4" + randomAlphaOfLengthBetween(1, 10)), is(true)); } public void testMustHaveNonNullFieldPermissionsDefinition() { final FieldPermissions fieldPermissions0 = FieldPermissions.DEFAULT; assertNonNullFieldPermissionDefinitions(fieldPermissions0.getFieldPermissionsDefinitions()); expectThrows(NullPointerException.class, () -> new FieldPermissions(null)); expectThrows(NullPointerException.class, () -> new FieldPermissions(null, Automatons.MATCH_ALL)); final FieldPermissions fieldPermissions03 = randomFrom( FieldPermissions.DEFAULT, new FieldPermissions(fieldPermissionDef(new String[] { "f1", "f2", "f3*" }, new String[] { "f3" })) ); assertNonNullFieldPermissionDefinitions(fieldPermissions03.limitFieldPermissions(null).getFieldPermissionsDefinitions()); assertNonNullFieldPermissionDefinitions( fieldPermissions03.limitFieldPermissions(FieldPermissions.DEFAULT).getFieldPermissionsDefinitions() ); assertNonNullFieldPermissionDefinitions( fieldPermissions03.limitFieldPermissions( new FieldPermissions(fieldPermissionDef(new String[] { "f1", "f3*", "f4" }, new String[] { "f3" })) ).getFieldPermissionsDefinitions(), fieldPermissions03.hasFieldLevelSecurity() ? 2 : 1 ); } public void testWriteCacheKeyWillDistinguishBetweenDefinitionAndLimitedByDefinition() throws IOException { // The overall same grant/except sets but are come from either: // 1. Just the definition // 2. Just the limited-by definition // 3. both // The cache key should differentiate between them // Just definition final BytesStreamOutput out0 = new BytesStreamOutput(); final FieldPermissions fieldPermissions0 = new FieldPermissions( new FieldPermissionsDefinition( Set.of( new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "x*" }, new String[] { "x2" }), new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "y*" }, new String[] { "y2" }), new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "z*" }, new String[] { "z2" }) ) ) ); fieldPermissions0.buildCacheKey(out0, BytesReference::utf8ToString); // Mixed definition final BytesStreamOutput out1 = new BytesStreamOutput(); final FieldPermissions fieldPermissions1 = new FieldPermissions( new FieldPermissionsDefinition( Set.of( new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "x*" }, new String[] { "x2" }), new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "y*" }, new String[] { "y2" }) ) ) ).limitFieldPermissions(new FieldPermissions(fieldPermissionDef(new String[] { "z*" }, new String[] { "z2" }))); fieldPermissions1.buildCacheKey(out1, BytesReference::utf8ToString); // Another mixed definition final BytesStreamOutput out2 = new BytesStreamOutput(); final FieldPermissions fieldPermissions2 = new FieldPermissions( new FieldPermissionsDefinition( Set.of(new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "x*" }, new String[] { "x2" })) ) ).limitFieldPermissions( new FieldPermissions( new FieldPermissionsDefinition( Set.of( new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "y*" }, new String[] { "y2" }), new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "z*" }, new String[] { "z2" }) ) ) ) ); fieldPermissions2.buildCacheKey(out2, BytesReference::utf8ToString); // Just limited by final BytesStreamOutput out3 = new BytesStreamOutput(); final FieldPermissions fieldPermissions3 = FieldPermissions.DEFAULT.limitFieldPermissions( new FieldPermissions( new FieldPermissionsDefinition( Set.of( new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "x*" }, new String[] { "x2" }), new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "y*" }, new String[] { "y2" }), new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "z*" }, new String[] { "z2" }) ) ) ) ); fieldPermissions3.buildCacheKey(out3, BytesReference::utf8ToString); assertThat(Arrays.equals(BytesReference.toBytes(out0.bytes()), BytesReference.toBytes(out1.bytes())), is(false)); assertThat(Arrays.equals(BytesReference.toBytes(out0.bytes()), BytesReference.toBytes(out2.bytes())), is(false)); assertThat(Arrays.equals(BytesReference.toBytes(out1.bytes()), BytesReference.toBytes(out2.bytes())), is(false)); // Just limited by is the same as definition because limitFieldPermissions uses limited-by definition if the original // permission is match all assertThat(Arrays.equals(BytesReference.toBytes(out0.bytes()), BytesReference.toBytes(out3.bytes())), is(true)); } private static FieldPermissionsDefinition fieldPermissionDef(String[] granted, String[] denied) { return new FieldPermissionsDefinition(granted, denied); } private void assertNonNullFieldPermissionDefinitions(List<FieldPermissionsDefinition> fieldPermissionsDefinitions) { assertNonNullFieldPermissionDefinitions(fieldPermissionsDefinitions, 1); } private void assertNonNullFieldPermissionDefinitions(List<FieldPermissionsDefinition> fieldPermissionsDefinitions, int expectedSize) { assertThat(fieldPermissionsDefinitions, notNullValue()); assertThat(fieldPermissionsDefinitions, hasSize(expectedSize)); fieldPermissionsDefinitions.forEach(fieldPermissionsDefinition -> assertThat(fieldPermissionsDefinition, notNullValue())); } }
FieldPermissionsTests
java
spring-cloud__spring-cloud-gateway
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/filter/factory/rewrite/ModifyRequestBodyGatewayFilterFactoryTests.java
{ "start": 3834, "end": 5674 }
class ____ { @Value("${test.uri}") String uri; @Bean public RouteLocator testRouteLocator(RouteLocatorBuilder builder) { return builder.routes() .route("test_modify_request_body", r -> r.order(-1) .host("**.modifyrequestbody.org") .filters(f -> f.modifyRequestBody(String.class, String.class, MediaType.APPLICATION_JSON_VALUE, (serverWebExchange, aVoid) -> { return Mono.just("modifyrequest"); })) .uri(uri)) .route("test_modify_request_body_empty", r -> r.order(-1) .host("**.modifyrequestbodyempty.org") .filters(f -> f.modifyRequestBody(String.class, String.class, MediaType.APPLICATION_JSON_VALUE, (serverWebExchange, body) -> { if (body == null) { return Mono.just("modifyrequest"); } return Mono.just(body.toUpperCase(Locale.ROOT)); })) .uri(uri)) .route("test_modify_request_body_to_large", r -> r.order(-1) .host("**.modifyrequestbodyemptytolarge.org") .filters(f -> f.modifyRequestBody(String.class, String.class, MediaType.APPLICATION_JSON_VALUE, (serverWebExchange, body) -> { return Mono.just( "tolarge-tolarge-tolarge-tolarge-tolarge-tolarge-tolarge-tolarge-tolarge-tolarge-tolarge-tolarge-tolarge-tolarge-tolarge-tolarge"); })) .uri(uri)) .route("test_modify_request_body_with_parameterizedtypereference", r -> r.order(-1) .host("**.modifyrequestbodyspacetounderscore.org") .filters(f -> f.modifyRequestBody(new ParameterizedTypeReference<String>() { }, new ParameterizedTypeReference<String>() { }, (swe, body) -> { return Mono.just(body.replaceAll(" ", "_").toUpperCase(Locale.ROOT)); })) .uri(uri)) .build(); } } }
TestConfig
java
redisson__redisson
redisson/src/main/java/org/redisson/spring/misc/BeanMethodInvoker.java
{ "start": 910, "end": 1493 }
class ____ extends ArgumentConvertingMethodInvoker implements InitializingBean { @Override public void afterPropertiesSet() throws Exception { prepare(); try { invoke(); } catch (InvocationTargetException ex) { if (ex.getTargetException() instanceof Exception) { throw (Exception) ex.getTargetException(); } if (ex.getTargetException() instanceof Error) { throw (Error) ex.getTargetException(); } throw ex; } } }
BeanMethodInvoker
java
quarkusio__quarkus
extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/CachedResultsProcessor.java
{ "start": 13866, "end": 14357 }
class ____ extends MultiBuildItem { private final CachedResultsInjectConfig config; private final String value; CachedResultsDifferentiator(CachedResultsInjectConfig config, String value) { this.config = config; this.value = value; } CachedResultsInjectConfig getConfig() { return config; } String getValue() { return value; } } static final
CachedResultsDifferentiator
java
google__truth
core/src/main/java/com/google/common/truth/ActualValueInference.java
{ "start": 47486, "end": 51950 }
class ____ pass the name in? private String className; InferenceClassVisitor(String methodNameToVisit) { super(Opcodes.ASM9); this.methodNameToVisit = methodNameToVisit; } @Override public void visit( int version, int access, String name, String signature, String superName, String[] interfaces) { className = name; } @Override public @Nullable MethodVisitor visitMethod( int access, String name, String desc, String signature, String[] exceptions) { /* * Each InferenceMethodVisitor instance may be used only once. Still, it might seem like we * can get away with creating a single instance at construction time. However, we know only * the name of the method that we're visiting, not its full signature, so we may need to visit * multiple methods with that name, each with a fresh visitor. */ return methodNameToVisit.equals(name) ? new InferenceMethodVisitor(access, className, name, desc, actualValueAtLine) : null; } } /* * TODO(cpovirk): Expand this, maybe based on data about the most common method calls passed to * assertThat(). */ private static final ImmutableSet<String> BORING_NAMES = ImmutableSet.of( // keep-sorted start "_build", "asList", "build", "collect", "copyOf", "create", "from", "get", "iterator", "listOf", "mapOf", "of", "setOf", "sortedMapOf", "sortedSetOf", "toArray", "toString", "valueOf" // keep-sorted end ); private static boolean isThatOrAssertThat(String owner, String name) { /* * TODO(cpovirk): Handle CustomSubjectBuilder. That requires looking at the type hierarchy, as * users always have an instance of a specific subtype. Also keep in mind that the that(...) * method might accept more than 1 parameter, like `that(className, methodName)` and/or that it * might have category-2 parameters. * * TODO(cpovirk): Handle custom assertThat methods. The challenges are similar. */ return (owner.equals("com/google/common/truth/Truth") && name.equals("assertThat")) || (owner.equals("com/google/common/truth/StandardSubjectBuilder") && name.equals("that")) || (owner.equals("com/google/common/truth/SimpleSubjectBuilder") && name.equals("that")) || (owner.equals("com/google/common/truth/Expect") && name.equals("that")); } private static boolean isBoxing(String owner, String name, String desc) { return name.equals("valueOf") && PRIMITIVE_WRAPPERS.contains(owner) /* * Don't handle valueOf(String s[, int radix]). The valueOf support is really here for * autoboxing, as in "assertThat(primitive)," not for * "assertThat(Integer.valueOf(...))." Not that there's anything really *wrong* with * handling manual boxing of primitives -- good thing, since we can't distinguish the two -- * but we're not interested in handling the valueOf methods that *parse*. That's mainly * because there's a type conversion, so some assertions might succeed on a string and fail * on the parsed number (or vice versa). */ && !Type.getArgumentTypes(desc)[0].equals(Type.getType(String.class)); } private static final ImmutableSet<String> PRIMITIVE_WRAPPERS = ImmutableSet.of( "java/lang/Boolean", "java/lang/Byte", "java/lang/Character", "java/lang/Double", "java/lang/Float", "java/lang/Integer", "java/lang/Long", "java/lang/Short"); private static boolean isStatic(int access) { return isSet(access, Opcodes.ACC_STATIC); } /** * Returns {@code true} iff <b>all</b> bits in {@code bitmask} are set in {@code flags}. Trivially * returns {@code true} if {@code bitmask} is 0. */ private static boolean isSet(int flags, int bitmask) { return (flags & bitmask) == bitmask; } private static void closeQuietly(@Nullable InputStream stream) { if (stream == null) { return; } try { stream.close(); } catch (IOException e) { // TODO(cpovirk): Log a warning? } } private ActualValueInference() {} }
visitor
java
micronaut-projects__micronaut-core
http-server-tck/src/main/java/io/micronaut/http/server/tck/tests/ErrorHandlerFluxTest.java
{ "start": 4344, "end": 5832 }
class ____ { @Get("/flux-exception") Flux<String> fluxException() { throw new MyTestException("Cannot process request."); } @Get("/flux-single-exception") @SingleResult Flux<String> fluxSingleException() { throw new MyTestException("Cannot process request."); } @Get("/flux-single-error") @SingleResult Flux<String> fluxSingleError() { return Flux.error(new MyTestException("Cannot process request.")); } @Get("/flux-chunked-immediate-error") Flux<String> fluxChunkedImmediateError() { return Flux.error(new MyTestException("Cannot process request.")); } @Get("/flux-chunked-delayed-error") Flux<String> fluxChunkedDelayedError() { return Flux.just("1", "2", "3").handle((data, sink) -> { if (data.equals("3")) { sink.error(new MyTestException("Cannot process request.")); } else { sink.next(data); } }); } @Error(global = true) public HttpResponse<String> handleMyTestException(HttpRequest<?> request, MyTestException exception) { var error = "Your request is erroneous: " + exception.getMessage(); return HttpResponse.<String>status(HttpStatus.I_AM_A_TEAPOT, "Bad request") .body(error); } } static
ErrorController
java
apache__flink
flink-connectors/flink-connector-base/src/main/java/org/apache/flink/connector/base/table/AsyncSinkConnectorOptions.java
{ "start": 1190, "end": 2968 }
class ____ { public static final ConfigOption<Integer> MAX_BATCH_SIZE = ConfigOptions.key("sink.batch.max-size") .intType() .noDefaultValue() .withDescription( "Maximum number of elements that may be passed" + " in a batch to be written downstream."); public static final ConfigOption<Integer> MAX_IN_FLIGHT_REQUESTS = ConfigOptions.key("sink.requests.max-inflight") .intType() .noDefaultValue() .withDescription( "Request threshold for uncompleted requests before blocking new write requests."); public static final ConfigOption<Integer> MAX_BUFFERED_REQUESTS = ConfigOptions.key("sink.requests.max-buffered") .intType() .noDefaultValue() .withDescription( "Maximum number of buffered records before applying backpressure."); public static final ConfigOption<Long> FLUSH_BUFFER_SIZE = ConfigOptions.key("sink.flush-buffer.size") .longType() .noDefaultValue() .withDescription("Threshold value in bytes for writer buffer flushing."); public static final ConfigOption<Long> FLUSH_BUFFER_TIMEOUT = ConfigOptions.key("sink.flush-buffer.timeout") .longType() .noDefaultValue() .withDescription( "Threshold time in milliseconds for an element to be in a buffer" + " before being flushed."); }
AsyncSinkConnectorOptions
java
apache__flink
flink-datastream-api/src/main/java/org/apache/flink/datastream/api/stream/NonKeyedPartitionStream.java
{ "start": 4281, "end": 4502 }
interface ____<T> extends NonKeyedPartitionStream<T>, ProcessConfigurable<ProcessConfigurableAndNonKeyedPartitionStream<T>> {} /** * This
ProcessConfigurableAndNonKeyedPartitionStream
java
elastic__elasticsearch
libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/ConfigurationUtilsBridge.java
{ "start": 901, "end": 2351 }
class ____ { private ConfigurationUtilsBridge() {} public static TemplateScriptFactoryBridge compileTemplate( final String processorType, final String processorTag, final String propertyName, final String propertyValue, final ScriptServiceBridge bridgedScriptService ) { ScriptService scriptService = bridgedScriptService.toInternal(); final TemplateScript.Factory templateScriptFactory = ConfigurationUtils.compileTemplate( processorType, processorTag, propertyName, propertyValue, scriptService ); return TemplateScriptFactoryBridge.fromInternal(templateScriptFactory); } public static String readStringProperty( final String processorType, final String processorTag, final Map<String, Object> configuration, final String propertyName ) { return ConfigurationUtils.readStringProperty(processorType, processorTag, configuration, propertyName); } public static Boolean readBooleanProperty( final String processorType, final String processorTag, final Map<String, Object> configuration, final String propertyName, final boolean defaultValue ) { return ConfigurationUtils.readBooleanProperty(processorType, processorTag, configuration, propertyName, defaultValue); } }
ConfigurationUtilsBridge
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/query/criteria/JpaJsonTableColumnsNode.java
{ "start": 302, "end": 3019 }
interface ____ { /** * Like {@link #existsColumn(String, String)}, but uses the column name as JSON path expression. * * @return The {@link JpaJsonExistsNode} for the column */ JpaJsonExistsNode existsColumn(String columnName); /** * Defines a boolean column on the result type with the given name for which the value can be obtained * by invoking {@code json_exists} with the given JSON path. * * @return The {@link JpaJsonExistsNode} for the column */ JpaJsonExistsNode existsColumn(String columnName, String jsonPath); /** * Like {@link #queryColumn(String, String)}, but uses the column name as JSON path expression. * * @return The {@link JpaJsonQueryNode} for the column */ JpaJsonQueryNode queryColumn(String columnName); /** * Defines a string column on the result type with the given name for which the value can be obtained * by invoking {@code json_query} with the given JSON path. * * @return The {@link JpaJsonQueryNode} for the column */ JpaJsonQueryNode queryColumn(String columnName, String jsonPath); /** * Like {@link #valueColumn(String, Class, String)} but uses the column name as JSON path expression. * * @return The {@link JpaJsonValueNode} for the column */ <T> JpaJsonValueNode<T> valueColumn(String columnName, Class<T> type); /** * Defines a column on the result type with the given name and type for which the value can be obtained by the given JSON path expression. * * @return The {@link JpaJsonValueNode} for the column */ <T> JpaJsonValueNode<T> valueColumn(String columnName, Class<T> type, String jsonPath); /** * Like {@link #valueColumn(String, Class, String)} but uses the column name as JSON path expression. * * @return The {@link JpaJsonValueNode} for the column */ <T> JpaJsonValueNode<T> valueColumn(String columnName, JpaCastTarget<T> type); /** * Defines a column on the result type with the given name and type for which the value can be obtained by the given JSON path expression. * * @return The {@link JpaJsonValueNode} for the column */ <T> JpaJsonValueNode<T> valueColumn(String columnName, JpaCastTarget<T> type, String jsonPath); /** * Defines nested columns that are accessible by the given JSON path. * * @return a new columns node for the nested JSON path */ JpaJsonTableColumnsNode nested(String jsonPath); /** * Defines a long typed column on the result type with the given name which is set to the ordinality i.e. * the 1-based position of the processed element. Ordinality starts again at 1 within nested paths. * * @return {@code this} for method chaining */ JpaJsonTableColumnsNode ordinalityColumn(String columnName); }
JpaJsonTableColumnsNode
java
grpc__grpc-java
netty/src/main/java/io/grpc/netty/ProtocolNegotiators.java
{ "start": 33292, "end": 33846 }
class ____ { final String host; final int port; public HostPort(String host, int port) { this.host = host; this.port = port; } } /** * Returns a {@link ProtocolNegotiator} used for upgrading to HTTP/2 from HTTP/1.x. */ public static ProtocolNegotiator plaintextUpgrade() { return new PlaintextUpgradeProtocolNegotiator(); } public static ProtocolNegotiator.ClientFactory plaintextUpgradeClientFactory() { return new PlaintextUpgradeProtocolNegotiatorClientFactory(); } private static final
HostPort
java
spring-projects__spring-framework
spring-beans/src/main/java/org/springframework/beans/factory/ObjectFactory.java
{ "start": 845, "end": 1006 }
interface ____ typically used to encapsulate a generic factory which * returns a new instance (prototype) of some target object on each invocation. * * <p>This
is
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppMetrics.java
{ "start": 1084, "end": 3186 }
class ____ { final Resource resourcePreempted; final int numNonAMContainersPreempted; final int numAMContainersPreempted; private final Map<String, Long> resourceSecondsMap; private final Map<String, Long> preemptedResourceSecondsMap; private int totalAllocatedContainers; public RMAppMetrics(Resource resourcePreempted, int numNonAMContainersPreempted, int numAMContainersPreempted, Map<String, Long> resourceSecondsMap, Map<String, Long> preemptedResourceSecondsMap, int totalAllocatedContainers) { this.resourcePreempted = resourcePreempted; this.numNonAMContainersPreempted = numNonAMContainersPreempted; this.numAMContainersPreempted = numAMContainersPreempted; this.resourceSecondsMap = resourceSecondsMap; this.preemptedResourceSecondsMap = preemptedResourceSecondsMap; this.totalAllocatedContainers = totalAllocatedContainers; } public Resource getResourcePreempted() { return resourcePreempted; } public int getNumNonAMContainersPreempted() { return numNonAMContainersPreempted; } public int getNumAMContainersPreempted() { return numAMContainersPreempted; } public long getMemorySeconds() { return RMServerUtils.getOrDefault(resourceSecondsMap, ResourceInformation.MEMORY_MB.getName(), 0L); } public long getVcoreSeconds() { return RMServerUtils .getOrDefault(resourceSecondsMap, ResourceInformation.VCORES.getName(), 0L); } public long getPreemptedMemorySeconds() { return RMServerUtils.getOrDefault(preemptedResourceSecondsMap, ResourceInformation.MEMORY_MB.getName(), 0L); } public long getPreemptedVcoreSeconds() { return RMServerUtils.getOrDefault(preemptedResourceSecondsMap, ResourceInformation.VCORES.getName(), 0L); } public Map<String, Long> getResourceSecondsMap() { return resourceSecondsMap; } public Map<String, Long> getPreemptedResourceSecondsMap() { return preemptedResourceSecondsMap; } public int getTotalAllocatedContainers() { return totalAllocatedContainers; } }
RMAppMetrics
java
FasterXML__jackson-core
src/main/java/tools/jackson/core/filter/JsonPointerBasedFilter.java
{ "start": 322, "end": 3249 }
class ____ extends TokenFilter { protected final JsonPointer _pathToMatch; /** * If true include all array elements by ignoring the array index match and advancing * the JsonPointer to the next level * * @since 2.16 */ protected final boolean _includeAllElements; public JsonPointerBasedFilter(String ptrExpr) { this(JsonPointer.compile(ptrExpr), false); } /** * @param pathToMatch Content to extract */ public JsonPointerBasedFilter(JsonPointer pathToMatch) { this(pathToMatch, false); } /** * @param pathToMatch Content to extract * @param includeAllElements if true array indexes in <code>ptrExpr</code> are ignored * and all elements will be matched. default: false * * @since 2.16 */ public JsonPointerBasedFilter(JsonPointer pathToMatch, boolean includeAllElements) { _pathToMatch = pathToMatch; _includeAllElements = includeAllElements; } /** * Overridable factory method use for creating new instances by * default {@link #includeElement} and {@link #includeProperty} methods: * needs to be overridden if sub-classing this class. * * @param pathToMatch Remaining path for filter to match * @param includeAllElements Whether to just include all array elements * of matching Array-valued path automatically * * @return Filter constructed */ protected JsonPointerBasedFilter construct(JsonPointer pathToMatch, boolean includeAllElements) { return new JsonPointerBasedFilter(pathToMatch, includeAllElements); } @Override public TokenFilter includeElement(int index) { JsonPointer next; if (_includeAllElements && !_pathToMatch.mayMatchElement()) { next = _pathToMatch.tail(); } else { next = _pathToMatch.matchElement(index); } if (next == null) { return null; } if (next.matches()) { return TokenFilter.INCLUDE_ALL; } return construct(next, _includeAllElements); } @Override public TokenFilter includeProperty(String name) { JsonPointer next = _pathToMatch.matchProperty(name); if (next == null) { return null; } if (next.matches()) { return TokenFilter.INCLUDE_ALL; } return construct(next, _includeAllElements); } @Override public TokenFilter filterStartArray() { return this; } @Override public TokenFilter filterStartObject() { return this; } @Override protected boolean _includeScalar() { // should only occur for root-level scalars, path "/" return _pathToMatch.matches(); } @Override public String toString() { return "[JsonPointerFilter at: "+_pathToMatch+"]"; } }
JsonPointerBasedFilter
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/id/CompositeNestedGeneratedValueGenerator.java
{ "start": 2343, "end": 2950 }
interface ____ { /** * Given the incoming object, determine the context for injecting back its generated * id sub-values. * * @param session The current session * @param incomingObject The entity for which we are generating id * * @return The injection context */ Object locateGenerationContext(SharedSessionContractImplementor session, Object incomingObject); } /** * Contract for performing the actual sub-value generation, usually injecting it into the * determined {@linkplain GenerationContextLocator#locateGenerationContext context} */ public
GenerationContextLocator
java
spring-projects__spring-framework
spring-web/src/main/java/org/springframework/web/context/request/async/WebAsyncManager.java
{ "start": 20765, "end": 21049 }
enum ____ { /** No async processing in progress. */ NOT_STARTED, /** Async handling has started, but the result hasn't been set yet. */ ASYNC_PROCESSING, /** The result is set, and an async dispatch was performed, unless there is a network error. */ RESULT_SET } }
State
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ProcessTableFunctionTestUtils.java
{ "start": 27521, "end": 28372 }
class ____ extends AppendProcessTableFunctionBase { public void eval( Context ctx, @ArgumentHint({SET_SEMANTIC_TABLE, OPTIONAL_PARTITION_BY, REQUIRE_ON_TIME}) Row r) { final TimeContext<Long> timeCtx = ctx.timeContext(Long.class); collectEvalEvent(timeCtx, r); if (timeCtx.time() == 0) { collectCreateTimer(timeCtx, "t", timeCtx.time() + 1); } } public void onTimer(OnTimerContext ctx) { final TimeContext<Long> timeCtx = ctx.timeContext(Long.class); collectOnTimerEvent(ctx); if (ctx.currentTimer().equals("t")) { collectCreateTimer(timeCtx, "again", timeCtx.time() + 1); } } } /** Testing function. */ public static
OptionalPartitionOnTimeFunction
java
mapstruct__mapstruct
core/src/main/java/org/mapstruct/MappingConstants.java
{ "start": 3242, "end": 5208 }
class ____ { private ComponentModel() { } /** * The mapper uses no component model, instances are typically retrieved * via {@link org.mapstruct.factory.Mappers#getMapper(java.lang.Class)} * */ public static final String DEFAULT = "default"; /** * The generated mapper is an application-scoped CDI bean and can be retrieved via @Inject. * The annotations are either from {@code javax} or {@code jakarta}. * Priority have the {@code javax} annotations. * In case you want to only use Jakarta then use {@link #JAKARTA_CDI}. * * @see #JAKARTA_CDI */ public static final String CDI = "cdi"; /** * The generated mapper is a Spring bean and can be retrieved via @Autowired * */ public static final String SPRING = "spring"; /** * The generated mapper is annotated with @Named and @Singleton, and can be retrieved via @Inject. * The annotations are either from {@code javax.inject} or {@code jakarta.inject}. * Priority have the {@code javax.inject} annotations. * In case you want to only use Jakarta then use {@link #JAKARTA}. * * @see #JAKARTA */ public static final String JSR330 = "jsr330"; /** * The generated mapper is annotated with @Named and @Singleton, and can be retrieved via @Inject. * The annotations are from {@code jakarta.inject}. * In case you want to use {@code javax.inject} then use {@link #JSR330}. * * @see #JSR330 */ public static final String JAKARTA = "jakarta"; /** * The generated mapper is an application-scoped Jakarta CDI bean and can be retrieved via @Inject. * @see #CDI */ public static final String JAKARTA_CDI = "jakarta-cdi"; } }
ComponentModel
java
quarkusio__quarkus
extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/root/BuildProfileTest.java
{ "start": 4714, "end": 5164 }
class ____ implements ContainerResponseFilter { @Override public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext) throws IOException { responseContext.getHeaders().add("X-RF-3", "Value"); } } @UnlessBuildProperty(name = "some.prop2", stringValue = "v2") // won't be enabled because the value matches @Provider public static
ResponseFilter3
java
apache__camel
components/camel-telemetry/src/main/java/org/apache/camel/telemetry/decorators/Paho5SpanDecorator.java
{ "start": 927, "end": 1641 }
class ____ extends AbstractMessagingSpanDecorator { @Override public String getComponent() { return "paho-mqtt5"; } @Override protected String getDestination(Exchange exchange, Endpoint endpoint) { // when using toD for dynamic destination then extract from header String destination = exchange.getMessage().getHeader("CamelPahoMqtt5OverrideTopic", String.class); if (destination == null) { destination = super.getDestination(exchange, endpoint); } return destination; } @Override public String getComponentClassName() { return "org.apache.camel.component.paho.mqtt5.PahoMqtt5Component"; } }
Paho5SpanDecorator
java
quarkusio__quarkus
integration-tests/infinispan-cache-jpa/src/main/java/io/quarkus/it/infinispan/cache/jpa/Trainer.java
{ "start": 251, "end": 899 }
class ____ { private long id; private List<Pokemon> pokemons; public Trainer() { } public Trainer(Pokemon... pokemons) { this.pokemons = Arrays.asList(pokemons); } @Id @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "trainerSeq") public long getId() { return id; } public void setId(long id) { this.id = id; } @OneToMany @Cache(usage = CacheConcurrencyStrategy.READ_WRITE) public List<Pokemon> getPokemons() { return pokemons; } public void setPokemons(List<Pokemon> pokemons) { this.pokemons = pokemons; } }
Trainer
java
reactor__reactor-core
buildSrc/src/main/java/io/reactor/gradle/JavaConventions.java
{ "start": 1279, "end": 4863 }
class ____ implements Plugin<Project> { private final JavaToolchainService javaToolchains; @Inject JavaConventions(JavaToolchainService javaToolchains) { this.javaToolchains = javaToolchains; } @Override public void apply(Project project) { project.getPlugins().withType(JavaPlugin.class, plugin -> applyJavaConvention(project)); } private void applyJavaConvention(Project project) { project.afterEvaluate(p -> { p.getTasks() .withType(JavaCompile.class) .forEach(compileTask -> { compileTask.getJavaCompiler().set(javaToolchains.compilerFor(spec -> spec.getLanguageVersion().set(JavaLanguageVersion.of(25)))); compileTask.getOptions().setEncoding("UTF-8"); setJavaRelease(compileTask, project); List<String> compilerArgs = new ArrayList<>(compileTask.getOptions() .getCompilerArgs()); if (compileTask.getName().endsWith("TestJava")) { compilerArgs.add("-parameters"); } compilerArgs.addAll(Arrays.asList( "-Xlint:-varargs", // intentionally disabled "-Xlint:cast", "-Xlint:classfile", "-Xlint:dep-ann", "-Xlint:divzero", "-Xlint:empty", "-Xlint:finally", "-Xlint:overrides", "-Xlint:path", "-Xlint:-processing", "-Xlint:static", "-Xlint:try", "-Xlint:deprecation", "-Xlint:unchecked", "-Xlint:-serial",// intentionally disabled "-Xlint:-options",// intentionally disabled "-Xlint:-fallthrough",// intentionally disabled "-Xmaxerrs", "500", "-Xmaxwarns", "1000" )); compileTask.getOptions().setCompilerArgs(compilerArgs); }); }); project.afterEvaluate(p -> { p.getTasks() .withType(Test.class) .forEach(testTask -> { int version = testTask.getJavaLauncher().get().getMetadata().getLanguageVersion().asInt(); System.out.println(testTask.getName() + ": running with Java " + version); }); }); } private void setJavaRelease(JavaCompile task, Project project) { int defaultVersion = 8; int releaseVersion = defaultVersion; int compilerVersion = task.getJavaCompiler().get().getMetadata().getLanguageVersion().asInt(); for (int version = defaultVersion ; version <= compilerVersion ; version++) { if (task.getName().contains("Java" + version)) { releaseVersion = version; break; } } final int finalVersion = releaseVersion; System.out.println(task.getName() + ": compiling with Java " + compilerVersion + ", targeting " + finalVersion); Provider<JavaCompiler> compiler = project.getExtensions() .getByType(JavaToolchainService.class) .compilerFor(spec -> spec.getLanguageVersion().set(JavaLanguageVersion.of(finalVersion))); // We can't use --release for Java8: https://bugs.openjdk.org/browse/JDK-8206937 // task.getOptions().getRelease().set(releaseVersion); String releaseVersionString = JavaLanguageVersion.of(finalVersion).toString(); task.setSourceCompatibility(releaseVersionString); task.setTargetCompatibility(releaseVersionString); FileCollection existingBootClasspath = task.getOptions().getBootstrapClasspath(); FileCollection bootClasspath = project.files(compiler.map(c -> c.getMetadata().getInstallationPath().getAsFileTree()).get()); if (existingBootClasspath != null) { bootClasspath = existingBootClasspath.plus(bootClasspath); } task.getOptions().setBootstrapClasspath(bootClasspath); } }
JavaConventions
java
redisson__redisson
redisson/src/main/java/org/redisson/api/RBatchReactive.java
{ "start": 1016, "end": 17000 }
interface ____ { /** * Returns stream instance by <code>name</code> * <p> * Requires <b>Redis 5.0.0 and higher.</b> * * @param <K> type of key * @param <V> type of value * @param name of stream * @return RStream object */ <K, V> RStreamReactive<K, V> getStream(String name); /** * Returns stream instance by <code>name</code> * using provided <code>codec</code> for entries. * <p> * Requires <b>Redis 5.0.0 and higher.</b> * * @param <K> type of key * @param <V> type of value * @param name - name of stream * @param codec - codec for entry * @return RStream object */ <K, V> RStreamReactive<K, V> getStream(String name, Codec codec); /** * Returns geospatial items holder instance by <code>name</code>. * * @param <V> type of value * @param name - name of object * @return Geo object */ <V> RGeoReactive<V> getGeo(String name); /** * Returns geospatial items holder instance by <code>name</code> * using provided codec for geospatial members. * * @param <V> type of value * @param name - name of object * @param codec - codec for value * @return Geo object */ <V> RGeoReactive<V> getGeo(String name, Codec codec); /** * Returns Set based Multimap instance by name. * * @param <K> type of key * @param <V> type of value * @param name - name of object * @return SetMultimap object */ <K, V> RSetMultimapReactive<K, V> getSetMultimap(String name); /** * Returns Set based Multimap instance by name * using provided codec for both map keys and values. * * @param <K> type of key * @param <V> type of value * @param name - name of object * @param codec - codec for keys and values * @return SetMultimap object */ <K, V> RSetMultimapReactive<K, V> getSetMultimap(String name, Codec codec); /** * Returns Set based Multimap cache instance by name. * Supports key eviction by specifying a time to live. * If eviction is not required then it's better to use regular set multimap {@link #getSetMultimap(String)}. * * @param <K> type of key * @param <V> type of value * @param name - name of object * @return RSetMultimapCacheRx object */ <K, V> RSetMultimapCacheReactive<K, V> getSetMultimapCache(String name); /** * Returns Set based Multimap cache instance by name using provided codec for both map keys and values. * Supports key eviction by specifying a time to live. * If eviction is not required then it's better to use regular set multimap {@link #getSetMultimap(String, Codec)}. * * @param <K> type of key * @param <V> type of value * @param name - name of object * @param codec - codec for keys and values * @return RSetMultimapCacheRx object */ <K, V> RSetMultimapCacheReactive<K, V> getSetMultimapCache(String name, Codec codec); /** * Returns set-based cache instance by <code>name</code>. * Uses map (value_hash, value) under the hood for minimal memory consumption. * Supports value eviction with a given TTL value. * * <p>If eviction is not required then it's better to use regular map {@link #getSet(String, Codec)}.</p> * * @param <V> type of value * @param name - name of object * @return SetCache object */ <V> RSetCacheReactive<V> getSetCache(String name); /** * Returns set-based cache instance by <code>name</code> * using provided <code>codec</code> for values. * Uses map (value_hash, value) under the hood for minimal memory consumption. * Supports value eviction with a given TTL value. * * <p>If eviction is not required then it's better to use regular map {@link #getSet(String, Codec)}.</p> * * @param <V> type of value * @param name - name of object * @param codec - codec for values * @return SetCache object */ <V> RSetCacheReactive<V> getSetCache(String name, Codec codec); /** * Returns map-based cache instance by <code>name</code> * using provided <code>codec</code> for both cache keys and values. * Supports entry eviction with a given TTL value. * * <p>If eviction is not required then it's better to use regular map {@link #getMap(String, Codec)}.</p> * * @param <K> type of key * @param <V> type of value * @param name - name of object * @param codec - codec for keys and values * @return MapCache object */ <K, V> RMapCacheReactive<K, V> getMapCache(String name, Codec codec); /** * Returns map-based cache instance by <code>name</code>. * Supports entry eviction with a given TTL value. * * <p>If eviction is not required then it's better to use regular map {@link #getMap(String)}.</p> * * @param <K> type of key * @param <V> type of value * @param name - name of object * @return MapCache object */ <K, V> RMapCacheReactive<K, V> getMapCache(String name); /** * Returns map instance by name. * Supports entry eviction with a given TTL. * <p> * Requires <b>Redis 7.4.0 and higher.</b> or <b>Valkey 9.0.0 and higher.</b> * * @param <K> type of key * @param <V> type of value * @param name name of object * @return Map object */ <K, V> RMapCacheNativeReactive<K, V> getMapCacheNative(String name); /** * Returns map instance by name * using provided codec for both map keys and values. * Supports entry eviction with a given TTL. * <p> * Requires <b>Redis 7.4.0 and higher.</b> or <b>Valkey 9.0.0 and higher.</b> * * @param <K> type of key * @param <V> type of value * @param name name of object * @param codec codec for keys and values * @return Map object */ <K, V> RMapCacheNativeReactive<K, V> getMapCacheNative(String name, Codec codec); /** * Returns List based Multimap instance by name. * Supports key-entry eviction with a given TTL value. * Stores insertion order and allows duplicates for values mapped to key. * <p> * Uses Redis native commands for entry expiration and not a scheduled eviction task. * <p> * Requires <b>Redis 7.4.0 and higher.</b> or <b>Valkey 9.0.0 and higher.</b> * * @param <K> type of key * @param <V> type of value * @param name name of object * @return ListMultimapCache object */ <K, V> RListMultimapCacheReactive<K, V> getListMultimapCacheNative(String name); /** * Returns List based Multimap instance by name * using provided codec for both map keys and values. * Supports key-entry eviction with a given TTL value. * Stores insertion order and allows duplicates for values mapped to key. * <p> * Uses Redis native commands for entry expiration and not a scheduled eviction task. * <p> * Requires <b>Redis 7.4.0 and higher.</b> or <b>Valkey 9.0.0 and higher.</b> * * @param <K> type of key * @param <V> type of value * @param name name of object * @param codec codec for keys and values * @return ListMultimapCache object */ <K, V> RListMultimapCacheReactive<K, V> getListMultimapCacheNative(String name, Codec codec); /** * Returns Set based Multimap instance by name. * Supports key-entry eviction with a given TTL value. * Doesn't allow duplications for values mapped to key. * <p> * Uses Redis native commands for entry expiration and not a scheduled eviction task. * <p> * Requires <b>Redis 7.4.0 and higher.</b> or <b>Valkey 9.0.0 and higher.</b> * * @param <K> type of key * @param <V> type of value * @param name name of object * @return SetMultimapCache object */ <K, V> RSetMultimapCacheReactive<K, V> getSetMultimapCacheNative(String name); /** * Returns Set based Multimap instance by name * using provided codec for both map keys and values. * Supports key-entry eviction with a given TTL value. * Doesn't allow duplications for values mapped to key. * <p> * Uses Redis native commands for entry expiration and not a scheduled eviction task. * <p> * Requires <b>Redis 7.4.0 and higher.</b> or <b>Valkey 9.0.0 and higher.</b> * * @param <K> type of key * @param <V> type of value * @param name name of object * @param codec codec for keys and values * @return SetMultimapCache object */ <K, V> RSetMultimapCacheReactive<K, V> getSetMultimapCacheNative(String name, Codec codec); /** * Returns object holder by name * * @param <V> type of value * @param name - name of object * @return Bucket object */ <V> RBucketReactive<V> getBucket(String name); <V> RBucketReactive<V> getBucket(String name, Codec codec); /** * Returns JSON data holder instance by name using provided codec. * * @see org.redisson.codec.JacksonCodec * * @param <V> type of value * @param name name of object * @param codec codec for values * @return JsonBucket object */ <V> RJsonBucketReactive<V> getJsonBucket(String name, JsonCodec codec); /** * Returns HyperLogLog object by name * * @param <V> type of value * @param name - name of object * @return HyperLogLog object */ <V> RHyperLogLogReactive<V> getHyperLogLog(String name); <V> RHyperLogLogReactive<V> getHyperLogLog(String name, Codec codec); /** * Returns list instance by name. * * @param <V> type of value * @param name - name of object * @return List object */ <V> RListReactive<V> getList(String name); <V> RListReactive<V> getList(String name, Codec codec); /** * Returns List based MultiMap instance by name. * * @param <K> type of key * @param <V> type of value * @param name - name of object * @return ListMultimap object */ <K, V> RListMultimapReactive<K, V> getListMultimap(String name); /** * Returns List based MultiMap instance by name * using provided codec for both map keys and values. * * @param <K> type of key * @param <V> type of value * @param name - name of object * @param codec - codec for keys and values * @return ListMultimap object */ <K, V> RListMultimapReactive<K, V> getListMultimap(String name, Codec codec); /** * Returns List based Multimap cache instance by name. * Supports key eviction by specifying a time to live. * If eviction is not required then it's better to use regular list multimap {@link #getListMultimap(String)}. * * @param <K> type of key * @param <V> type of value * @param name - name of object * @return RListMultimapCacheRx object */ <K, V> RListMultimapReactive<K, V> getListMultimapCache(String name); /** * Returns List based Multimap cache instance by name using provided codec for both map keys and values. * Supports key eviction by specifying a time to live. * If eviction is not required then it's better to use regular list multimap {@link #getListMultimap(String, Codec)}. * * @param <K> type of key * @param <V> type of value * @param name - name of object * @param codec - codec for keys and values * @return RListMultimapCacheRx object */ <K, V> RListMultimapReactive<K, V> getListMultimapCache(String name, Codec codec); /** * Returns map instance by name. * * @param <K> type of key * @param <V> type of value * @param name - name of object * @return Map object */ <K, V> RMapReactive<K, V> getMap(String name); <K, V> RMapReactive<K, V> getMap(String name, Codec codec); /** * Returns set instance by name. * * @param <V> type of value * @param name - name of object * @return Set object */ <V> RSetReactive<V> getSet(String name); <V> RSetReactive<V> getSet(String name, Codec codec); /** * Returns topic instance by name. * * @param name - name of object * @return Topic object */ RTopicReactive getTopic(String name); RTopicReactive getTopic(String name, Codec codec); /** * Returns Sharded Topic instance by name. * <p> * Messages are delivered to message listeners connected to the same Topic. * <p> * * @param name - name of object * @return Topic object */ RShardedTopicReactive getShardedTopic(String name); /** * Returns Sharded Topic instance by name using provided codec for messages. * <p> * Messages are delivered to message listeners connected to the same Topic. * <p> * * @param name - name of object * @param codec - codec for message * @return Topic object */ RShardedTopicReactive getShardedTopic(String name, Codec codec); /** * Returns queue instance by name. * * @param <V> type of value * @param name - name of object * @return Queue object */ <V> RQueueReactive<V> getQueue(String name); <V> RQueueReactive<V> getQueue(String name, Codec codec); /** * Returns blocking queue instance by name. * * @param <V> type of value * @param name - name of object * @return BlockingQueue object */ <V> RBlockingQueueReactive<V> getBlockingQueue(String name); <V> RBlockingQueueReactive<V> getBlockingQueue(String name, Codec codec); /** * Returns blocking deque instance by name. * * @param <V> type of value * @param name - name of object * @return BlockingDeque object */ <V> RBlockingDequeReactive<V> getBlockingDeque(String name); <V> RBlockingDequeReactive<V> getBlockingDeque(String name, Codec codec); /** * Returns deque instance by name. * * @param <V> type of value * @param name - name of object * @return Deque object */ <V> RDequeReactive<V> getDeque(String name); <V> RDequeReactive<V> getDeque(String name, Codec codec); /** * Returns "atomic long" instance by name. * * @param name - name of object * @return AtomicLong object */ RAtomicLongReactive getAtomicLong(String name); /** * Returns atomicDouble instance by name. * * @param name - name of object * @return AtomicDouble object */ RAtomicDoubleReactive getAtomicDouble(String name); /** * Returns Redis Sorted Set instance by name * * @param <V> type of value * @param name - name of object * @return ScoredSortedSet object */ <V> RScoredSortedSetReactive<V> getScoredSortedSet(String name); <V> RScoredSortedSetReactive<V> getScoredSortedSet(String name, Codec codec); /** * Returns String based Redis Sorted Set instance by name * All elements are inserted with the same score during addition, * in order to force lexicographical ordering * * @param name - name of object * @return LexSortedSet object */ RLexSortedSetReactive getLexSortedSet(String name); /** * Returns bitSet instance by name. * * @param name of bitSet * @return BitSet object */ RBitSetReactive getBitSet(String name); /** * Returns script operations object * * @return Script object */ RScriptReactive getScript(); /** * Returns script operations object using provided codec. * * @param codec - codec for params and result * @return Script object */ RScriptReactive getScript(Codec codec); /** * Returns
RBatchReactive
java
apache__maven
impl/maven-impl/src/test/java/org/apache/maven/impl/model/MavenModelMergerTest.java
{ "start": 1167, "end": 4499 }
class ____ { private MavenModelMerger modelMerger = new MavenModelMerger(); // modelVersion is neither inherited nor injected @Test void testMergeModelModelVersion() { Model parent = Model.newBuilder().modelVersion("4.0.0").build(); Model model = Model.newInstance(); Model.Builder builder = Model.newBuilder(model); modelMerger.mergeModel_ModelVersion(builder, model, parent, false, null); assertNull(builder.build().getModelVersion()); model = Model.newBuilder().modelVersion("5.0.0").build(); builder = Model.newBuilder(model); modelMerger.mergeModel_ModelVersion(builder, model, parent, false, null); assertEquals("5.0.0", builder.build().getModelVersion()); } // ArtifactId is neither inherited nor injected @Test void testMergeModelArtifactId() { Model parent = Model.newBuilder().artifactId("PARENT").build(); Model model = Model.newInstance(); Model.Builder builder = Model.newBuilder(model); modelMerger.mergeModel_ArtifactId(builder, model, parent, false, null); assertNull(model.getArtifactId()); model = Model.newBuilder().artifactId("MODEL").build(); builder = Model.newBuilder(model); modelMerger.mergeModel_ArtifactId(builder, model, parent, false, null); assertEquals("MODEL", builder.build().getArtifactId()); } // Prerequisites are neither inherited nor injected @Test void testMergeModelPrerequisites() { Model parent = Model.newBuilder().prerequisites(Prerequisites.newInstance()).build(); Model model = Model.newInstance(); Model.Builder builder = Model.newBuilder(model); modelMerger.mergeModel_Prerequisites(builder, model, parent, false, null); assertNull(builder.build().getPrerequisites()); Prerequisites modelPrerequisites = Prerequisites.newBuilder().maven("3.0").build(); model = Model.newBuilder().prerequisites(modelPrerequisites).build(); builder = Model.newBuilder(model); modelMerger.mergeModel_Prerequisites(builder, model, parent, false, null); assertEquals(modelPrerequisites, builder.build().getPrerequisites()); } // Profiles are neither inherited nor injected during inheritance assembly @Test void testMergeModelProfiles() { Profile parentProfile = Profile.newBuilder().id("PARENT").build(); Model parent = Model.newBuilder() .profiles(Collections.singletonList(parentProfile)) .build(); Model model = Model.newInstance(); Model.Builder builder = Model.newBuilder(model); modelMerger.mergeModel_Profiles(builder, model, parent, false, null); assertEquals(0, builder.build().getProfiles().size()); Profile modelProfile = Profile.newBuilder().id("MODEL").build(); model = Model.newBuilder() .profiles(Collections.singletonList(modelProfile)) .build(); builder = Model.newBuilder(model); modelMerger.mergeModel_Profiles(builder, model, parent, false, null); assertEquals(1, builder.build().getProfiles().size()); assertEquals("MODEL", builder.build().getProfiles().get(0).getId()); } }
MavenModelMergerTest
java
apache__kafka
streams/upgrade-system-tests-30/src/test/java/org/apache/kafka/streams/tests/SmokeTestClient.java
{ "start": 2277, "end": 12287 }
class ____ extends SmokeTestUtil { private final String name; private KafkaStreams streams; private boolean uncaughtException = false; private boolean started; private volatile boolean closed; private static void addShutdownHook(final String name, final Runnable runnable) { if (name != null) { Runtime.getRuntime().addShutdownHook(KafkaThread.nonDaemon(name, runnable)); } else { Runtime.getRuntime().addShutdownHook(new Thread(runnable)); } } private static File tempDirectory() { final String prefix = "kafka-"; final File file; try { file = Files.createTempDirectory(prefix).toFile(); } catch (final IOException ex) { throw new RuntimeException("Failed to create a temp dir", ex); } file.deleteOnExit(); addShutdownHook("delete-temp-file-shutdown-hook", () -> { try { Utils.delete(file); } catch (final IOException e) { System.out.println("Error deleting " + file.getAbsolutePath()); e.printStackTrace(System.out); } }); return file; } public SmokeTestClient(final String name) { this.name = name; } public boolean started() { return started; } public boolean closed() { return closed; } public void start(final Properties streamsProperties) { final Topology build = getTopology(); streams = new KafkaStreams(build, getStreamsConfig(streamsProperties)); final CountDownLatch countDownLatch = new CountDownLatch(1); streams.setStateListener((newState, oldState) -> { System.out.printf("%s %s: %s -> %s%n", name, Instant.now(), oldState, newState); if (oldState == KafkaStreams.State.REBALANCING && newState == KafkaStreams.State.RUNNING) { started = true; countDownLatch.countDown(); } if (newState == KafkaStreams.State.NOT_RUNNING) { closed = true; } }); streams.setUncaughtExceptionHandler(e -> { System.out.println(name + ": SMOKE-TEST-CLIENT-EXCEPTION"); System.out.println(name + ": FATAL: An unexpected exception is encountered: " + e); e.printStackTrace(System.out); uncaughtException = true; return StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT; }); addShutdownHook("streams-shutdown-hook", this::close); streams.start(); try { if (!countDownLatch.await(1, TimeUnit.MINUTES)) { System.out.println(name + ": SMOKE-TEST-CLIENT-EXCEPTION: Didn't start in one minute"); } } catch (final InterruptedException e) { System.out.println(name + ": SMOKE-TEST-CLIENT-EXCEPTION: " + e); e.printStackTrace(System.out); } System.out.println(name + ": SMOKE-TEST-CLIENT-STARTED"); System.out.println(name + " started at " + Instant.now()); } public void closeAsync() { streams.close(Duration.ZERO); } public void close() { final boolean closed = streams.close(Duration.ofMinutes(1)); if (closed && !uncaughtException) { System.out.println(name + ": SMOKE-TEST-CLIENT-CLOSED"); } else if (closed) { System.out.println(name + ": SMOKE-TEST-CLIENT-EXCEPTION"); } else { System.out.println(name + ": SMOKE-TEST-CLIENT-EXCEPTION: Didn't close"); } } private Properties getStreamsConfig(final Properties props) { final Properties fullProps = new Properties(props); fullProps.put(StreamsConfig.APPLICATION_ID_CONFIG, "SmokeTest"); fullProps.put(StreamsConfig.CLIENT_ID_CONFIG, "SmokeTest-" + name); fullProps.put(StreamsConfig.STATE_DIR_CONFIG, tempDirectory().getAbsolutePath()); fullProps.putAll(props); return fullProps; } public Topology getTopology() { final StreamsBuilder builder = new StreamsBuilder(); final Consumed<String, Integer> stringIntConsumed = Consumed.with(stringSerde, intSerde); final KStream<String, Integer> source = builder.stream("data", stringIntConsumed); source.filterNot((k, v) -> k.equals("flush")) .to("echo", Produced.with(stringSerde, intSerde)); final KStream<String, Integer> data = source.filter((key, value) -> value == null || value != END); data.process(SmokeTestUtil.printProcessorSupplier("data", name)); // min final KGroupedStream<String, Integer> groupedData = data.groupByKey(Grouped.with(stringSerde, intSerde)); final KTable<Windowed<String>, Integer> minAggregation = groupedData .windowedBy(TimeWindows.ofSizeAndGrace(Duration.ofDays(1), Duration.ofMinutes(1))) .aggregate( () -> Integer.MAX_VALUE, (aggKey, value, aggregate) -> (value < aggregate) ? value : aggregate, Materialized .<String, Integer, WindowStore<Bytes, byte[]>>as("uwin-min") .withValueSerde(intSerde) .withRetention(Duration.ofHours(25)) ); streamify(minAggregation, "min-raw"); streamify(minAggregation.suppress(untilWindowCloses(BufferConfig.unbounded())), "min-suppressed"); minAggregation .toStream(new Unwindow<>()) .filterNot((k, v) -> k.equals("flush")) .to("min", Produced.with(stringSerde, intSerde)); final KTable<Windowed<String>, Integer> smallWindowSum = groupedData .windowedBy(TimeWindows.ofSizeAndGrace(Duration.ofSeconds(2), Duration.ofSeconds(30)).advanceBy(Duration.ofSeconds(1))) .reduce(Integer::sum); streamify(smallWindowSum, "sws-raw"); streamify(smallWindowSum.suppress(untilWindowCloses(BufferConfig.unbounded())), "sws-suppressed"); final KTable<String, Integer> minTable = builder.table( "min", Consumed.with(stringSerde, intSerde), Materialized.as("minStoreName")); minTable.toStream().process(SmokeTestUtil.printProcessorSupplier("min", name)); // max groupedData .windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofDays(2))) .aggregate( () -> Integer.MIN_VALUE, (aggKey, value, aggregate) -> (value > aggregate) ? value : aggregate, Materialized.<String, Integer, WindowStore<Bytes, byte[]>>as("uwin-max").withValueSerde(intSerde)) .toStream(new Unwindow<>()) .filterNot((k, v) -> k.equals("flush")) .to("max", Produced.with(stringSerde, intSerde)); final KTable<String, Integer> maxTable = builder.table( "max", Consumed.with(stringSerde, intSerde), Materialized.as("maxStoreName")); maxTable.toStream().process(SmokeTestUtil.printProcessorSupplier("max", name)); // sum groupedData .windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofDays(2))) .aggregate( () -> 0L, (aggKey, value, aggregate) -> (long) value + aggregate, Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("win-sum").withValueSerde(longSerde)) .toStream(new Unwindow<>()) .filterNot((k, v) -> k.equals("flush")) .to("sum", Produced.with(stringSerde, longSerde)); final Consumed<String, Long> stringLongConsumed = Consumed.with(stringSerde, longSerde); final KTable<String, Long> sumTable = builder.table("sum", stringLongConsumed); sumTable.toStream().process(SmokeTestUtil.printProcessorSupplier("sum", name)); // cnt groupedData .windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofDays(2))) .count(Materialized.as("uwin-cnt")) .toStream(new Unwindow<>()) .filterNot((k, v) -> k.equals("flush")) .to("cnt", Produced.with(stringSerde, longSerde)); final KTable<String, Long> cntTable = builder.table( "cnt", Consumed.with(stringSerde, longSerde), Materialized.as("cntStoreName")); cntTable.toStream().process(SmokeTestUtil.printProcessorSupplier("cnt", name)); // dif maxTable .join( minTable, (value1, value2) -> value1 - value2) .toStream() .filterNot((k, v) -> k.equals("flush")) .to("dif", Produced.with(stringSerde, intSerde)); // avg sumTable .join( cntTable, (value1, value2) -> (double) value1 / (double) value2) .toStream() .filterNot((k, v) -> k.equals("flush")) .to("avg", Produced.with(stringSerde, doubleSerde)); // test repartition final Agg agg = new Agg(); cntTable.groupBy(agg.selector(), Grouped.with(stringSerde, longSerde)) .aggregate(agg.init(), agg.adder(), agg.remover(), Materialized.<String, Long>as(Stores.inMemoryKeyValueStore("cntByCnt")) .withKeySerde(Serdes.String()) .withValueSerde(Serdes.Long())) .toStream() .to("tagg", Produced.with(stringSerde, longSerde)); return builder.build(); } private static void streamify(final KTable<Windowed<String>, Integer> windowedTable, final String topic) { windowedTable .toStream() .filterNot((k, v) -> k.key().equals("flush")) .map((key, value) -> new KeyValue<>(key.toString(), value)) .to(topic, Produced.with(stringSerde, intSerde)); } }
SmokeTestClient
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
{ "start": 46552, "end": 47119 }
class ____ implements FileSystemAccess.FileSystemExecutor<Void> { private Path path; private String name; public FSRemoveXAttr(String path, String name) { this.path = new Path(path); this.name = name; } @Override public Void execute(FileSystem fs) throws IOException { fs.removeXAttr(path, name); return null; } } /** * Executor that performs listing xattrs FileSystemAccess files system * operation. */ @SuppressWarnings("rawtypes") @InterfaceAudience.Private public static
FSRemoveXAttr
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/fielddata/BooleanScriptFieldData.java
{ "start": 3529, "end": 4430 }
class ____ extends LeafLongFieldData { private final BooleanScriptDocValues booleanScriptDocValues; protected final ToScriptFieldFactory<SortedNumericLongValues> toScriptFieldFactory; BooleanScriptLeafFieldData( BooleanScriptDocValues booleanScriptDocValues, ToScriptFieldFactory<SortedNumericLongValues> toScriptFieldFactory ) { super(0); this.booleanScriptDocValues = booleanScriptDocValues; this.toScriptFieldFactory = toScriptFieldFactory; } @Override public SortedNumericLongValues getLongValues() { return booleanScriptDocValues; } @Override public DocValuesScriptFieldFactory getScriptFieldFactory(String name) { return toScriptFieldFactory.getScriptFieldFactory(getLongValues(), name); } } }
BooleanScriptLeafFieldData
java
spring-projects__spring-framework
spring-jdbc/src/test/java/org/springframework/jdbc/object/SqlQueryTests.java
{ "start": 12743, "end": 14374 }
class ____ extends MappingSqlQuery<Customer> { public CustomerQuery(DataSource ds) { super(ds, SELECT_ID_WHERE); declareParameter(new SqlParameter(COLUMN_NAMES[0], COLUMN_TYPES[0])); declareParameter(new SqlParameter(COLUMN_NAMES[1], COLUMN_TYPES[1])); compile(); } @Override protected Customer mapRow(ResultSet rs, int rownum) throws SQLException { Customer cust = new Customer(); cust.setId(rs.getInt(COLUMN_NAMES[0])); cust.setForename(rs.getString(COLUMN_NAMES[1])); return cust; } public Customer findCustomer(int id, String name) { return findObject(id, name); } } CustomerQuery query = new CustomerQuery(dataSource); Customer cust1 = query.findCustomer(1, "rod"); assertThat(cust1).as("Found customer").isNotNull(); assertThat(cust1.getId()).as("Customer id was assigned correctly").isEqualTo(1); Customer cust2 = query.findCustomer(1, "Roger"); assertThat(cust2).as("No customer found").isNull(); verify(preparedStatement).setObject(1, 1, Types.INTEGER); verify(preparedStatement).setString(2, "rod"); verify(preparedStatement2).setObject(1, 1, Types.INTEGER); verify(preparedStatement2).setString(2, "Roger"); verify(resultSet).close(); verify(resultSet2).close(); verify(preparedStatement).close(); verify(preparedStatement2).close(); verify(connection, times(2)).close(); } @Test void testFindTooManyCustomers() throws SQLException { given(resultSet.next()).willReturn(true, true, false); given(resultSet.getInt("id")).willReturn(1, 2); given(resultSet.getString("forename")).willReturn("rod", "rod");
CustomerQuery
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/TransientOverrideAsPersistentSingleTableTests.java
{ "start": 10507, "end": 10893 }
class ____ { private String name; private String description; public Group(String name) { this(); setName( name ); } @Id public String getName() { return name; } protected Group() { // this form used by Hibernate } protected void setName(String name) { this.name = name; } } @SuppressWarnings("unused") @Entity(name = "Job") public static
Group
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/long2darray/Long2DArrayAssert_isEmpty_Test.java
{ "start": 941, "end": 1377 }
class ____ extends Long2DArrayAssertBaseTest { @Override protected Long2DArrayAssert invoke_api_method() { assertions.isEmpty(); return null; } @Override protected void verify_internal_effects() { verify(arrays).assertEmpty(getInfo(assertions), getActual(assertions)); } @Override @Test public void should_return_this() { // Disable this test because isEmpty is void } }
Long2DArrayAssert_isEmpty_Test
java
spring-projects__spring-boot
documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/actuator/metrics/export/graphite/MyGraphiteConfiguration.java
{ "start": 1177, "end": 1554 }
class ____ { @Bean public GraphiteMeterRegistry graphiteMeterRegistry(GraphiteConfig config, Clock clock) { return new GraphiteMeterRegistry(config, clock, this::toHierarchicalName); } private String toHierarchicalName(Meter.Id id, NamingConvention convention) { return /**/ HierarchicalNameMapper.DEFAULT.toHierarchicalName(id, convention); } }
MyGraphiteConfiguration
java
spring-projects__spring-boot
module/spring-boot-hazelcast/src/test/java/org/springframework/boot/hazelcast/autoconfigure/HazelcastAutoConfigurationServerTests.java
{ "start": 10686, "end": 10859 }
class ____ { @Bean Config myHazelcastConfig() { return new Config("my-test-instance"); } } @Configuration(proxyBeanMethods = false) static
HazelcastConfigWithName
java
mapstruct__mapstruct
processor/src/test/resources/fixtures/org/mapstruct/ap/test/value/enum2enum/SpecialOrderMapperImpl.java
{ "start": 535, "end": 2125 }
class ____ implements SpecialOrderMapper { @Override public OrderDto orderEntityToDto(OrderEntity order) { if ( order == null ) { return null; } OrderDto orderDto = new OrderDto(); orderDto.setOrderType( orderTypeToExternalOrderType( order.getOrderType() ) ); return orderDto; } @Override public ExternalOrderType orderTypeToExternalOrderType(OrderType orderType) { if ( orderType == null ) { return ExternalOrderType.DEFAULT; } ExternalOrderType externalOrderType; switch ( orderType ) { case STANDARD: externalOrderType = null; break; case RETAIL: externalOrderType = ExternalOrderType.RETAIL; break; case B2B: externalOrderType = ExternalOrderType.B2B; break; default: externalOrderType = ExternalOrderType.SPECIAL; } return externalOrderType; } @Override public OrderType externalOrderTypeToOrderType(ExternalOrderType orderType) { if ( orderType == null ) { return OrderType.STANDARD; } OrderType orderType1; switch ( orderType ) { case SPECIAL: orderType1 = OrderType.EXTRA; break; case DEFAULT: orderType1 = null; break; case RETAIL: orderType1 = OrderType.RETAIL; break; case B2B: orderType1 = OrderType.B2B; break; default: throw new IllegalArgumentException( "Unexpected
SpecialOrderMapperImpl
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/nestedbeans/DottedErrorMessageTest.java
{ "start": 15897, "end": 23156 }
enum ____ must " + "be be mapped via adding additional mappings: NORMAL." ) } ) public void testSourceMapEnumProperty() { } @ProcessorTest @WithClasses({ UnmappableTargetWarnDeepNestingMapper.class, UnmappableTargetWarnDeepListMapper.class, UnmappableTargetWarnDeepMapKeyMapper.class, UnmappableTargetWarnDeepMapValueMapper.class, UnmappableTargetWarnCollectionElementPropertyMapper.class, UnmappableTargetWarnValuePropertyMapper.class }) @ExpectedCompilationOutcome( value = CompilationResult.SUCCEEDED, diagnostics = { @Diagnostic(type = UnmappableTargetWarnDeepNestingMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped target property: \"rgb\". Mapping from " + PROPERTY + " \"Color house.roof.color\" to \"ColorDto house.roof.color\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseDeepNestingMapper'."), @Diagnostic(type = UnmappableTargetWarnDeepListMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped target property: \"left\". Mapping from " + COLLECTION_ELEMENT + " \"Wheel car.wheels\" to \"WheelDto car.wheels\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseDeepListMapper'."), @Diagnostic(type = UnmappableTargetWarnDeepMapKeyMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped target property: \"pronunciation\". Mapping from " + MAP_KEY + " \"Word dictionary.wordMap{:key}\" to \"WordDto dictionary.wordMap{:key}\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseDeepMapKeyMapper'."), @Diagnostic(type = UnmappableTargetWarnDeepMapValueMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped target property: \"pronunciation\". Mapping from " + MAP_VALUE + " \"ForeignWord dictionary.wordMap{:value}\" to \"ForeignWordDto dictionary.wordMap{:value}\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseDeepMapValueMapper'."), @Diagnostic(type = UnmappableTargetWarnCollectionElementPropertyMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped target property: \"color\". Mapping from " + PROPERTY + " \"Info computers[].info\" to \"InfoDto computers[].info\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseCollectionElementPropertyMapper'."), @Diagnostic(type = UnmappableTargetWarnValuePropertyMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped target property: \"color\". Mapping from " + PROPERTY + " \"Info catNameMap{:value}.info\" to \"InfoDto catNameMap{:value}.info\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseValuePropertyMapper'.") } ) public void testWarnUnmappedTargetProperties() { } @IssueKey( "2788" ) @ProcessorTest @WithClasses({ UnmappableSourceWarnDeepNestingMapper.class, UnmappableSourceWarnDeepListMapper.class, UnmappableSourceWarnDeepMapKeyMapper.class, UnmappableSourceWarnDeepMapValueMapper.class, UnmappableSourceWarnCollectionElementPropertyMapper.class, UnmappableSourceWarnValuePropertyMapper.class }) @ExpectedCompilationOutcome( value = CompilationResult.SUCCEEDED, diagnostics = { @Diagnostic(type = UnmappableSourceWarnDeepNestingMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped source property: \"cmyk\". Mapping from " + PROPERTY + " \"Color house.roof.color\" to \"ColorDto house.roof.color\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseDeepNestingMapper'."), @Diagnostic(type = UnmappableSourceWarnDeepListMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped source property: \"right\". Mapping from " + COLLECTION_ELEMENT + " \"Wheel car.wheels\" to \"WheelDto car.wheels\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseDeepListMapper'."), @Diagnostic(type = UnmappableSourceWarnDeepMapKeyMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped source property: \"meaning\". Mapping from " + MAP_KEY + " \"Word dictionary.wordMap{:key}\" to \"WordDto dictionary.wordMap{:key}\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseDeepMapKeyMapper'."), @Diagnostic(type = UnmappableSourceWarnDeepMapValueMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped source property: \"meaning\". Mapping from " + MAP_VALUE + " \"ForeignWord dictionary.wordMap{:value}\" to \"ForeignWordDto dictionary.wordMap{:value}\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseDeepMapValueMapper'."), @Diagnostic(type = UnmappableSourceWarnCollectionElementPropertyMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped source property: \"size\". Mapping from " + PROPERTY + " \"Info computers[].info\" to \"InfoDto computers[].info\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseCollectionElementPropertyMapper'."), @Diagnostic(type = UnmappableSourceWarnValuePropertyMapper.class, kind = javax.tools.Diagnostic.Kind.WARNING, line = 16, message = "Unmapped source property: \"size\". Mapping from " + PROPERTY + " \"Info catNameMap{:value}.info\" to \"InfoDto catNameMap{:value}.info\"." + " Occured at 'UserDto userToUserDto(User user)' in 'BaseValuePropertyMapper'.") } ) public void testWarnUnmappedSourceProperties() { } @ProcessorTest @WithClasses({ UnmappableIgnoreDeepNestingMapper.class, UnmappableIgnoreDeepListMapper.class, UnmappableIgnoreDeepMapKeyMapper.class, UnmappableIgnoreDeepMapValueMapper.class, UnmappableIgnoreCollectionElementPropertyMapper.class, UnmappableIgnoreValuePropertyMapper.class }) @ExpectedCompilationOutcome( value = CompilationResult.SUCCEEDED ) public void testIgnoreUnmappedTargetProperties() { } }
and
java
apache__avro
doc/examples/mr-example/src/main/java/example/MapReduceColorCount.java
{ "start": 2210, "end": 3811 }
class ____ extends Reducer<Text, IntWritable, AvroKey<CharSequence>, AvroValue<Integer>> { @Override public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable value : values) { sum += value.get(); } context.write(new AvroKey<CharSequence>(key.toString()), new AvroValue<Integer>(sum)); } } public int run(String[] args) throws Exception { if (args.length != 2) { System.err.println("Usage: MapReduceColorCount <input path> <output path>"); return -1; } Job job = new Job(getConf()); job.setJarByClass(MapReduceColorCount.class); job.setJobName("Color Count"); FileInputFormat.setInputPaths(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); job.setInputFormatClass(AvroKeyInputFormat.class); job.setMapperClass(ColorCountMapper.class); AvroJob.setInputKeySchema(job, User.getClassSchema()); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setOutputFormatClass(AvroKeyValueOutputFormat.class); job.setReducerClass(ColorCountReducer.class); AvroJob.setOutputKeySchema(job, Schema.create(Schema.Type.STRING)); AvroJob.setOutputValueSchema(job, Schema.create(Schema.Type.INT)); return (job.waitForCompletion(true) ? 0 : 1); } public static void main(String[] args) throws Exception { int res = ToolRunner.run(new MapReduceColorCount(), args); System.exit(res); } }
ColorCountReducer
java
alibaba__druid
core/src/test/java/com/alibaba/druid/sql/parser/SQLCommentTest.java
{ "start": 215, "end": 1480 }
class ____ extends TestCase { // issues 5708 public void test1() { String sqlStr = "SELECT \n" + "test1, -- test1的注释\n" + "test2 -- test2的注释\n" + "FROM \n" + "S371_BSD_O_IDCS"; SQLStatement sqlStatement1 = SQLUtils.parseSingleStatement(sqlStr, DbType.db2, true); System.out.println(sqlStatement1); } // issues 5709 public void test2() { String sqlStr = "INSERT INTO S371_BSD_O_IDCS(\n" + "AAAA -- AAAA\n" + ", BBBB -- BBBB\n" + ", CCCC -- CCCC\n" + ", DDDD -- DDDD\n" + ")\n" + "\n" + "\t(SELECT \n" + "\tAAAA, -- AAAA\n" + "\tBBBB,-- BBBB\n" + "\tCCCC,-- CCCC\n" + "\tDDDD -- DDDD\n" + "\tFROM TABLE_1\n" + "\tUNION\n" + "\tSELECT AAAA,BBBB,CCCC,DDDD FROM TABLE_2\n" + "\tUNION\n" + "\tSELECT AAAA,BBBB,CCCC,DDDD FROM TABLE_3)"; SQLStatement sqlStatement1 = SQLUtils.parseSingleStatement(sqlStr, DbType.db2, true); System.out.println(sqlStatement1); } }
SQLCommentTest
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/dialect/function/json/OracleJsonValueFunction.java
{ "start": 691, "end": 2736 }
class ____ extends JsonValueFunction { public OracleJsonValueFunction(TypeConfiguration typeConfiguration) { super( typeConfiguration, false, false ); } @Override protected void render( SqlAppender sqlAppender, JsonValueArguments arguments, ReturnableType<?> returnType, SqlAstTranslator<?> walker) { final boolean encodedBoolean = arguments.returningType() != null && isEncodedBoolean( arguments.returningType().getJdbcMapping() ); if ( encodedBoolean ) { sqlAppender.append( "decode(" ); } super.render( sqlAppender, arguments, returnType, walker ); if ( encodedBoolean ) { final JdbcMapping type = arguments.returningType().getJdbcMapping(); //noinspection unchecked final JdbcLiteralFormatter<Object> jdbcLiteralFormatter = type.getJdbcLiteralFormatter(); final SessionFactoryImplementor sessionFactory = walker.getSessionFactory(); final Dialect dialect = sessionFactory.getJdbcServices().getDialect(); final WrapperOptions wrapperOptions = sessionFactory.getWrapperOptions(); final Object trueValue = type.convertToRelationalValue( true ); final Object falseValue = type.convertToRelationalValue( false ); sqlAppender.append( ",'true'," ); jdbcLiteralFormatter.appendJdbcLiteral( sqlAppender, trueValue, dialect, wrapperOptions ); sqlAppender.append( ",'false'," ); jdbcLiteralFormatter.appendJdbcLiteral( sqlAppender, falseValue, dialect, wrapperOptions ); sqlAppender.append( ')' ); } } @Override protected void renderReturningClause(SqlAppender sqlAppender, JsonValueArguments arguments, SqlAstTranslator<?> walker) { if ( arguments.returningType() != null && isEncodedBoolean( arguments.returningType().getJdbcMapping() ) ) { sqlAppender.appendSql( " returning varchar2(5)" ); } else { super.renderReturningClause( sqlAppender, arguments, walker ); } } public static boolean isEncodedBoolean(JdbcMapping type) { return type.getJdbcType().isBoolean() && type.getJdbcType().getDdlTypeCode() != SqlTypes.BOOLEAN; } }
OracleJsonValueFunction
java
grpc__grpc-java
android-interop-testing/src/generated/debug/grpc/io/grpc/testing/integration/MetricsServiceGrpc.java
{ "start": 7715, "end": 8068 }
class ____ implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return MetricsServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service MetricsService. */ public static final
MetricsServiceImplBase
java
apache__camel
components/camel-xslt-saxon/src/main/java/org/apache/camel/component/xslt/saxon/XsltSaxonBuilder.java
{ "start": 1633, "end": 3624 }
class ____ extends XsltBuilder { private boolean allowStAX = true; private boolean useJsonBody = false; @Override protected Source prepareSource(Source source) { if (!isAllowStAX() && source instanceof StAXSource) { // Always convert StAXSource to SAXSource. // * Xalan and Saxon-B don't support StAXSource. // * The JDK default implementation (XSLTC) doesn't handle CDATA events // (see com.sun.org.apache.xalan.internal.xsltc.trax.StAXStream2SAX). // * Saxon-HE/PE/EE seem to support StAXSource, but don't advertise this // officially (via TransformerFactory.getFeature(StAXSource.FEATURE)) source = new StAX2SAXSource(((StAXSource) source).getXMLStreamReader()); } return source; } // Properties // ------------------------------------------------------------------------- public boolean isAllowStAX() { return allowStAX; } public void setAllowStAX(boolean allowStAX) { this.allowStAX = allowStAX; } public boolean isUseJsonBody() { return useJsonBody; } public void setUseJsonBody(boolean useJsonBody) { this.useJsonBody = useJsonBody; } @Override protected XmlSourceHandlerFactoryImpl createXmlSourceHandlerFactoryImpl() { SaxonXmlSourceHandlerFactoryImpl factory = new SaxonXmlSourceHandlerFactoryImpl(); factory.setUseJsonBody(useJsonBody); return factory; } @Override protected Templates createTemplates(TransformerFactory factory, Source source) throws TransformerConfigurationException { final Templates templates = super.createTemplates(factory, source); if (templates instanceof TemplatesImpl && getXsltMessageLogger() != null) { return new MessageDelegatingTemplates((TemplatesImpl) templates, getXsltMessageLogger()); } return templates; } private static
XsltSaxonBuilder
java
apache__flink
flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/operators/over/RowTimeRangeBoundedPrecedingFunctionTest.java
{ "start": 1456, "end": 4024 }
class ____ extends RowTimeOverWindowTestBase { @Test void testStateCleanup() throws Exception { RowTimeRangeBoundedPrecedingFunction<RowData> function = new RowTimeRangeBoundedPrecedingFunction<>( aggsHandleFunction, accTypes, inputFieldTypes, 2000, 2); KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>(function); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); AbstractKeyedStateBackend stateBackend = (AbstractKeyedStateBackend) operator.getKeyedStateBackend(); assertThat(stateBackend.numKeyValueStateEntries()) .as("Initial state is not empty") .isEqualTo(0); // put some records testHarness.processElement(insertRecord("key", 1L, 100L)); testHarness.processElement(insertRecord("key", 1L, 100L)); testHarness.processElement(insertRecord("key", 1L, 500L)); testHarness.processWatermark(new Watermark(1000L)); // at this moment we expect the function to have some records in state testHarness.processWatermark(new Watermark(4000L)); // at this moment the function should have cleaned up states assertThat(stateBackend.numKeyValueStateEntries()) .as("State has not been cleaned up") .isEqualTo(0); } @Test void testLateRecordMetrics() throws Exception { RowTimeRangeBoundedPrecedingFunction<RowData> function = new RowTimeRangeBoundedPrecedingFunction<>( aggsHandleFunction, accTypes, inputFieldTypes, 2000, 2); KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>(function); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); Counter counter = function.getCounter(); // put some records testHarness.processElement(insertRecord("key", 1L, 100L)); testHarness.processElement(insertRecord("key", 1L, 100L)); testHarness.processElement(insertRecord("key", 1L, 500L)); testHarness.processWatermark(new Watermark(500L)); // late record testHarness.processElement(insertRecord("key", 1L, 400L)); assertThat(counter.getCount()).isEqualTo(1L); } }
RowTimeRangeBoundedPrecedingFunctionTest
java
elastic__elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkResponse.java
{ "start": 863, "end": 2683 }
class ____ extends ActionResponse { private final long tookInMillis; private Error error; private final boolean ignored; public MonitoringBulkResponse(final long tookInMillis, final boolean ignored) { this.tookInMillis = tookInMillis; this.ignored = ignored; } public MonitoringBulkResponse(final long tookInMillis, final Error error) { this(tookInMillis, false); this.error = error; } public MonitoringBulkResponse(StreamInput in) throws IOException { tookInMillis = in.readVLong(); error = in.readOptionalWriteable(Error::new); ignored = in.readBoolean(); } public TimeValue getTook() { return new TimeValue(tookInMillis); } public long getTookInMillis() { return tookInMillis; } /** * Determine if the request was ignored. * * @return {@code true} if the request was ignored because collection was disabled. */ public boolean isIgnored() { return ignored; } /** * Returns HTTP status * * <ul> * <li>{@link RestStatus#OK} if monitoring bulk request was successful (or ignored because collection is disabled)</li> * <li>{@link RestStatus#INTERNAL_SERVER_ERROR} if monitoring bulk request was partially successful or failed completely</li> * </ul> */ public RestStatus status() { if (error == null) { return RestStatus.OK; } return RestStatus.INTERNAL_SERVER_ERROR; } public Error getError() { return error; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(tookInMillis); out.writeOptionalWriteable(error); out.writeBoolean(ignored); } public static
MonitoringBulkResponse
java
apache__hadoop
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/PseudoLocalFs.java
{ "start": 5834, "end": 10333 }
class ____ extends InputStream implements Seekable, PositionedReadable { private final Random r = new Random(); private BytesWritable val = null; private int positionInVal = 0;// current position in the buffer 'val' private long totalSize = 0;// total number of random bytes to be generated private long curPos = 0;// current position in this stream /** * @param size total number of random bytes to be generated in this stream * @param bufferSize the buffer size. An internal buffer array of length * <code>bufferSize</code> is created. If <code>bufferSize</code> is not a * positive number, then a default value of 1MB is used. */ RandomInputStream(long size, int bufferSize) { totalSize = size; if (bufferSize <= 0) { bufferSize = DEFAULT_BUFFER_SIZE; } val = new BytesWritable(new byte[bufferSize]); } @Override public int read() throws IOException { byte[] b = new byte[1]; if (curPos < totalSize) { if (positionInVal < val.getLength()) {// use buffered byte b[0] = val.getBytes()[positionInVal++]; ++curPos; } else {// generate data int num = read(b); if (num < 0) { return num; } } } else { return -1; } return b[0]; } @Override public int read(byte[] bytes) throws IOException { return read(bytes, 0, bytes.length); } @Override public int read(byte[] bytes, int off, int len) throws IOException { if (curPos == totalSize) { return -1;// EOF } int numBytes = len; if (numBytes > (totalSize - curPos)) {// position in file is close to EOF numBytes = (int)(totalSize - curPos); } if (numBytes > (val.getLength() - positionInVal)) { // need to generate data into val r.nextBytes(val.getBytes()); positionInVal = 0; } System.arraycopy(val.getBytes(), positionInVal, bytes, off, numBytes); curPos += numBytes; positionInVal += numBytes; return numBytes; } @Override public int available() { return (int)(val.getLength() - positionInVal); } @Override public int read(long position, byte[] buffer, int offset, int length) throws IOException { throw new UnsupportedOperationException(); } @Override public void readFully(long position, byte[] buffer) throws IOException { throw new UnsupportedOperationException(); } @Override public void readFully(long position, byte[] buffer, int offset, int length) throws IOException { throw new UnsupportedOperationException(); } /** * Get the current position in this stream/pseudo-file * @return the position in this stream/pseudo-file * @throws IOException */ @Override public long getPos() throws IOException { return curPos; } @Override public void seek(long pos) throws IOException { throw new UnsupportedOperationException(); } @Override public boolean seekToNewSource(long targetPos) throws IOException { throw new UnsupportedOperationException(); } } @Override public FSDataOutputStream append(Path path, int bufferSize, Progressable progress) throws IOException { throw new UnsupportedOperationException("Append is not supported" + " in pseudo local file system."); } @Override public boolean mkdirs(Path f, FsPermission permission) throws IOException { throw new UnsupportedOperationException("Mkdirs is not supported" + " in pseudo local file system."); } @Override public boolean rename(Path src, Path dst) throws IOException { throw new UnsupportedOperationException("Rename is not supported" + " in pseudo local file system."); } @Override public boolean delete(Path path, boolean recursive) { throw new UnsupportedOperationException("File deletion is not supported " + "in pseudo local file system."); } @Override public void setWorkingDirectory(Path newDir) { throw new UnsupportedOperationException("SetWorkingDirectory " + "is not supported in pseudo local file system."); } @Override public Path makeQualified(Path path) { // skip FileSystem#checkPath() to validate some other Filesystems return path.makeQualified(this.getUri(), this.getWorkingDirectory()); } }
RandomInputStream
java
apache__flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/resource/ResourceManagerTest.java
{ "start": 5691, "end": 8125 }
class ____ Class<?> clazz1 = Class.forName(GENERATED_LOWER_UDF_CLASS, false, userClassLoader); final Class<?> clazz2 = Class.forName(GENERATED_LOWER_UDF_CLASS, false, userClassLoader); assertEquals(clazz1, clazz2); } @Test public void testRegisterFileResource() throws Exception { ResourceUri normalizedResource = new ResourceUri( ResourceType.FILE, resourceManager.getURLFromPath(new Path(file.getPath())).getPath()); // register file resource, uri is formatted with "file" scheme prefix String localFilePath = resourceManager.registerFileResource( new ResourceUri(ResourceType.FILE, "file://" + file.getPath())); assertEquals(file.getPath(), localFilePath); Map<ResourceUri, URL> actualResource = Collections.singletonMap( normalizedResource, resourceManager.getURLFromPath(new Path(localFilePath))); assertThat(resourceManager.getResources()).containsExactlyEntriesOf(actualResource); // register the same file resource repeatedly, but without scheme assertThat( resourceManager.registerFileResource( new ResourceUri(ResourceType.FILE, file.getPath()))) .isEqualTo(localFilePath); assertThat(resourceManager.getResources()).containsExactlyEntriesOf(actualResource); // register the same file resource repeatedly, use relative path as uri assertThat( resourceManager.registerFileResource( new ResourceUri( ResourceType.FILE, new File(".") .getCanonicalFile() .toPath() .relativize(file.toPath()) .toString()))) .isEqualTo(localFilePath); assertThat(resourceManager.getResources()).containsExactlyEntriesOf(actualResource); } @Test public void testRegisterResourceWithRelativePath() throws Exception { URLClassLoader userClassLoader = resourceManager.getUserClassLoader(); // test
final
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/MultiStateTransitionListener.java
{ "start": 1284, "end": 2306 }
class ____ <OPERAND, EVENT, STATE extends Enum<STATE>> implements StateTransitionListener<OPERAND, EVENT, STATE> { private final List<StateTransitionListener<OPERAND, EVENT, STATE>> listeners = new ArrayList<>(); /** * Add a listener to the list of listeners. * @param listener A listener. */ public void addListener(StateTransitionListener<OPERAND, EVENT, STATE> listener) { listeners.add(listener); } @Override public void preTransition(OPERAND op, STATE beforeState, EVENT eventToBeProcessed) { for (StateTransitionListener<OPERAND, EVENT, STATE> listener : listeners) { listener.preTransition(op, beforeState, eventToBeProcessed); } } @Override public void postTransition(OPERAND op, STATE beforeState, STATE afterState, EVENT processedEvent) { for (StateTransitionListener<OPERAND, EVENT, STATE> listener : listeners) { listener.postTransition(op, beforeState, afterState, processedEvent); } } }
MultiStateTransitionListener
java
elastic__elasticsearch
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/contextualai/ContextualAiServiceTests.java
{ "start": 815, "end": 1602 }
class ____ extends ESTestCase { private ThreadPool threadPool; @Override public void setUp() throws Exception { super.setUp(); threadPool = new TestThreadPool(getTestName()); } @Override public void tearDown() throws Exception { super.tearDown(); terminate(threadPool); } public void testRerankerWindowSize() { var service = createContextualAiService(); assertThat(service.rerankerWindowSize("any-model"), is(5500)); } private ContextualAiService createContextualAiService() { return new ContextualAiService( mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool), mockClusterServiceEmpty() ); } }
ContextualAiServiceTests
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/operators/sort/ReusingSortMergeCoGroupIterator.java
{ "start": 1402, "end": 5478 }
enum ____ { NONE_REMAINED, FIRST_REMAINED, SECOND_REMAINED, FIRST_EMPTY, SECOND_EMPTY } // -------------------------------------------------------------------------------------------- private MatchStatus matchStatus; private Iterable<T1> firstReturn; private Iterable<T2> secondReturn; private TypePairComparator<T1, T2> comp; private ReusingKeyGroupedIterator<T1> iterator1; private ReusingKeyGroupedIterator<T2> iterator2; // -------------------------------------------------------------------------------------------- public ReusingSortMergeCoGroupIterator( MutableObjectIterator<T1> input1, MutableObjectIterator<T2> input2, TypeSerializer<T1> serializer1, TypeComparator<T1> groupingComparator1, TypeSerializer<T2> serializer2, TypeComparator<T2> groupingComparator2, TypePairComparator<T1, T2> pairComparator) { this.comp = pairComparator; this.iterator1 = new ReusingKeyGroupedIterator<T1>(input1, serializer1, groupingComparator1); this.iterator2 = new ReusingKeyGroupedIterator<T2>(input2, serializer2, groupingComparator2); } @Override public void open() {} @Override public void close() {} @Override public Iterable<T1> getValues1() { return this.firstReturn; } @Override public Iterable<T2> getValues2() { return this.secondReturn; } @Override public boolean next() throws IOException { boolean firstEmpty = true; boolean secondEmpty = true; if (this.matchStatus != MatchStatus.FIRST_EMPTY) { if (this.matchStatus == MatchStatus.FIRST_REMAINED) { // comparator is still set correctly firstEmpty = false; } else { if (this.iterator1.nextKey()) { this.comp.setReference(this.iterator1.getCurrent()); firstEmpty = false; } } } if (this.matchStatus != MatchStatus.SECOND_EMPTY) { if (this.matchStatus == MatchStatus.SECOND_REMAINED) { secondEmpty = false; } else { if (iterator2.nextKey()) { secondEmpty = false; } } } if (firstEmpty && secondEmpty) { // both inputs are empty return false; } else if (firstEmpty && !secondEmpty) { // input1 is empty, input2 not this.firstReturn = Collections.emptySet(); this.secondReturn = this.iterator2.getValues(); this.matchStatus = MatchStatus.FIRST_EMPTY; return true; } else if (!firstEmpty && secondEmpty) { // input1 is not empty, input 2 is empty this.firstReturn = this.iterator1.getValues(); this.secondReturn = Collections.emptySet(); this.matchStatus = MatchStatus.SECOND_EMPTY; return true; } else { // both inputs are not empty final int comp = this.comp.compareToReference(this.iterator2.getCurrent()); if (0 == comp) { // keys match this.firstReturn = this.iterator1.getValues(); this.secondReturn = this.iterator2.getValues(); this.matchStatus = MatchStatus.NONE_REMAINED; } else if (0 < comp) { // key1 goes first this.firstReturn = this.iterator1.getValues(); this.secondReturn = Collections.emptySet(); this.matchStatus = MatchStatus.SECOND_REMAINED; } else { // key 2 goes first this.firstReturn = Collections.emptySet(); this.secondReturn = this.iterator2.getValues(); this.matchStatus = MatchStatus.FIRST_REMAINED; } return true; } } }
MatchStatus
java
quarkusio__quarkus
integration-tests/gradle/src/main/resources/basic-composite-build-extension-project/extensions/example-extension/deployment/src/main/java/org/acme/example/extension/deployment/QuarkusExampleProcessor.java
{ "start": 434, "end": 881 }
class ____ { private static final String FEATURE = "example"; @BuildStep FeatureBuildItem feature() { return new FeatureBuildItem(FEATURE); } @BuildStep void addLibABean(BuildProducer<AdditionalBeanBuildItem> additionalBeans) { additionalBeans.produce(new AdditionalBeanBuildItem.Builder() .addBeanClasses(LibA.class) .setUnremovable() .setDefaultScope(DotNames.APPLICATION_SCOPED) .build()); } }
QuarkusExampleProcessor
java
spring-projects__spring-boot
module/spring-boot-jackson/src/test/java/org/springframework/boot/jackson/autoconfigure/JacksonAutoConfigurationTests.java
{ "start": 39330, "end": 39513 }
class ____ extends ObjectValueSerializer<Baz> { @Override protected void serializeObject(Baz value, JsonGenerator jgen, SerializationContext context) { } } static
BazSerializer
java
micronaut-projects__micronaut-core
core-processor/src/main/java/io/micronaut/inject/processing/IntroductionInterfaceBeanElementCreator.java
{ "start": 1152, "end": 1242 }
interface ____ builder. * * @author Denis Stepanov * @since 4.0.0 */ @Internal final
proxy
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/boot/archive/internal/UrlInputStreamAccess.java
{ "start": 359, "end": 813 }
class ____ implements InputStreamAccess, Serializable { private final URL url; public UrlInputStreamAccess(URL url) { this.url = url; } @Override public String getStreamName() { return url.toExternalForm(); } @Override public InputStream accessInputStream() { try { return url.openStream(); } catch (Exception e) { throw new HibernateException( "Could not open url stream : " + url.toExternalForm() ); } } }
UrlInputStreamAccess
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/FirstIntByTimestampAggregator.java
{ "start": 1349, "end": 1788 }
class ____ generated. Edit `X-ValueByTimestampAggregator.java.st` instead. */ @Aggregator( { @IntermediateState(name = "timestamps", type = "LONG"), @IntermediateState(name = "values", type = "INT"), @IntermediateState(name = "seen", type = "BOOLEAN") } ) @GroupingAggregator( { @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), @IntermediateState(name = "values", type = "INT_BLOCK") } ) public
is
java
quarkusio__quarkus
extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/FieldNameSetGetPrefixResourceTest.java
{ "start": 1644, "end": 1916 }
class ____ { @GET public UncommonBody get() { return new UncommonBody("id", true, true, false, "setText"); } private record UncommonBody(String id, boolean set, boolean get, boolean is, String setText) { } } }
Resource
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EvalExec.java
{ "start": 1081, "end": 3141 }
class ____ extends UnaryExec implements EstimatesRowSize { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( PhysicalPlan.class, "EvalExec", EvalExec::new ); private final List<Alias> fields; public EvalExec(Source source, PhysicalPlan child, List<Alias> fields) { super(source, child); this.fields = fields; } private EvalExec(StreamInput in) throws IOException { this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(PhysicalPlan.class), in.readCollectionAsList(Alias::new)); } @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); out.writeNamedWriteable(child()); out.writeCollection(fields()); } @Override public String getWriteableName() { return ENTRY.name; } public List<Alias> fields() { return fields; } @Override public List<Attribute> output() { return mergeOutputAttributes(fields, child().output()); } @Override protected AttributeSet computeReferences() { return Eval.computeReferences(fields); } @Override public UnaryExec replaceChild(PhysicalPlan newChild) { return new EvalExec(source(), newChild, fields); } @Override protected NodeInfo<? extends PhysicalPlan> info() { return NodeInfo.create(this, EvalExec::new, child(), fields); } @Override public PhysicalPlan estimateRowSize(State state) { state.add(false, fields); return this; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EvalExec eval = (EvalExec) o; return child().equals(eval.child()) && Objects.equals(fields, eval.fields); } @Override public int hashCode() { return Objects.hash(super.hashCode(), fields); } }
EvalExec
java
elastic__elasticsearch
libs/x-content/src/main/java/org/elasticsearch/xcontent/ParseField.java
{ "start": 863, "end": 8195 }
class ____ { private final String name; private final String[] deprecatedNames; private final Predicate<RestApiVersion> forRestApiVersion; private final String allReplacedWith; private final boolean fullyDeprecated; private final String[] allNames; private static final String[] EMPTY = new String[0]; private ParseField( String name, Predicate<RestApiVersion> forRestApiVersion, String[] deprecatedNames, boolean fullyDeprecated, String allReplacedWith ) { this.name = name; this.fullyDeprecated = fullyDeprecated; this.allReplacedWith = allReplacedWith; if (deprecatedNames == null || deprecatedNames.length == 0) { this.deprecatedNames = EMPTY; } else { final HashSet<String> set = new HashSet<>(); Collections.addAll(set, deprecatedNames); this.deprecatedNames = set.toArray(new String[set.size()]); } this.forRestApiVersion = forRestApiVersion; Set<String> names = new HashSet<>(); names.add(name); Collections.addAll(names, this.deprecatedNames); this.allNames = names.toArray(new String[names.size()]); } /** * Creates a field available for lookup for both current and previous REST API versions * @param name the primary name for this field. This will be returned by * {@link #getPreferredName()} * @param deprecatedNames names for this field which are deprecated and will not be * accepted when strict matching is used. */ public ParseField(String name, String... deprecatedNames) { this(name, RestApiVersion.onOrAfter(RestApiVersion.minimumSupported()), deprecatedNames, false, null); } /** * @return the preferred name used for this field */ public String getPreferredName() { return name; } /** * @return All names for this field regardless of whether they are * deprecated */ public String[] getAllNamesIncludedDeprecated() { return allNames; } /** * @param deprecatedNamesOverride * deprecated names to include with the returned * {@link ParseField} * @return a new {@link ParseField} using the preferred name from this one * but with the specified deprecated names */ public ParseField withDeprecation(String... deprecatedNamesOverride) { return new ParseField(this.name, this.forRestApiVersion, deprecatedNamesOverride, this.fullyDeprecated, this.allReplacedWith); } /** * Creates a new field with current name and deprecatedNames, but overrides forRestApiVersion * @param forRestApiVersionOverride - a boolean function indicating for what version a deprecated name is available */ public ParseField forRestApiVersion(Predicate<RestApiVersion> forRestApiVersionOverride) { return new ParseField(this.name, forRestApiVersionOverride, this.deprecatedNames, this.fullyDeprecated, this.allReplacedWith); } /** * @return a function indicating for which RestApiVersion a deprecated name is declared for */ public Predicate<RestApiVersion> getForRestApiVersion() { return forRestApiVersion; } /** * Return a new ParseField where all field names are deprecated and replaced * with {@code allReplacedWith}. */ public ParseField withAllDeprecated(String allReplacedWithOverride) { return new ParseField( this.name, this.forRestApiVersion, getAllNamesIncludedDeprecated(), this.fullyDeprecated, allReplacedWithOverride ); } /** * Return a new ParseField where all field names are deprecated with no replacement */ public ParseField withAllDeprecated() { return new ParseField(this.name, this.forRestApiVersion, getAllNamesIncludedDeprecated(), true, this.allReplacedWith); } /** * Does {@code fieldName} match this field? * @param fieldName * the field name to match against this {@link ParseField} * @param deprecationHandler called if {@code fieldName} is deprecated * @return true if <code>fieldName</code> matches any of the acceptable * names for this {@link ParseField}. */ public boolean match(String fieldName, DeprecationHandler deprecationHandler) { return match(null, () -> XContentLocation.UNKNOWN, fieldName, deprecationHandler); } /** * Does {@code fieldName} match this field? * @param parserName * the name of the parent object holding this field * @param location * the XContentLocation of the field * @param fieldName * the field name to match against this {@link ParseField} * @param deprecationHandler called if {@code fieldName} is deprecated * @return true if <code>fieldName</code> matches any of the acceptable * names for this {@link ParseField}. */ public boolean match(String parserName, Supplier<XContentLocation> location, String fieldName, DeprecationHandler deprecationHandler) { Objects.requireNonNull(fieldName, "fieldName cannot be null"); // if this parse field has not been completely deprecated then try to // match the preferred name if (fullyDeprecated == false && allReplacedWith == null && fieldName.equals(name)) { return true; } boolean isCompatibleDeprecation = RestApiVersion.minimumSupported().matches(forRestApiVersion) && RestApiVersion.current().matches(forRestApiVersion) == false; // Now try to match against one of the deprecated names. Note that if // the parse field is entirely deprecated (allReplacedWith != null) all // fields will be in the deprecatedNames array for (String depName : deprecatedNames) { if (fieldName.equals(depName)) { if (fullyDeprecated) { deprecationHandler.logRemovedField(parserName, location, fieldName, isCompatibleDeprecation); } else if (allReplacedWith == null) { deprecationHandler.logRenamedField(parserName, location, fieldName, name, isCompatibleDeprecation); } else { deprecationHandler.logReplacedField(parserName, location, fieldName, allReplacedWith, isCompatibleDeprecation); } return true; } } return false; } @Override public String toString() { return getPreferredName(); } /** * @return the message to use if this {@link ParseField} has been entirely * deprecated in favor of something else. This method will return * <code>null</code> if the ParseField has not been completely * deprecated. */ public String getAllReplacedWith() { return allReplacedWith; } /** * @return an array of the names for the {@link ParseField} which are * deprecated. */ public String[] getDeprecatedNames() { return deprecatedNames; } public static
ParseField
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingManagerFactory.java
{ "start": 1239, "end": 1890 }
class ____ { private static final Logger LOG = LoggerFactory.getLogger( NetworkTagMappingManagerFactory.class); private NetworkTagMappingManagerFactory() {} public static NetworkTagMappingManager getManager(Configuration conf) { Class<? extends NetworkTagMappingManager> managerClass = conf.getClass(YarnConfiguration.NM_NETWORK_TAG_MAPPING_MANAGER, NetworkTagMappingJsonManager.class, NetworkTagMappingManager.class); LOG.info("Using NetworkTagMappingManager implementation - " + managerClass); return ReflectionUtils.newInstance(managerClass, conf); } }
NetworkTagMappingManagerFactory
java
spring-projects__spring-framework
spring-test/src/test/java/org/springframework/test/web/servlet/MockMvcReuseTests.java
{ "start": 3339, "end": 3512 }
class ____ { @GetMapping("/") String hello() { return HELLO; } @GetMapping(path = "/", params = ENIGMA) String enigma() { return ENIGMA; } } }
MyController
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/health/metadata/HealthMetadataTests.java
{ "start": 661, "end": 3724 }
class ____ extends ESTestCase { public void testDiskFreeBytesCalculationOfAbsoluteValue() { HealthMetadata.Disk metadata = HealthMetadata.Disk.newBuilder() .highWatermark("100B", "bytes-high") .floodStageWatermark("50B", "bytes-flood") .frozenFloodStageWatermark("50B", "bytes-frozen-flood") .frozenFloodStageMaxHeadroom("20B", "headroom") .build(); assertThat(metadata.getFreeBytesHighWatermark(ByteSizeValue.MINUS_ONE), equalTo(ByteSizeValue.ofBytes(100))); assertThat(metadata.getFreeBytesFloodStageWatermark(ByteSizeValue.MINUS_ONE), equalTo(ByteSizeValue.ofBytes(50))); assertThat(metadata.getFreeBytesFrozenFloodStageWatermark(ByteSizeValue.MINUS_ONE), equalTo(ByteSizeValue.ofBytes(50))); } public void testDiskFreeBytesCalculationMaxHeadroom() { HealthMetadata.Disk metadata = HealthMetadata.Disk.newBuilder() .highWatermark("90%", "ratio-high") .highMaxHeadroom(ByteSizeValue.ofBytes(10)) .floodStageWatermark("95%", "ratio-flood") .floodStageMaxHeadroom(ByteSizeValue.ofBytes(5)) .frozenFloodStageWatermark("95%", "ratio-frozen-flood") .frozenFloodStageMaxHeadroom("20B", "headroom") .build(); assertThat(metadata.getFreeBytesHighWatermark(ByteSizeValue.ofBytes(1000)), equalTo(ByteSizeValue.ofBytes(10))); assertThat(metadata.getFreeBytesFloodStageWatermark(ByteSizeValue.ofBytes(1000)), equalTo(ByteSizeValue.ofBytes(5))); assertThat(metadata.getFreeBytesFrozenFloodStageWatermark(ByteSizeValue.ofBytes(1000)), equalTo(ByteSizeValue.ofBytes(20))); } public void testDiskFreeBytesCalculationPercent() { HealthMetadata.Disk metadata = HealthMetadata.Disk.newBuilder() .highWatermark("90%", "ratio-high") .floodStageWatermark("95%", "ratio-flood") .frozenFloodStageWatermark("95%", "ratio-frozen-flood") .frozenFloodStageMaxHeadroom("60B", "headroom") .build(); assertThat(metadata.getFreeBytesHighWatermark(ByteSizeValue.ofBytes(1000)), equalTo(ByteSizeValue.ofBytes(100))); assertThat(metadata.getFreeBytesFloodStageWatermark(ByteSizeValue.ofBytes(1000)), equalTo(ByteSizeValue.ofBytes(50))); assertThat(metadata.getFreeBytesFrozenFloodStageWatermark(ByteSizeValue.ofBytes(1000)), equalTo(ByteSizeValue.ofBytes(50))); } public void testShardLimitsBuilders() { var shardLimits = HealthMetadata.ShardLimits.newBuilder() .maxShardsPerNode(100) .maxShardsPerNodeFrozen(999) .shardCapacityUnhealthyThresholdYellow(10) .shardCapacityUnhealthyThresholdRed(5) .build(); // Regular builder assertEquals(shardLimits, new HealthMetadata.ShardLimits(100, 999, 10, 5)); // Copy-builder assertEquals(HealthMetadata.ShardLimits.newBuilder(shardLimits).build(), new HealthMetadata.ShardLimits(100, 999, 10, 5)); } }
HealthMetadataTests
java
spring-projects__spring-framework
spring-webflux/src/main/java/org/springframework/web/reactive/handler/WebFluxResponseStatusExceptionHandler.java
{ "start": 1502, "end": 1944 }
class ____ extends ResponseStatusExceptionHandler { @Override protected @Nullable HttpStatusCode determineStatus(Throwable ex) { HttpStatusCode statusCode = super.determineStatus(ex); if (statusCode == null) { ResponseStatus ann = AnnotatedElementUtils.findMergedAnnotation(ex.getClass(), ResponseStatus.class); if (ann != null) { statusCode = ann.code(); } } return statusCode; } }
WebFluxResponseStatusExceptionHandler
java
quarkusio__quarkus
integration-tests/test-extension/extension/deployment/src/test/java/io/quarkus/config/UnremoveableConfigMappingTest.java
{ "start": 378, "end": 871 }
class ____ { @RegisterExtension static final QuarkusUnitTest TEST = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClass(UnremovableMappingFromBuildItem.class)); @Inject SmallRyeConfig config; @Test void unremoveableMapping() { UnremovableMappingFromBuildItem mapping = config.getConfigMapping(UnremovableMappingFromBuildItem.class); assertEquals("1234", mapping.value()); } }
UnremoveableConfigMappingTest
java
quarkusio__quarkus
integration-tests/grpc-vertx/src/test/java/io/quarkus/grpc/examples/hello/HelloWorldNewServiceTestBase.java
{ "start": 678, "end": 3279 }
class ____ { private Channel channel; private Vertx _vertx; protected Vertx vertx() { return null; } protected void close(Vertx vertx) { } protected abstract int port(); protected void checkVerticles() { } protected void checkVerticles(Vertx vertx) { VertxInternal internal = (VertxInternal) vertx; Set<String> deploymentIDs = internal.deploymentIDs(); // should be just one, but in the worst case skip the test if not Assumptions.assumeTrue(deploymentIDs.size() == 1); Deployment deployment = internal.getDeployment(deploymentIDs.iterator().next()); Set<Verticle> verticles = deployment.getVerticles(); Assumptions.assumeTrue(verticles.size() > 1); } protected boolean skipEventloopTest() { return false; } @BeforeEach public void init() { _vertx = vertx(); channel = GRPCTestUtils.channel(_vertx, port()); } @AfterEach public void cleanup() { GRPCTestUtils.close(channel); close(_vertx); } @Test public void testEventLoop() { // only check those where we know Vertx instance comes from Quarkus checkVerticles(); Assumptions.assumeFalse(skipEventloopTest()); Set<String> threadNames = new HashSet<>(); for (int i = 0; i < 10; i++) { Channel newChannel = GRPCTestUtils.channel(_vertx, port()); try { GreeterGrpc.GreeterBlockingStub client = GreeterGrpc.newBlockingStub(newChannel); HelloReply reply = client.threadName(HelloRequest.newBuilder().build()); threadNames.add(reply.getMessage()); } finally { GRPCTestUtils.close(newChannel); } } assertThat(threadNames.size()).isGreaterThan(1); } @Test public void testHelloWorldServiceUsingBlockingStub() { GreeterGrpc.GreeterBlockingStub client = GreeterGrpc.newBlockingStub(channel); HelloReply reply = client .sayHello(HelloRequest.newBuilder().setName("neo-blocking").build()); assertThat(reply.getMessage()).isEqualTo("Hello neo-blocking"); } @Test public void testHelloWorldServiceUsingMutinyStub() { HelloReply reply = MutinyGreeterGrpc.newMutinyStub(channel) .sayHello(HelloRequest.newBuilder().setName("neo-blocking").build()) .await().atMost(Duration.ofSeconds(5)); assertThat(reply.getMessage()).isEqualTo("Hello neo-blocking"); } }
HelloWorldNewServiceTestBase
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java
{ "start": 21124, "end": 21288 }
class ____ maintained for backwards compatibility and performance purposes. We use it for serialisation along with * {@link WildcardStates}. */ private
is
java
FasterXML__jackson-databind
src/main/java/tools/jackson/databind/deser/impl/ObjectIdReader.java
{ "start": 403, "end": 3816 }
class ____ implements java.io.Serializable { private static final long serialVersionUID = 1L; protected final JavaType _idType; public final PropertyName propertyName; /** * Blueprint generator instance: actual instance will be * fetched from {@link SerializationContext} using this as * the key. */ public final ObjectIdGenerator<?> generator; public final ObjectIdResolver resolver; /** * Deserializer used for deserializing id values. */ protected final ValueDeserializer<Object> _deserializer; public final SettableBeanProperty idProperty; /* /********************************************************** /* Life-cycle /********************************************************** */ @SuppressWarnings("unchecked") protected ObjectIdReader(JavaType t, PropertyName propName, ObjectIdGenerator<?> gen, ValueDeserializer<?> deser, SettableBeanProperty idProp, ObjectIdResolver resolver) { _idType = t; propertyName = propName; generator = gen; this.resolver = resolver; _deserializer = (ValueDeserializer<Object>) deser; idProperty = idProp; } /** * Factory method called by {@link tools.jackson.databind.ser.bean.BeanSerializerBase} * with the initial information based on standard settings for the type * for which serializer is being built. */ public static ObjectIdReader construct(JavaType idType, PropertyName propName, ObjectIdGenerator<?> generator, ValueDeserializer<?> deser, SettableBeanProperty idProp, ObjectIdResolver resolver) { return new ObjectIdReader(idType, propName, generator, deser, idProp, resolver); } /* /********************************************************** /* API /********************************************************** */ public ValueDeserializer<Object> getDeserializer() { return _deserializer; } public JavaType getIdType() { return _idType; } /** * Convenience method, equivalent to calling: *<code> * readerInstance.generator.maySerializeAsObject(); *</code> * and used to determine whether Object Ids handled by the underlying * generator may be in form of (JSON) Objects. * Used for optimizing handling in cases where method returns false. */ public boolean maySerializeAsObject() { return generator.maySerializeAsObject(); } /** * Convenience method, equivalent to calling: *<code> * readerInstance.generator.isValidReferencePropertyName(name, parser); *</code> * and used to determine whether Object Ids handled by the underlying * generator may be in form of (JSON) Objects. * Used for optimizing handling in cases where method returns false. */ public boolean isValidReferencePropertyName(String name, JsonParser parser) { return generator.isValidReferencePropertyName(name, parser); } /** * Method called to read value that is expected to be an Object Reference * (that is, value of an Object Id used to refer to another object). */ public Object readObjectReference(JsonParser p, DeserializationContext ctxt) throws JacksonException { return _deserializer.deserialize(p, ctxt); } }
ObjectIdReader
java
quarkusio__quarkus
independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdateException.java
{ "start": 60, "end": 362 }
class ____ extends RuntimeException { private static final long serialVersionUID = 1L; public QuarkusUpdateException(String message, Throwable cause) { super(message, cause); } public QuarkusUpdateException(String message) { super(message); } }
QuarkusUpdateException