language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/FloatArrayFieldTest_primitive.java
{ "start": 1122, "end": 1336 }
class ____ { private float[] value; public float[] getValue() { return value; } public void setValue(float[] value) { this.value = value; } } }
V0
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/index/mapper/LookupRuntimeFieldTypeTests.java
{ "start": 1383, "end": 6095 }
class ____ extends MapperServiceTestCase { public void testFetchValues() throws IOException { String mapping = """ { "_doc": { "properties" : { "foo" : { "type" : "keyword" } }, "runtime": { "foo_lookup_field": { "type": "lookup", "target_index": "my_index", "input_field": "foo", "target_field": "term_field_foo", "fetch_fields": [ "remote_field_*", {"field": "created", "format": "YYYY-dd-MM"} ] } } } } """; var mapperService = createMapperService(mapping); Source source = Source.fromMap(Map.of("foo", List.of("f1", "f2")), randomFrom(XContentType.values())); MappedFieldType fieldType = mapperService.fieldType("foo_lookup_field"); ValueFetcher valueFetcher = fieldType.valueFetcher(createSearchExecutionContext(mapperService), null); DocumentField doc = valueFetcher.fetchDocumentField("foo_lookup_field", source, -1); assertNotNull(doc); assertThat(doc.getName(), equalTo("foo_lookup_field")); assertThat(doc.getValues(), empty()); assertThat(doc.getIgnoredValues(), empty()); assertThat( doc.getLookupFields(), contains( new LookupField( "my_index", new TermQueryBuilder("term_field_foo", "f1"), List.of(new FieldAndFormat("remote_field_*", null), new FieldAndFormat("created", "YYYY-dd-MM")), 1 ), new LookupField( "my_index", new TermQueryBuilder("term_field_foo", "f2"), List.of(new FieldAndFormat("remote_field_*", null), new FieldAndFormat("created", "YYYY-dd-MM")), 1 ) ) ); } public void testEmptyInputField() throws IOException { String mapping = """ { "_doc": { "properties" : { "foo" : { "type" : "keyword" } }, "runtime": { "foo_lookup_field": { "type": "lookup", "target_index": "my_index", "input_field": "foo", "target_field": "term_field_foo", "fetch_fields": ["remote_field_*"] } } } } """; var mapperService = createMapperService(mapping); XContentBuilder source = XContentFactory.jsonBuilder(); source.startObject(); if (randomBoolean()) { source.field("foo", List.of()); } source.endObject(); Source s = Source.fromBytes(BytesReference.bytes(source), XContentType.JSON); MappedFieldType fieldType = mapperService.fieldType("foo_lookup_field"); ValueFetcher valueFetcher = fieldType.valueFetcher(createSearchExecutionContext(mapperService), null); DocumentField doc = valueFetcher.fetchDocumentField("foo_lookup_field", s, -1); assertNull(doc); } public void testInputFieldDoesNotExist() throws IOException { String mapping = """ { "_doc": { "runtime": { "foo_lookup_field": { "type": "lookup", "target_index": "my_index", "input_field": "barbaz", "target_field": "term_field_foo", "fetch_fields": ["field-1", "field-2"] } } } } """; var mapperService = createMapperService(mapping); MappedFieldType fieldType = mapperService.fieldType("foo_lookup_field"); // fails if unmapped_fields is not QueryShardException error = expectThrows(QueryShardException.class, () -> { SearchExecutionContext context = createSearchExecutionContext(mapperService); context.setAllowUnmappedFields(randomBoolean()); fieldType.valueFetcher(context, null); }); assertThat(error.getMessage(), containsString("No field mapping can be found for the field with name [barbaz]")); } }
LookupRuntimeFieldTypeTests
java
quarkusio__quarkus
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/producer/ProducerWithClassAndMethodLevelInterceptorsAndBindingsSourceTest.java
{ "start": 871, "end": 1663 }
class ____ { @RegisterExtension public ArcTestContainer container = new ArcTestContainer(MyBinding1.class, MyInterceptor1.class, MyBinding2.class, MyInterceptor2.class, MyProducer.class); @Test public void test() { MyNonbean nonbean = Arc.container().instance(MyNonbean.class).get(); assertEquals("intercepted1: intercepted2: hello1", nonbean.hello1()); assertEquals("intercepted1: hello2", nonbean.hello2()); assertEquals("hello3", nonbean.hello3()); assertEquals("intercepted2: hello4", nonbean.hello4()); } @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.TYPE, ElementType.METHOD, ElementType.CONSTRUCTOR }) @InterceptorBinding @
ProducerWithClassAndMethodLevelInterceptorsAndBindingsSourceTest
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/engine/ThreadPoolMergeScheduler.java
{ "start": 37638, "end": 37706 }
enum ____ { ABORT, RUN, BACKLOG } }
Schedule
java
micronaut-projects__micronaut-core
core-processor/src/main/java/io/micronaut/inject/ast/ClassElement.java
{ "start": 24824, "end": 27281 }
class ____ as the originating element. * * <p>Note that this method can only be called on classes being directly compiled by Micronaut. If the ClassElement is * loaded from pre-compiled code an {@link UnsupportedOperationException} will be thrown.</p> * * @param type The type of the bean * @return A bean builder */ @NonNull default BeanElementBuilder addAssociatedBean(@NonNull ClassElement type) { throw new UnsupportedOperationException("Element of type [" + getClass() + "] does not support adding associated beans at compilation time"); } @Override default ClassElement withAnnotationMetadata(AnnotationMetadata annotationMetadata) { return (ClassElement) TypedElement.super.withAnnotationMetadata(annotationMetadata); } /** * Copies this element and overrides its type arguments. * * @param typeArguments The type arguments * @return A new element * @since 4.0.0 */ @NonNull default ClassElement withTypeArguments(Map<String, ClassElement> typeArguments) { throw new UnsupportedOperationException("Element of type [" + getClass() + "] does not support copy constructor"); } /** * Copies this element and overrides its type arguments. * Variation of {@link #withTypeArguments(Map)} that doesn't require type argument names. * * @param typeArguments The type arguments * @return A new element * @since 4.0.0 */ @NonNull default ClassElement withTypeArguments(@NonNull Collection<ClassElement> typeArguments) { if (typeArguments.isEmpty()) { // Allow to eliminate all arguments return withTypeArguments(Collections.emptyMap()); } Set<String> genericNames = getTypeArguments().keySet(); if (genericNames.size() != typeArguments.size()) { throw new IllegalStateException("Expected to have: " + genericNames.size() + " type arguments! Got: " + typeArguments.size()); } Map<String, ClassElement> boundByName = CollectionUtils.newLinkedHashMap(typeArguments.size()); Iterator<String> keys = genericNames.iterator(); Iterator<? extends ClassElement> args = typeArguments.iterator(); while (keys.hasNext() && args.hasNext()) { boundByName.put(keys.next(), args.next()); } return withTypeArguments(boundByName); } /** * Create a
element
java
quarkusio__quarkus
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/event/select/BreakInEvent.java
{ "start": 50, "end": 95 }
class ____ extends SecurityEvent { }
BreakInEvent
java
apache__rocketmq
tools/src/main/java/org/apache/rocketmq/tools/command/controller/UpdateControllerConfigSubCommand.java
{ "start": 1282, "end": 3573 }
class ____ implements SubCommand { @Override public String commandName() { return "updateControllerConfig"; } @Override public String commandDesc() { return "Update controller config."; } @Override public Options buildCommandlineOptions(final Options options) { Option opt = new Option("a", "controllerAddress", true, "Controller address list, eg: 192.168.0.1:9878;192.168.0.2:9878"); opt.setRequired(true); options.addOption(opt); opt = new Option("k", "key", true, "config key"); opt.setRequired(true); options.addOption(opt); opt = new Option("v", "value", true, "config value"); opt.setRequired(true); options.addOption(opt); return options; } @Override public void execute(final CommandLine commandLine, final Options options, final RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try { // key name String key = commandLine.getOptionValue('k').trim(); // key name String value = commandLine.getOptionValue('v').trim(); Properties properties = new Properties(); properties.put(key, value); // servers String servers = commandLine.getOptionValue('a'); List<String> serverList = null; if (servers != null && servers.length() > 0) { String[] serverArray = servers.trim().split(";"); if (serverArray.length > 0) { serverList = Arrays.asList(serverArray); } } defaultMQAdminExt.start(); defaultMQAdminExt.updateControllerConfig(properties, serverList); System.out.printf("update controller config success!%s\n%s : %s\n", serverList == null ? "" : serverList, key, value); } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { defaultMQAdminExt.shutdown(); } } }
UpdateControllerConfigSubCommand
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/engine/jdbc/env/internal/NormalizingIdentifierHelperImpl.java
{ "start": 629, "end": 5595 }
class ____ implements IdentifierHelper { private final JdbcEnvironment jdbcEnvironment; private final NameQualifierSupport nameQualifierSupport; private final boolean globallyQuoteIdentifiers; private final boolean globallyQuoteIdentifiersSkipColumnDefinitions; private final boolean autoQuoteKeywords; private final boolean autoQuoteInitialUnderscore; private final boolean autoQuoteDollar; private final TreeSet<String> reservedWords; private final IdentifierCaseStrategy unquotedCaseStrategy; private final IdentifierCaseStrategy quotedCaseStrategy; public NormalizingIdentifierHelperImpl( JdbcEnvironment jdbcEnvironment, NameQualifierSupport nameQualifierSupport, boolean globallyQuoteIdentifiers, boolean globallyQuoteIdentifiersSkipColumnDefinitions, boolean autoQuoteKeywords, boolean autoQuoteInitialUnderscore, boolean autoQuoteDollar, TreeSet<String> reservedWords, //careful, we intentionally omit making a defensive copy to not waste memory IdentifierCaseStrategy unquotedCaseStrategy, IdentifierCaseStrategy quotedCaseStrategy) { this.jdbcEnvironment = jdbcEnvironment; this.nameQualifierSupport = nameQualifierSupport; this.globallyQuoteIdentifiers = globallyQuoteIdentifiers; this.globallyQuoteIdentifiersSkipColumnDefinitions = globallyQuoteIdentifiersSkipColumnDefinitions; this.autoQuoteKeywords = autoQuoteKeywords; this.autoQuoteInitialUnderscore = autoQuoteInitialUnderscore; this.autoQuoteDollar = autoQuoteDollar; this.reservedWords = reservedWords; this.unquotedCaseStrategy = unquotedCaseStrategy == null ? IdentifierCaseStrategy.UPPER : unquotedCaseStrategy; this.quotedCaseStrategy = quotedCaseStrategy == null ? IdentifierCaseStrategy.MIXED : quotedCaseStrategy; } @Override public Identifier normalizeQuoting(Identifier identifier) { if ( identifier == null ) { return null; } else if ( identifier.isQuoted() ) { return identifier; } else if ( mustQuote( identifier ) ) { return Identifier.toIdentifier( identifier.getText(), true ); } else { return identifier; } } private boolean mustQuote(Identifier identifier) { final String identifierText = identifier.getText(); return globallyQuoteIdentifiers || autoQuoteKeywords && isReservedWord( identifierText ) || autoQuoteInitialUnderscore && identifierText.startsWith( "_" ) || autoQuoteDollar && identifierText.contains( "$" ); } @Override public Identifier toIdentifier(String text) { return normalizeQuoting( Identifier.toIdentifier( text ) ); } @Override public Identifier toIdentifier(String text, boolean quoted) { return normalizeQuoting( Identifier.toIdentifier( text, quoted ) ); } @Override public Identifier applyGlobalQuoting(String text) { return Identifier.toIdentifier( text, globallyQuoteIdentifiers && !globallyQuoteIdentifiersSkipColumnDefinitions, false ); } @Override public boolean isReservedWord(String word) { if ( !autoQuoteKeywords ) { throw new AssertionFailure( "The reserved keywords map is only initialized if autoQuoteKeywords is true" ); } return reservedWords.contains( word ); } @Override public String toMetaDataCatalogName(Identifier identifier) { if ( !nameQualifierSupport.supportsCatalogs() ) { // null is used to tell DatabaseMetaData to not limit results based on catalog. return null; } else { final var id = identifier == null ? jdbcEnvironment.getCurrentCatalog() : identifier; return id == null ? "" : toMetaDataText( id ); } } private String toMetaDataText(Identifier identifier) { if ( identifier == null ) { throw new IllegalArgumentException( "Identifier cannot be null; bad usage" ); } final String text = identifier.getText(); if ( identifier instanceof DatabaseIdentifier ) { return text; } else if ( identifier.isQuoted() ) { return switch ( quotedCaseStrategy ) { case UPPER -> text.toUpperCase( Locale.ROOT ); case LOWER -> text.toLowerCase( Locale.ROOT ); case MIXED -> text; // default }; } else { return switch ( unquotedCaseStrategy ) { case MIXED -> text; case LOWER -> text.toLowerCase( Locale.ROOT ); case UPPER -> text.toUpperCase( Locale.ROOT ); // default }; } } @Override public String toMetaDataSchemaName(Identifier identifier) { if ( !nameQualifierSupport.supportsSchemas() ) { // null is used to tell DatabaseMetaData to not limit results based on schema. return null; } else { final var id = identifier == null ? jdbcEnvironment.getCurrentSchema() : identifier; return id == null ? "" : toMetaDataText( id ); } } @Override public String toMetaDataObjectName(Identifier identifier) { if ( identifier == null ) { // if this method was called, the value is needed throw new IllegalArgumentException( "null was passed as an object name" ); } return toMetaDataText( identifier ); } }
NormalizingIdentifierHelperImpl
java
apache__thrift
lib/java/src/main/java/org/apache/thrift/async/TAsyncClientManager.java
{ "start": 6893, "end": 7340 }
class ____ implements Comparator<TAsyncMethodCall<?>>, Serializable { @Override public int compare(TAsyncMethodCall left, TAsyncMethodCall right) { if (left.getTimeoutTimestamp() == right.getTimeoutTimestamp()) { return (int) (left.getSequenceId() - right.getSequenceId()); } else { return (int) (left.getTimeoutTimestamp() - right.getTimeoutTimestamp()); } } } }
TAsyncMethodCallTimeoutComparator
java
elastic__elasticsearch
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java
{ "start": 59024, "end": 60129 }
class ____<V> implements AsyncSupplier<V> { private final AsyncSupplier<V> asyncSupplier; private volatile ListenableFuture<V> valueFuture = null; private CachingAsyncSupplier(AsyncSupplier<V> supplier) { this.asyncSupplier = supplier; } @Override public SubscribableListener<V> getAsync() { if (valueFuture == null) { boolean firstInvocation = false; synchronized (this) { if (valueFuture == null) { valueFuture = new ListenableFuture<>(); firstInvocation = true; } } if (firstInvocation) { asyncSupplier.getAsync().addListener(valueFuture); } } return valueFuture; } } public static void addSettings(List<Setting<?>> settings) { settings.add(ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING); settings.addAll(LoadAuthorizedIndicesTimeChecker.Factory.getSettings()); } }
CachingAsyncSupplier
java
bumptech__glide
library/test/src/test/java/com/bumptech/glide/load/data/LocalUriFetcherTest.java
{ "start": 898, "end": 1848 }
class ____ { private TestLocalUriFetcher fetcher; @Mock private DataFetcher.DataCallback<Closeable> callback; @Before public void setUp() { MockitoAnnotations.initMocks(this); fetcher = new TestLocalUriFetcher( ApplicationProvider.getApplicationContext(), Uri.parse("content://empty")); } @Test public void testClosesDataOnCleanup() throws Exception { fetcher.loadData(Priority.NORMAL, callback); fetcher.cleanup(); verify(fetcher.closeable).close(); } @Test public void testDoesNotCloseNullData() throws IOException { fetcher.cleanup(); verify(fetcher.closeable, never()).close(); } @Test public void testHandlesExceptionOnClose() throws Exception { fetcher.loadData(Priority.NORMAL, callback); doThrow(new IOException("Test")).when(fetcher.closeable).close(); fetcher.cleanup(); verify(fetcher.closeable).close(); } private static
LocalUriFetcherTest
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/script/field/vectors/FloatRankVectorsDocValuesField.java
{ "start": 910, "end": 3495 }
class ____ extends RankVectorsDocValuesField { private final BinaryDocValues input; private final BinaryDocValues magnitudes; private boolean decoded; private final int dims; private BytesRef value; private BytesRef magnitudesValue; private FloatVectorIterator vectorValues; private int numVectors; private float[] buffer; public FloatRankVectorsDocValuesField( BinaryDocValues input, BinaryDocValues magnitudes, String name, ElementType elementType, int dims ) { super(name, elementType); this.input = input; this.magnitudes = magnitudes; this.dims = dims; this.buffer = new float[dims]; } @Override public void setNextDocId(int docId) throws IOException { decoded = false; if (input.advanceExact(docId)) { boolean magnitudesFound = magnitudes.advanceExact(docId); assert magnitudesFound; value = input.binaryValue(); assert value.length % (Float.BYTES * dims) == 0; numVectors = value.length / (Float.BYTES * dims); magnitudesValue = magnitudes.binaryValue(); assert magnitudesValue.length == (Float.BYTES * numVectors); } else { value = null; magnitudesValue = null; numVectors = 0; } } @Override public RankVectorsScriptDocValues toScriptDocValues() { return new RankVectorsScriptDocValues(this, dims); } @Override public boolean isEmpty() { return value == null; } @Override public RankVectors get() { if (isEmpty()) { return RankVectors.EMPTY; } decodeVectorIfNecessary(); return new FloatRankVectors(vectorValues, magnitudesValue, numVectors, dims); } @Override public RankVectors get(RankVectors defaultValue) { if (isEmpty()) { return defaultValue; } decodeVectorIfNecessary(); return new FloatRankVectors(vectorValues, magnitudesValue, numVectors, dims); } @Override public RankVectors getInternal() { return get(null); } @Override public int size() { return value == null ? 0 : value.length / (Float.BYTES * dims); } private void decodeVectorIfNecessary() { if (decoded == false && value != null) { vectorValues = new FloatVectorIterator(value, buffer, numVectors); decoded = true; } } public static
FloatRankVectorsDocValuesField
java
elastic__elasticsearch
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/UnresolvedFunctionTests.java
{ "start": 898, "end": 7670 }
class ____ extends AbstractNodeTestCase<UnresolvedFunction, Expression> { public static UnresolvedFunction randomUnresolvedFunction() { return innerRandomUnresolvedFunction(resolutionStrategies()); } static UnresolvedFunction innerRandomUnresolvedFunction(List<FunctionResolutionStrategy> resolutionStrategies) { /* Pick an UnresolvedFunction where the name and the * message don't happen to be the same String. If they * matched then transform would get them confused. */ Source source = randomSource(); String name = randomAlphaOfLength(5); FunctionResolutionStrategy resolutionStrategy = randomFrom(resolutionStrategies); List<Expression> args = randomFunctionArgs(); boolean analyzed = randomBoolean(); String unresolvedMessage = randomUnresolvedMessage(); return new UnresolvedFunction(source, name, resolutionStrategy, args, analyzed, unresolvedMessage); } private static List<FunctionResolutionStrategy> resolutionStrategies() { return asList(FunctionResolutionStrategy.DEFAULT, new FunctionResolutionStrategy() { }); } protected List<FunctionResolutionStrategy> pluggableResolutionStrategies() { return resolutionStrategies(); } private static List<Expression> randomFunctionArgs() { // At this point we only support functions with 0, 1, or 2 arguments. Supplier<List<Expression>> option = randomFrom( asList( Collections::emptyList, () -> singletonList(randomUnresolvedAttribute()), () -> asList(randomUnresolvedAttribute(), randomUnresolvedAttribute()) ) ); return option.get(); } /** * Pick a random value for the unresolved message. * It is important that this value is not the same * as the value for the name for tests like the {@link #testTransform} * and for general ease of reading. */ private static String randomUnresolvedMessage() { return randomBoolean() ? null : randomAlphaOfLength(6); } @Override protected UnresolvedFunction randomInstance() { return innerRandomUnresolvedFunction(pluggableResolutionStrategies()); } @Override protected UnresolvedFunction mutate(UnresolvedFunction uf) { Supplier<UnresolvedFunction> option = randomFrom( asList( () -> new UnresolvedFunction( uf.source(), randomValueOtherThan(uf.name(), () -> randomAlphaOfLength(5)), uf.resolutionStrategy(), uf.children(), uf.analyzed(), uf.unresolvedMessage() ), () -> new UnresolvedFunction( uf.source(), uf.name(), randomValueOtherThan(uf.resolutionStrategy(), () -> randomFrom(resolutionStrategies())), uf.children(), uf.analyzed(), uf.unresolvedMessage() ), () -> new UnresolvedFunction( uf.source(), uf.name(), uf.resolutionStrategy(), randomValueOtherThan(uf.children(), UnresolvedFunctionTests::randomFunctionArgs), uf.analyzed(), uf.unresolvedMessage() ), () -> new UnresolvedFunction( uf.source(), uf.name(), uf.resolutionStrategy(), uf.children(), uf.analyzed() == false, uf.unresolvedMessage() ), () -> new UnresolvedFunction( uf.source(), uf.name(), uf.resolutionStrategy(), uf.children(), uf.analyzed(), randomValueOtherThan(uf.unresolvedMessage(), () -> randomAlphaOfLength(5)) ) ) ); return option.get(); } @Override protected UnresolvedFunction copy(UnresolvedFunction uf) { return new UnresolvedFunction( uf.source(), uf.name(), uf.resolutionStrategy(), uf.children(), uf.analyzed(), uf.unresolvedMessage() ); } @Override public void testTransform() { UnresolvedFunction uf = innerRandomUnresolvedFunction(pluggableResolutionStrategies()); String newName = randomValueOtherThan(uf.name(), () -> randomAlphaOfLength(5)); assertEquals( new UnresolvedFunction(uf.source(), newName, uf.resolutionStrategy(), uf.children(), uf.analyzed(), uf.unresolvedMessage()), uf.transformPropertiesOnly(Object.class, p -> Objects.equals(p, uf.name()) ? newName : p) ); FunctionResolutionStrategy newResolution = randomValueOtherThan(uf.resolutionStrategy(), () -> randomFrom(resolutionStrategies())); assertEquals( new UnresolvedFunction(uf.source(), uf.name(), newResolution, uf.children(), uf.analyzed(), uf.unresolvedMessage()), uf.transformPropertiesOnly(Object.class, p -> Objects.equals(p, uf.resolutionStrategy()) ? newResolution : p) ); String newUnresolvedMessage = randomValueOtherThan(uf.unresolvedMessage(), UnresolvedFunctionTests::randomUnresolvedMessage); assertEquals( new UnresolvedFunction(uf.source(), uf.name(), uf.resolutionStrategy(), uf.children(), uf.analyzed(), newUnresolvedMessage), uf.transformPropertiesOnly(Object.class, p -> Objects.equals(p, uf.unresolvedMessage()) ? newUnresolvedMessage : p) ); assertEquals( new UnresolvedFunction( uf.source(), uf.name(), uf.resolutionStrategy(), uf.children(), uf.analyzed() == false, uf.unresolvedMessage() ), uf.transformPropertiesOnly(Object.class, p -> Objects.equals(p, uf.analyzed()) ? uf.analyzed() == false : p) ); } @Override public void testReplaceChildren() { UnresolvedFunction uf = innerRandomUnresolvedFunction(pluggableResolutionStrategies()); List<Expression> newChildren = randomValueOtherThan(uf.children(), UnresolvedFunctionTests::randomFunctionArgs); assertEquals( new UnresolvedFunction(uf.source(), uf.name(), uf.resolutionStrategy(), newChildren, uf.analyzed(), uf.unresolvedMessage()), uf.replaceChildren(newChildren) ); } }
UnresolvedFunctionTests
java
quarkusio__quarkus
extensions/hal/runtime/src/main/java/io/quarkus/hal/HalLink.java
{ "start": 32, "end": 469 }
class ____ { private final String href; private final String title; private final String type; public HalLink(String href, String title, String type) { this.href = href; this.title = title; this.type = type; } public String getHref() { return href; } public String getTitle() { return title; } public String getType() { return type; } }
HalLink
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java
{ "start": 1138, "end": 7646 }
class ____ { /** * Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings. * * @param input the custom settings to use; these are not overwritten by settings in the configuration file * @param properties map of properties key/value pairs (usually from the command-line) * @param configPath path to config directory; (use null to indicate the default) * @param defaultNodeName supplier for the default node.name if the setting isn't defined * @return the {@link Environment} */ public static Environment prepareEnvironment( Settings input, Map<String, String> properties, Path configPath, Supplier<String> defaultNodeName ) { Path configDir = findConfigDir(configPath, input, properties); Path configFile = configDir.resolve("elasticsearch.yml"); Settings.Builder output = Settings.builder(); // start with a fresh output loadConfigWithSubstitutions(output, configFile, System::getenv); loadOverrides(output, properties); output.put(input); replaceForcedSettings(output); try { output.replacePropertyPlaceholders(); } catch (Exception e) { throw new SettingsException("Failed to replace property placeholders from [" + configFile.getFileName() + "]", e); } ensureSpecialSettingsExist(output, defaultNodeName); return new Environment(output.build(), configDir); } static Path findConfigDir(Path configPath, Settings input, Map<String, String> properties) { if (configPath != null) { return configPath; } String esHome = properties.get(Environment.PATH_HOME_SETTING.getKey()); if (esHome == null) { // TODO: this fallback is only needed for tests, in production input is always Settings.EMPTY esHome = Environment.PATH_HOME_SETTING.get(input); if (esHome == null) { throw new IllegalStateException(Environment.PATH_HOME_SETTING.getKey() + " is not configured"); } } return resolveConfigDir(esHome); } @SuppressForbidden(reason = "reading initial config") private static Path resolveConfigDir(String esHome) { return PathUtils.get(esHome).resolve("config"); } static void loadConfigWithSubstitutions(Settings.Builder output, Path configFile, Function<String, String> substitutions) { if (Files.exists(configFile) == false) { return; } try { long existingSize = Files.size(configFile); StringBuilder builder = new StringBuilder((int) existingSize); try (BufferedReader reader = Files.newBufferedReader(configFile, StandardCharsets.UTF_8)) { String line; while ((line = reader.readLine()) != null) { int dollarNdx; int nextNdx = 0; while ((dollarNdx = line.indexOf("${", nextNdx)) != -1) { int closeNdx = line.indexOf('}', dollarNdx + 2); if (closeNdx == -1) { // No close substitution was found. Break to leniently copy the rest of the line as is. break; } // copy up to the dollar if (dollarNdx > nextNdx) { builder.append(line, nextNdx, dollarNdx); } nextNdx = closeNdx + 1; String substKey = line.substring(dollarNdx + 2, closeNdx); String substValue = substitutions.apply(substKey); if (substValue != null) { builder.append(substValue); } else { // the substitution name doesn't exist, defer to setting based substitution after yaml parsing builder.append(line, dollarNdx, nextNdx); } } if (nextNdx < line.length()) { builder.append(line, nextNdx, line.length()); } builder.append(System.lineSeparator()); } } var is = new ByteArrayInputStream(builder.toString().getBytes(StandardCharsets.UTF_8)); output.loadFromStream(configFile.getFileName().toString(), is, false); } catch (IOException e) { throw new SettingsException("Failed to load settings from " + configFile.toString(), e); } } static void loadOverrides(Settings.Builder output, Map<String, String> overrides) { StringBuilder builder = new StringBuilder(); for (var entry : overrides.entrySet()) { builder.append(entry.getKey()); builder.append(": "); builder.append(entry.getValue()); builder.append(System.lineSeparator()); } var is = new ByteArrayInputStream(builder.toString().getBytes(StandardCharsets.UTF_8)); // fake the resource name so it loads yaml try { output.loadFromStream("overrides.yml", is, false); } catch (IOException e) { throw new SettingsException("Malformed setting override value", e); } } private static void replaceForcedSettings(Settings.Builder output) { List<String> forcedSettings = new ArrayList<>(); for (String setting : output.keys()) { if (setting.startsWith("force.")) { forcedSettings.add(setting); } } for (String forcedSetting : forcedSettings) { String value = output.remove(forcedSetting); output.put(forcedSetting.substring("force.".length()), value); } } private static void ensureSpecialSettingsExist(Settings.Builder output, Supplier<String> defaultNodeName) { // put the cluster and node name if they aren't set if (output.get(ClusterName.CLUSTER_NAME_SETTING.getKey()) == null) { output.put(ClusterName.CLUSTER_NAME_SETTING.getKey(), ClusterName.DEFAULT.value()); } if (output.get(Node.NODE_NAME_SETTING.getKey()) == null) { output.put(Node.NODE_NAME_SETTING.getKey(), defaultNodeName.get()); } } }
InternalSettingsPreparer
java
google__auto
value/src/test/java/com/google/auto/value/extension/memoized/MemoizedValidationTest.java
{ "start": 2611, "end": 3071 }
class ____ {", " @Memoized", " String string() {", " return \"\";", " }", "}"); Compilation compilation = javac().withProcessors(new MemoizedValidator()).compile(source); assertThat(compilation).failed(); assertThat(compilation) .hadErrorContaining("@Memoized methods must be declared only in @AutoValue classes") .inFile(source) .onLine(6); } }
EnclosingClass
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCleanupRepositoryAction.java
{ "start": 1230, "end": 1999 }
class ____ extends BaseRestHandler { @Override public List<Route> routes() { return List.of(new Route(POST, "/_snapshot/{repository}/_cleanup")); } @Override public String getName() { return "cleanup_repository_action"; } @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final var cleanupRepositoryRequest = new CleanupRepositoryRequest( getMasterNodeTimeout(request), getAckTimeout(request), request.param("repository") ); return channel -> client.admin().cluster().cleanupRepository(cleanupRepositoryRequest, new RestToXContentListener<>(channel)); } }
RestCleanupRepositoryAction
java
apache__flink
flink-datastream/src/main/java/org/apache/flink/datastream/impl/extension/eventtime/functions/ExtractEventTimeProcessFunction.java
{ "start": 1872, "end": 8398 }
class ____<IN> implements OneInputStreamProcessFunction<IN, IN>, ProcessingTimeService.ProcessingTimeCallback { /** User-defined watermark strategy. */ private final EventTimeWatermarkStrategy<IN> watermarkStrategy; /** The maximum timestamp encountered so far. */ private long currentMaxEventTime = Long.MIN_VALUE; private long lastEmittedEventTime = Long.MIN_VALUE; /** * The periodic processing timer interval; if not configured by user in {@link * EventTimeWatermarkStrategy}, it will default to the value specified by {@link * PipelineOptions#AUTO_WATERMARK_INTERVAL}. */ private long periodicTimerInterval = 0; /** * Whether enable create and send {@link EventTimeExtension#IDLE_STATUS_WATERMARK_DECLARATION}. */ private boolean enableIdleStatus; /** The {@link IdlenessTimer} is utilized to check whether the input is currently idle. */ private IdlenessTimer idlenessTimer; private boolean isIdleNow = false; private final long maxOutOfOrderTimeInMs; private ProcessingTimeService processingTimeService; private WatermarkManager watermarkManager; public ExtractEventTimeProcessFunction(EventTimeWatermarkStrategy<IN> watermarkStrategy) { this.watermarkStrategy = watermarkStrategy; if (watermarkStrategy.getIdleTimeout().toMillis() > 0) { this.enableIdleStatus = true; } this.maxOutOfOrderTimeInMs = watermarkStrategy.getMaxOutOfOrderTime().toMillis(); } public void initEventTimeExtension( ExecutionConfig config, WatermarkManager watermarkManager, ProcessingTimeService processingTimeService) { this.processingTimeService = processingTimeService; this.watermarkManager = watermarkManager; if (enableIdleStatus) { this.idlenessTimer = new IdlenessTimer( processingTimeService.getClock(), watermarkStrategy.getIdleTimeout()); } // May need register timer to check whether the input is idle and periodically send event // time watermarks boolean needRegisterTimer = watermarkStrategy.getGenerateMode() == EventTimeWatermarkStrategy.EventTimeWatermarkGenerateMode .PERIODIC || enableIdleStatus; // set timer interval default to config option {@link // PipelineOptions#AUTO_WATERMARK_INTERVAL} this.periodicTimerInterval = config.getAutoWatermarkInterval(); if (watermarkStrategy.getGenerateMode() == EventTimeWatermarkStrategy.EventTimeWatermarkGenerateMode.PERIODIC && !watermarkStrategy.getPeriodicWatermarkInterval().isZero()) { this.periodicTimerInterval = watermarkStrategy.getPeriodicWatermarkInterval().toMillis(); } checkState( periodicTimerInterval > 0, "Watermark interval " + periodicTimerInterval + " should large to 0."); if (needRegisterTimer) { processingTimeService.registerTimer( processingTimeService.getCurrentProcessingTime() + periodicTimerInterval, this); } } @Override public Set<? extends WatermarkDeclaration> declareWatermarks() { // declare EventTimeExtension.EVENT_TIME_WATERMARK_DECLARATION // if idle status is enabled, also declare // EventTimeExtension.IDLE_STATUS_WATERMARK_DECLARATION. Set<WatermarkDeclaration> watermarkDeclarations = new HashSet<>(); watermarkDeclarations.add(EventTimeExtension.EVENT_TIME_WATERMARK_DECLARATION); if (enableIdleStatus) { watermarkDeclarations.add(EventTimeExtension.IDLE_STATUS_WATERMARK_DECLARATION); } return watermarkDeclarations; } @Override public void processRecord(IN record, Collector<IN> output, PartitionedContext<IN> ctx) throws Exception { if (enableIdleStatus) { if (isIdleNow) { watermarkManager.emitWatermark( EventTimeExtension.IDLE_STATUS_WATERMARK_DECLARATION.newWatermark(false)); isIdleNow = false; } // mark current input as active idlenessTimer.activity(); } // extract event time from record long extractedEventTime = watermarkStrategy.getEventTimeExtractor().extractTimestamp(record); currentMaxEventTime = Math.max(currentMaxEventTime, extractedEventTime); output.collectAndOverwriteTimestamp(record, extractedEventTime); if (watermarkStrategy.getGenerateMode() == EventTimeWatermarkStrategy.EventTimeWatermarkGenerateMode.PER_EVENT) { // If the per event watermark is utilized, create event time watermark and send tryEmitEventTimeWatermark(ctx.getNonPartitionedContext().getWatermarkManager()); } } /** * The processing timer has two goals: 1. check whether the input is idle 2. periodically emit * event time watermark */ @Override public void onProcessingTime(long time) throws IOException, InterruptedException, Exception { if (enableIdleStatus && idlenessTimer.checkIfIdle()) { if (!isIdleNow) { watermarkManager.emitWatermark( EventTimeExtension.IDLE_STATUS_WATERMARK_DECLARATION.newWatermark(true)); isIdleNow = true; } } else if (watermarkStrategy.getGenerateMode() == EventTimeWatermarkStrategy.EventTimeWatermarkGenerateMode.PERIODIC) { tryEmitEventTimeWatermark(watermarkManager); } processingTimeService.registerTimer(time + periodicTimerInterval, this); } private void tryEmitEventTimeWatermark(WatermarkManager watermarkManager) { if (currentMaxEventTime == Long.MIN_VALUE) { return; } long needEmittedEventTime = currentMaxEventTime - maxOutOfOrderTimeInMs; if (needEmittedEventTime > lastEmittedEventTime) { watermarkManager.emitWatermark( EventTimeExtension.EVENT_TIME_WATERMARK_DECLARATION.newWatermark( needEmittedEventTime)); lastEmittedEventTime = needEmittedEventTime; } } }
ExtractEventTimeProcessFunction
java
elastic__elasticsearch
x-pack/plugin/rank-vectors/src/main/java/org/elasticsearch/xpack/rank/vectors/script/RankVectorsScoreScriptUtils.java
{ "start": 3794, "end": 5484 }
class ____ extends RankVectorsFunction { protected final float[][] queryVector; /** * Constructs a dense vector function used for float vectors. * * @param scoreScript The script in which this function was referenced. * @param field The vector field. * @param queryVector The query vector. */ public FloatRankVectorsFunction(ScoreScript scoreScript, RankVectorsDocValuesField field, List<List<Number>> queryVector) { super(scoreScript, field); if (queryVector.isEmpty()) { throw new IllegalArgumentException("The query vector is empty."); } DenseVector.checkDimensions(field.get().getDims(), queryVector.get(0).size()); this.queryVector = new float[queryVector.size()][queryVector.get(0).size()]; int lastSize = -1; for (int i = 0; i < queryVector.size(); i++) { if (lastSize != -1 && lastSize != queryVector.get(i).size()) { throw new IllegalArgumentException( "The query vector contains inner vectors which have inconsistent number of dimensions." ); } lastSize = queryVector.get(i).size(); for (int j = 0; j < queryVector.get(i).size(); j++) { this.queryVector[i][j] = queryVector.get(i).get(j).floatValue(); } field.getElement().checkVectorBounds(this.queryVector[i]); } } } // Calculate Hamming distances between a query's dense vector and documents' dense vectors public
FloatRankVectorsFunction
java
assertj__assertj-core
assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/MultimapAssert_containsKeys_Test.java
{ "start": 983, "end": 3745 }
class ____ extends MultimapAssertBaseTest { @Test public void should_pass_if_actual_contains_given_keys() { assertThat(actual).containsKeys("Lakers", "Bulls"); } @Test public void should_fail_if_actual_is_null() { // GIVEN actual = null; // WHEN Throwable throwable = catchThrowable(() -> assertThat(actual).containsKeys("Nets", "Bulls", "Knicks")); // THEN assertThat(throwable).isInstanceOf(AssertionError.class) .hasMessage(actualIsNull()); } @Test public void should_fail_if_keys_to_look_for_are_null() { // GIVEN String[] keys = null; // WHEN Throwable throwable = catchThrowable(() -> assertThat(actual).containsKeys(keys)); // THEN assertThat(throwable).isInstanceOf(IllegalArgumentException.class) .hasMessage("The keys to look for should not be null"); } @Test public void should_fail_if_keys_to_look_for_are_empty() { // WHEN Throwable throwable = catchThrowable(() -> assertThat(actual).containsKeys()); // THEN assertThat(throwable).isInstanceOf(IllegalArgumentException.class) .hasMessage("The keys to look for should not be empty"); } @Test public void should_fail_if_actual_does_not_contain_all_given_keys() { // WHEN Throwable throwable = catchThrowable(() -> assertThat(actual).containsKeys("Nets", "Bulls", "Knicks")); // THEN assertThat(throwable).isInstanceOf(AssertionError.class) .hasMessage(format("%nExpecting:%n" + " {Lakers=[Kobe Bryant, Magic Johnson, Kareem Abdul Jabbar], Bulls=[Michael Jordan, Scottie Pippen, Derrick Rose], Spurs=[Tony Parker, Tim Duncan, Manu Ginobili]}%n" + "to contain keys:%n" + " [\"Nets\", \"Bulls\", \"Knicks\"]%n" + "but could not find:%n" + " [\"Nets\", \"Knicks\"]")); } @Test public void should_fail_if_actual_does_not_contain_the_given_key() { // WHEN Throwable throwable = catchThrowable(() -> assertThat(actual).containsKeys("Nets")); // THEN assertThat(throwable).isInstanceOf(AssertionError.class) .hasMessage(format("%nExpecting:%n" + " {Lakers=[Kobe Bryant, Magic Johnson, Kareem Abdul Jabbar], Bulls=[Michael Jordan, Scottie Pippen, Derrick Rose], Spurs=[Tony Parker, Tim Duncan, Manu Ginobili]}%n" + "to contain key:%n" + " \"Nets\"")); } }
MultimapAssert_containsKeys_Test
java
apache__camel
components/camel-grpc/src/test/java/org/apache/camel/component/grpc/GrpcProducerSecurityTest.java
{ "start": 1903, "end": 8000 }
class ____ extends CamelTestSupport { private static final Logger LOG = LoggerFactory.getLogger(GrpcProducerSecurityTest.class); private static final int GRPC_TLS_TEST_PORT = AvailablePortFinder.getNextAvailable(); private static final int GRPC_JWT_TEST_PORT = AvailablePortFinder.getNextAvailable(); private static final int GRPC_TEST_PING_ID = 1; private static final int GRPC_TEST_PONG_ID01 = 1; private static final int GRPC_TEST_PONG_ID02 = 2; private static final String GRPC_TEST_PING_VALUE = "PING"; private static final String GRPC_TEST_PONG_VALUE = "PONG"; private static final String GRPC_JWT_CORRECT_SECRET = "correctsecret"; private static final String GRPC_JWT_INCORRECT_SECRET = "incorrectsecret"; private static Server grpcServerWithTLS; private static Server grpcServerWithJWT; @BeforeAll public static void startGrpcServer() throws Exception { SslContext sslContext = GrpcSslContexts .forServer(new File("src/test/resources/certs/server.pem"), new File("src/test/resources/certs/server.key")) .trustManager(new File("src/test/resources/certs/ca.pem")) .clientAuth(ClientAuth.REQUIRE) .build(); Assumptions.assumeTrue(sslContext instanceof OpenSslClientContext || sslContext instanceof JdkSslContext); grpcServerWithTLS = NettyServerBuilder.forPort(GRPC_TLS_TEST_PORT) .sslContext(sslContext) .addService(new PingPongImpl()).build().start(); grpcServerWithJWT = NettyServerBuilder.forPort(GRPC_JWT_TEST_PORT) .addService(new PingPongImpl()) .intercept(new JwtServerInterceptor(JwtAlgorithm.HMAC256, GRPC_JWT_CORRECT_SECRET, null, null)) .build() .start(); LOG.info("gRPC server with TLS started on port {}", GRPC_TLS_TEST_PORT); LOG.info("gRPC server with the JWT auth started on port {}", GRPC_JWT_TEST_PORT); } @AfterAll public static void stopGrpcServer() { if (grpcServerWithTLS != null) { grpcServerWithTLS.shutdown(); LOG.info("gRPC server with TLS stopped"); } if (grpcServerWithJWT != null) { grpcServerWithJWT.shutdown(); LOG.info("gRPC server with JWT stopped"); } } @Test public void testWithEnableTLS() { LOG.info("gRPC PingSyncSync method test start with TLS enable"); // Testing simple sync method invoke using TLS negotiation PingRequest pingRequest = PingRequest.newBuilder().setPingName(GRPC_TEST_PING_VALUE).setPingId(GRPC_TEST_PING_ID).build(); Object pongResponse = template.requestBody("direct:grpc-tls", pingRequest); assertNotNull(pongResponse); assertTrue(pongResponse instanceof PongResponse); assertEquals(GRPC_TEST_PING_ID, ((PongResponse) pongResponse).getPongId()); assertEquals(GRPC_TEST_PING_VALUE + GRPC_TEST_PONG_VALUE, ((PongResponse) pongResponse).getPongName()); } @Test public void testWithCorrectJWT() { LOG.info("gRPC PingSyncSync method test start with correct JWT authentication"); // Testing simple sync method invoke using correct JWT authentication PingRequest pingRequest = PingRequest.newBuilder().setPingName(GRPC_TEST_PING_VALUE).setPingId(GRPC_TEST_PING_ID).build(); Object pongResponse = template.requestBody("direct:grpc-correct-jwt", pingRequest); assertNotNull(pongResponse); assertTrue(pongResponse instanceof PongResponse); assertEquals(GRPC_TEST_PING_ID, ((PongResponse) pongResponse).getPongId()); assertEquals(GRPC_TEST_PING_VALUE + GRPC_TEST_PONG_VALUE, ((PongResponse) pongResponse).getPongName()); } @Test public void testWithIncorrectJWT() { LOG.info("gRPC PingSyncSync method test start with incorrect JWT authentication"); // Testing simple sync method invoke using incorrect JWT authentication PingRequest pingRequest = PingRequest.newBuilder().setPingName(GRPC_TEST_PING_VALUE).setPingId(GRPC_TEST_PING_ID).build(); try { template.requestBody("direct:grpc-incorrect-jwt", pingRequest); } catch (Exception e) { assertNotNull(e); assertTrue(e.getCause().getCause() instanceof StatusRuntimeException); assertEquals( "UNAUTHENTICATED: The Token's Signature resulted invalid when verified using the Algorithm: HmacSHA256", e.getCause().getCause().getMessage()); } } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { from("direct:grpc-tls") .to("grpc://localhost:" + GRPC_TLS_TEST_PORT + "/org.apache.camel.component.grpc.PingPong?method=pingSyncSync&synchronous=true&" + "negotiationType=TLS&keyCertChainResource=file:src/test/resources/certs/client.pem&" + "keyResource=file:src/test/resources/certs/client.key&trustCertCollectionResource=file:src/test/resources/certs/ca.pem"); from("direct:grpc-correct-jwt") .to("grpc://localhost:" + GRPC_JWT_TEST_PORT + "/org.apache.camel.component.grpc.PingPong?method=pingSyncSync&synchronous=true&" + "authenticationType=JWT&jwtSecret=" + GRPC_JWT_CORRECT_SECRET); from("direct:grpc-incorrect-jwt") .to("grpc://localhost:" + GRPC_JWT_TEST_PORT + "/org.apache.camel.component.grpc.PingPong?method=pingSyncSync&synchronous=true&" + "authenticationType=JWT&jwtSecret=" + GRPC_JWT_INCORRECT_SECRET); } }; } /** * Test gRPC PingPong server implementation */ static
GrpcProducerSecurityTest
java
elastic__elasticsearch
modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessWrappedException.java
{ "start": 719, "end": 943 }
class ____ extends Error { /** * Constructor. * @param cause The {@link Exception} cause. */ public PainlessWrappedException(final Exception cause) { super(cause); } }
PainlessWrappedException
java
apache__hadoop
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestBlockingThreadPoolExecutorService.java
{ "start": 1582, "end": 4808 }
class ____ extends AbstractHadoopTestBase { private static final Logger LOG = LoggerFactory.getLogger( ITestBlockingThreadPoolExecutorService.class); private static final int NUM_ACTIVE_TASKS = 4; private static final int NUM_WAITING_TASKS = 2; private static final int TASK_SLEEP_MSEC = 100; private static final int SHUTDOWN_WAIT_MSEC = 200; private static final int SHUTDOWN_WAIT_TRIES = 5; private static final int BLOCKING_THRESHOLD_MSEC = 50; private static final Integer SOME_VALUE = 1337; private static BlockingThreadPoolExecutorService tpe; @AfterAll public static void afterClass() throws Exception { ensureDestroyed(); } /** * Basic test of running one trivial task. */ @Test public void testSubmitCallable() throws Exception { ensureCreated(); Future<Integer> f = tpe.submit(callableSleeper); Integer v = f.get(); assertEquals(SOME_VALUE, v); } /** * More involved test, including detecting blocking when at capacity. */ @Test public void testSubmitRunnable() throws Exception { ensureCreated(); verifyQueueSize(tpe, NUM_ACTIVE_TASKS + NUM_WAITING_TASKS); } /** * Verify the size of the executor's queue, by verifying that the first * submission to block is {@code expectedQueueSize + 1}. * @param executorService executor service to test * @param expectedQueueSize size of queue */ protected void verifyQueueSize(ExecutorService executorService, int expectedQueueSize) { CountDownLatch latch = new CountDownLatch(1); for (int i = 0; i < expectedQueueSize; i++) { executorService.submit(new LatchedSleeper(latch)); } StopWatch stopWatch = new StopWatch().start(); latch.countDown(); executorService.submit(sleeper); assertDidBlock(stopWatch); } @Test public void testShutdown() throws Exception { // Cover create / destroy, regardless of when this test case runs ensureCreated(); ensureDestroyed(); // Cover create, execute, destroy, regardless of when test case runs ensureCreated(); testSubmitRunnable(); ensureDestroyed(); } @Test public void testChainedQueue() throws Throwable { ensureCreated(); int size = 2; ExecutorService wrapper = new SemaphoredDelegatingExecutor(tpe, size, true); verifyQueueSize(wrapper, size); } // Helper functions, etc. private void assertDidBlock(StopWatch sw) { try { if (sw.now(TimeUnit.MILLISECONDS) < BLOCKING_THRESHOLD_MSEC) { throw new RuntimeException("Blocking call returned too fast."); } } finally { sw.reset().start(); } } private Runnable sleeper = new Runnable() { @Override public void run() { String name = Thread.currentThread().getName(); try { Thread.sleep(TASK_SLEEP_MSEC); } catch (InterruptedException e) { LOG.info("Thread {} interrupted.", name); Thread.currentThread().interrupt(); } } }; private Callable<Integer> callableSleeper = new Callable<Integer>() { @Override public Integer call() throws Exception { sleeper.run(); return SOME_VALUE; } }; private
ITestBlockingThreadPoolExecutorService
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/deployment/TaskDeploymentDescriptorFactory.java
{ "start": 23461, "end": 24100 }
interface ____ { /** * Serialize and try offload shuffle descriptors. * * @param shuffleDescriptorGroup to serialize * @param numConsumer consumers number of these shuffle descriptors, it means how many times * serialized shuffle descriptor should be sent * @return offloaded or non-offloaded serialized shuffle descriptors */ MaybeOffloaded<ShuffleDescriptorGroup> serializeAndTryOffloadShuffleDescriptor( ShuffleDescriptorGroup shuffleDescriptorGroup, int numConsumer) throws IOException; } private static
ShuffleDescriptorSerializer
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/JsltEndpointBuilderFactory.java
{ "start": 1555, "end": 9269 }
interface ____ extends EndpointProducerBuilder { default AdvancedJsltEndpointBuilder advanced() { return (AdvancedJsltEndpointBuilder) this; } /** * Sets whether the context map should allow access to all details. By * default only the message body and headers can be accessed. This * option can be enabled for full access to the current Exchange and * CamelContext. Doing so impose a potential security risk as this opens * access to the full power of CamelContext API. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer * * @param allowContextMapAll the value to set * @return the dsl builder */ default JsltEndpointBuilder allowContextMapAll(boolean allowContextMapAll) { doSetProperty("allowContextMapAll", allowContextMapAll); return this; } /** * Sets whether the context map should allow access to all details. By * default only the message body and headers can be accessed. This * option can be enabled for full access to the current Exchange and * CamelContext. Doing so impose a potential security risk as this opens * access to the full power of CamelContext API. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: producer * * @param allowContextMapAll the value to set * @return the dsl builder */ default JsltEndpointBuilder allowContextMapAll(String allowContextMapAll) { doSetProperty("allowContextMapAll", allowContextMapAll); return this; } /** * Whether to allow to use resource template from header or not (default * false). Enabling this allows to specify dynamic templates via message * header. However this can be seen as a potential security * vulnerability if the header is coming from a malicious user, so use * this with care. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer * * @param allowTemplateFromHeader the value to set * @return the dsl builder */ default JsltEndpointBuilder allowTemplateFromHeader(boolean allowTemplateFromHeader) { doSetProperty("allowTemplateFromHeader", allowTemplateFromHeader); return this; } /** * Whether to allow to use resource template from header or not (default * false). Enabling this allows to specify dynamic templates via message * header. However this can be seen as a potential security * vulnerability if the header is coming from a malicious user, so use * this with care. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: producer * * @param allowTemplateFromHeader the value to set * @return the dsl builder */ default JsltEndpointBuilder allowTemplateFromHeader(String allowTemplateFromHeader) { doSetProperty("allowTemplateFromHeader", allowTemplateFromHeader); return this; } /** * Sets whether to use resource content cache or not. * * The option is a: <code>boolean</code> type. * * Default: true * Group: producer * * @param contentCache the value to set * @return the dsl builder */ default JsltEndpointBuilder contentCache(boolean contentCache) { doSetProperty("contentCache", contentCache); return this; } /** * Sets whether to use resource content cache or not. * * The option will be converted to a <code>boolean</code> type. * * Default: true * Group: producer * * @param contentCache the value to set * @return the dsl builder */ default JsltEndpointBuilder contentCache(String contentCache) { doSetProperty("contentCache", contentCache); return this; } /** * If true, the mapper will use the USE_BIG_DECIMAL_FOR_FLOATS in * serialization features. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer * * @param mapBigDecimalAsFloats the value to set * @return the dsl builder */ default JsltEndpointBuilder mapBigDecimalAsFloats(boolean mapBigDecimalAsFloats) { doSetProperty("mapBigDecimalAsFloats", mapBigDecimalAsFloats); return this; } /** * If true, the mapper will use the USE_BIG_DECIMAL_FOR_FLOATS in * serialization features. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: producer * * @param mapBigDecimalAsFloats the value to set * @return the dsl builder */ default JsltEndpointBuilder mapBigDecimalAsFloats(String mapBigDecimalAsFloats) { doSetProperty("mapBigDecimalAsFloats", mapBigDecimalAsFloats); return this; } /** * Setting a custom JSON Object Mapper to be used. * * The option is a: * <code>com.fasterxml.jackson.databind.ObjectMapper</code> type. * * Group: producer * * @param objectMapper the value to set * @return the dsl builder */ default JsltEndpointBuilder objectMapper(com.fasterxml.jackson.databind.ObjectMapper objectMapper) { doSetProperty("objectMapper", objectMapper); return this; } /** * Setting a custom JSON Object Mapper to be used. * * The option will be converted to a * <code>com.fasterxml.jackson.databind.ObjectMapper</code> type. * * Group: producer * * @param objectMapper the value to set * @return the dsl builder */ default JsltEndpointBuilder objectMapper(String objectMapper) { doSetProperty("objectMapper", objectMapper); return this; } /** * If true, JSON in output message is pretty printed. * * The option is a: <code>boolean</code> type. * * Default: false * Group: common * * @param prettyPrint the value to set * @return the dsl builder */ default JsltEndpointBuilder prettyPrint(boolean prettyPrint) { doSetProperty("prettyPrint", prettyPrint); return this; } /** * If true, JSON in output message is pretty printed. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: common * * @param prettyPrint the value to set * @return the dsl builder */ default JsltEndpointBuilder prettyPrint(String prettyPrint) { doSetProperty("prettyPrint", prettyPrint); return this; } } /** * Advanced builder for endpoint for the JSLT component. */ public
JsltEndpointBuilder
java
apache__camel
core/camel-main/src/test/java/org/apache/camel/main/MyOrder.java
{ "start": 842, "end": 1190 }
class ____ { private final String company; private final MyAddress address; public MyOrder(String company, MyAddress address) { this.company = company; this.address = address; } public String getCompany() { return company; } public MyAddress getAddress() { return address; } }
MyOrder
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/FloatingPointLiteralPrecisionTest.java
{ "start": 1069, "end": 1324 }
class ____ { @Test public void positive() { BugCheckerRefactoringTestHelper.newInstance(FloatingPointLiteralPrecision.class, getClass()) .addInputLines( "in/Test.java", """
FloatingPointLiteralPrecisionTest
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/PrioritizedOperatorSubtaskStateTest.java
{ "start": 15905, "end": 24763 }
enum ____ { /** mode 0: one valid state handle (deep copy of original). */ ONE_VALID_STATE_HANDLE(0) { @Override public OperatorSubtaskState createAlternativeSubtaskState( OperatorSubtaskState primaryOriginal) { return OperatorSubtaskState.builder() .setManagedOperatorState( deepCopyFirstElement(primaryOriginal.getManagedOperatorState())) .setRawOperatorState( deepCopyFirstElement(primaryOriginal.getRawOperatorState())) .setManagedKeyedState( deepCopyFirstElement(primaryOriginal.getManagedKeyedState())) .setRawKeyedState(deepCopyFirstElement(primaryOriginal.getRawKeyedState())) .setInputChannelState(deepCopy(primaryOriginal.getInputChannelState())) .setResultSubpartitionState( deepCopy(primaryOriginal.getResultSubpartitionState())) .build(); } }, /** mode 1: empty StateHandleCollection. */ EMPTY_STATE_HANDLE_COLLECTION(1) { @Override public OperatorSubtaskState createAlternativeSubtaskState( OperatorSubtaskState primaryOriginal) { return OperatorSubtaskState.builder().build(); } }, /** * mode 2: one invalid state handle (e.g. wrong key group, different meta data). e.g. wrong * key group, different meta data. */ ONE_INVALID_STATE_HANDLE(2) { @Override public OperatorSubtaskState createAlternativeSubtaskState( OperatorSubtaskState primaryOriginal) { KeyGroupRange otherRange = new KeyGroupRange(8, 16); int numNamedStates = 2; return OperatorSubtaskState.builder() .setManagedOperatorState( createNewOperatorStateHandle(numNamedStates, RANDOM)) .setRawOperatorState(createNewOperatorStateHandle(numNamedStates, RANDOM)) .setManagedKeyedState(createNewKeyedStateHandle(otherRange)) .setRawKeyedState(createNewKeyedStateHandle(otherRange)) .setInputChannelState( singleton(createNewInputChannelStateHandle(10, RANDOM))) .setResultSubpartitionState( singleton(createNewResultSubpartitionStateHandle(10, RANDOM))) .build(); } }; CreateAltSubtaskStateMode(int code) { this.code = code; } private final int code; static CreateAltSubtaskStateMode byCode(int code) { for (CreateAltSubtaskStateMode v : values()) { if (v.code == code) { return v; } } throw new IllegalArgumentException("unknown code: " + code); } public abstract OperatorSubtaskState createAlternativeSubtaskState( OperatorSubtaskState primaryOriginal); } private <T extends StateObject> boolean checkResultAsExpected( Function<OperatorSubtaskState, StateObjectCollection<T>> extractor, Function<PrioritizedOperatorSubtaskState, List<StateObjectCollection<T>>> extractor2, PrioritizedOperatorSubtaskState prioritizedResult, OperatorSubtaskState... expectedOrdered) { List<StateObjectCollection<T>> collector = new ArrayList<>(expectedOrdered.length); for (OperatorSubtaskState operatorSubtaskState : expectedOrdered) { collector.add(extractor.apply(operatorSubtaskState)); } return checkRepresentSameOrder( extractor2.apply(prioritizedResult).iterator(), collector.toArray(new StateObjectCollection[0])); } private boolean checkRepresentSameOrder( Iterator<? extends StateObjectCollection<?>> ordered, StateObjectCollection<?>... expectedOrder) { for (StateObjectCollection<?> objects : expectedOrder) { if (!ordered.hasNext() || !checkContainedObjectsReferentialEquality(objects, ordered.next())) { return false; } } return !ordered.hasNext(); } /** * Returns true iff, in iteration order, all objects in the first collection are equal by * reference to their corresponding object (by order) in the second collection and the size of * the collections is equal. */ public boolean checkContainedObjectsReferentialEquality( StateObjectCollection<?> a, StateObjectCollection<?> b) { if (a == b) { return true; } if (a == null || b == null) { return false; } if (a.size() != b.size()) { return false; } Iterator<?> bIter = b.iterator(); for (StateObject stateObject : a) { if (!bIter.hasNext() || bIter.next() != stateObject) { return false; } } return true; } /** * Creates a deep copy of the first state object in the given collection, or null if the * collection is empy. */ private static <T extends StateObject> StateObjectCollection<T> deepCopyFirstElement( StateObjectCollection<T> original) { if (original.isEmpty()) { return StateObjectCollection.empty(); } return StateObjectCollection.singleton(deepCopy(original.iterator().next())); } /** * Creates a deep copy of the first state object in the given collection, or null if the * collection is empy. */ private static <T extends StateObject> StateObjectCollection<T> deepCopy( StateObjectCollection<T> original) { if (original == null || original.isEmpty()) { return StateObjectCollection.empty(); } return new StateObjectCollection<>( original.stream() .map(PrioritizedOperatorSubtaskStateTest::deepCopy) .collect(Collectors.toList())); } @SuppressWarnings("unchecked") private static <T extends StateObject> T deepCopy(T stateObject) { if (stateObject instanceof OperatorStreamStateHandle) { return (T) deepDummyCopy((OperatorStateHandle) stateObject); } else if (stateObject instanceof KeyedStateHandle) { return (T) deepDummyCopy((KeyedStateHandle) stateObject); } else if (stateObject instanceof InputChannelStateHandle) { return (T) deepDummyCopy((InputChannelStateHandle) stateObject); } else if (stateObject instanceof ResultSubpartitionStateHandle) { return (T) deepDummyCopy((ResultSubpartitionStateHandle) stateObject); } else { throw new IllegalStateException(); } } private <T extends StateObject, ID> StateObjectCollection<T> computeExpectedMixedState( List<OperatorSubtaskState> orderedAlternativesList, OperatorSubtaskState primaryAndFallback, Function<OperatorSubtaskState, StateObjectCollection<T>> stateExtractor, Function<T, ID> idExtractor) { List<OperatorSubtaskState> reverseAlternatives = new ArrayList<>(orderedAlternativesList); Collections.reverse(reverseAlternatives); Map<ID, T> map = stateExtractor.apply(primaryAndFallback).stream() .collect(Collectors.toMap(idExtractor, Function.identity())); reverseAlternatives.stream() .flatMap(x -> stateExtractor.apply(x).stream()) .forEach(x -> map.replace(idExtractor.apply(x), x)); return new StateObjectCollection<>(map.values()); } static <SH extends StateObject> void assertResultAsExpected( StateObjectCollection<SH> expected, StateObjectCollection<SH> primary, List<StateObjectCollection<SH>> actual) { Assertions.assertTrue(!actual.isEmpty() && actual.size() <= 2); Assertions.assertTrue(isSameContentUnordered(expected, actual.get(0))); if (actual.size() == 1) { Assertions.assertTrue(isSameContentUnordered(primary, actual.get(0))); } else { Assertions.assertTrue(isSameContentUnordered(primary, actual.get(1))); } } static <T> boolean isSameContentUnordered(Collection<T> a, Collection<T> b) { return a.size() == b.size() && a.containsAll(b); } }
CreateAltSubtaskStateMode
java
apache__camel
components/camel-vertx/camel-vertx-http/src/test/java/org/apache/camel/component/vertx/http/VertxHttpStreamingResponseTest.java
{ "start": 1300, "end": 2810 }
class ____ extends VertxHttpTestSupport { private static final String MESSAGE = "Streaming response content"; @Test public void testStreamingResponseToFile() { VertxHttpComponent component = context.getComponent("vertx-http", VertxHttpComponent.class); Vertx vertx = component.getVertx(); String path = "target/streaming.txt"; AsyncFile file = vertx.fileSystem().openBlocking(path, new OpenOptions()); VertxHttpBinding binding = new DefaultVertxHttpBinding() { @Override public HttpRequest<Buffer> prepareHttpRequest(VertxHttpEndpoint endpoint, Exchange exchange) throws Exception { HttpRequest<Buffer> request = super.prepareHttpRequest(endpoint, exchange); request.as(BodyCodec.pipe(file)); return request; } }; component.setVertxHttpBinding(binding); try { template.request(getProducerUri(), null); Buffer buffer = vertx.fileSystem().readFileBlocking(path); assertEquals(MESSAGE, buffer.toString()); } finally { vertx.fileSystem().deleteBlocking(path); } } @Override protected RoutesBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { from(getTestServerUri()) .setBody().constant(MESSAGE); } }; } }
VertxHttpStreamingResponseTest
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/TwitterDirectMessageEndpointBuilderFactory.java
{ "start": 1596, "end": 32980 }
interface ____ extends EndpointConsumerBuilder { default AdvancedTwitterDirectMessageEndpointConsumerBuilder advanced() { return (AdvancedTwitterDirectMessageEndpointConsumerBuilder) this; } /** * If the polling consumer did not poll any files, you can enable this * option to send an empty message (no body) instead. * * The option is a: <code>boolean</code> type. * * Default: false * Group: consumer * * @param sendEmptyMessageWhenIdle the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder sendEmptyMessageWhenIdle(boolean sendEmptyMessageWhenIdle) { doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle); return this; } /** * If the polling consumer did not poll any files, you can enable this * option to send an empty message (no body) instead. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: consumer * * @param sendEmptyMessageWhenIdle the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder sendEmptyMessageWhenIdle(String sendEmptyMessageWhenIdle) { doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle); return this; } /** * Endpoint type to use. * * The option is a: * <code>org.apache.camel.component.twitter.data.EndpointType</code> * type. * * Default: polling * Group: consumer * * @param type the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder type(org.apache.camel.component.twitter.data.EndpointType type) { doSetProperty("type", type); return this; } /** * Endpoint type to use. * * The option will be converted to a * <code>org.apache.camel.component.twitter.data.EndpointType</code> * type. * * Default: polling * Group: consumer * * @param type the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder type(String type) { doSetProperty("type", type); return this; } /** * Limiting number of results per page. * * The option is a: <code>java.lang.Integer</code> type. * * Default: 5 * Group: filter * * @param count the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder count(Integer count) { doSetProperty("count", count); return this; } /** * Limiting number of results per page. * * The option will be converted to a <code>java.lang.Integer</code> * type. * * Default: 5 * Group: filter * * @param count the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder count(String count) { doSetProperty("count", count); return this; } /** * Filter out old tweets, that has previously been polled. This state is * stored in memory only, and based on last tweet id. * * The option is a: <code>boolean</code> type. * * Default: true * Group: filter * * @param filterOld the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder filterOld(boolean filterOld) { doSetProperty("filterOld", filterOld); return this; } /** * Filter out old tweets, that has previously been polled. This state is * stored in memory only, and based on last tweet id. * * The option will be converted to a <code>boolean</code> type. * * Default: true * Group: filter * * @param filterOld the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder filterOld(String filterOld) { doSetProperty("filterOld", filterOld); return this; } /** * The lang string ISO_639-1 which will be used for searching. * * The option is a: <code>java.lang.String</code> type. * * Group: filter * * @param lang the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder lang(String lang) { doSetProperty("lang", lang); return this; } /** * The number of pages result which you want camel-twitter to consume. * * The option is a: <code>java.lang.Integer</code> type. * * Default: 1 * Group: filter * * @param numberOfPages the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder numberOfPages(Integer numberOfPages) { doSetProperty("numberOfPages", numberOfPages); return this; } /** * The number of pages result which you want camel-twitter to consume. * * The option will be converted to a <code>java.lang.Integer</code> * type. * * Default: 1 * Group: filter * * @param numberOfPages the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder numberOfPages(String numberOfPages) { doSetProperty("numberOfPages", numberOfPages); return this; } /** * The last tweet id which will be used for pulling the tweets. It is * useful when the camel route is restarted after a long running. * * The option is a: <code>long</code> type. * * Default: 1 * Group: filter * * @param sinceId the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder sinceId(long sinceId) { doSetProperty("sinceId", sinceId); return this; } /** * The last tweet id which will be used for pulling the tweets. It is * useful when the camel route is restarted after a long running. * * The option will be converted to a <code>long</code> type. * * Default: 1 * Group: filter * * @param sinceId the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder sinceId(String sinceId) { doSetProperty("sinceId", sinceId); return this; } /** * To filter by user ids for filter. Multiple values can be separated by * comma. * * The option is a: <code>java.lang.String</code> type. * * Group: filter * * @param userIds the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder userIds(String userIds) { doSetProperty("userIds", userIds); return this; } /** * The http proxy host which can be used for the camel-twitter. Can also * be configured on the TwitterComponent level instead. * * The option is a: <code>java.lang.String</code> type. * * Group: proxy * * @param httpProxyHost the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder httpProxyHost(String httpProxyHost) { doSetProperty("httpProxyHost", httpProxyHost); return this; } /** * The http proxy password which can be used for the camel-twitter. Can * also be configured on the TwitterComponent level instead. * * The option is a: <code>java.lang.String</code> type. * * Group: proxy * * @param httpProxyPassword the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder httpProxyPassword(String httpProxyPassword) { doSetProperty("httpProxyPassword", httpProxyPassword); return this; } /** * The http proxy port which can be used for the camel-twitter. Can also * be configured on the TwitterComponent level instead. * * The option is a: <code>java.lang.Integer</code> type. * * Group: proxy * * @param httpProxyPort the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder httpProxyPort(Integer httpProxyPort) { doSetProperty("httpProxyPort", httpProxyPort); return this; } /** * The http proxy port which can be used for the camel-twitter. Can also * be configured on the TwitterComponent level instead. * * The option will be converted to a <code>java.lang.Integer</code> * type. * * Group: proxy * * @param httpProxyPort the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder httpProxyPort(String httpProxyPort) { doSetProperty("httpProxyPort", httpProxyPort); return this; } /** * The http proxy user which can be used for the camel-twitter. Can also * be configured on the TwitterComponent level instead. * * The option is a: <code>java.lang.String</code> type. * * Group: proxy * * @param httpProxyUser the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder httpProxyUser(String httpProxyUser) { doSetProperty("httpProxyUser", httpProxyUser); return this; } /** * The number of subsequent error polls (failed due some error) that * should happen before the backoffMultipler should kick-in. * * The option is a: <code>int</code> type. * * Group: scheduler * * @param backoffErrorThreshold the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder backoffErrorThreshold(int backoffErrorThreshold) { doSetProperty("backoffErrorThreshold", backoffErrorThreshold); return this; } /** * The number of subsequent error polls (failed due some error) that * should happen before the backoffMultipler should kick-in. * * The option will be converted to a <code>int</code> type. * * Group: scheduler * * @param backoffErrorThreshold the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder backoffErrorThreshold(String backoffErrorThreshold) { doSetProperty("backoffErrorThreshold", backoffErrorThreshold); return this; } /** * The number of subsequent idle polls that should happen before the * backoffMultipler should kick-in. * * The option is a: <code>int</code> type. * * Group: scheduler * * @param backoffIdleThreshold the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder backoffIdleThreshold(int backoffIdleThreshold) { doSetProperty("backoffIdleThreshold", backoffIdleThreshold); return this; } /** * The number of subsequent idle polls that should happen before the * backoffMultipler should kick-in. * * The option will be converted to a <code>int</code> type. * * Group: scheduler * * @param backoffIdleThreshold the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder backoffIdleThreshold(String backoffIdleThreshold) { doSetProperty("backoffIdleThreshold", backoffIdleThreshold); return this; } /** * To let the scheduled polling consumer backoff if there has been a * number of subsequent idles/errors in a row. The multiplier is then * the number of polls that will be skipped before the next actual * attempt is happening again. When this option is in use then * backoffIdleThreshold and/or backoffErrorThreshold must also be * configured. * * The option is a: <code>int</code> type. * * Group: scheduler * * @param backoffMultiplier the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder backoffMultiplier(int backoffMultiplier) { doSetProperty("backoffMultiplier", backoffMultiplier); return this; } /** * To let the scheduled polling consumer backoff if there has been a * number of subsequent idles/errors in a row. The multiplier is then * the number of polls that will be skipped before the next actual * attempt is happening again. When this option is in use then * backoffIdleThreshold and/or backoffErrorThreshold must also be * configured. * * The option will be converted to a <code>int</code> type. * * Group: scheduler * * @param backoffMultiplier the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder backoffMultiplier(String backoffMultiplier) { doSetProperty("backoffMultiplier", backoffMultiplier); return this; } /** * Milliseconds before the next poll. * * The option is a: <code>long</code> type. * * Default: 30000 * Group: scheduler * * @param delay the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder delay(long delay) { doSetProperty("delay", delay); return this; } /** * Milliseconds before the next poll. * * The option will be converted to a <code>long</code> type. * * Default: 30000 * Group: scheduler * * @param delay the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder delay(String delay) { doSetProperty("delay", delay); return this; } /** * If greedy is enabled, then the ScheduledPollConsumer will run * immediately again, if the previous run polled 1 or more messages. * * The option is a: <code>boolean</code> type. * * Default: false * Group: scheduler * * @param greedy the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder greedy(boolean greedy) { doSetProperty("greedy", greedy); return this; } /** * If greedy is enabled, then the ScheduledPollConsumer will run * immediately again, if the previous run polled 1 or more messages. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: scheduler * * @param greedy the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder greedy(String greedy) { doSetProperty("greedy", greedy); return this; } /** * Milliseconds before the first poll starts. * * The option is a: <code>long</code> type. * * Default: 1000 * Group: scheduler * * @param initialDelay the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder initialDelay(long initialDelay) { doSetProperty("initialDelay", initialDelay); return this; } /** * Milliseconds before the first poll starts. * * The option will be converted to a <code>long</code> type. * * Default: 1000 * Group: scheduler * * @param initialDelay the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder initialDelay(String initialDelay) { doSetProperty("initialDelay", initialDelay); return this; } /** * Specifies a maximum limit of number of fires. So if you set it to 1, * the scheduler will only fire once. If you set it to 5, it will only * fire five times. A value of zero or negative means fire forever. * * The option is a: <code>long</code> type. * * Default: 0 * Group: scheduler * * @param repeatCount the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder repeatCount(long repeatCount) { doSetProperty("repeatCount", repeatCount); return this; } /** * Specifies a maximum limit of number of fires. So if you set it to 1, * the scheduler will only fire once. If you set it to 5, it will only * fire five times. A value of zero or negative means fire forever. * * The option will be converted to a <code>long</code> type. * * Default: 0 * Group: scheduler * * @param repeatCount the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder repeatCount(String repeatCount) { doSetProperty("repeatCount", repeatCount); return this; } /** * The consumer logs a start/complete log line when it polls. This * option allows you to configure the logging level for that. * * The option is a: <code>org.apache.camel.LoggingLevel</code> type. * * Default: TRACE * Group: scheduler * * @param runLoggingLevel the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder runLoggingLevel(org.apache.camel.LoggingLevel runLoggingLevel) { doSetProperty("runLoggingLevel", runLoggingLevel); return this; } /** * The consumer logs a start/complete log line when it polls. This * option allows you to configure the logging level for that. * * The option will be converted to a * <code>org.apache.camel.LoggingLevel</code> type. * * Default: TRACE * Group: scheduler * * @param runLoggingLevel the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder runLoggingLevel(String runLoggingLevel) { doSetProperty("runLoggingLevel", runLoggingLevel); return this; } /** * Allows for configuring a custom/shared thread pool to use for the * consumer. By default each consumer has its own single threaded thread * pool. * * The option is a: * <code>java.util.concurrent.ScheduledExecutorService</code> type. * * Group: scheduler * * @param scheduledExecutorService the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder scheduledExecutorService(ScheduledExecutorService scheduledExecutorService) { doSetProperty("scheduledExecutorService", scheduledExecutorService); return this; } /** * Allows for configuring a custom/shared thread pool to use for the * consumer. By default each consumer has its own single threaded thread * pool. * * The option will be converted to a * <code>java.util.concurrent.ScheduledExecutorService</code> type. * * Group: scheduler * * @param scheduledExecutorService the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder scheduledExecutorService(String scheduledExecutorService) { doSetProperty("scheduledExecutorService", scheduledExecutorService); return this; } /** * To use a cron scheduler from either camel-spring or camel-quartz * component. Use value spring or quartz for built in scheduler. * * The option is a: <code>java.lang.Object</code> type. * * Default: none * Group: scheduler * * @param scheduler the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder scheduler(Object scheduler) { doSetProperty("scheduler", scheduler); return this; } /** * To use a cron scheduler from either camel-spring or camel-quartz * component. Use value spring or quartz for built in scheduler. * * The option will be converted to a <code>java.lang.Object</code> type. * * Default: none * Group: scheduler * * @param scheduler the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder scheduler(String scheduler) { doSetProperty("scheduler", scheduler); return this; } /** * To configure additional properties when using a custom scheduler or * any of the Quartz, Spring based scheduler. This is a multi-value * option with prefix: scheduler. * * The option is a: <code>java.util.Map&lt;java.lang.String, * java.lang.Object&gt;</code> type. * The option is multivalued, and you can use the * schedulerProperties(String, Object) method to add a value (call the * method multiple times to set more values). * * Group: scheduler * * @param key the option key * @param value the option value * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder schedulerProperties(String key, Object value) { doSetMultiValueProperty("schedulerProperties", "scheduler." + key, value); return this; } /** * To configure additional properties when using a custom scheduler or * any of the Quartz, Spring based scheduler. This is a multi-value * option with prefix: scheduler. * * The option is a: <code>java.util.Map&lt;java.lang.String, * java.lang.Object&gt;</code> type. * The option is multivalued, and you can use the * schedulerProperties(String, Object) method to add a value (call the * method multiple times to set more values). * * Group: scheduler * * @param values the values * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder schedulerProperties(Map values) { doSetMultiValueProperties("schedulerProperties", "scheduler.", values); return this; } /** * Whether the scheduler should be auto started. * * The option is a: <code>boolean</code> type. * * Default: true * Group: scheduler * * @param startScheduler the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder startScheduler(boolean startScheduler) { doSetProperty("startScheduler", startScheduler); return this; } /** * Whether the scheduler should be auto started. * * The option will be converted to a <code>boolean</code> type. * * Default: true * Group: scheduler * * @param startScheduler the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder startScheduler(String startScheduler) { doSetProperty("startScheduler", startScheduler); return this; } /** * Time unit for initialDelay and delay options. * * The option is a: <code>java.util.concurrent.TimeUnit</code> type. * * Default: MILLISECONDS * Group: scheduler * * @param timeUnit the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder timeUnit(TimeUnit timeUnit) { doSetProperty("timeUnit", timeUnit); return this; } /** * Time unit for initialDelay and delay options. * * The option will be converted to a * <code>java.util.concurrent.TimeUnit</code> type. * * Default: MILLISECONDS * Group: scheduler * * @param timeUnit the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder timeUnit(String timeUnit) { doSetProperty("timeUnit", timeUnit); return this; } /** * Controls if fixed delay or fixed rate is used. See * ScheduledExecutorService in JDK for details. * * The option is a: <code>boolean</code> type. * * Default: true * Group: scheduler * * @param useFixedDelay the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder useFixedDelay(boolean useFixedDelay) { doSetProperty("useFixedDelay", useFixedDelay); return this; } /** * Controls if fixed delay or fixed rate is used. See * ScheduledExecutorService in JDK for details. * * The option will be converted to a <code>boolean</code> type. * * Default: true * Group: scheduler * * @param useFixedDelay the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder useFixedDelay(String useFixedDelay) { doSetProperty("useFixedDelay", useFixedDelay); return this; } /** * The access token. Can also be configured on the TwitterComponent * level instead. * * The option is a: <code>java.lang.String</code> type. * * Group: security * * @param accessToken the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder accessToken(String accessToken) { doSetProperty("accessToken", accessToken); return this; } /** * The access secret. Can also be configured on the TwitterComponent * level instead. * * The option is a: <code>java.lang.String</code> type. * * Group: security * * @param accessTokenSecret the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder accessTokenSecret(String accessTokenSecret) { doSetProperty("accessTokenSecret", accessTokenSecret); return this; } /** * The consumer key. Can also be configured on the TwitterComponent * level instead. * * The option is a: <code>java.lang.String</code> type. * * Group: security * * @param consumerKey the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder consumerKey(String consumerKey) { doSetProperty("consumerKey", consumerKey); return this; } /** * The consumer secret. Can also be configured on the TwitterComponent * level instead. * * The option is a: <code>java.lang.String</code> type. * * Group: security * * @param consumerSecret the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder consumerSecret(String consumerSecret) { doSetProperty("consumerSecret", consumerSecret); return this; } /** * Sorts by id, so the oldest are first, and newest last. * * The option is a: <code>boolean</code> type. * * Default: true * Group: sort * * @param sortById the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder sortById(boolean sortById) { doSetProperty("sortById", sortById); return this; } /** * Sorts by id, so the oldest are first, and newest last. * * The option will be converted to a <code>boolean</code> type. * * Default: true * Group: sort * * @param sortById the value to set * @return the dsl builder */ default TwitterDirectMessageEndpointConsumerBuilder sortById(String sortById) { doSetProperty("sortById", sortById); return this; } } /** * Advanced builder for endpoint consumers for the Twitter Direct Message component. */ public
TwitterDirectMessageEndpointConsumerBuilder
java
spring-projects__spring-security
core/src/test/java/org/springframework/security/concurrent/DelegatingSecurityContextSupportTests.java
{ "start": 2263, "end": 2478 }
class ____ extends AbstractDelegatingSecurityContextSupport { ConcreteDelegatingSecurityContextSupport(SecurityContext securityContext) { super(securityContext); } } }
ConcreteDelegatingSecurityContextSupport
java
apache__camel
components/camel-hazelcast/src/test/java/org/apache/camel/component/hazelcast/HazelcastMapConsumerTest.java
{ "start": 1691, "end": 6721 }
class ____ extends HazelcastCamelTestSupport { @Mock private IMap<Object, Object> map; @Captor private ArgumentCaptor<MapEntryListener<Object, Object>> argument; @Override protected void trainHazelcastInstance(HazelcastInstance hazelcastInstance) { when(hazelcastInstance.getMap("foo")).thenReturn(map); when(map.addEntryListener(any(), eq(true))).thenReturn(UUID.randomUUID()); } @Override protected void verifyHazelcastInstance(HazelcastInstance hazelcastInstance) { verify(hazelcastInstance).getMap("foo"); verify(map).addEntryListener(any(MapEntryListener.class), eq(true)); } @Test public void testAdd() throws InterruptedException { MockEndpoint out = getMockEndpoint("mock:added"); out.expectedMessageCount(1); verify(map).addEntryListener(argument.capture(), eq(true)); EntryEvent<Object, Object> event = new EntryEvent<>("foo", null, EntryEventType.ADDED.getType(), "4711", "my-foo"); argument.getValue().entryAdded(event); MockEndpoint.assertIsSatisfied(context, 5000, TimeUnit.MILLISECONDS); this.checkHeaders(out.getExchanges().get(0).getIn().getHeaders(), HazelcastConstants.ADDED); } @Test public void testEnict() throws InterruptedException { MockEndpoint out = super.getMockEndpoint("mock:evicted"); out.expectedMessageCount(1); verify(map).addEntryListener(argument.capture(), eq(true)); EntryEvent<Object, Object> event = new EntryEvent<>("foo", null, EntryEventType.EVICTED.getType(), "4711", "my-foo"); argument.getValue().entryEvicted(event); MockEndpoint.assertIsSatisfied(context, 30000, TimeUnit.MILLISECONDS); } @Test public void testUpdate() throws InterruptedException { MockEndpoint out = getMockEndpoint("mock:updated"); out.expectedMessageCount(1); verify(map).addEntryListener(argument.capture(), eq(true)); EntryEvent<Object, Object> event = new EntryEvent<>("foo", null, EntryEventType.UPDATED.getType(), "4711", "my-foo"); argument.getValue().entryUpdated(event); MockEndpoint.assertIsSatisfied(context, 5000, TimeUnit.MILLISECONDS); this.checkHeaders(out.getExchanges().get(0).getIn().getHeaders(), HazelcastConstants.UPDATED); } @Test public void testEvict() throws InterruptedException { MockEndpoint out = getMockEndpoint("mock:evicted"); out.expectedMessageCount(1); verify(map).addEntryListener(argument.capture(), eq(true)); EntryEvent<Object, Object> event = new EntryEvent<>("foo", null, EntryEventType.EVICTED.getType(), "4711", "my-foo"); argument.getValue().entryEvicted(event); MockEndpoint.assertIsSatisfied(context, 5000, TimeUnit.MILLISECONDS); this.checkHeaders(out.getExchanges().get(0).getIn().getHeaders(), HazelcastConstants.EVICTED); } @Test public void testRemove() throws InterruptedException { MockEndpoint out = getMockEndpoint("mock:removed"); out.expectedMessageCount(1); verify(map).addEntryListener(argument.capture(), eq(true)); EntryEvent<Object, Object> event = new EntryEvent<>("foo", null, EntryEventType.REMOVED.getType(), "4711", "my-foo"); argument.getValue().entryRemoved(event); MockEndpoint.assertIsSatisfied(context, 5000, TimeUnit.MILLISECONDS); this.checkHeaders(out.getExchanges().get(0).getIn().getHeaders(), HazelcastConstants.REMOVED); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from(String.format("hazelcast-%sfoo", HazelcastConstants.MAP_PREFIX)).log("object...").choice() .when(header(HazelcastConstants.LISTENER_ACTION).isEqualTo(HazelcastConstants.ADDED)) .log("...added").to("mock:added") .when(header(HazelcastConstants.LISTENER_ACTION).isEqualTo(HazelcastConstants.EVICTED)) .log("...evicted").to("mock:evicted") .when(header(HazelcastConstants.LISTENER_ACTION).isEqualTo(HazelcastConstants.UPDATED)) .log("...updated").to("mock:updated") .when(header(HazelcastConstants.LISTENER_ACTION).isEqualTo(HazelcastConstants.REMOVED)) .log("...removed").to("mock:removed").otherwise().log("fail!"); } }; } private void checkHeaders(Map<String, Object> headers, String action) { assertEquals(action, headers.get(HazelcastConstants.LISTENER_ACTION)); assertEquals(HazelcastConstants.CACHE_LISTENER, headers.get(HazelcastConstants.LISTENER_TYPE)); assertEquals("4711", headers.get(HazelcastConstants.OBJECT_ID)); assertNotNull(headers.get(HazelcastConstants.LISTENER_TIME)); } }
HazelcastMapConsumerTest
java
quarkusio__quarkus
extensions/arc/deployment/src/test/java/io/quarkus/arc/test/properties/UnlessBuildPropertyRepeatableStereotypeTest.java
{ "start": 811, "end": 2883 }
class ____ { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(MatchingProperty.class, InheritableMatchingProperty.class, TransitiveMatchingProperty.class, InheritableTransitiveMatchingProperty.class, MyService.class, MatchingPropertyMyService.class, InheritableMatchingPropertyMyService.class, TransitiveMatchingPropertyMyService.class, InheritableTransitiveMatchingPropertyMyService.class, MyServiceSimple.class, MyServiceMatchingPropertyDirect.class, MyServiceMatchingPropertyTransitive.class, MyServiceMatchingPropertyOnSuperclassNotInheritable.class, MyServiceMatchingPropertyOnSuperclassInheritable.class, MyServiceMatchingPropertyTransitiveOnSuperclassNotInheritable.class, MyServiceMatchingPropertyTransitiveOnSuperclassInheritable.class, Producers.class)) .overrideConfigKey("foo.bar", "baz") .overrideConfigKey("some.prop", "val"); @Inject @Any Instance<MyService> services; @Test public void test() { Set<String> hello = services.stream().map(MyService::hello).collect(Collectors.toSet()); Set<Object> expected = Set.of( MyServiceSimple.class.getSimpleName(), MyServiceMatchingPropertyOnSuperclassNotInheritable.class.getSimpleName(), MyServiceMatchingPropertyTransitiveOnSuperclassNotInheritable.class.getSimpleName(), Producers.SIMPLE); assertEquals(expected, hello); } @UnlessBuildProperty(name = "foo.bar", stringValue = "baz") @UnlessBuildProperty(name = "some.prop", stringValue = "none") @Stereotype @Target({ ElementType.TYPE, ElementType.METHOD, ElementType.FIELD }) @Retention(RetentionPolicy.RUNTIME) public @
UnlessBuildPropertyRepeatableStereotypeTest
java
quarkusio__quarkus
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/InterceptorInfo.java
{ "start": 9572, "end": 10031 }
class ____ last. * * @return the interceptor methods */ public List<MethodInfo> getAroundInvokes() { return aroundInvokes; } /** * Returns all methods annotated with {@link jakarta.interceptor.AroundConstruct} found in the hierarchy of the interceptor * class. * <p> * The returned list is sorted. The method declared on the most general superclass is first. The method declared on the * interceptor
is
java
apache__flink
flink-connectors/flink-connector-files/src/test/java/org/apache/flink/connector/file/sink/utils/IntegerFileSinkTestDataUtils.java
{ "start": 3110, "end": 6529 }
class ____ implements BucketAssigner<Integer, String> { private final int numBuckets; public ModuloBucketAssigner(int numBuckets) { this.numBuckets = numBuckets; } @Override public String getBucketId(Integer element, Context context) { return Integer.toString(element % numBuckets); } @Override public SimpleVersionedSerializer<String> getSerializer() { return SimpleVersionedStringSerializer.INSTANCE; } } /** * Verifies the files written by the sink contains the expected integer sequences. The integers * are partition into different buckets according to module, and each integer will be repeated * by <tt>numSources</tt> times. * * @param path The directory to check. * @param numRecords The total number of records. * @param numBuckets The number of buckets to assign. * @param numSources The parallelism of sources generating the sequences. Each integer will be * repeat for <tt>numSources</tt> times. */ public static void checkIntegerSequenceSinkOutput( String path, int numRecords, int numBuckets, int numSources) throws Exception { File dir = new File(path); String[] subDirNames = dir.list(); assertThat(subDirNames).isNotNull(); Arrays.sort(subDirNames, Comparator.comparingInt(Integer::parseInt)); assertThat(subDirNames).hasSize(numBuckets); for (int i = 0; i < numBuckets; ++i) { assertThat(subDirNames[i]).isEqualTo(Integer.toString(i)); // now check its content File bucketDir = new File(path, subDirNames[i]); assertThat(bucketDir) .as(bucketDir.getAbsolutePath() + " Should be a existing directory") .isDirectory(); Map<Integer, Integer> counts = new HashMap<>(); File[] files = bucketDir.listFiles(f -> !f.getName().startsWith(".")); assertThat(files).isNotNull(); for (File file : files) { assertThat(file).isFile(); try (DataInputStream dataInputStream = new DataInputStream(new FileInputStream(file))) { while (true) { int value = dataInputStream.readInt(); counts.compute(value, (k, v) -> v == null ? 1 : v + 1); } } catch (EOFException e) { // End the reading } } int expectedCount = numRecords / numBuckets + (i < numRecords % numBuckets ? 1 : 0); assertThat(counts).hasSize(expectedCount); for (int j = i; j < numRecords; j += numBuckets) { assertThat(counts.getOrDefault(j, 0).intValue()) .as( "The record " + j + " should occur " + numSources + " times, " + " but only occurs " + counts.getOrDefault(j, 0) + "time") .isEqualTo(numSources); } } } }
ModuloBucketAssigner
java
apache__dubbo
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/RestConstants.java
{ "start": 1128, "end": 2631 }
class ____ { public static final String REST_FILTER_KEY = "rest.filter"; public static final String EXTENSION_KEY = "extension"; public static final int DIALECT_BASIC = 0; public static final int DIALECT_SPRING_MVC = 1; public static final int DIALECT_JAXRS = 2; public static final String HEADER_SERVICE_VERSION = "rest-service-version"; public static final String HEADER_SERVICE_GROUP = "rest-service-group"; public static final String SLASH = "/"; /* Request Attribute */ public static final String BODY_ATTRIBUTE = HttpRequest.class.getName() + ".body"; public static final String BODY_DECODER_ATTRIBUTE = HttpMessageDecoder.class.getName() + ".body"; public static final String SIG_ATTRIBUTE = RequestMapping.class.getName() + ".sig"; public static final String MAPPING_ATTRIBUTE = RequestMapping.class.getName(); public static final String HANDLER_ATTRIBUTE = HandlerMeta.class.getName(); public static final String PATH_ATTRIBUTE = "org.springframework.web.util.UrlPathHelper.PATH"; public static final String URI_TEMPLATE_VARIABLES_ATTRIBUTE = "org.springframework.web.servlet.HandlerMapping.uriTemplateVariables"; public static final String PRODUCIBLE_MEDIA_TYPES_ATTRIBUTE = "org.springframework.web.servlet.HandlerMapping.producibleMediaTypes"; /* Configuration Key */ public static final String CONFIG_PREFIX = "dubbo.protocol.triple.rest."; private RestConstants() {} }
RestConstants
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/pc/FilterTest.java
{ "start": 13554, "end": 13696 }
enum ____ { DEBIT, CREDIT } //tag::pc-filter-Client-example[] @Entity(name = "Client") @Table(name = "client") public static
AccountType
java
apache__rocketmq
broker/src/main/java/org/apache/rocketmq/broker/client/ClientChannelInfo.java
{ "start": 946, "end": 3265 }
class ____ { private final Channel channel; private final String clientId; private final LanguageCode language; private final int version; private volatile long lastUpdateTimestamp = System.currentTimeMillis(); public ClientChannelInfo(Channel channel) { this(channel, null, null, 0); } public ClientChannelInfo(Channel channel, String clientId, LanguageCode language, int version) { this.channel = channel; this.clientId = clientId; this.language = language; this.version = version; } public Channel getChannel() { return channel; } public String getClientId() { return clientId; } public LanguageCode getLanguage() { return language; } public int getVersion() { return version; } public long getLastUpdateTimestamp() { return lastUpdateTimestamp; } public void setLastUpdateTimestamp(long lastUpdateTimestamp) { this.lastUpdateTimestamp = lastUpdateTimestamp; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((channel == null) ? 0 : channel.hashCode()); result = prime * result + ((clientId == null) ? 0 : clientId.hashCode()); result = prime * result + ((language == null) ? 0 : language.hashCode()); result = prime * result + (int) (lastUpdateTimestamp ^ (lastUpdateTimestamp >>> 32)); result = prime * result + version; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ClientChannelInfo other = (ClientChannelInfo) obj; if (channel == null) { if (other.channel != null) return false; } else if (this.channel != other.channel) { return false; } return true; } @Override public String toString() { return "ClientChannelInfo [channel=" + channel + ", clientId=" + clientId + ", language=" + language + ", version=" + version + ", lastUpdateTimestamp=" + lastUpdateTimestamp + "]"; } }
ClientChannelInfo
java
quarkusio__quarkus
extensions/resteasy-reactive/rest-common/deployment/src/main/java/io/quarkus/resteasy/reactive/common/deployment/JaxrsMethodsProcessor.java
{ "start": 518, "end": 945 }
class ____ { @BuildStep ExecutionModelAnnotationsAllowedBuildItem jaxrsMethods(BeanArchiveIndexBuildItem beanArchiveIndex) { IndexView index = beanArchiveIndex.getIndex(); return new ExecutionModelAnnotationsAllowedBuildItem(new Predicate<MethodInfo>() { @Override public boolean test(MethodInfo method) { // looking for `@Path` on the declaring
JaxrsMethodsProcessor
java
quarkusio__quarkus
extensions/resteasy-classic/resteasy-client/deployment/src/test/java/io/quarkus/restclient/configuration/VaultScenarioRestClientConfigTest.java
{ "start": 1875, "end": 2612 }
class ____ extends AbstractConfigSource { public VaultLikeConfigSource() { super("Test config source", Integer.MAX_VALUE); } @Override public Map<String, String> getProperties() { return Collections.emptyMap(); } @Override public Set<String> getPropertyNames() { return Collections.emptySet(); } @Override public String getValue(String propertyName) { if ("quarkus.rest-client.\"io.quarkus.restclient.configuration.EchoClient\".url".equals(propertyName)) { return "http://localhost:${quarkus.http.test-port:8081}"; } return null; } } }
VaultLikeConfigSource
java
google__gson
gson/src/test/java/com/google/gson/functional/JsonAdapterAnnotationOnClassesTest.java
{ "start": 19520, "end": 20684 }
class ____ used; one specified with {@code @JsonAdapter} on a class, and the other * specified with {@code @JsonAdapter} on a field of that class. * * <p><b>Important:</b> This situation is likely a rare corner case; the purpose of this test is * to verify that Gson behaves reasonable, mainly that it does not cause a {@link * StackOverflowError} due to infinite recursion. This test is not intended to dictate an expected * behavior. */ @Test public void testDelegating_SameFactoryClass_OnClassAndField() { Gson gson = new GsonBuilder() .registerTypeAdapter( String.class, new TypeAdapter<String>() { @Override public String read(JsonReader in) throws IOException { return in.nextString() + "-str"; } @Override public void write(JsonWriter out, String value) throws IOException { out.value(value + "-str"); } }) .create(); // Should use both factories, and therefore have `{"custom": ... }` once for
are
java
elastic__elasticsearch
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java
{ "start": 2193, "end": 7778 }
class ____ implements MlDataRemover { private static final Logger LOGGER = LogManager.getLogger(UnusedStateRemover.class); private final OriginSettingClient client; private final TaskId parentTaskId; public UnusedStateRemover(OriginSettingClient client, TaskId parentTaskId) { this.client = Objects.requireNonNull(client); this.parentTaskId = Objects.requireNonNull(parentTaskId); } @Override public void remove(float requestsPerSec, ActionListener<Boolean> listener, BooleanSupplier isTimedOutSupplier) { try { List<String> unusedStateDocIds = findUnusedStateDocIds(); if (isTimedOutSupplier.getAsBoolean()) { listener.onResponse(false); } else { if (unusedStateDocIds.size() > 0) { executeDeleteUnusedStateDocs(unusedStateDocIds, requestsPerSec, listener); } else { listener.onResponse(true); } } } catch (Exception e) { listener.onFailure(e); } } private List<String> findUnusedStateDocIds() { Set<String> jobIds = getJobIds(); List<String> stateDocIdsToDelete = new ArrayList<>(); BatchedStateDocIdsIterator stateDocIdsIterator = new BatchedStateDocIdsIterator( client, AnomalyDetectorsIndex.jobStateIndexPattern() ); while (stateDocIdsIterator.hasNext()) { Deque<String> stateDocIds = stateDocIdsIterator.next(); for (String stateDocId : stateDocIds) { String jobId = JobIdExtractor.extractJobId(stateDocId); if (jobId == null) { // not a managed state document id continue; } if (jobIds.contains(jobId) == false) { stateDocIdsToDelete.add(stateDocId); } } } return stateDocIdsToDelete; } private Set<String> getJobIds() { Set<String> jobIds = new HashSet<>(); jobIds.addAll(getAnomalyDetectionJobIds()); jobIds.addAll(getDataFrameAnalyticsJobIds()); return jobIds; } private Set<String> getAnomalyDetectionJobIds() { Set<String> jobIds = new HashSet<>(); DocIdBatchedDocumentIterator iterator = new DocIdBatchedDocumentIterator( client, MlConfigIndex.indexName(), QueryBuilders.termQuery(Job.JOB_TYPE.getPreferredName(), Job.ANOMALY_DETECTOR_JOB_TYPE) ); while (iterator.hasNext()) { Deque<String> docIds = iterator.next(); docIds.stream().map(Job::extractJobIdFromDocumentId).filter(Objects::nonNull).forEach(jobIds::add); } return jobIds; } private Set<String> getDataFrameAnalyticsJobIds() { Set<String> jobIds = new HashSet<>(); DocIdBatchedDocumentIterator iterator = new DocIdBatchedDocumentIterator( client, MlConfigIndex.indexName(), QueryBuilders.termQuery(DataFrameAnalyticsConfig.CONFIG_TYPE.getPreferredName(), DataFrameAnalyticsConfig.TYPE) ); while (iterator.hasNext()) { Deque<String> docIds = iterator.next(); docIds.stream().map(DataFrameAnalyticsConfig::extractJobIdFromDocId).filter(Objects::nonNull).forEach(jobIds::add); } return jobIds; } private void executeDeleteUnusedStateDocs(List<String> unusedDocIds, float requestsPerSec, ActionListener<Boolean> listener) { LOGGER.info("Found [{}] unused state documents; attempting to delete", unusedDocIds.size()); var indicesToQuery = WritableIndexExpander.getInstance().getWritableIndices(AnomalyDetectorsIndex.jobStateIndexPattern()); if (indicesToQuery.isEmpty()) { LOGGER.info("No writable indices found for unused state documents"); listener.onResponse(true); return; } DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(indicesToQuery.toArray(new String[0])).setIndicesOptions( IndicesOptions.lenientExpandOpen() ) .setAbortOnVersionConflict(false) .setRequestsPerSecond(requestsPerSec) .setTimeout(DEFAULT_MAX_DURATION) .setQuery(QueryBuilders.idsQuery().addIds(unusedDocIds.toArray(new String[0]))); // _doc is the most efficient sort order and will also disable scoring deleteByQueryRequest.getSearchRequest().source().sort(ElasticsearchMappings.ES_DOC); deleteByQueryRequest.setParentTask(parentTaskId); client.execute(DeleteByQueryAction.INSTANCE, deleteByQueryRequest, ActionListener.wrap(response -> { if (response.getBulkFailures().isEmpty() == false || response.getSearchFailures().isEmpty() == false) { LOGGER.error( "Some unused state documents could not be deleted due to failures: {}", Strings.collectionToCommaDelimitedString(response.getBulkFailures()) + "," + Strings.collectionToCommaDelimitedString(response.getSearchFailures()) ); } else { LOGGER.info("Successfully deleted all unused state documents"); } listener.onResponse(true); }, e -> { LOGGER.error("Error deleting unused model state documents: ", e); listener.onFailure(e); })); } private static
UnusedStateRemover
java
apache__camel
components/camel-parquet-avro/src/main/java/org/apache/camel/dataformat/parquet/avro/ParquetInputStream.java
{ "start": 1078, "end": 1212 }
class ____ implements InputFile { private final String streamId; private final byte[] data; private static
ParquetInputStream
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/memory/ListMemorySegmentSource.java
{ "start": 1058, "end": 1518 }
class ____ implements MemorySegmentSource { private final List<MemorySegment> segments; public ListMemorySegmentSource(final List<MemorySegment> memorySegments) { this.segments = memorySegments; } @Override public MemorySegment nextSegment() { if (this.segments.size() > 0) { return this.segments.remove(this.segments.size() - 1); } else { return null; } } }
ListMemorySegmentSource
java
google__gson
gson/src/test/java/com/google/gson/functional/EnumTest.java
{ "start": 8379, "end": 9027 }
enum ____ { RED("red", 1), BLUE("blue", 2), GREEN("green", 3); final String value; final int index; private Color(String value, int index) { this.value = value; this.index = index; } } @Test public void testEnumToStringRead() { // Should still be able to read constant name assertThat(gson.fromJson("\"A\"", CustomToString.class)).isEqualTo(CustomToString.A); // Should be able to read toString() value assertThat(gson.fromJson("\"test\"", CustomToString.class)).isEqualTo(CustomToString.A); assertThat(gson.fromJson("\"other\"", CustomToString.class)).isNull(); } private
Color
java
apache__logging-log4j2
log4j-spring-boot/src/test/java/org/apache/logging/log4j/spring/boot/SpringProfileTest.java
{ "start": 1577, "end": 3713 }
class ____ { private static final String CONFIG = "log4j2-springProfile.xml"; private static final MockEnvironment env = new MockEnvironment(); private static final String[] DEV_PROFILES = {"dev", "staging"}; private void registerSpringEnvironment(final LoggerContext loggerContext, final Environment env) { loggerContext.putObject(Log4j2SpringBootLoggingSystem.ENVIRONMENT_KEY, env); } private void clearSpringEnvironment(final LoggerContext loggerContext) { loggerContext.removeObject(Log4j2SpringBootLoggingSystem.ENVIRONMENT_KEY); } private void testAppenderOut( final LoggerContext loggerContext, final Class<? extends Appender> clazz, final String patternPrefix) { final Appender app = loggerContext.getConfiguration().getAppender("Out"); assertThat(app).isInstanceOf(clazz); final Layout<?> layout = app.getLayout(); assertThat(layout).isInstanceOf(PatternLayout.class); assertThat(((PatternLayout) layout).getConversionPattern()).startsWith(patternPrefix); } @Test @LoggerContextSource(CONFIG) void prodTest(final LoggerContext loggerContext) { testAppenderOut(loggerContext, ListAppender.class, "none:"); registerSpringEnvironment(loggerContext, env); try { env.setActiveProfiles("prod"); loggerContext.reconfigure(); testAppenderOut(loggerContext, ListAppender.class, "prod:"); } finally { clearSpringEnvironment(loggerContext); } } @Test @LoggerContextSource(CONFIG) void devTest(final LoggerContext loggerContext) { testAppenderOut(loggerContext, ListAppender.class, "none:"); registerSpringEnvironment(loggerContext, env); try { for (final String profile : DEV_PROFILES) { env.setActiveProfiles(profile); loggerContext.reconfigure(); testAppenderOut(loggerContext, ConsoleAppender.class, "dev:"); } } finally { clearSpringEnvironment(loggerContext); } } }
SpringProfileTest
java
google__gson
gson/src/main/java/com/google/gson/ToNumberPolicy.java
{ "start": 1147, "end": 3984 }
enum ____ implements ToNumberStrategy { /** * Using this policy will ensure that numbers will be read as {@link Double} values. This is the * default strategy used during deserialization of numbers as {@link Object}. */ DOUBLE { @Override public Double readNumber(JsonReader in) throws IOException { return in.nextDouble(); } }, /** * Using this policy will ensure that numbers will be read as a lazily parsed number backed by a * string. This is the default strategy used during deserialization of numbers as {@link Number}. */ LAZILY_PARSED_NUMBER { @Override public Number readNumber(JsonReader in) throws IOException { return new LazilyParsedNumber(in.nextString()); } }, /** * Using this policy will ensure that numbers will be read as {@link Long} or {@link Double} * values depending on how JSON numbers are represented: {@code Long} if the JSON number can be * parsed as a {@code Long} value, or otherwise {@code Double} if it can be parsed as a {@code * Double} value. If the parsed double-precision number results in a positive or negative infinity * ({@link Double#isInfinite()}) or a NaN ({@link Double#isNaN()}) value and the {@code * JsonReader} is not {@link JsonReader#isLenient() lenient}, a {@link MalformedJsonException} is * thrown. */ LONG_OR_DOUBLE { @Override public Number readNumber(JsonReader in) throws IOException, JsonParseException { String value = in.nextString(); if (value.indexOf('.') >= 0) { return parseAsDouble(value, in); } else { try { return Long.parseLong(value); } catch (NumberFormatException e) { return parseAsDouble(value, in); } } } private Number parseAsDouble(String value, JsonReader in) throws IOException { try { Double d = Double.valueOf(value); if ((d.isInfinite() || d.isNaN()) && !in.isLenient()) { throw new MalformedJsonException( "JSON forbids NaN and infinities: " + d + "; at path " + in.getPreviousPath()); } return d; } catch (NumberFormatException e) { throw new JsonParseException( "Cannot parse " + value + "; at path " + in.getPreviousPath(), e); } } }, /** * Using this policy will ensure that numbers will be read as numbers of arbitrary length using * {@link BigDecimal}. */ BIG_DECIMAL { @Override public BigDecimal readNumber(JsonReader in) throws IOException { String value = in.nextString(); try { return NumberLimits.parseBigDecimal(value); } catch (NumberFormatException e) { throw new JsonParseException( "Cannot parse " + value + "; at path " + in.getPreviousPath(), e); } } } }
ToNumberPolicy
java
apache__spark
core/src/main/java/org/apache/spark/SparkStageInfo.java
{ "start": 1091, "end": 1319 }
interface ____ extends Serializable { int stageId(); int currentAttemptId(); long submissionTime(); String name(); int numTasks(); int numActiveTasks(); int numCompletedTasks(); int numFailedTasks(); }
SparkStageInfo
java
quarkusio__quarkus
integration-tests/test-extension/tests/src/test/java/io/quarkus/it/extension/it/AvroMultimoduleIT.java
{ "start": 757, "end": 1263 }
class ____ extends MojoTestBase { @Test public void testThatTheTestsPassed() throws MavenInvocationException, InterruptedException { File testDir = initProject("projects/avro-multimodule-project", "projects/avro-multimodule-project-build"); RunningInvoker running = new RunningInvoker(testDir, false); MavenProcessInvocationResult result = running.execute(List.of("clean", "test"), Map.of()); assertThat(result.getProcess().waitFor()).isZero(); } }
AvroMultimoduleIT
java
spring-projects__spring-framework
spring-webflux/src/main/java/org/springframework/web/reactive/function/client/ExchangeFunctions.java
{ "start": 1434, "end": 2533 }
class ____ { private static final Log logger = LogFactory.getLog(ExchangeFunctions.class); /** * Create an {@code ExchangeFunction} with the given {@code ClientHttpConnector}. * This is the same as calling * {@link #create(ClientHttpConnector, ExchangeStrategies)} and passing * {@link ExchangeStrategies#withDefaults()}. * @param connector the connector to use for connecting to servers * @return the created {@code ExchangeFunction} */ public static ExchangeFunction create(ClientHttpConnector connector) { return create(connector, ExchangeStrategies.withDefaults()); } /** * Create an {@code ExchangeFunction} with the given * {@code ClientHttpConnector} and {@code ExchangeStrategies}. * @param connector the connector to use for connecting to servers * @param strategies the {@code ExchangeStrategies} to use * @return the created {@code ExchangeFunction} */ public static ExchangeFunction create(ClientHttpConnector connector, ExchangeStrategies strategies) { return new DefaultExchangeFunction(connector, strategies); } private static
ExchangeFunctions
java
alibaba__nacos
naming/src/main/java/com/alibaba/nacos/naming/push/NamingFuzzyWatchChangeNotifier.java
{ "start": 1481, "end": 3966 }
class ____ extends SmartSubscriber { private NamingFuzzyWatchContextService namingFuzzyWatchContextService; private FuzzyWatchPushDelayTaskEngine fuzzyWatchPushDelayTaskEngine; public NamingFuzzyWatchChangeNotifier(NamingFuzzyWatchContextService namingFuzzyWatchContextService, FuzzyWatchPushDelayTaskEngine fuzzyWatchPushDelayTaskEngine) { this.fuzzyWatchPushDelayTaskEngine = fuzzyWatchPushDelayTaskEngine; this.namingFuzzyWatchContextService = namingFuzzyWatchContextService; NotifyCenter.registerSubscriber(this); } @Override public List<Class<? extends Event>> subscribeTypes() { List<Class<? extends Event>> result = new LinkedList<>(); result.add(ServiceEvent.ServiceChangedEvent.class); return result; } @Override public void onEvent(Event event) { if (event instanceof ServiceEvent.ServiceChangedEvent) { ServiceEvent.ServiceChangedEvent serviceChangedEvent = (ServiceEvent.ServiceChangedEvent) event; if (namingFuzzyWatchContextService.syncServiceContext(serviceChangedEvent.getService(), serviceChangedEvent.getChangedType())) { generateFuzzyWatchChangeNotifyTask(serviceChangedEvent.getService(), serviceChangedEvent.getChangedType()); } } } private void generateFuzzyWatchChangeNotifyTask(com.alibaba.nacos.naming.core.v2.pojo.Service service, String changedType) { String serviceKey = NamingUtils.getServiceKey(service.getNamespace(), service.getGroup(), service.getName()); Set<String> fuzzyWatchedClients = namingFuzzyWatchContextService.getFuzzyWatchedClients(service); Loggers.SRV_LOG.info("FUZZY_WATCH:serviceKey {} has {} clients fuzzy watched", serviceKey, fuzzyWatchedClients == null ? 0 : fuzzyWatchedClients.size()); // watch notify push task specify by service for (String clientId : fuzzyWatchedClients) { FuzzyWatchChangeNotifyTask fuzzyWatchChangeNotifyTask = new FuzzyWatchChangeNotifyTask(serviceKey, changedType, clientId, PushConfig.getInstance().getPushTaskDelay()); fuzzyWatchPushDelayTaskEngine.addTask(FuzzyWatchPushDelayTaskEngine.getTaskKey(fuzzyWatchChangeNotifyTask), fuzzyWatchChangeNotifyTask); } } }
NamingFuzzyWatchChangeNotifier
java
playframework__playframework
web/play-java-forms/src/main/java/play/data/FormFactoryModule.java
{ "start": 352, "end": 578 }
class ____ extends Module { @Override public List<Binding<?>> bindings(final Environment environment, final Config config) { return Collections.singletonList(bindClass(FormFactory.class).toSelf()); } }
FormFactoryModule
java
apache__maven
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3581PluginUsesWagonDependencyTest.java
{ "start": 1014, "end": 1735 }
class ____ extends AbstractMavenIntegrationTestCase { public MavenITmng3581PluginUsesWagonDependencyTest() { // Not 2.0.9 super(); } /** * Test that a plugin using a specific wagon implementation directly works. * * @throws Exception in case of failure */ @Test public void testit() throws Exception { File testDir = extractResources("/mng-3581"); Verifier verifier = newVerifier(testDir.getAbsolutePath()); verifier.setAutoclean(false); verifier.addCliArgument("initialize"); verifier.execute(); verifier.addCliArgument("-B"); verifier.verifyErrorFreeLog(); } }
MavenITmng3581PluginUsesWagonDependencyTest
java
google__guava
android/guava/src/com/google/common/base/Equivalence.java
{ "start": 14390, "end": 14869 }
class ____ extends Equivalence<Object> implements Serializable { static final Identity INSTANCE = new Identity(); @Override protected boolean doEquivalent(Object a, Object b) { return false; } @Override protected int doHash(Object o) { return System.identityHashCode(o); } private Object readResolve() { return INSTANCE; } @GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 1; } }
Identity
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/dirty/DirtyTrackingInheritanceWithGenericsTest.java
{ "start": 1739, "end": 2155 }
class ____<T, S> { private T basicValue; @ManyToOne private S association; public T getBasicValue() { return basicValue; } public void setBasicValue(T basicValue) { this.basicValue = basicValue; } public S getAssociation() { return association; } public void setAssociation(S association) { this.association = association; } } @Entity( name = "ChildItemOne" ) public static
Item
java
google__dagger
dagger-runtime/main/java/dagger/internal/MembersInjectors.java
{ "start": 842, "end": 1293 }
class ____ { /** * Returns a {@link MembersInjector} implementation that injects no members * * <p>Note that there is no verification that the type being injected does not have {@link Inject} * members, so care should be taken to ensure appropriate use. */ @SuppressWarnings("unchecked") public static <T> MembersInjector<T> noOp() { return (MembersInjector<T>) NoOpMembersInjector.INSTANCE; } private static
MembersInjectors
java
apache__maven
impl/maven-core/src/main/java/org/apache/maven/BuildFailureException.java
{ "start": 883, "end": 1120 }
class ____ extends Exception { public BuildFailureException(String message) { super(message); } public BuildFailureException(String message, Throwable cause) { super(message, cause); } }
BuildFailureException
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java
{ "start": 1085, "end": 3836 }
class ____ implements EvalOperator.ExpressionEvaluator { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(AtanEvaluator.class); private final Source source; private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; private Warnings warnings; public AtanEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { this.source = source; this.val = val; this.driverContext = driverContext; } @Override public Block eval(Page page) { try (DoubleBlock valBlock = (DoubleBlock) val.eval(page)) { DoubleVector valVector = valBlock.asVector(); if (valVector == null) { return eval(page.getPositionCount(), valBlock); } return eval(page.getPositionCount(), valVector).asBlock(); } } @Override public long baseRamBytesUsed() { long baseRamBytesUsed = BASE_RAM_BYTES_USED; baseRamBytesUsed += val.baseRamBytesUsed(); return baseRamBytesUsed; } public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { switch (valBlock.getValueCount(p)) { case 0: result.appendNull(); continue position; case 1: break; default: warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); result.appendNull(); continue position; } double val = valBlock.getDouble(valBlock.getFirstValueIndex(p)); result.appendDouble(Atan.process(val)); } return result.build(); } } public DoubleVector eval(int positionCount, DoubleVector valVector) { try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { double val = valVector.getDouble(p); result.appendDouble(p, Atan.process(val)); } return result.build(); } } @Override public String toString() { return "AtanEvaluator[" + "val=" + val + "]"; } @Override public void close() { Releasables.closeExpectNoException(val); } private Warnings warnings() { if (warnings == null) { this.warnings = Warnings.createWarnings( driverContext.warningsMode(), source.source().getLineNumber(), source.source().getColumnNumber(), source.text() ); } return warnings; } static
AtanEvaluator
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_150.java
{ "start": 438, "end": 3060 }
class ____ extends MysqlTest { public void test_0() throws Exception { String sql = "(select __aid\n" + " from unidesk_ads.dmj_ex_1_unidesk_tag_all\n" + " where unidesk_ads.dmj_ex_1_unidesk_tag_all.pred_career_type in ('test1'))\n" + " \n" + " union\n" + "\n" + "(select __aid\n" + " from unidesk_ads.dmj_ex_1_unidesk_tag_all\n" + " where unidesk_ads.dmj_ex_1_unidesk_tag_all.pred_career_type in ('test'))\n" + "\n" + "MINUS\n" + "(\n" + "select __aid\n" + " from unidesk_ads.dmj_ex_1_unidesk_tag_all\n" + " where unidesk_ads.dmj_ex_1_unidesk_tag_all.pred_career_type in ('8', '1')\n" + " )"; List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL); SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0); assertEquals(1, statementList.size()); assertEquals("(SELECT __aid\n" + "FROM unidesk_ads.dmj_ex_1_unidesk_tag_all\n" + "WHERE unidesk_ads.dmj_ex_1_unidesk_tag_all.pred_career_type IN ('test1'))\n" + "UNION\n" + "(SELECT __aid\n" + "FROM unidesk_ads.dmj_ex_1_unidesk_tag_all\n" + "WHERE unidesk_ads.dmj_ex_1_unidesk_tag_all.pred_career_type IN ('test'))\n" + "MINUS\n" + "(SELECT __aid\n" + "FROM unidesk_ads.dmj_ex_1_unidesk_tag_all\n" + "WHERE unidesk_ads.dmj_ex_1_unidesk_tag_all.pred_career_type IN ('8', '1'))", stmt.toString()); assertEquals("(SELECT __aid\n" + "FROM unidesk_ads.dmj_ex_1_unidesk_tag_all\n" + "WHERE unidesk_ads.dmj_ex_1_unidesk_tag_all.pred_career_type IN (?))\n" + "UNION\n" + "(SELECT __aid\n" + "FROM unidesk_ads.dmj_ex_1_unidesk_tag_all\n" + "WHERE unidesk_ads.dmj_ex_1_unidesk_tag_all.pred_career_type IN (?))\n" + "MINUS\n" + "(SELECT __aid\n" + "FROM unidesk_ads.dmj_ex_1_unidesk_tag_all\n" + "WHERE unidesk_ads.dmj_ex_1_unidesk_tag_all.pred_career_type IN (?))", ParameterizedOutputVisitorUtils.parameterize(sql, JdbcConstants.MYSQL, VisitorFeature.OutputParameterizedZeroReplaceNotUseOriginalSql)); } }
MySqlSelectTest_150
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampGroupingAggregatorFunction.java
{ "start": 1093, "end": 1182 }
class ____ generated. Edit {@code GroupingAggregatorImplementer} instead. */ public final
is
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/util/FormattingCLIUtils.java
{ "start": 6281, "end": 6392 }
enum ____ { TITLE, HEADER, LINE } /** * String utility class. */ private static final
TableRowType
java
apache__camel
components/camel-mail/src/test/java/org/apache/camel/component/mail/MailAttachmentsUmlautIssueTest.java
{ "start": 1923, "end": 4687 }
class ____ extends CamelTestSupport { private static final MailboxUser james = Mailbox.getOrCreateUser("james", "secret"); @Test public void testSendAndReceiveMailWithAttachments() throws Exception { // clear mailbox Mailbox.clearAll(); // create an exchange with a normal body and attachment to be produced as email Endpoint endpoint = context.getEndpoint(james.uriPrefix(Protocol.smtp)); // create the exchange with the mail message that is multipart with a file and a Hello World text/plain message. Exchange exchange = endpoint.createExchange(); AttachmentMessage in = exchange.getIn(AttachmentMessage.class); in.setBody("Hello World"); // unicode 00DC is german umlaut String name = "logo2\u00DC"; // use existing logo.jpeg file, but lets name it with the umlaut in.addAttachment(name, new DataHandler(new FileDataSource("src/test/data/logo.jpeg"))); // create a producer that can produce the exchange (= send the mail) Producer producer = endpoint.createProducer(); // start the producer producer.start(); // and let it go (processes the exchange by sending the email) producer.process(exchange); // need some time for the mail to arrive on the inbox (consumed and sent to the mock) MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); Exchange out = mock.assertExchangeReceived(0); Awaitility.await().pollDelay(2, TimeUnit.SECONDS).untilAsserted(() -> { mock.assertIsSatisfied(); }); // plain text assertEquals("Hello World", out.getIn().getBody(String.class)); // attachment Map<String, DataHandler> attachments = out.getIn(AttachmentMessage.class).getAttachments(); assertNotNull(attachments, "Should have attachments"); assertEquals(1, attachments.size()); DataHandler handler = out.getIn(AttachmentMessage.class).getAttachment(name); assertNotNull(handler, "The " + name + " should be there"); String nameURLEncoded = URLEncoder.encode(name, Charset.defaultCharset().name()); assertTrue(handler.getContentType().endsWith(nameURLEncoded), "Handler content type should end with URL-encoded name"); assertEquals(name, handler.getName(), "Handler name should be the file name"); producer.stop(); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from(james.uriPrefix(Protocol.pop3) + "&initialDelay=100&delay=100").to("mock:result"); } }; } }
MailAttachmentsUmlautIssueTest
java
quarkusio__quarkus
test-framework/junit5/src/main/java/io/quarkus/test/junit/AbstractQuarkusTestWithContextExtension.java
{ "start": 371, "end": 4414 }
class ____ extends AbstractTestWithCallbacksExtension implements TestExecutionExceptionHandler, LifecycleMethodExecutionExceptionHandler, TestWatcher { private static final Logger LOG = Logger.getLogger(AbstractQuarkusTestWithContextExtension.class); public static final String IO_QUARKUS_TESTING_TYPE = "io.quarkus.testing.type"; @Override public void handleTestExecutionException(ExtensionContext context, Throwable throwable) throws Throwable { markTestAsFailed(context, throwable); throw throwable; } @Override public void handleAfterAllMethodExecutionException(ExtensionContext context, Throwable throwable) throws Throwable { markTestAsFailed(context, throwable); throw throwable; } @Override public void handleAfterEachMethodExecutionException(ExtensionContext context, Throwable throwable) throws Throwable { markTestAsFailed(context, throwable); throw throwable; } @Override public void handleBeforeAllMethodExecutionException(ExtensionContext context, Throwable throwable) throws Throwable { markTestAsFailed(context, throwable); throw throwable; } @Override public void handleBeforeEachMethodExecutionException(ExtensionContext context, Throwable throwable) throws Throwable { markTestAsFailed(context, throwable); throw throwable; } @Override public void testFailed(ExtensionContext context, Throwable cause) { markTestAsFailed(context, cause); } protected QuarkusTestExtensionState getState(ExtensionContext context) { ExtensionContext.Store store = getStoreFromContext(context); Object o = store.get(QuarkusTestExtensionState.class.getName()); if (o != null) { QuarkusTestExtensionState state; // It's quite possible the state was created in another classloader, and if so, we will need to clone it across into this classloader if (o instanceof QuarkusTestExtensionState) { state = (QuarkusTestExtensionState) o; } else { state = QuarkusTestExtensionState.clone(o); } Class<?> testingTypeOfState = store.get(IO_QUARKUS_TESTING_TYPE, Class.class); if (!this.getTestingType().equals(testingTypeOfState)) { // The current state was created by a different testing type, so we need to renew it, so the new state is // compatible with the current testing type. try { state.close(); } catch (IOException ignored) { LOG.debug(ignored); // ignoring exceptions when closing state. } finally { getStoreFromContext(context).remove(QuarkusTestExtensionState.class.getName()); } return null; } return state; } else { return null; } } protected void setState(ExtensionContext context, QuarkusTestExtensionState state) { ExtensionContext.Store store = getStoreFromContext(context); store.put(QuarkusTestExtensionState.class.getName(), state); store.put(IO_QUARKUS_TESTING_TYPE, this.getTestingType()); } protected ExtensionContext.Store getStoreFromContext(ExtensionContext context) { // TODO if we would add some ugly code here to jump up to the // system classloader, we could load QuarkusTestExtension with the test's classloader, and // avoid a whole bunch of reflection // TODO #store ExtensionContext root = context.getRoot(); return root.getStore(ExtensionContext.Namespace.GLOBAL); } protected void markTestAsFailed(ExtensionContext context, Throwable throwable) { QuarkusTestExtensionState state = getState(context); if (state != null) { state.setTestFailed(throwable); } } }
AbstractQuarkusTestWithContextExtension
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorScriptDocValues.java
{ "start": 2094, "end": 2330 }
interface ____ extends Supplier<BytesRef> { @Override default BytesRef getInternal(int index) { throw new UnsupportedOperationException(); } DenseVector getInternal(); } }
DenseVectorSupplier
java
spring-projects__spring-boot
module/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/restart/MainMethod.java
{ "start": 2499, "end": 2607 }
class ____ */ String getDeclaringClassName() { return this.method.getDeclaringClass().getName(); } }
name
java
apache__hadoop
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestDataBlocks.java
{ "start": 1890, "end": 8793 }
class ____ extends HadoopTestBase { public static Collection<Object[]> params() { return Arrays.asList(new Object[][]{ {FAST_UPLOAD_BUFFER_DISK}, {FAST_UPLOAD_BUFFER_ARRAY}, {FAST_UPLOAD_BYTEBUFFER} }); } @TempDir private Path tempDir; /** * Buffer type. */ private String bufferType; public void initTestDataBlocks(final String pBufferType) { this.bufferType = pBufferType; } /** * Create a block factory. * @return the factory */ private S3ADataBlocks.BlockFactory createFactory() { switch (bufferType) { // this one passed in a file allocation function case FAST_UPLOAD_BUFFER_DISK: return new S3ADataBlocks.DiskBlockFactory((i, l) -> tempDir.resolve("file" + i).toFile()); case FAST_UPLOAD_BUFFER_ARRAY: return new S3ADataBlocks.ArrayBlockFactory(null); case FAST_UPLOAD_BYTEBUFFER: return new S3ADataBlocks.ByteBufferBlockFactory(null); default: throw new IllegalArgumentException("Unknown buffer type: " + bufferType); } } /** * Test the content providers from the block factory and the streams * they produce. * There are extra assertions on the {@link ByteBufferInputStream}. */ @ParameterizedTest(name = "BufferType : {0}") @MethodSource("params") public void testBlockFactoryIO(String pBufferType) throws Throwable { initTestDataBlocks(pBufferType); try (S3ADataBlocks.BlockFactory factory = createFactory()) { int limit = 128; S3ADataBlocks.DataBlock block = factory.create(1, limit, null); maybeAssertOutstandingBuffers(factory, 1); byte[] buffer = ContractTestUtils.toAsciiByteArray("test data"); int bufferLen = buffer.length; block.write(buffer, 0, bufferLen); assertEquals(bufferLen, block.dataSize()); assertEquals(limit - bufferLen, block.remainingCapacity(), "capacity in " + block); assertTrue(block.hasCapacity(64), "hasCapacity(64) in " + block); assertTrue(block.hasCapacity(limit - bufferLen), "No capacity in " + block); // now start the write S3ADataBlocks.BlockUploadData blockUploadData = block.startUpload(); final UploadContentProviders.BaseContentProvider<?> cp = blockUploadData.getContentProvider(); assertStreamCreationCount(cp, 0); InputStream stream = cp.newStream(); assertStreamCreationCount(cp, 1); assertThat(stream.markSupported()) .describedAs("markSupported() of %s", stream) .isTrue(); Optional<ByteBufferInputStream> bbStream = stream instanceof ByteBufferInputStream ? Optional.of((ByteBufferInputStream) stream) : empty(); bbStream.ifPresent(bb -> { assertThat(bb.hasRemaining()) .describedAs("hasRemaining() in %s", bb) .isTrue(); }); int expected = bufferLen; assertAvailableValue(stream, expected); assertReadEquals(stream, 't'); stream.mark(Integer.MAX_VALUE); expected--; assertAvailableValue(stream, expected); // read into a byte array with an offset int offset = 5; byte[] in = new byte[limit]; assertEquals(2, stream.read(in, offset, 2)); assertByteAtIndex(in, offset++, 'e'); assertByteAtIndex(in, offset++, 's'); expected -= 2; assertAvailableValue(stream, expected); // read to end byte[] remainder = new byte[limit]; int c; int index = 0; while ((c = stream.read()) >= 0) { remainder[index++] = (byte) c; } assertEquals(expected, index); assertByteAtIndex(remainder, --index, 'a'); // no more data left assertAvailableValue(stream, 0); bbStream.ifPresent(bb -> { assertThat(bb.hasRemaining()) .describedAs("hasRemaining() in %s", bb) .isFalse(); }); // at the end of the stream, a read fails assertReadEquals(stream, -1); // go the mark point stream.reset(); assertAvailableValue(stream, bufferLen - 1); assertReadEquals(stream, 'e'); // now ask the content provider for another content stream. final InputStream stream2 = cp.newStream(); assertStreamCreationCount(cp, 2); // this must close the old stream bbStream.ifPresent(bb -> { assertThat(bb.isOpen()) .describedAs("stream %s is open", bb) .isFalse(); }); // do a read(byte[]) of everything byte[] readBuffer = new byte[bufferLen]; assertThat(stream2.read(readBuffer)) .describedAs("number of bytes read from stream %s", stream2) .isEqualTo(bufferLen); assertThat(readBuffer) .describedAs("data read into buffer") .isEqualTo(buffer); // when the block is closed, the buffer must be returned // to the pool. block.close(); maybeAssertOutstandingBuffers(factory, 0); stream.close(); maybeAssertOutstandingBuffers(factory, 0); // now the block is closed, the content provider must fail to // create a new stream intercept(IllegalStateException.class, cp::newStream); } } private static void assertByteAtIndex(final byte[] bytes, final int index, final char expected) { assertThat(bytes) .contains(expected, Index.atIndex(index)); } private static void assertReadEquals(final InputStream stream, final int ch) throws IOException { assertThat(stream.read()) .describedAs("read() in %s", stream) .isEqualTo(ch); } private static void assertAvailableValue(final InputStream stream, final int expected) throws IOException { assertThat(stream.available()) .describedAs("wrong available() in %s", stream) .isEqualTo(expected); } private static void assertStreamCreationCount( final UploadContentProviders.BaseContentProvider<?> cp, final int count) { assertThat(cp.getStreamCreationCount()) .describedAs("stream creation count of %s", cp) .isEqualTo(count); } /** * Assert the number of buffers active for a block factory, * if the factory is a ByteBufferBlockFactory. * <p> * If it is of any other type, no checks are made. * @param factory factory * @param expectedCount expected count. */ private static void maybeAssertOutstandingBuffers( S3ADataBlocks.BlockFactory factory, int expectedCount) { if (factory instanceof S3ADataBlocks.ByteBufferBlockFactory) { S3ADataBlocks.ByteBufferBlockFactory bufferFactory = (S3ADataBlocks.ByteBufferBlockFactory) factory; assertThat(bufferFactory.getOutstandingBufferCount()) .describedAs("outstanding buffers in %s", factory) .isEqualTo(expectedCount); } } }
TestDataBlocks
java
apache__camel
components/camel-kubernetes/src/main/java/org/apache/camel/component/kubernetes/config_maps/KubernetesConfigMapsComponent.java
{ "start": 1103, "end": 1392 }
class ____ extends AbstractKubernetesComponent { @Override protected KubernetesConfigMapsEndpoint doCreateEndpoint(String uri, String remaining, KubernetesConfiguration config) { return new KubernetesConfigMapsEndpoint(uri, this, config); } }
KubernetesConfigMapsComponent
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/NonOverridingEqualsTest.java
{ "start": 3231, "end": 3588 }
class ____ { // BUG: Diagnostic contains: Did you mean '@Override' public native boolean equals(Test other); } """) .doTest(); } @Test public void flagsIfMethodTakesUnrelatedType() { compilationHelper .addSourceLines( "Test.java", """ public
Test
java
apache__spark
common/network-common/src/test/java/org/apache/spark/network/StreamSuite.java
{ "start": 1966, "end": 5461 }
class ____ { private static final String[] STREAMS = StreamTestHelper.STREAMS; private static StreamTestHelper testData; private static TransportContext context; private static TransportServer server; private static TransportClientFactory clientFactory; private static ByteBuffer createBuffer(int bufSize) { ByteBuffer buf = ByteBuffer.allocate(bufSize); for (int i = 0; i < bufSize; i ++) { buf.put((byte) i); } buf.flip(); return buf; } @BeforeAll public static void setUp() throws Exception { testData = new StreamTestHelper(); final TransportConf conf = new TransportConf("shuffle", MapConfigProvider.EMPTY); final StreamManager streamManager = new StreamManager() { @Override public ManagedBuffer getChunk(long streamId, int chunkIndex) { throw new UnsupportedOperationException(); } @Override public ManagedBuffer openStream(String streamId) { return testData.openStream(conf, streamId); } }; RpcHandler handler = new RpcHandler() { @Override public void receive( TransportClient client, ByteBuffer message, RpcResponseCallback callback) { throw new UnsupportedOperationException(); } @Override public StreamManager getStreamManager() { return streamManager; } }; context = new TransportContext(conf, handler); server = context.createServer(); clientFactory = context.createClientFactory(); } @AfterAll public static void tearDown() { server.close(); clientFactory.close(); testData.cleanup(); context.close(); } @Test public void testZeroLengthStream() throws Throwable { try (TransportClient client = clientFactory.createClient(TestUtils.getLocalHost(), server.getPort())) { StreamTask task = new StreamTask(client, "emptyBuffer", TimeUnit.SECONDS.toMillis(5)); task.run(); task.check(); } } @Test public void testSingleStream() throws Throwable { try (TransportClient client = clientFactory.createClient(TestUtils.getLocalHost(), server.getPort())) { StreamTask task = new StreamTask(client, "largeBuffer", TimeUnit.SECONDS.toMillis(5)); task.run(); task.check(); } } @Test public void testMultipleStreams() throws Throwable { try (TransportClient client = clientFactory.createClient(TestUtils.getLocalHost(), server.getPort())) { for (int i = 0; i < 20; i++) { StreamTask task = new StreamTask(client, STREAMS[i % STREAMS.length], TimeUnit.SECONDS.toMillis(5)); task.run(); task.check(); } } } @Test public void testConcurrentStreams() throws Throwable { ExecutorService executor = Executors.newFixedThreadPool(20); try (TransportClient client = clientFactory.createClient(TestUtils.getLocalHost(), server.getPort())) { List<StreamTask> tasks = new ArrayList<>(); for (int i = 0; i < 20; i++) { StreamTask task = new StreamTask(client, STREAMS[i % STREAMS.length], TimeUnit.SECONDS.toMillis(20)); tasks.add(task); executor.submit(task); } executor.shutdown(); assertTrue(executor.awaitTermination(30, TimeUnit.SECONDS), "Timed out waiting for tasks."); for (StreamTask task : tasks) { task.check(); } } finally { executor.shutdownNow(); } } private static
StreamSuite
java
apache__logging-log4j2
log4j-core-test/src/main/java/org/apache/logging/log4j/core/test/layout/Log4j2_1482_Test.java
{ "start": 1537, "end": 3189 }
class ____ { static final String CONFIG_LOCATION = "log4j2-1482.xml"; static final String FOLDER = "target/log4j2-1482"; private static final int LOOP_COUNT = 10; static void assertFileContents(final int runNumber) throws IOException { final Path path = Paths.get(FOLDER + "/audit.tmp"); final List<String> lines = Files.readAllLines(path, Charset.defaultCharset()); int i = 1; final int size = lines.size(); for (final String string : lines) { if (string.startsWith(",,")) { final Path folder = Paths.get(FOLDER); final File[] files = folder.toFile().listFiles(); Arrays.sort(files); System.out.println("Run " + runNumber + ": " + Arrays.toString(files)); Assert.fail( String.format("Run %,d, line %,d of %,d: \"%s\" in %s", runNumber, i++, size, string, lines)); } } } @Rule public CleanFolders cleanFolders = new CleanFolders(FOLDER); protected abstract void log(int runNumber); private void loopingRun(final int loopCount) throws IOException { for (int i = 1; i <= loopCount; i++) { try (final LoggerContext loggerContext = Configurator.initialize(getClass().getName(), CONFIG_LOCATION)) { log(i); } assertFileContents(i); } } @Test public void testLoopingRun() throws IOException { loopingRun(LOOP_COUNT); } @Test public void testSingleRun() throws IOException { loopingRun(1); } }
Log4j2_1482_Test
java
alibaba__nacos
common/src/main/java/com/alibaba/nacos/common/packagescan/resource/PathResource.java
{ "start": 2174, "end": 9818 }
class ____ extends AbstractResource implements WritableResource { private final Path path; /** * Create a new PathResource from a Path handle. * * <p>Note: Unlike {@link FileSystemResource}, when building relative resources * via {@link #createRelative}, the relative path will be built <i>underneath</i> * the given root: e.g. Paths.get("C:/dir1/"), relative path "dir2" &rarr; "C:/dir1/dir2"! * * @param path a Path handle */ public PathResource(Path path) { AbstractAssert.notNull(path, "Path must not be null"); this.path = path.normalize(); } /** * Create a new PathResource from a Path handle. * * <p>Note: Unlike {@link FileSystemResource}, when building relative resources * via {@link #createRelative}, the relative path will be built <i>underneath</i> * the given root: e.g. Paths.get("C:/dir1/"), relative path "dir2" &rarr; "C:/dir1/dir2"! * * @param path a path * @see Paths#get(String, String...) */ public PathResource(String path) { AbstractAssert.notNull(path, "Path must not be null"); this.path = Paths.get(path).normalize(); } /** * Create a new PathResource from a Path handle. * * <p>Note: Unlike {@link FileSystemResource}, when building relative resources * via {@link #createRelative}, the relative path will be built <i>underneath</i> * the given root: e.g. Paths.get("C:/dir1/"), relative path "dir2" &rarr; "C:/dir1/dir2"! * * @param uri a path URI * @see Paths#get(URI) */ public PathResource(URI uri) { AbstractAssert.notNull(uri, "URI must not be null"); this.path = Paths.get(uri).normalize(); } /** * Return the file path for this resource. */ public final String getPath() { return this.path.toString(); } /** * This implementation returns whether the underlying file exists. * * @see Files#exists(Path, LinkOption...) */ @Override public boolean exists() { return Files.exists(this.path); } /** * This implementation checks whether the underlying file is marked as readable * (and corresponds to an actual file with content, not to a directory). * * @see Files#isReadable(Path) * @see Files#isDirectory(Path, LinkOption...) */ @Override public boolean isReadable() { return (Files.isReadable(this.path) && !Files.isDirectory(this.path)); } /** * This implementation opens a InputStream for the underlying file. * * @see java.nio.file.spi.FileSystemProvider#newInputStream(Path, OpenOption...) */ @Override public InputStream getInputStream() throws IOException { if (!exists()) { throw new FileNotFoundException(getPath() + " (no such file or directory)"); } if (Files.isDirectory(this.path)) { throw new FileNotFoundException(getPath() + " (is a directory)"); } return Files.newInputStream(this.path); } /** * This implementation checks whether the underlying file is marked as writable * (and corresponds to an actual file with content, not to a directory). * * @see Files#isWritable(Path) * @see Files#isDirectory(Path, LinkOption...) */ @Override public boolean isWritable() { return (Files.isWritable(this.path) && !Files.isDirectory(this.path)); } /** * This implementation opens a OutputStream for the underlying file. * * @see java.nio.file.spi.FileSystemProvider#newOutputStream(Path, OpenOption...) */ @Override public OutputStream getOutputStream() throws IOException { if (Files.isDirectory(this.path)) { throw new FileNotFoundException(getPath() + " (is a directory)"); } return Files.newOutputStream(this.path); } /** * This implementation returns a URL for the underlying file. * * @see Path#toUri() * @see URI#toURL() */ @Override public URL getUrl() throws IOException { return this.path.toUri().toURL(); } /** * This implementation returns a URI for the underlying file. * * @see Path#toUri() */ @Override public URI getUri() throws IOException { return this.path.toUri(); } /** * This implementation always indicates a file. */ @Override public boolean isFile() { return true; } /** * This implementation returns the underlying File reference. */ @Override public File getFile() throws IOException { try { return this.path.toFile(); } catch (UnsupportedOperationException ex) { // Only paths on the default file system can be converted to a File: // Do exception translation for cases where conversion is not possible. throw new FileNotFoundException(this.path + " cannot be resolved to absolute file path"); } } /** * This implementation opens a Channel for the underlying file. * * @see Files#newByteChannel(Path, OpenOption...) */ @Override public ReadableByteChannel readableChannel() throws IOException { try { return Files.newByteChannel(this.path, StandardOpenOption.READ); } catch (NoSuchFileException ex) { throw new FileNotFoundException(ex.getMessage()); } } /** * This implementation opens a Channel for the underlying file. * * @see Files#newByteChannel(Path, OpenOption...) */ @Override public WritableByteChannel writableChannel() throws IOException { return Files.newByteChannel(this.path, StandardOpenOption.WRITE); } /** * This implementation returns the underlying file's length. */ @Override public long contentLength() throws IOException { return Files.size(this.path); } /** * This implementation returns the underlying File's timestamp. * * @see Files#getLastModifiedTime(Path, LinkOption...) */ @Override public long lastModified() throws IOException { // We can not use the superclass method since it uses conversion to a File and // only a Path on the default file system can be converted to a File... return Files.getLastModifiedTime(this.path).toMillis(); } /** * This implementation creates a PathResource, applying the given path * relative to the path of the underlying file of this resource descriptor. * * @see Path#resolve(String) */ @Override public Resource createRelative(String relativePath) { return new PathResource(this.path.resolve(relativePath)); } /** * This implementation returns the name of the file. * * @see Path#getFileName() */ @Override public String getFilename() { return this.path.getFileName().toString(); } @Override public String getDescription() { return "path [" + this.path.toAbsolutePath() + "]"; } /** * This implementation compares the underlying Path references. */ @Override public boolean equals(Object other) { return (this == other || (other instanceof PathResource && this.path.equals(((PathResource) other).path))); } /** * This implementation returns the hash code of the underlying Path reference. */ @Override public int hashCode() { return this.path.hashCode(); } }
PathResource
java
FasterXML__jackson-databind
src/main/java/tools/jackson/databind/PropertyMetadata.java
{ "start": 991, "end": 1144 }
class ____ for containing information about expected merge * information for this property, if merging is expected. */ public final static
used
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/path/JSONPath_oracle_compatible_test.java
{ "start": 189, "end": 2384 }
class ____ extends TestCase { public void test_reserve() throws Exception { JSONObject object = JSON.parseObject(str); assertEquals("Sayings of the Century", JSONPath.eval(object, "$.store.book[0].title")); assertEquals("Sayings of the Century", JSONPath.eval(object, "$['store']['book'][0]['title']")); } public static final String str = "{\n" + " \"store\": {\n" + " \"book\": [\n" + " {\n" + " \"category\": \"reference\",\n" + "\n" + " \"author\": \"Nigel Rees\",\n" + "\n" + " \"title\": \"Sayings of the Century\",\n" + "\n" + " \"price\": 8.95\n" + " },\n" + " {\n" + " \"category\": \"fiction\",\n" + " \"author\": \"Evelyn Waugh\",\n" + " \"title\": \"Sword of Honour\",\n" + " \"price\": 12.99\n" + " },\n" + " {\n" + " \"category\": \"fiction\",\n" + " \"author\": \"Herman Melville\",\n" + " \"title\": \"Moby Dick\",\n" + " \"isbn\": \"0-553-21311-3\",\n" + " \"price\": 8.99\n" + " },\n" + " {\n" + " \"category\": \"fiction\",\n" + " \"author\": \"J. R. R. Tolkien\",\n" + " \"title\": \"The Lord of the Rings\",\n" + " \"isbn\": \"0-395-19395-8\",\n" + " \"price\": 22.99\n" + " }\n" + " ],\n" + " \"bicycle\": {\n" + " \"color\": \"red\",\n" + " \"price\": 19.95\n" + " }\n" + " },\n" + " \"expensive\": 10\n" + "}"; }
JSONPath_oracle_compatible_test
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/builder/ProxyBuilderTest.java
{ "start": 1767, "end": 1837 }
interface ____ { Future<String> sayHello(String body); } }
Foo
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/exception/internal/SQLStateConversionDelegate.java
{ "start": 1663, "end": 4296 }
class ____ extends AbstractSQLExceptionConversionDelegate { public SQLStateConversionDelegate(ConversionContext conversionContext) { super( conversionContext ); } @Override public @Nullable JDBCException convert(SQLException sqlException, String message, String sql) { final String sqlState = extractSqlState( sqlException ); if ( sqlState != null ) { switch ( sqlState ) { case "42501": return new AuthException( message, sqlException, sql ); case "40001": return new LockAcquisitionException( message, sqlException, sql ); } switch ( determineSqlStateClassCode( sqlState ) ) { case "07", // "dynamic SQL error" "20", "2A", // "direct SQL syntax error or access rule violation" "37", // "dynamic SQL syntax error or access rule violation" "42", // "syntax error or access rule violation" "65", // Oracle specific as far as I can tell "S0": // MySQL specific as far as I can tell return new SQLGrammarException( message, sqlException, sql ); case "23", // "integrity constraint violation" "27", // "triggered data change violation" "44": // "with check option violation" final String constraintName = getConversionContext().getViolatedConstraintNameExtractor() .extractConstraintName( sqlException ); if ( sqlState.length() >= 5 ) { final ConstraintKind constraintKind = constraintKind( sqlState.substring( 0, 5 ) ); return new ConstraintViolationException( message, sqlException, sql, constraintKind, constraintName ); } else { return new ConstraintViolationException( message, sqlException, sql, constraintName ); } case "08": // "connection exception" return new JDBCConnectionException( message, sqlException, sql ); case "21", // "cardinality violation" "22": // "data exception" (22001 is string too long; 22003 is numeric value out of range) return new DataException( message, sqlException, sql ); case "28": // "authentication failure" return new AuthException( message, sqlException, sql ); } } return null; } private static ConstraintKind constraintKind(String trimmedState) { return switch ( trimmedState ) { case "23502" -> ConstraintKind.NOT_NULL; case "23505" -> ConstraintKind.UNIQUE; case "23503" -> ConstraintKind.FOREIGN_KEY; // 23510-3 indicate CHECK on Db2, // 23514 indicates CHECK on Postgres, // 23513-4 indicate CHECK on h2 case "23510", "23511", "23512", "23513", "23514" -> ConstraintKind.CHECK; default -> ConstraintKind.OTHER; }; } }
SQLStateConversionDelegate
java
apache__kafka
storage/api/src/main/java/org/apache/kafka/server/log/remote/storage/RemoteLogSegmentState.java
{ "start": 2097, "end": 4418 }
enum ____ { /** * This state indicates that the segment copying to remote storage is started but not yet finished. */ COPY_SEGMENT_STARTED((byte) 0), /** * This state indicates that the segment copying to remote storage is finished. */ COPY_SEGMENT_FINISHED((byte) 1), /** * This state indicates that the segment deletion is started but not yet finished. */ DELETE_SEGMENT_STARTED((byte) 2), /** * This state indicates that the segment is deleted successfully. */ DELETE_SEGMENT_FINISHED((byte) 3); private static final Map<Byte, RemoteLogSegmentState> STATE_TYPES = Collections.unmodifiableMap( Arrays.stream(values()).collect(Collectors.toMap(RemoteLogSegmentState::id, Function.identity()))); private final byte id; RemoteLogSegmentState(byte id) { this.id = id; } public byte id() { return id; } public static RemoteLogSegmentState forId(byte id) { return STATE_TYPES.get(id); } public static boolean isValidTransition(RemoteLogSegmentState srcState, RemoteLogSegmentState targetState) { Objects.requireNonNull(targetState, "targetState can not be null"); if (srcState == null) { // If the source state is null, check the target state as the initial state viz COPY_SEGMENT_STARTED // This ensures simplicity here as we don't have to define one more type to represent the state 'null' like // COPY_SEGMENT_NOT_STARTED, have the null check by the caller and pass that state. return targetState == COPY_SEGMENT_STARTED; } else if (srcState == targetState) { // Self transition is treated as valid. This is to maintain the idempotency for the state in case of retries // or failover. return true; } else if (srcState == COPY_SEGMENT_STARTED) { return targetState == COPY_SEGMENT_FINISHED || targetState == DELETE_SEGMENT_STARTED; } else if (srcState == COPY_SEGMENT_FINISHED) { return targetState == DELETE_SEGMENT_STARTED; } else if (srcState == DELETE_SEGMENT_STARTED) { return targetState == DELETE_SEGMENT_FINISHED; } else { return false; } } }
RemoteLogSegmentState
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/jsonfield/JSONFieldTest_1.java
{ "start": 1750, "end": 2471 }
class ____ { private int f0; private int f1; private int f2; @JSONField(ordinal = 3) public int getF0() { return f0; } @JSONField(ordinal = 3) public void setF0(int f0) { this.f0 = f0; } @JSONField(ordinal = 2) public int getF1() { return f1; } @JSONField(ordinal = 2) public void setF1(int f1) { this.f1 = f1; } @JSONField(ordinal = 1) public int getF2() { return f2; } @JSONField(ordinal = 1) public void setF2(int f2) { this.f2 = f2; } } }
VO
java
apache__flink
flink-filesystems/flink-hadoop-fs/src/test/java/org/apache/flink/runtime/fs/hdfs/AbstractHadoopRecoverableWriterITCase.java
{ "start": 2075, "end": 16479 }
class ____ { // ----------------------- Test Specific configuration ----------------------- private static final Random RND = new Random(); protected static Path basePath; private static FileSystem fileSystem; // this is set for every test @BeforeEach protected Path basePathForTest; // ----------------------- Test Data to be used ----------------------- private static final String testData1 = "THIS IS A TEST 1."; private static final String testData2 = "THIS IS A TEST 2."; private static final String testData3 = "THIS IS A TEST 3."; protected static final String BIG_CHUNK_DATA_PATTERN = testData1; protected static String bigDataChunk; // ----------------------- Test Lifecycle ----------------------- protected static boolean skipped = true; @TempDir protected static File tempFolder; @AfterAll static void cleanUp() throws Exception { if (!skipped) { getFileSystem().delete(basePath, true); } FileSystem.initialize(new Configuration()); } @BeforeEach void prepare() throws Exception { basePathForTest = new Path(basePath, StringUtils.getRandomString(RND, 16, 16, 'a', 'z')); cleanupLocalDir(); } protected abstract String getLocalTmpDir() throws Exception; protected abstract String getIncompleteObjectName( RecoverableWriter.ResumeRecoverable recoverable); private void cleanupLocalDir() throws Exception { final String defaultTmpDir = getLocalTmpDir(); final java.nio.file.Path defaultTmpPath = Paths.get(defaultTmpDir); if (Files.exists(defaultTmpPath)) { try (Stream<java.nio.file.Path> files = Files.list(defaultTmpPath)) { files.forEach( p -> { try { Files.delete(p); } catch (IOException e) { e.printStackTrace(); } }); } } else { Files.createDirectory(defaultTmpPath); } } @AfterEach void cleanupAndCheckTmpCleanup() throws Exception { final String defaultTmpDir = getLocalTmpDir(); final java.nio.file.Path localTmpDir = Paths.get(defaultTmpDir); // delete local tmp dir. assertThat(Files.exists(localTmpDir)).isTrue(); try (Stream<java.nio.file.Path> files = Files.list(localTmpDir)) { assertThat(files).isEmpty(); } Files.delete(localTmpDir); // delete also object store dir. getFileSystem().delete(basePathForTest, true); } protected static FileSystem getFileSystem() throws Exception { if (fileSystem == null) { fileSystem = FileSystem.get(basePath.toUri()); } return fileSystem; } // ----------------------- Test Normal Execution ----------------------- @Test void testCloseWithNoData() throws Exception { final RecoverableWriter writer = getRecoverableWriter(); final Path path = new Path(basePathForTest, "part-0"); final RecoverableFsDataOutputStream stream = writer.open(path); stream.closeForCommit().commit(); } @Test void testCommitAfterNormalClose() throws Exception { final RecoverableWriter writer = getRecoverableWriter(); final Path path = new Path(basePathForTest, "part-0"); final RecoverableFsDataOutputStream stream = writer.open(path); stream.write(bytesOf(testData1)); stream.closeForCommit().commit(); assertThat(getContentsOfFile(path)).isEqualTo(testData1); } @Test void testCommitAfterPersist() throws Exception { final RecoverableWriter writer = getRecoverableWriter(); final Path path = new Path(basePathForTest, "part-0"); final RecoverableFsDataOutputStream stream = writer.open(path); stream.write(bytesOf(testData1)); stream.persist(); stream.write(bytesOf(testData2)); stream.closeForCommit().commit(); assertThat(getContentsOfFile(path)).isEqualTo(testData1 + testData2); } @Test void testCleanupRecoverableState() throws Exception { final RecoverableWriter writer = getRecoverableWriter(); final Path path = new Path(basePathForTest, "part-0"); final RecoverableFsDataOutputStream stream = writer.open(path); stream.write(bytesOf(testData1)); RecoverableWriter.ResumeRecoverable recoverable = stream.persist(); stream.closeForCommit().commit(); // still the data is there as we have not deleted them from the tmp object final String content = getContentsOfFile(new Path('/' + getIncompleteObjectName(recoverable))); assertThat(content).isEqualTo(testData1); boolean successfullyDeletedState = writer.cleanupRecoverableState(recoverable); assertThat(successfullyDeletedState).isTrue(); assertThatThrownBy( () -> { int retryTimes = 10; final long delayMs = 1000; // Because the s3 is eventually consistency the s3 file might still be // found after we delete // it. // So we try multi-times to verify that the file was deleted at last. while (retryTimes > 0) { // this should throw the exception as we deleted the file. getContentsOfFile( new Path('/' + getIncompleteObjectName(recoverable))); retryTimes--; Thread.sleep(delayMs); } }) .isInstanceOf(FileNotFoundException.class); } @Test void testCallingDeleteObjectTwiceDoesNotThroughException() throws Exception { final RecoverableWriter writer = getRecoverableWriter(); final Path path = new Path(basePathForTest, "part-0"); final RecoverableFsDataOutputStream stream = writer.open(path); stream.write(bytesOf(testData1)); RecoverableWriter.ResumeRecoverable recoverable = stream.persist(); stream.closeForCommit().commit(); // still the data is there as we have not deleted them from the tmp object final String content = getContentsOfFile(new Path('/' + getIncompleteObjectName(recoverable))); assertThat(content).isEqualTo(testData1); boolean successfullyDeletedState = writer.cleanupRecoverableState(recoverable); assertThat(successfullyDeletedState).isTrue(); boolean unsuccessfulDeletion = writer.cleanupRecoverableState(recoverable); assertThat(unsuccessfulDeletion).isFalse(); } // ----------------------- Test Recovery ----------------------- @Test void testCommitAfterRecovery() throws Exception { final Path path = new Path(basePathForTest, "part-0"); final RecoverableWriter initWriter = getRecoverableWriter(); final RecoverableFsDataOutputStream stream = initWriter.open(path); stream.write(bytesOf(testData1)); stream.persist(); stream.persist(); // and write some more data stream.write(bytesOf(testData2)); final RecoverableWriter.CommitRecoverable recoverable = stream.closeForCommit().getRecoverable(); final byte[] serializedRecoverable = initWriter.getCommitRecoverableSerializer().serialize(recoverable); // get a new serializer from a new writer to make sure that no pre-initialized state leaks // in. final RecoverableWriter newWriter = getRecoverableWriter(); final SimpleVersionedSerializer<RecoverableWriter.CommitRecoverable> deserializer = newWriter.getCommitRecoverableSerializer(); final RecoverableWriter.CommitRecoverable recoveredRecoverable = deserializer.deserialize(deserializer.getVersion(), serializedRecoverable); final RecoverableFsDataOutputStream.Committer committer = newWriter.recoverForCommit(recoveredRecoverable); committer.commitAfterRecovery(); assertThat(getContentsOfFile(path)).isEqualTo(testData1 + testData2); } private static final String INIT_EMPTY_PERSIST = "EMPTY"; private static final String INTERM_WITH_STATE_PERSIST = "INTERM-STATE"; private static final String INTERM_WITH_NO_ADDITIONAL_STATE_PERSIST = "INTERM-IMEDIATE"; private static final String FINAL_WITH_EXTRA_STATE = "FINAL"; @Test void testRecoverWithEmptyState() throws Exception { testResumeAfterMultiplePersistWithSmallData(INIT_EMPTY_PERSIST, testData3); } @Test void testRecoverWithState() throws Exception { testResumeAfterMultiplePersistWithSmallData( INTERM_WITH_STATE_PERSIST, testData1 + testData3); } @Test void testRecoverFromIntermWithoutAdditionalState() throws Exception { testResumeAfterMultiplePersistWithSmallData( INTERM_WITH_NO_ADDITIONAL_STATE_PERSIST, testData1 + testData3); } @Test void testRecoverAfterMultiplePersistsState() throws Exception { testResumeAfterMultiplePersistWithSmallData( FINAL_WITH_EXTRA_STATE, testData1 + testData2 + testData3); } @Test void testRecoverWithStateWithMultiPart() throws Exception { testResumeAfterMultiplePersistWithMultiPartUploads( INTERM_WITH_STATE_PERSIST, bigDataChunk + bigDataChunk); } @Test void testRecoverFromIntermWithoutAdditionalStateWithMultiPart() throws Exception { testResumeAfterMultiplePersistWithMultiPartUploads( INTERM_WITH_NO_ADDITIONAL_STATE_PERSIST, bigDataChunk + bigDataChunk); } @Test void testRecoverAfterMultiplePersistsStateWithMultiPart() throws Exception { testResumeAfterMultiplePersistWithMultiPartUploads( FINAL_WITH_EXTRA_STATE, bigDataChunk + bigDataChunk + bigDataChunk); } private void testResumeAfterMultiplePersistWithSmallData( final String persistName, final String expectedFinalContents) throws Exception { testResumeAfterMultiplePersist( persistName, expectedFinalContents, testData1, testData2, testData3); } private void testResumeAfterMultiplePersistWithMultiPartUploads( final String persistName, final String expectedFinalContents) throws Exception { testResumeAfterMultiplePersist( persistName, expectedFinalContents, bigDataChunk, bigDataChunk, bigDataChunk); } private void testResumeAfterMultiplePersist( final String persistName, final String expectedFinalContents, final String firstItemToWrite, final String secondItemToWrite, final String thirdItemToWrite) throws Exception { final Path path = new Path(basePathForTest, "part-0"); final RecoverableWriter initWriter = getRecoverableWriter(); final Map<String, RecoverableWriter.ResumeRecoverable> recoverables = new HashMap<>(4); try (final RecoverableFsDataOutputStream stream = initWriter.open(path)) { recoverables.put(INIT_EMPTY_PERSIST, stream.persist()); stream.write(bytesOf(firstItemToWrite)); recoverables.put(INTERM_WITH_STATE_PERSIST, stream.persist()); recoverables.put(INTERM_WITH_NO_ADDITIONAL_STATE_PERSIST, stream.persist()); // and write some more data stream.write(bytesOf(secondItemToWrite)); recoverables.put(FINAL_WITH_EXTRA_STATE, stream.persist()); } final SimpleVersionedSerializer<RecoverableWriter.ResumeRecoverable> serializer = initWriter.getResumeRecoverableSerializer(); final byte[] serializedRecoverable = serializer.serialize(recoverables.get(persistName)); // get a new serializer from a new writer to make sure that no pre-initialized state leaks // in. final RecoverableWriter newWriter = getRecoverableWriter(); final SimpleVersionedSerializer<RecoverableWriter.ResumeRecoverable> deserializer = newWriter.getResumeRecoverableSerializer(); final RecoverableWriter.ResumeRecoverable recoveredRecoverable = deserializer.deserialize(serializer.getVersion(), serializedRecoverable); final RecoverableFsDataOutputStream recoveredStream = newWriter.recover(recoveredRecoverable); recoveredStream.write(bytesOf(thirdItemToWrite)); recoveredStream.closeForCommit().commit(); assertThat(getContentsOfFile(path)).isEqualTo(expectedFinalContents); } // -------------------------- Test Utilities -------------------------- protected String getContentsOfFile(Path path) throws Exception { final StringBuilder builder = new StringBuilder(); try (FSDataInputStream inStream = getFileSystem().open(path); BufferedReader reader = new BufferedReader(new InputStreamReader(inStream))) { String line; while ((line = reader.readLine()) != null) { builder.append(line); } } return builder.toString(); } // ----------------------- Test utilities ----------------------- protected static String createBigDataChunk(String pattern, long size) { final StringBuilder stringBuilder = new StringBuilder(); int sampleLength = bytesOf(pattern).length; int repeats = MathUtils.checkedDownCast(size) / sampleLength + 100; for (int i = 0; i < repeats; i++) { stringBuilder.append(pattern); } return stringBuilder.toString(); } protected static byte[] bytesOf(String str) { return str.getBytes(StandardCharsets.UTF_8); } protected RecoverableWriter getRecoverableWriter() throws Exception { return getFileSystem().createRecoverableWriter(); } }
AbstractHadoopRecoverableWriterITCase
java
apache__spark
core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
{ "start": 1256, "end": 1868 }
class ____ extends OutOfMemoryError implements SparkThrowable { String errorClass; Map<String, String> messageParameters; public SparkOutOfMemoryError(String errorClass, Map<String, String> messageParameters) { super(SparkThrowableHelper.getMessage(errorClass, messageParameters)); this.errorClass = errorClass; this.messageParameters = messageParameters; } @Override public Map<String, String> getMessageParameters() { return messageParameters; } @Override public String getCondition() { return errorClass; } }
SparkOutOfMemoryError
java
junit-team__junit5
jupiter-tests/src/test/java/org/junit/jupiter/engine/bridge/AbstractNonGenericTests.java
{ "start": 550, "end": 785 }
class ____ { @Test void mA() { BridgeMethodTests.sequence.add("mA()"); } @Test void test(Number value) { BridgeMethodTests.sequence.add("A.test(Number)"); Assertions.assertEquals(42, value); } static
AbstractNonGenericTests
java
spring-projects__spring-boot
core/spring-boot/src/test/java/org/springframework/boot/convert/ApplicationConversionServiceTests.java
{ "start": 16634, "end": 16893 }
class ____ implements Formatter<Integer> { @Override public String print(Integer object, Locale locale) { return ""; } @Override public Integer parse(String text, Locale locale) throws ParseException { return 1; } } static
ExampleFormatter
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/GenericConverter.java
{ "start": 1084, "end": 1571 }
class ____ implements ValueConverter { private static final GenericConverter INSTANCE = new GenericConverter(); private GenericConverter() { } public static GenericConverter getInstance() { return INSTANCE; } @Override public byte[] encodeValue(Object value) throws IOException { return GenericObjectMapper.write(value); } @Override public Object decodeValue(byte[] bytes) throws IOException { return GenericObjectMapper.read(bytes); } }
GenericConverter
java
playframework__playframework
documentation/manual/working/javaGuide/main/tests/code/javaguide/tests/ModelTest.java
{ "start": 858, "end": 946 }
interface ____ { public Set<Role> findUserRoles(User user); } public
UserRepository
java
hibernate__hibernate-orm
hibernate-testing/src/main/java/org/hibernate/testing/jdbc/SQLStatementInterceptor.java
{ "start": 716, "end": 1840 }
class ____ { private final LinkedList<String> sqlQueries = new LinkedList<>(); private final StatementInspector inspector = sql -> { sqlQueries.add( sql ); return sql; }; public SQLStatementInterceptor(SessionFactoryBuilder sessionFactoryBuilder) { sessionFactoryBuilder.applyStatementInspector( inspector ); } public SQLStatementInterceptor(Map<String,Object> settings) { settings.put( STATEMENT_INSPECTOR, inspector ); } public SQLStatementInterceptor(StandardServiceRegistryBuilder registryBuilder) { registryBuilder.applySetting( STATEMENT_INSPECTOR, inspector ); } public SQLStatementInterceptor(Configuration configuration) { this( PropertiesHelper.map( configuration.getProperties() ) ); } public LinkedList<String> getSqlQueries() { return sqlQueries; } public void clear() { sqlQueries.clear(); } public void assertExecuted(String expected) { assertTrue(sqlQueries.contains( expected )); } public void assertExecutedCount(int expected) { assertEquals(expected, sqlQueries.size()); } public int getQueryCount() { return sqlQueries.size(); } }
SQLStatementInterceptor
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/router/AbstractRouterPolicy.java
{ "start": 2202, "end": 7792 }
class ____ extends AbstractConfigurableFederationPolicy implements FederationRouterPolicy { @Override public void validate(WeightedPolicyInfo newPolicyInfo) throws FederationPolicyInitializationException { super.validate(newPolicyInfo); Map<SubClusterIdInfo, Float> newWeights = newPolicyInfo.getRouterPolicyWeights(); if (newWeights == null || newWeights.size() < 1) { throw new FederationPolicyInitializationException( "Weight vector cannot be null/empty."); } } public void validate(ApplicationSubmissionContext appSubmissionContext) throws FederationPolicyException { if (appSubmissionContext == null) { throw new FederationPolicyException( "Cannot route an application with null context."); } // if the queue is not specified we set it to default value, to be // compatible with YARN behavior. String queue = appSubmissionContext.getQueue(); if (queue == null) { appSubmissionContext.setQueue(YarnConfiguration.DEFAULT_QUEUE_NAME); } } /** * This method is implemented by the specific policy, and it is used to route * both reservations, and applications among a given set of * sub-clusters. * * @param queue the queue for this application/reservation * @param preSelectSubClusters a pre-filter set of sub-clusters * @return the chosen sub-cluster * * @throws YarnException if the policy fails to choose a sub-cluster */ protected abstract SubClusterId chooseSubCluster(String queue, Map<SubClusterId, SubClusterInfo> preSelectSubClusters) throws YarnException; /** * Filter chosen SubCluster based on reservationId. * * @param reservationId the globally unique identifier for a reservation. * @param activeSubClusters the map of ids to info for all active subclusters. * @return the chosen sub-cluster * @throws YarnException if the policy fails to choose a sub-cluster */ protected Map<SubClusterId, SubClusterInfo> prefilterSubClusters( ReservationId reservationId, Map<SubClusterId, SubClusterInfo> activeSubClusters) throws YarnException { // if a reservation exists limit scope to the sub-cluster this // reservation is mapped to if (reservationId != null) { // note this might throw YarnException if the reservation is // unknown. This is to be expected, and should be handled by // policy invoker. FederationStateStoreFacade stateStoreFacade = getPolicyContext().getFederationStateStoreFacade(); SubClusterId resSubCluster = stateStoreFacade.getReservationHomeSubCluster(reservationId); SubClusterInfo subClusterInfo = activeSubClusters.get(resSubCluster); return Collections.singletonMap(resSubCluster, subClusterInfo); } return activeSubClusters; } /** * Simply picks from alphabetically-sorted active subclusters based on the * hash of query name. Jobs of the same queue will all be routed to the same * sub-cluster, as far as the number of active sub-cluster and their names * remain the same. * * @param appContext the {@link ApplicationSubmissionContext} that * has to be routed to an appropriate subCluster for execution. * * @param blackLists the list of subClusters as identified by * {@link SubClusterId} to blackList from the selection of the home * subCluster. * * @return a hash-based chosen {@link SubClusterId} that will be the "home" * for this application. * * @throws YarnException if there are no active subclusters. */ @Override public SubClusterId getHomeSubcluster(ApplicationSubmissionContext appContext, List<SubClusterId> blackLists) throws YarnException { // null checks and default-queue behavior validate(appContext); // apply filtering based on reservation location and active sub-clusters Map<SubClusterId, SubClusterInfo> filteredSubClusters = prefilterSubClusters( appContext.getReservationID(), getActiveSubclusters()); FederationPolicyUtils.validateSubClusterAvailability(filteredSubClusters.keySet(), blackLists); // remove black SubCluster if (blackLists != null) { blackLists.forEach(filteredSubClusters::remove); } // pick the chosen subCluster from the active ones return chooseSubCluster(appContext.getQueue(), filteredSubClusters); } /** * This method provides a wrapper of all policy functionalities for routing a * reservation. Internally it manages configuration changes, and policy * init/reinit. * * @param request the reservation to route. * * @return the id of the subcluster that will be the "home" for this * reservation. * * @throws YarnException if there are issues initializing policies, or no * valid sub-cluster id could be found for this reservation. */ @Override public SubClusterId getReservationHomeSubcluster(ReservationSubmissionRequest request) throws YarnException { if (request == null) { throw new FederationPolicyException("The ReservationSubmissionRequest cannot be null."); } if (request.getQueue() == null) { request.setQueue(YarnConfiguration.DEFAULT_QUEUE_NAME); } // apply filtering based on reservation location and active sub-clusters Map<SubClusterId, SubClusterInfo> filteredSubClusters = getActiveSubclusters(); // pick the chosen subCluster from the active ones return chooseSubCluster(request.getQueue(), filteredSubClusters); } }
AbstractRouterPolicy
java
redisson__redisson
redisson/src/test/java/org/redisson/BaseMapTest.java
{ "start": 1192, "end": 2402 }
class ____ implements Serializable { private String key; public SimpleKey() { } public SimpleKey(String field) { this.key = field; } public String getKey() { return key; } public void setKey(String key) { this.key = key; } @Override public String toString() { return "key: " + key; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((key == null) ? 0 : key.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; SimpleKey other = (SimpleKey) obj; if (key == null) { if (other.key != null) return false; } else if (!key.equals(other.key)) return false; return true; } } public static
SimpleKey
java
alibaba__nacos
common/src/test/java/com/alibaba/nacos/common/model/RequestHttpEntityTest.java
{ "start": 1296, "end": 3428 }
class ____ { Header header; Query query; HttpClientConfig clientConfig; Object body; @BeforeEach void setUp() throws Exception { header = Header.newInstance(); header.addParam("testHeader", "test"); query = Query.newInstance(); query.addParam("testQuery", "test"); clientConfig = HttpClientConfig.builder().build(); body = new HashMap<>(); } @Test void testConstructWithoutConfigAndBody() { RequestHttpEntity entity = new RequestHttpEntity(header, query); assertTrue(entity.isEmptyBody()); assertNull(entity.getHttpClientConfig()); assertNull(entity.getBody()); assertEquals(header.toString(), entity.getHeaders().toString()); assertEquals(query.toString(), entity.getQuery().toString()); } @Test void testConstructWithoutConfigAndQuery() { RequestHttpEntity entity = new RequestHttpEntity(header, body); assertFalse(entity.isEmptyBody()); assertNull(entity.getHttpClientConfig()); assertNull(entity.getQuery()); assertEquals(header.toString(), entity.getHeaders().toString()); assertEquals(body, entity.getBody()); } @Test void testConstructWithoutConfig() { RequestHttpEntity entity = new RequestHttpEntity(header, query, body); assertFalse(entity.isEmptyBody()); assertNull(entity.getHttpClientConfig()); assertEquals(query.toString(), entity.getQuery().toString()); assertEquals(header.toString(), entity.getHeaders().toString()); assertEquals(body, entity.getBody()); } @Test void testConstructFull() { RequestHttpEntity entity = new RequestHttpEntity(clientConfig, header, query, body); assertFalse(entity.isEmptyBody()); assertEquals(clientConfig, entity.getHttpClientConfig()); assertEquals(query.toString(), entity.getQuery().toString()); assertEquals(header.toString(), entity.getHeaders().toString()); assertEquals(body, entity.getBody()); } }
RequestHttpEntityTest
java
elastic__elasticsearch
libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAlwaysAllowedIT.java
{ "start": 756, "end": 1374 }
class ____ extends AbstractEntitlementsIT { @ClassRule public static EntitlementsTestRule testRule = new EntitlementsTestRule(true, null); public EntitlementsAlwaysAllowedIT(@Name("actionName") String actionName) { super(actionName, true); } @ParametersFactory public static Iterable<Object[]> data() { return RestEntitlementsCheckAction.getAlwaysAllowedCheckActions().stream().map(action -> new Object[] { action }).toList(); } @Override protected String getTestRestCluster() { return testRule.cluster.getHttpAddresses(); } }
EntitlementsAlwaysAllowedIT
java
spring-projects__spring-framework
spring-web/src/main/java/org/springframework/http/converter/cbor/JacksonCborHttpMessageConverter.java
{ "start": 1599, "end": 1774 }
class ____ of the JSON view as value.</li> * <li>A filter provider with a <code>"tools.jackson.databind.ser.FilterProvider"</code> * key and the filter provider
name
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/notfound/IsNullAndNotFoundTest.java
{ "start": 10101, "end": 10748 }
class ____ { @Id private Integer id; private String name; @OneToOne @NotFound(action = NotFoundAction.IGNORE) private Account account; @OneToOne(fetch = FetchType.LAZY) private Account lazyAccount; Person() { } public Person(Integer id, String name, Account account) { this.id = id; this.name = name; this.account = account; } public Integer getId() { return id; } public String getName() { return name; } public Account getAccount() { return account; } } @SuppressWarnings({"FieldCanBeLocal", "unused"}) @Entity(name = "Account") @Table(name = "ACCOUNT_TABLE") public static
Person
java
apache__maven
compat/maven-compat/src/main/java/org/apache/maven/repository/metadata/DefaultClasspathTransformation.java
{ "start": 1187, "end": 2751 }
class ____ implements ClasspathTransformation { @Inject GraphConflictResolver conflictResolver; // ---------------------------------------------------------------------------------------------------- @Override public ClasspathContainer transform(MetadataGraph dirtyGraph, ArtifactScopeEnum scope, boolean resolve) throws MetadataGraphTransformationException { try { if (dirtyGraph == null || dirtyGraph.isEmpty()) { return null; } MetadataGraph cleanGraph = conflictResolver.resolveConflicts(dirtyGraph, scope); if (cleanGraph == null || cleanGraph.isEmpty()) { return null; } ClasspathContainer cpc = new ClasspathContainer(scope); if (cleanGraph.isEmptyEdges()) { // single entry in the classpath, populated from itself ArtifactMetadata amd = cleanGraph.getEntry().getMd(); cpc.add(amd); } else { ClasspathGraphVisitor v = new ClasspathGraphVisitor(cleanGraph, cpc); MetadataGraphVertex entry = cleanGraph.getEntry(); // entry point v.visit(entry); } return cpc; } catch (GraphConflictResolutionException e) { throw new MetadataGraphTransformationException(e); } } // =================================================================================================== /** * Helper
DefaultClasspathTransformation
java
apache__maven
compat/maven-plugin-api/src/main/java/org/apache/maven/plugin/AbstractMojo.java
{ "start": 6111, "end": 7587 }
class ____ implements Mojo, ContextEnabled { /** Instance logger */ private Log log; /** Plugin container context */ private Map pluginContext; /** * @deprecated Use SLF4J directly */ @Deprecated @Override public void setLog(Log log) { this.log = log; } /** * <p> * Returns the logger that has been injected into this mojo. If no logger has been set up yet, a * <code>SystemStreamLog</code> logger will be created and returned. * </p> * <strong>Note:</strong> * The logger returned by this method must not be cached in an instance field during the construction of the mojo. * This would cause the mojo to use a wrongly configured default logger when being run by Maven. The proper logger * gets injected by the Plexus container <em>after</em> the mojo has been constructed. Therefore, simply call this * method directly whenever you need the logger, it is fast enough and needs no caching. * * @see org.apache.maven.plugin.Mojo#getLog() * @deprecated Use SLF4J directly */ @Deprecated @Override public Log getLog() { if (log == null) { log = new SystemStreamLog(); } return log; } @Override public Map getPluginContext() { return pluginContext; } @Override public void setPluginContext(Map pluginContext) { this.pluginContext = pluginContext; } }
AbstractMojo
java
quarkusio__quarkus
extensions/hibernate-envers/deployment/src/test/java/io/quarkus/hibernate/orm/envers/EnversFastBootingTest.java
{ "start": 493, "end": 1247 }
class ____ { private static final ClassLoaderLimiter limitsChecker = ClassLoaderLimiter.builder() .neverLoadedResource("org/hibernate/jpa/orm_2_1.xsd") .neverLoadedResource("org/hibernate/jpa/orm_2_2.xsd") .build(); @RegisterExtension static QuarkusUnitTest runner = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClass(MyAuditedEntity.class)) .withConfigurationResource("application.properties") .addClassLoaderEventListener(limitsChecker); @Inject Session session; @Test public void testInjection() { //Check that Hibernate actually started: Assert.assertNotNull(session); } }
EnversFastBootingTest
java
mockito__mockito
mockito-core/src/test/java/org/mockito/internal/util/DefaultMockingDetailsTest.java
{ "start": 1074, "end": 5099 }
class ____ { @Mock private Foo foo; @Mock private Bar bar; @Mock private IMethods mock; @Spy private Gork gork; @Before public void before() { MockitoAnnotations.openMocks(this); } @Test public void should_provide_original_mock() throws Exception { // expect assertEquals(mockingDetails(foo).getMock(), foo); assertEquals(mockingDetails(null).getMock(), null); } @Test public void should_know_spy() { assertTrue(mockingDetails(gork).isMock()); assertTrue(mockingDetails(spy(new Gork())).isMock()); assertTrue(mockingDetails(spy(Gork.class)).isMock()); assertTrue( mockingDetails( mock( Gork.class, withSettings().defaultAnswer(Mockito.CALLS_REAL_METHODS))) .isMock()); } @Test public void should_know_mock() { assertTrue(mockingDetails(foo).isMock()); assertTrue(mockingDetails(mock(Foo.class)).isMock()); assertFalse(mockingDetails(foo).isSpy()); assertFalse(mockingDetails(mock(Foo.class)).isSpy()); } @Test public void should_handle_non_mocks() { assertFalse(mockingDetails("non mock").isSpy()); assertFalse(mockingDetails("non mock").isMock()); assertFalse(mockingDetails(null).isSpy()); assertFalse(mockingDetails(null).isMock()); } @Test public void should_check_that_a_spy_is_also_a_mock() throws Exception { assertEquals(true, mockingDetails(gork).isMock()); } @Test public void provides_invocations() { // when mock.simpleMethod(10); mock.otherMethod(); // then assertEquals(0, mockingDetails(foo).getInvocations().size()); assertEquals( "[mock.simpleMethod(10);, mock.otherMethod();]", mockingDetails(mock).getInvocations().toString()); } @Test public void manipulating_invocations_is_safe() { mock.simpleMethod(); // when we manipulate the invocations mockingDetails(mock).getInvocations().clear(); // then we didn't actually changed the invocations assertEquals(1, mockingDetails(mock).getInvocations().size()); } @Test public void provides_mock_creation_settings() { // smoke test some creation settings assertEquals(Foo.class, mockingDetails(foo).getMockCreationSettings().getTypeToMock()); assertEquals(Bar.class, mockingDetails(bar).getMockCreationSettings().getTypeToMock()); assertEquals(0, mockingDetails(mock).getMockCreationSettings().getExtraInterfaces().size()); } @Test public void fails_when_getting_creation_settings_for_incorrect_input() { assertThatThrownBy( () -> { mockingDetails(null).getMockCreationSettings(); }) .isInstanceOf(NotAMockException.class) .hasMessage( "Argument passed to Mockito.mockingDetails() should be a mock, but is null!"); } @Test public void fails_when_getting_invocations_when_null() { try { // when mockingDetails(null).getInvocations(); // then fail(); } catch (NotAMockException e) { assertEquals( "Argument passed to Mockito.mockingDetails() should be a mock, but is null!", e.getMessage()); } } @Test public void fails_when_getting_invocations_when_not_mock() { try { // when mockingDetails(new Object()).getInvocations(); // then fail(); } catch (NotAMockException e) { assertEquals( "Argument passed to Mockito.mockingDetails() should be a mock, but is an instance of
DefaultMockingDetailsTest
java
apache__camel
components/camel-activemq/src/main/java/org/apache/camel/component/activemq/ActiveMQSendDynamicAware.java
{ "start": 994, "end": 1058 }
class ____ extends JmsSendDynamicAware { }
ActiveMQSendDynamicAware
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/serializer/NotWriteDefaultValueTest_NoneASM.java
{ "start": 3354, "end": 3731 }
class ____ { private float f0; private float f1; public float getF0() { return f0; } public void setF0(float f0) { this.f0 = f0; } public float getF1() { return f1; } public void setF1(float f1) { this.f1 = f1; } } private static
VO_Float
java
quarkusio__quarkus
test-framework/junit5-component/src/test/java/io/quarkus/test/component/declarative/DeclarativeUnsetConfigurationPropertiesTest.java
{ "start": 636, "end": 959 }
class ____ { @Inject Component component; @Test public void testComponent() { assertNull(component.foo); assertFalse(component.bar); assertEquals(0, component.baz); assertNull(component.bazzz); } @Singleton public static
DeclarativeUnsetConfigurationPropertiesTest
java
apache__flink
flink-core/src/test/java/org/apache/flink/api/common/typeutils/TypeSerializerSnapshotTest.java
{ "start": 2364, "end": 4075 }
class ____ extends TypeSerializer<Integer> { @Override public boolean isImmutableType() { return true; } @Override public TypeSerializer<Integer> duplicate() { return this; } @Override public Integer createInstance() { return 0; } @Override public Integer copy(Integer from) { return from; } @Override public Integer copy(Integer from, Integer reuse) { return from; } @Override public int getLength() { return 1; } @Override public void serialize(Integer record, DataOutputView target) { // do nothing } @Override public Integer deserialize(DataInputView source) { return 0; } @Override public Integer deserialize(Integer reuse, DataInputView source) { return reuse; } @Override public void copy(DataInputView source, DataOutputView target) { // do nothing } @Override public boolean equals(Object obj) { return false; } @Override public int hashCode() { return 0; } @Override public TypeSerializerSnapshot<Integer> snapshotConfiguration() { return new NotCompletedTypeSerializerSnapshot() { @Override public TypeSerializer<Integer> restoreSerializer() { return NotCompletedTypeSerializer.this; } }; } } private static
NotCompletedTypeSerializer
java
elastic__elasticsearch
x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java
{ "start": 1117, "end": 2384 }
class ____ extends ESTestCase { public void testDatabaseFunctionOutput() { String clusterName = randomAlphaOfLengthBetween(1, 15); SqlParser parser = new SqlParser(); EsIndex test = new EsIndex("test", SqlTypesTests.loadMapping("mapping-basic.json", true)); SqlConfiguration sqlConfig = new SqlConfiguration( DateUtils.UTC, null, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, null, randomFrom(Mode.values()), randomAlphaOfLength(10), null, null, clusterName, randomBoolean(), randomBoolean(), null, null, randomBoolean(), null ); Analyzer analyzer = analyzer(sqlConfig, IndexResolution.valid(test)); Project result = (Project) analyzer.analyze(parser.createStatement("SELECT DATABASE()"), true); NamedExpression ne = result.projections().get(0); assertTrue(ne instanceof Alias); assertTrue(((Alias) ne).child() instanceof Database); assertEquals(clusterName, ((Database) ((Alias) ne).child()).fold()); } }
DatabaseFunctionTests
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/state/metrics/MetricsTrackingValueState.java
{ "start": 1266, "end": 4479 }
class ____<K, N, T> extends AbstractMetricsTrackState< K, N, T, InternalValueState<K, N, T>, MetricsTrackingValueState.ValueStateMetrics> implements InternalValueState<K, N, T> { public MetricsTrackingValueState( String stateName, InternalValueState<K, N, T> original, KeyedStateBackend<K> keyedStateBackend, LatencyTrackingStateConfig latencyTrackingStateConfig, SizeTrackingStateConfig sizeTrackingStateConfig) { super( original, keyedStateBackend, latencyTrackingStateConfig.isEnabled() ? new ValueStateMetrics( stateName, latencyTrackingStateConfig.getMetricGroup(), latencyTrackingStateConfig.getSampleInterval(), latencyTrackingStateConfig.getHistorySize(), latencyTrackingStateConfig.isStateNameAsVariable()) : null, sizeTrackingStateConfig.isEnabled() ? new ValueStateMetrics( stateName, sizeTrackingStateConfig.getMetricGroup(), sizeTrackingStateConfig.getSampleInterval(), sizeTrackingStateConfig.getHistorySize(), sizeTrackingStateConfig.isStateNameAsVariable()) : null); } @Override public T value() throws IOException { T result; if (latencyTrackingStateMetric != null && latencyTrackingStateMetric.trackMetricsOnGet()) { result = trackLatencyWithIOException( () -> original.value(), ValueStateMetrics.VALUE_STATE_GET_LATENCY); } else { result = original.value(); } if (sizeTrackingStateMetric != null && sizeTrackingStateMetric.trackMetricsOnGet()) { sizeTrackingStateMetric.updateMetrics( ValueStateMetrics.VALUE_STATE_GET_KEY_SIZE, super.sizeOfKey()); sizeTrackingStateMetric.updateMetrics( ValueStateMetrics.VALUE_STATE_GET_VALUE_SIZE, super.sizeOfValue(result)); } return result; } @Override public void update(T value) throws IOException { if (sizeTrackingStateMetric != null && sizeTrackingStateMetric.trackMetricsOnUpdate()) { sizeTrackingStateMetric.updateMetrics( ValueStateMetrics.VALUE_STATE_UPDATE_KEY_SIZE, super.sizeOfKey()); sizeTrackingStateMetric.updateMetrics( ValueStateMetrics.VALUE_STATE_UPDATE_VALUE_SIZE, super.sizeOfValue(value)); } if (latencyTrackingStateMetric != null && latencyTrackingStateMetric.trackMetricsOnUpdate()) { trackLatencyWithIOException( () -> original.update(value), ValueStateMetrics.VALUE_STATE_UPDATE_LATENCY); } else { original.update(value); } } static
MetricsTrackingValueState