language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
alibaba__druid
core/src/main/java/com/alibaba/druid/sql/SQLUtils.java
{ "start": 84207, "end": 93294 }
class ____ { private final Token token; private final String stringVal; private final int pos; public TokenInfo(Token token, String stringVal, int pos) { this.token = token; this.stringVal = stringVal; this.pos = pos; } public Token getToken() { return token; } public String getStringVal() { return stringVal; } public int getPos() { return pos; } @Override public String toString() { if (stringVal == null) { return token + "@" + pos; } return token + "(" + stringVal + ")@" + pos; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TokenInfo tokenInfo = (TokenInfo) o; if (token != tokenInfo.token) { return false; } if (pos != tokenInfo.pos) { return false; } return stringVal != null ? stringVal.equals(tokenInfo.stringVal) : tokenInfo.stringVal == null; } @Override public int hashCode() { int result = token != null ? token.hashCode() : 0; result = 31 * result + (stringVal != null ? stringVal.hashCode() : 0); result = 31 * result + pos; return result; } } /** * Get all tokens from SQL parsing based on dialect type, including their string values * * @param sql SQL statement to parse * @param dbType Database type (dialect) * @return List of TokenInfo objects containing token and corresponding stringVal pairs (excluding comments) */ public static List<TokenInfo> getAllTokens(String sql, DbType dbType) { return getAllTokens(sql, dbType, false); } /** * Get all tokens from SQL parsing based on dialect type, including their string values * * @param sql SQL statement to parse * @param dbType Database type (dialect) * @param keepComments Whether to keep comment tokens (LINE_COMMENT, MULTI_LINE_COMMENT, HINT) * @return List of TokenInfo objects containing token and corresponding stringVal pairs */ public static List<TokenInfo> getAllTokens(String sql, DbType dbType, boolean keepComments) { if (sql == null || sql.isEmpty()) { return new ArrayList<TokenInfo>(); } List<TokenInfo> tokens = new ArrayList<TokenInfo>(); Lexer lexer = SQLParserUtils.createLexer(sql, dbType); // Configure lexer to keep or skip comments lexer.config(SQLParserFeature.SkipComments, !keepComments); lexer.config(SQLParserFeature.KeepComments, keepComments); for (; ; ) { lexer.nextToken(); Token token = lexer.token(); // Filter out comment tokens if keepComments is false if (!keepComments && (token == Token.LINE_COMMENT || token == Token.MULTI_LINE_COMMENT || token == Token.HINT)) { continue; } // Create and add token info tokens.add(createTokenInfo(token, lexer)); // Exit on EOF or ERROR if (token == Token.EOF || token == Token.ERROR) { break; } } return tokens; } /** * Create a TokenInfo object from the current lexer state */ private static TokenInfo createTokenInfo(Token token, Lexer lexer) { int pos = lexer.pos(); String stringVal; // Get stringVal - use numberString() for numeric literals, stringVal() for others if (token == Token.LITERAL_INT || token == Token.LITERAL_FLOAT || token == Token.LITERAL_HEX) { stringVal = lexer.numberString(); } else { stringVal = lexer.stringVal(); } // Create a defensive copy to prevent mutation if (stringVal != null) { stringVal = new String(stringVal.toCharArray()); } return new TokenInfo(token, stringVal, pos); } /** * Get all tokens from SQL parsing based on dialect type (overloaded method with String dbType) * * @param sql SQL statement to parse * @param dbType Database type string (will be converted to DbType) * @return List of TokenInfo objects containing token and corresponding stringVal pairs */ public static List<TokenInfo> getAllTokens(String sql, String dbType) { return getAllTokens(sql, DbType.of(dbType), false); } /** * Get all tokens from SQL parsing based on dialect type (overloaded method with String dbType) * * @param sql SQL statement to parse * @param dbType Database type string (will be converted to DbType) * @param keepComments Whether to keep comment tokens (LINE_COMMENT, MULTI_LINE_COMMENT, HINT) * @return List of TokenInfo objects containing token and corresponding stringVal pairs */ public static List<TokenInfo> getAllTokens(String sql, String dbType, boolean keepComments) { return getAllTokens(sql, DbType.of(dbType), keepComments); } /** * Calculate Levenshtein distance between two SQL statements based on their token sequences. * For IDENTIFIER, PROPERTY, and LITERAL_INT tokens, the stringVal must also match for tokens to be considered equal. * * @param sql1 First SQL statement * @param dbType1 Database type for first SQL * @param sql2 Second SQL statement * @param dbType2 Database type for second SQL * @return Levenshtein distance between the two token sequences */ public static int calculateTokenLevenshteinDistance(String sql1, DbType dbType1, String sql2, DbType dbType2) { // Step 1: Parse both SQLs and get their token lists List<TokenInfo> tokens1 = getAllTokens(sql1, dbType1); List<TokenInfo> tokens2 = getAllTokens(sql2, dbType2); // Step 2: Calculate Levenshtein distance based on tokens return calculateLevenshteinDistance(tokens1, tokens2); } /** * Calculate Levenshtein distance between two token sequences */ private static int calculateLevenshteinDistance(List<TokenInfo> tokens1, List<TokenInfo> tokens2) { int len1 = tokens1.size(); int len2 = tokens2.size(); // Create DP matrix int[][] dp = new int[len1 + 1][len2 + 1]; // Initialize base cases for (int i = 0; i <= len1; i++) { dp[i][0] = i; } for (int j = 0; j <= len2; j++) { dp[0][j] = j; } // Fill DP matrix for (int i = 1; i <= len1; i++) { for (int j = 1; j <= len2; j++) { TokenInfo tokenInfo1 = tokens1.get(i - 1); TokenInfo tokenInfo2 = tokens2.get(j - 1); int cost; if (areTokensEqual(tokenInfo1, tokenInfo2)) { cost = 0; // Tokens are equal } else { cost = 1; // Tokens are different } dp[i][j] = Math.min( Math.min( dp[i - 1][j] + 1, // Deletion dp[i][j - 1] + 1 // Insertion ), dp[i - 1][j - 1] + cost // Substitution ); } } return dp[len1][len2]; } /** * Check if two tokens are equal. * For IDENTIFIER, PROPERTY, and LITERAL_INT tokens, stringVal must also match. */ private static boolean areTokensEqual( TokenInfo tokenInfo1, TokenInfo tokenInfo2 ) { // First check if token types are the same if (tokenInfo1.getToken() != tokenInfo2.getToken()) { return false; } // For IDENTIFIER, PROPERTY, and LITERAL_INT tokens, check stringVal if (tokenInfo1.getToken() == Token.IDENTIFIER || tokenInfo1.getToken() == Token.LITERAL_INT || tokenInfo1.getToken() == Token.LITERAL_FLOAT || tokenInfo1.getToken() == Token.LITERAL_CHARS || tokenInfo1.getToken() == Token.LITERAL_NCHARS || tokenInfo1.getToken() == Token.LITERAL_HEX) { String stringVal1 = tokenInfo1.getStringVal(); String stringVal2 = tokenInfo2.getStringVal(); // Both should have string values if (stringVal1 == null && stringVal2 == null) { return true; } if (stringVal1 == null || stringVal2 == null) { return false; } return stringVal1.equals(stringVal2); } // For other token types, just matching the token type is enough return true; } /** * Helper
TokenInfo
java
netty__netty
testsuite/src/main/java/io/netty/testsuite/transport/TestsuitePermutation.java
{ "start": 1719, "end": 2220 }
interface ____<SB extends AbstractBootstrap<?, ?>, CB extends AbstractBootstrap<?, ?>> { SB newServerInstance(); CB newClientInstance(); } public static ByteBuf randomBufferType(ByteBufAllocator allocator, byte[] data, int offset, int length) { if (ThreadLocalRandom.current().nextBoolean()) { return allocator.directBuffer().writeBytes(data, offset, length); } return Unpooled.wrappedBuffer(data, offset, length); } }
BootstrapComboFactory
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/entitygraph/FindGraphCollectionOrderByTest.java
{ "start": 5067, "end": 5576 }
class ____ { @Id Long id; @ManyToOne( fetch = FetchType.LAZY ) @JoinColumn( name = "parent_id" ) private Level2 parent; public Level3() { } public Level3(Level2 parent, Long id) { this.parent = parent; this.id = id; parent.getChildren().add( this ); } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Level2 getParent() { return parent; } public void setParent(Level2 parent) { this.parent = parent; } } }
Level3
java
alibaba__druid
core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/clause/ModelClause.java
{ "start": 6056, "end": 6584 }
class ____ extends OracleSQLObjectImpl { private List<SQLExpr> exprList = new ArrayList<SQLExpr>(); public List<SQLExpr> getExprList() { return exprList; } public void setExprList(List<SQLExpr> exprList) { this.exprList = exprList; } @Override public void accept0(OracleASTVisitor visitor) { if (visitor.visit(this)) { acceptChild(visitor, exprList); } } } public static
QueryPartitionClause
java
hibernate__hibernate-orm
tooling/metamodel-generator/src/jakartaData/java/org/hibernate/processor/test/data/generic/MyMappedSuperclass.java
{ "start": 278, "end": 354 }
class ____<ID extends Serializable> { @Id private ID id; }
MyMappedSuperclass
java
apache__avro
lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
{ "start": 3619, "end": 5198 }
class ____ extends Reducer<Text, IntWritable, AvroKey<GenericData.Record>, NullWritable> { private AvroKey<GenericData.Record> mStats; private AvroMultipleOutputs amos; @Override protected void setup(Context context) { mStats = new AvroKey<>(null); amos = new AvroMultipleOutputs(context); } @Override protected void reduce(Text line, Iterable<IntWritable> counts, Context context) throws IOException, InterruptedException { GenericData.Record record = new GenericData.Record(STATS_SCHEMA); GenericData.Record record2 = new GenericData.Record(STATS_SCHEMA_2); int sum = 0; for (IntWritable count : counts) { sum += count.get(); } record.put("name", new Utf8(line.toString())); record.put("count", sum); mStats.datum(record); context.write(mStats, NullWritable.get()); amos.sync("myavro", "myavro"); amos.write("myavro", mStats, NullWritable.get()); record2.put("name1", new Utf8(line.toString())); record2.put("count1", sum); mStats.datum(record2); amos.write(mStats, NullWritable.get(), STATS_SCHEMA_2, null, "testnewwrite2"); amos.sync("myavro1", "myavro1"); amos.write("myavro1", mStats); amos.write(mStats, NullWritable.get(), STATS_SCHEMA, null, "testnewwrite"); amos.write(mStats, NullWritable.get(), "testwritenonschema"); } @Override protected void cleanup(Context context) throws IOException, InterruptedException { amos.close(); } } private static
GenericStatsReducer
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/query/criteria/CriteriaMutationQueryFkValuesTest.java
{ "start": 3773, "end": 4279 }
class ____ { @Id private Long id; @ManyToOne( cascade = CascadeType.PERSIST ) @JoinColumn( name = "a_id" ) private A a; @ManyToOne( cascade = CascadeType.PERSIST ) @JoinColumn( name = "b_id" ) private B b; @ManyToOne( cascade = CascadeType.PERSIST ) @JoinColumn( name = "c_id" ) private C c; public DemoEntity() { } public DemoEntity(Long id, A a, B b, C c) { this.id = id; this.a = a; this.b = b; this.c = c; } } @Entity( name = "AEntity" ) static
DemoEntity
java
google__dagger
javatests/dagger/internal/codegen/InjectConstructorFactoryGeneratorTest.java
{ "start": 59720, "end": 59784 }
interface ____ {", " @Qualifier", " @
Outer
java
spring-projects__spring-framework
spring-test/src/test/java/org/springframework/test/context/aot/TestContextAotGeneratorTests.java
{ "start": 2940, "end": 3498 }
class ____ extends TestContextAotGenerator { List<GenericApplicationContext> contexts = new ArrayList<>(); DemoTestContextAotGenerator(GeneratedFiles generatedFiles) { super(generatedFiles); } @Override GenericApplicationContext loadContextForAotProcessing( MergedContextConfiguration mergedConfig) throws TestContextAotException { GenericApplicationContext context = super.loadContextForAotProcessing(mergedConfig); this.contexts.add(context); return context; } } @SpringJUnitConfig private static
DemoTestContextAotGenerator
java
ReactiveX__RxJava
src/main/java/io/reactivex/rxjava3/internal/operators/parallel/ParallelFilter.java
{ "start": 1174, "end": 2364 }
class ____<T> extends ParallelFlowable<T> { final ParallelFlowable<T> source; final Predicate<? super T> predicate; public ParallelFilter(ParallelFlowable<T> source, Predicate<? super T> predicate) { this.source = source; this.predicate = predicate; } @Override public void subscribe(Subscriber<? super T>[] subscribers) { subscribers = RxJavaPlugins.onSubscribe(this, subscribers); if (!validate(subscribers)) { return; } int n = subscribers.length; @SuppressWarnings("unchecked") Subscriber<? super T>[] parents = new Subscriber[n]; for (int i = 0; i < n; i++) { Subscriber<? super T> a = subscribers[i]; if (a instanceof ConditionalSubscriber) { parents[i] = new ParallelFilterConditionalSubscriber<>((ConditionalSubscriber<? super T>)a, predicate); } else { parents[i] = new ParallelFilterSubscriber<>(a, predicate); } } source.subscribe(parents); } @Override public int parallelism() { return source.parallelism(); } abstract static
ParallelFilter
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/javadoc/UnescapedEntityTest.java
{ "start": 1734, "end": 1987 }
interface ____ {} """) .doTest(TestMode.AST_MATCH); } @Test public void negative() { helper .addSourceLines( "Test.java", """ /** {@code List<Foo>, Map<Foo, Bar>} */
Test
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/streaming/runtime/io/checkpointing/ChannelState.java
{ "start": 1450, "end": 3914 }
class ____ { private final Map<InputChannelInfo, Integer> sequenceNumberInAnnouncedChannels = new HashMap<>(); /** * {@link #blockedChannels} are the ones for which we have already processed {@link * CheckpointBarrier}. {@link #sequenceNumberInAnnouncedChannels} on the other hand, are the * ones that we have processed announcement but not yet a barrier. */ private final Set<InputChannelInfo> blockedChannels = new HashSet<>(); private final CheckpointableInput[] inputs; public ChannelState(CheckpointableInput[] inputs) { this.inputs = inputs; } public void blockChannel(InputChannelInfo channelInfo) { inputs[channelInfo.getGateIdx()].blockConsumption(channelInfo); blockedChannels.add(channelInfo); } public void channelFinished(InputChannelInfo channelInfo) { blockedChannels.remove(channelInfo); sequenceNumberInAnnouncedChannels.remove(channelInfo); } public void prioritizeAllAnnouncements() throws IOException { for (Map.Entry<InputChannelInfo, Integer> announcedNumberInChannel : sequenceNumberInAnnouncedChannels.entrySet()) { InputChannelInfo channelInfo = announcedNumberInChannel.getKey(); inputs[channelInfo.getGateIdx()].convertToPriorityEvent( channelInfo.getInputChannelIdx(), announcedNumberInChannel.getValue()); } sequenceNumberInAnnouncedChannels.clear(); } public void unblockAllChannels() throws IOException { for (InputChannelInfo blockedChannel : blockedChannels) { inputs[blockedChannel.getGateIdx()].resumeConsumption(blockedChannel); } blockedChannels.clear(); } public CheckpointableInput[] getInputs() { return inputs; } public void addSeenAnnouncement(InputChannelInfo channelInfo, int sequenceNumber) { this.sequenceNumberInAnnouncedChannels.put(channelInfo, sequenceNumber); } public void removeSeenAnnouncement(InputChannelInfo channelInfo) { this.sequenceNumberInAnnouncedChannels.remove(channelInfo); } public ChannelState emptyState() { checkState( blockedChannels.isEmpty(), "We should not reset to an empty state if there are blocked channels: %s", blockedChannels); sequenceNumberInAnnouncedChannels.clear(); return this; } }
ChannelState
java
hibernate__hibernate-orm
hibernate-envers/src/main/java/org/hibernate/envers/internal/entities/EntitiesConfigurations.java
{ "start": 545, "end": 6582 }
class ____ { private Map<String, EntityConfiguration> entitiesConfigurations; private Map<String, EntityConfiguration> notAuditedEntitiesConfigurations; // Map versions entity name -> entity name private Map<String, String> entityNamesForVersionsEntityNames = new HashMap<>(); public EntitiesConfigurations( Map<String, EntityConfiguration> entitiesConfigurations, Map<String, EntityConfiguration> notAuditedEntitiesConfigurations) { this.entitiesConfigurations = entitiesConfigurations; this.notAuditedEntitiesConfigurations = notAuditedEntitiesConfigurations; generateBidirectionRelationInfo(); generateVersionsEntityToEntityNames(); } private void generateVersionsEntityToEntityNames() { entityNamesForVersionsEntityNames = new HashMap<>(); for ( Map.Entry<String, EntityConfiguration> entry : entitiesConfigurations.entrySet() ) { entityNamesForVersionsEntityNames.put( entry.getValue().getVersionsEntityName(), entry.getKey() ); } } private void generateBidirectionRelationInfo() { // Checking each relation if it is bidirectional. If so, storing that information. for ( Map.Entry<String, EntityConfiguration> entry : entitiesConfigurations.entrySet() ) { final String entityName = entry.getKey(); final EntityConfiguration entCfg = entry.getValue(); // Iterating over all relations from that entity for ( RelationDescription relDesc : entCfg.getRelationsIterator() ) { // If this is an "owned" relation, checking the related entity, if it has a relation that has // a mapped-by attribute to the currently checked. If so, this is a bidirectional relation. if ( relDesc.getRelationType() == RelationType.TO_ONE || relDesc.getRelationType() == RelationType.TO_MANY_MIDDLE ) { final EntityConfiguration entityConfiguration = entitiesConfigurations.get( relDesc.getToEntityName() ); if ( entityConfiguration != null ) { for ( RelationDescription other : entityConfiguration.getRelationsIterator() ) { if ( relDesc.getFromPropertyName().equals( other.getMappedByPropertyName() ) && (entityName.equals( other.getToEntityName() )) ) { relDesc.setBidirectional( true ); other.setBidirectional( true ); } } } } } } } public EntityConfiguration get(String entityName) { return entitiesConfigurations.get( entityName ); } public EntityConfiguration getNotVersionEntityConfiguration(String entityName) { return notAuditedEntitiesConfigurations.get( entityName ); } public String getEntityNameForVersionsEntityName(String versionsEntityName) { return entityNamesForVersionsEntityNames.get( versionsEntityName ); } public boolean isVersioned(String entityName) { return get( entityName ) != null; } public boolean hasAuditedEntities() { return entitiesConfigurations.size() != 0; } public RelationDescription getRelationDescription(String entityName, String propertyName) { final EntityConfiguration entCfg; if ( isVersioned( entityName ) ) { entCfg = get( entityName ); } else { entCfg = getNotVersionEntityConfiguration( entityName ); } final RelationDescription relDesc = entCfg.getRelationDescription( propertyName ); if ( relDesc != null ) { return relDesc; } else if ( entCfg.getParentEntityName() != null ) { // The field may be declared in a superclass ... return getRelationDescription( entCfg.getParentEntityName(), propertyName ); } else { return null; } } private Collection<RelationDescription> getRelationDescriptions(String entityName) { final EntityConfiguration entCfg = entitiesConfigurations.get( entityName ); Collection<RelationDescription> descriptions = new ArrayList<>(); if ( entCfg.getParentEntityName() != null ) { // collect descriptions from super classes descriptions.addAll( getRelationDescriptions( entCfg.getParentEntityName() ) ); } for ( RelationDescription relationDescription : entCfg.getRelationsIterator() ) { descriptions.add( relationDescription ); } return descriptions; } public ComponentDescription getComponentDescription(final String entityName, final String propertyName) { final EntityConfiguration entCfg; if ( isVersioned( entityName ) ) { entCfg = get( entityName ); } else { entCfg = getNotVersionEntityConfiguration( entityName ); } final ComponentDescription relDesc = entCfg.getComponentDescription( propertyName ); if ( relDesc != null ) { return relDesc; } else if ( entCfg.getParentEntityName() != null ) { // The field may be declared in a superclass ... return getComponentDescription( entCfg.getParentEntityName(), propertyName ); } else { return null; } } private void addWithParentEntityNames(String entityName, Set<String> entityNames) { entityNames.add( entityName ); final EntityConfiguration entCfg = entitiesConfigurations.get( entityName ); if ( entCfg.getParentEntityName() != null ) { // collect descriptions from super classes addWithParentEntityNames( entCfg.getParentEntityName(), entityNames ); } } private Set<String> getEntityAndParentsNames(String entityName) { final Set<String> names = new HashSet<>(); addWithParentEntityNames( entityName, names ); return names; } public Set<String> getToPropertyNames(String fromEntityName, String fromPropertyName, String toEntityName) { final Set<String> entityAndParentsNames = getEntityAndParentsNames( fromEntityName ); final Set<String> toPropertyNames = new HashSet<>(); for ( RelationDescription relationDescription : getRelationDescriptions( toEntityName ) ) { final String relToEntityName = relationDescription.getToEntityName(); final String mappedByPropertyName = relationDescription.getMappedByPropertyName(); if ( entityAndParentsNames.contains( relToEntityName ) && mappedByPropertyName != null && mappedByPropertyName .equals( fromPropertyName ) ) { toPropertyNames.add( relationDescription.getFromPropertyName() ); } } return toPropertyNames; } }
EntitiesConfigurations
java
spring-projects__spring-framework
spring-test/src/main/java/org/springframework/test/context/TestPropertySource.java
{ "start": 9104, "end": 9755 }
class ____ <em>shadow</em> and effectively * replace any property source locations defined by a superclass or * enclosing class. * <p>In the following example, the {@code ApplicationContext} for * {@code BaseTest} will be loaded using only the {@code "base.properties"} * file as a test property source. In contrast, the {@code ApplicationContext} * for {@code ExtendedTest} will be loaded using the {@code "base.properties"} * <strong>and</strong> {@code "extended.properties"} files as test property * source locations. * <pre class="code"> * &#064;TestPropertySource("base.properties") * &#064;ContextConfiguration * public
will
java
apache__commons-lang
src/main/java/org/apache/commons/lang3/builder/ToStringBuilder.java
{ "start": 31242, "end": 31385 }
class ____ most of the implementation of * its properties to another class. You can then call {@code toString()} on * the other
delegates
java
spring-projects__spring-framework
spring-test/src/test/java/org/springframework/test/context/env/ExplicitPropertiesFileTestPropertySourceTests.java
{ "start": 2120, "end": 2371 }
class ____ extends AbstractExplicitPropertiesFileTests { } @Nested @DisplayName("with absolute path with internal relative paths") @TestPropertySource("/org/../org/springframework/test/../test/context/env/explicit.properties")
AbsolutePathTests
java
apache__spark
launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
{ "start": 9593, "end": 18418 }
class ____."); } if (mainClass != null) { args.add(parser.CLASS); if (isRemote && "1".equals(getenv("SPARK_SCALA_SHELL"))) { args.add("org.apache.spark.sql.application.ConnectRepl"); } else { args.add(mainClass); } } args.addAll(parsedArgs); if (appResource != null) { if (isRemote && "1".equals(getenv("SPARK_SCALA_SHELL"))) { args.add("connect-shell"); } else { args.add(appResource); } } args.addAll(appArgs); return args; } private List<String> buildSparkSubmitCommand(Map<String, String> env) throws IOException, IllegalArgumentException { // Load the properties file and check whether spark-submit will be running the app's driver // or just launching a cluster app. When running the driver, the JVM's argument will be // modified to cover the driver's configuration. Map<String, String> config = getEffectiveConfig(); boolean isClientMode = isClientMode(config); String extraClassPath = isClientMode ? config.get(SparkLauncher.DRIVER_EXTRA_CLASSPATH) : null; String defaultExtraClassPath = config.get(SparkLauncher.DRIVER_DEFAULT_EXTRA_CLASS_PATH); if (extraClassPath == null || extraClassPath.trim().isEmpty()) { extraClassPath = defaultExtraClassPath; } else { extraClassPath += File.pathSeparator + defaultExtraClassPath; } List<String> cmd = buildJavaCommand(extraClassPath); // Take Thrift/Connect Server as daemon if (isThriftServer(mainClass) || isConnectServer(mainClass)) { addOptionString(cmd, System.getenv("SPARK_DAEMON_JAVA_OPTS")); } addOptionString(cmd, System.getenv("SPARK_SUBMIT_OPTS")); // We don't want the client to specify Xmx. These have to be set by their corresponding // memory flag --driver-memory or configuration entry spark.driver.memory String driverDefaultJavaOptions = config.get(SparkLauncher.DRIVER_DEFAULT_JAVA_OPTIONS); checkJavaOptions(driverDefaultJavaOptions); String driverExtraJavaOptions = config.get(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS); checkJavaOptions(driverExtraJavaOptions); if (isClientMode) { // Figuring out where the memory value come from is a little tricky due to precedence. // Precedence is observed in the following order: // - explicit configuration (setConf()), which also covers --driver-memory cli argument. // - properties file. // - SPARK_DRIVER_MEMORY env variable // - SPARK_MEM env variable // - default value (1g) // Take Thrift/Connect Server as daemon String tsMemory = isThriftServer(mainClass) || isConnectServer(mainClass) ? System.getenv("SPARK_DAEMON_MEMORY") : null; String memory = firstNonEmpty(tsMemory, config.get(SparkLauncher.DRIVER_MEMORY), System.getenv("SPARK_DRIVER_MEMORY"), System.getenv("SPARK_MEM"), DEFAULT_MEM); cmd.add("-Xmx" + memory); addOptionString(cmd, driverDefaultJavaOptions); addOptionString(cmd, driverExtraJavaOptions); mergeEnvPathList(env, getLibPathEnvName(), config.get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH)); } // SPARK-36796: Always add some JVM runtime default options to submit command addOptionString(cmd, JavaModuleOptions.defaultModuleOptions()); addOptionString(cmd, "-Dderby.connection.requireAuthentication=false"); cmd.add("org.apache.spark.deploy.SparkSubmit"); cmd.addAll(buildSparkSubmitArgs()); return cmd; } private void checkJavaOptions(String javaOptions) { if (!isEmpty(javaOptions)) { for (String javaOption: CommandBuilderUtils.parseOptionString(javaOptions)) { if (javaOption.startsWith("-Xmx")) { String msg = String.format("Not allowed to specify max heap(Xmx) memory settings " + "through java options (was %s). Use the corresponding --driver-memory or " + "spark.driver.memory configuration instead.", javaOptions); throw new IllegalArgumentException(msg); } } } } private List<String> buildPySparkShellCommand(Map<String, String> env) throws IOException { // For backwards compatibility, if a script is specified in // the pyspark command line, then run it using spark-submit. if (!appArgs.isEmpty() && appArgs.get(0).endsWith(".py")) { System.err.println( "Running python applications through 'pyspark' is not supported as of Spark 2.0.\n" + "Use ./bin/spark-submit <python file>"); System.exit(-1); } checkArgument(appArgs.isEmpty(), "pyspark does not support any application options."); // When launching the pyspark shell, the spark-submit arguments should be stored in the // PYSPARK_SUBMIT_ARGS env variable. appResource = PYSPARK_SHELL_RESOURCE; // Do not pass remote configurations to Spark Connect server via Py4J. constructEnvVarArgs(env, "PYSPARK_SUBMIT_ARGS", false); // Will pick up the binary executable in the following order // 1. conf spark.pyspark.driver.python // 2. conf spark.pyspark.python // 3. environment variable PYSPARK_DRIVER_PYTHON // 4. environment variable PYSPARK_PYTHON // 5. python List<String> pyargs = new ArrayList<>(); pyargs.add(firstNonEmpty(conf.get(SparkLauncher.PYSPARK_DRIVER_PYTHON), conf.get(SparkLauncher.PYSPARK_PYTHON), System.getenv("PYSPARK_DRIVER_PYTHON"), System.getenv("PYSPARK_PYTHON"), "python3")); String pyOpts = System.getenv("PYSPARK_DRIVER_PYTHON_OPTS"); if (conf.containsKey(SparkLauncher.PYSPARK_PYTHON)) { // pass conf spark.pyspark.python to python by environment variable. env.put("PYSPARK_PYTHON", conf.get(SparkLauncher.PYSPARK_PYTHON)); } String remoteStr = firstNonEmpty(remote, conf.getOrDefault(SparkLauncher.SPARK_REMOTE, null)); String masterStr = firstNonEmpty(master, conf.getOrDefault(SparkLauncher.SPARK_MASTER, null)); String deployStr = firstNonEmpty( deployMode, conf.getOrDefault(SparkLauncher.DEPLOY_MODE, null)); if (remoteStr != null && (masterStr != null || deployStr != null)) { throw new IllegalStateException("Remote cannot be specified with master and/or deploy mode."); } String apiMode = getApiMode(conf); env.put("SPARK_API_MODE", apiMode); if (remoteStr != null) { env.put("SPARK_REMOTE", remoteStr); env.put("SPARK_CONNECT_MODE_ENABLED", "1"); } else if ("connect".equalsIgnoreCase(apiMode)) { env.put("MASTER", firstNonEmpty(masterStr, "local")); env.put("SPARK_CONNECT_MODE_ENABLED", "1"); } if (!isEmpty(pyOpts)) { pyargs.addAll(parseOptionString(pyOpts)); } return pyargs; } private List<String> buildSparkRCommand(Map<String, String> env) throws IOException { if (!appArgs.isEmpty() && (appArgs.get(0).endsWith(".R") || appArgs.get(0).endsWith(".r"))) { System.err.println( "Running R applications through 'sparkR' is not supported as of Spark 2.0.\n" + "Use ./bin/spark-submit <R file>"); System.exit(-1); } // When launching the SparkR shell, store the spark-submit arguments in the SPARKR_SUBMIT_ARGS // env variable. appResource = SPARKR_SHELL_RESOURCE; constructEnvVarArgs(env, "SPARKR_SUBMIT_ARGS", true); // Set shell.R as R_PROFILE_USER to load the SparkR package when the shell comes up. String sparkHome = System.getenv("SPARK_HOME"); env.put("R_PROFILE_USER", join(File.separator, sparkHome, "R", "lib", "SparkR", "profile", "shell.R")); List<String> args = new ArrayList<>(); args.add(firstNonEmpty(conf.get(SparkLauncher.SPARKR_R_SHELL), System.getenv("SPARKR_DRIVER_R"), "R")); return args; } private void constructEnvVarArgs( Map<String, String> env, String submitArgsEnvVariable, boolean includeRemote) throws IOException { mergeEnvPathList(env, getLibPathEnvName(), getEffectiveConfig().get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH)); StringBuilder submitArgs = new StringBuilder(); for (String arg : buildSparkSubmitArgs(includeRemote)) { if (submitArgs.length() > 0) { submitArgs.append(" "); } submitArgs.append(quoteForCommandString(arg)); } env.put(submitArgsEnvVariable, submitArgs.toString()); } boolean isClientMode(Map<String, String> userProps) { String userMaster = firstNonEmpty(master, userProps.get(SparkLauncher.SPARK_MASTER)); String userDeployMode = firstNonEmpty(deployMode, userProps.get(SparkLauncher.DEPLOY_MODE)); // Default master is "local[*]", so assume client mode in that case return userMaster == null || userDeployMode == null || "client".equals(userDeployMode); } /** * Return whether the given main
name
java
apache__dubbo
dubbo-configcenter/dubbo-configcenter-zookeeper/src/main/java/org/apache/dubbo/configcenter/support/zookeeper/CacheListener.java
{ "start": 1272, "end": 2840 }
class ____ { private final ConcurrentMap<String, ZookeeperDataListener> pathKeyListeners = new ConcurrentHashMap<>(); public CacheListener() {} public ZookeeperDataListener addListener( String pathKey, ConfigurationListener configurationListener, String key, String group, ApplicationModel applicationModel) { ZookeeperDataListener zookeeperDataListener = ConcurrentHashMapUtils.computeIfAbsent( pathKeyListeners, pathKey, _pathKey -> new ZookeeperDataListener(_pathKey, key, group, applicationModel)); zookeeperDataListener.addListener(configurationListener); return zookeeperDataListener; } public ZookeeperDataListener removeListener(String pathKey, ConfigurationListener configurationListener) { ZookeeperDataListener zookeeperDataListener = pathKeyListeners.get(pathKey); if (zookeeperDataListener != null) { zookeeperDataListener.removeListener(configurationListener); if (CollectionUtils.isEmpty(zookeeperDataListener.getListeners())) { pathKeyListeners.remove(pathKey); } } return zookeeperDataListener; } public ZookeeperDataListener getCachedListener(String pathKey) { return pathKeyListeners.get(pathKey); } public Map<String, ZookeeperDataListener> getPathKeyListeners() { return pathKeyListeners; } public void clear() { pathKeyListeners.clear(); } }
CacheListener
java
apache__kafka
server-common/src/test/java/org/apache/kafka/server/metrics/KafkaMetricsGroupTest.java
{ "start": 1013, "end": 1478 }
class ____ { @Test public void testConstructorWithPackageAndSimpleName() { String packageName = "testPackage"; String simpleName = "testSimple"; KafkaMetricsGroup group = new KafkaMetricsGroup(packageName, simpleName); MetricName metricName = group.metricName("metric-name", Map.of()); assertEquals(packageName, metricName.getGroup()); assertEquals(simpleName, metricName.getType()); } }
KafkaMetricsGroupTest
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/aot/hint/SerializationHints.java
{ "start": 994, "end": 3330 }
class ____ { private final Set<JavaSerializationHint> javaSerializationHints; public SerializationHints() { this.javaSerializationHints = new LinkedHashSet<>(); } /** * Return the {@link JavaSerializationHint java serialization hints} for types * that need to be serialized using Java serialization at runtime. * @return a stream of {@link JavaSerializationHint java serialization hints} */ public Stream<JavaSerializationHint> javaSerializationHints() { return this.javaSerializationHints.stream(); } /** * Register that the type defined by the specified {@link TypeReference} * need to be serialized using java serialization. * @param type the type to register * @param serializationHint a builder to further customize the serialization * @return {@code this}, to facilitate method chaining */ public SerializationHints registerType(TypeReference type, @Nullable Consumer<JavaSerializationHint.Builder> serializationHint) { JavaSerializationHint.Builder builder = new JavaSerializationHint.Builder(type); if (serializationHint != null) { serializationHint.accept(builder); } this.javaSerializationHints.add(builder.build()); return this; } /** * Register that the type defined by the specified {@link TypeReference} * need to be serialized using java serialization. * @param type the type to register * @return {@code this}, to facilitate method chaining */ public SerializationHints registerType(TypeReference type) { return registerType(type, null); } /** * Register that the specified type need to be serialized using java * serialization. * @param type the type to register * @param serializationHint a builder to further customize the serialization * @return {@code this}, to facilitate method chaining */ public SerializationHints registerType(Class<? extends Serializable> type, @Nullable Consumer<JavaSerializationHint.Builder> serializationHint) { return registerType(TypeReference.of(type), serializationHint); } /** * Register that the specified type need to be serialized using java * serialization. * @param type the type to register * @return {@code this}, to facilitate method chaining */ public SerializationHints registerType(Class<? extends Serializable> type) { return registerType(type, null); } }
SerializationHints
java
google__auto
value/src/it/functional/src/test/java/com/google/auto/value/AutoAnnotationTest.java
{ "start": 2132, "end": 3324 }
class ____ {} @Test public void testSimple() { StringValues expectedStringValues = AnnotatedClass.class.getAnnotation(StringValues.class); StringValues actualStringValues = newStringValues(new String[] {"oops"}); StringValues otherStringValues = newStringValues(new String[] {}); new EqualsTester() .addEqualityGroup(expectedStringValues, actualStringValues) .addEqualityGroup(otherStringValues) .testEquals(); } @Test public void testEqualsParameterAnnotation() throws ReflectiveOperationException { assume() .that(Double.parseDouble(StandardSystemProperty.JAVA_SPECIFICATION_VERSION.value())) .isAtLeast(8.0); Class<? extends Annotation> jspecifyNullable; try { // We write this using .concat in order to hide it from rewriting rules. jspecifyNullable = Class.forName("org".concat(".jspecify.annotations.Nullable")) .asSubclass(Annotation.class); } catch (ClassNotFoundException e) { throw new AssumptionViolatedException("No JSpecify @Nullable available", e); } @SuppressWarnings("GetClassOnAnnotation") // yes, I really want the implementation
AnnotatedClass
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/ExpectedExceptionCheckerTest.java
{ "start": 14814, "end": 16216 }
class ____ { @Rule ExpectedException thrown = ExpectedException.none(); @Test public void testThrow() throws Exception { thrown.expect(IOException.class); throw new IOException(); } @Test public void one() throws Exception { Path p = Paths.get("NOSUCH"); thrown.expect(IOException.class); Files.readAllBytes(p); assertThat(Files.exists(p)).isFalse(); fail(); } @Test public void two() throws Exception { Path p = Paths.get("NOSUCH"); thrown.expect(IOException.class); Files.readAllBytes(p); assertThat(Files.exists(p)).isFalse(); throw new AssertionError(); } } """) .addOutputLines( "out/ExceptionTest.java", """ import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; import java.io.IOException; import java.nio.file.*; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException;
ExceptionTest
java
apache__flink
flink-core/src/main/java/org/apache/flink/streaming/api/operators/OutputTypeConfigurable.java
{ "start": 1788, "end": 2577 }
interface ____<OUT> { /** * Is called by the {@code org.apache.flink.streaming.api.graph.StreamGraph#addOperator(Integer, * String, StreamOperator, TypeInformation, TypeInformation, String)} method when the {@code * org.apache.flink.streaming.api.graph.StreamGraph} is generated. The method is called with the * output {@link TypeInformation} which is also used for the {@code * org.apache.flink.streaming.runtime.tasks.StreamTask} output serializer. * * @param outTypeInfo Output type information of the {@code * org.apache.flink.streaming.runtime.tasks.StreamTask} * @param executionConfig Execution configuration */ void setOutputType(TypeInformation<OUT> outTypeInfo, ExecutionConfig executionConfig); }
OutputTypeConfigurable
java
apache__flink
flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnTestBase.java
{ "start": 4465, "end": 14155 }
class ____ { private static final Logger LOG = LoggerFactory.getLogger(YarnTestBase.class); protected static final PrintStream ORIGINAL_STDOUT = System.out; protected static final PrintStream ORIGINAL_STDERR = System.err; private static final InputStream ORIGINAL_STDIN = System.in; protected static final String TEST_CLUSTER_NAME_KEY = "flink-yarn-minicluster-name"; protected static final int NUM_NODEMANAGERS = 2; /** The tests are scanning for these strings in the final output. */ protected static final String[] PROHIBITED_STRINGS = { "Exception", // we don't want any exceptions to happen "Started SelectChannelConnector@0.0.0.0:8081" // Jetty should start on a random port in YARN // mode. }; /** These strings are white-listed, overriding the prohibited strings. */ protected static final Pattern[] WHITELISTED_STRINGS = { // happens if yarn does not support external resources Pattern.compile( "ClassNotFoundException: org.apache.hadoop.yarn.api.records.ResourceInformation"), // occurs if a TM disconnects from a JM because it is no longer hosting any slots Pattern.compile("has no more allocated slots for job"), // can happen if another process hasn't fully started yet Pattern.compile("org.apache.pekko.actor.ActorNotFound: Actor not found for"), // can happen if another process hasn't fully started yet Pattern.compile("RpcConnectionException: Could not connect to rpc endpoint under address"), // rest handler whose registration is logged on DEBUG level Pattern.compile("JobExceptionsHandler"), Pattern.compile("org\\.apache\\.pekko\\.remote\\.RemoteTransportExceptionNoStackTrace"), // workaround for annoying InterruptedException logging: // https://issues.apache.org/jira/browse/YARN-1022 Pattern.compile("java\\.lang\\.InterruptedException"), // very specific on purpose; whitelist meaningless exceptions that occur during Pekko // shutdown: Pattern.compile( "Remote connection to \\[.*\\] failed with java.net.ConnectException: Connection refused"), Pattern.compile( "Remote connection to \\[.*\\] failed with java.nio.channels.NotYetConnectedException"), Pattern.compile("java\\.io\\.IOException: Connection reset by peer"), Pattern.compile( "Association with remote system \\[pekko.tcp://flink@[^]]+\\] has failed, address is now gated for \\[50\\] ms. Reason: \\[Association failed with \\[pekko.tcp://flink@[^]]+\\]\\] Caused by: \\[java.net.ConnectException: Connection refused: [^]]+\\]"), // filter out expected ResourceManagerException caused by intended shutdown request Pattern.compile(YarnResourceManagerDriver.ERROR_MESSAGE_ON_SHUTDOWN_REQUEST), // this can happen in Pekko on shutdown. Pattern.compile( "java\\.util\\.concurrent\\.RejectedExecutionException: Worker has already been shutdown"), Pattern.compile("org\\.apache\\.flink.util\\.FlinkException: Stopping JobMaster"), Pattern.compile( "org\\.apache\\.flink.util\\.FlinkException: JobManager is shutting down\\."), Pattern.compile("lost the leadership."), Pattern.compile( "org.apache.pekko.remote.transport.netty.NettyTransport.*Remote connection to \\[[^]]+\\] failed with java.io.IOException: Broken pipe"), Pattern.compile( "org.apache.pekko.remote.transport.netty.NettyTransport.*Remote connection to \\[.+\\] failed with java.net.SocketException: Connection reset"), // this can happen during cluster shutdown, if AMRMClient happens to be heartbeating Pattern.compile("Exception on heartbeat"), Pattern.compile("java\\.io\\.InterruptedIOException: Call interrupted"), Pattern.compile( "java\\.io\\.InterruptedIOException: Interrupted waiting to send RPC request to server"), Pattern.compile("java\\.lang\\.InterruptedException"), // this can happen if the hbase delegation token provider is not available Pattern.compile("ClassNotFoundException : \"org.apache.hadoop.hbase.HBaseConfiguration\""), // This happens in YARN shutdown Pattern.compile("Rejected TaskExecutor registration at the ResourceManager") }; // Temp directory which is deleted after the unit test. @TempDir protected static File tmp; // Temp directory for mini hdfs @TempDir public static File tmpHDFS; protected static MiniYARNCluster yarnCluster = null; protected static MiniDFSCluster miniDFSCluster = null; /** Uberjar (fat jar) file of Flink. */ protected static File flinkUberjar; protected static final YarnConfiguration YARN_CONFIGURATION; /** lib/ folder of the flink distribution. */ protected static File flinkLibFolder; /** Temporary folder where Flink configurations will be kept for secure run. */ protected static File tempConfPathForSecureRun = null; protected static File yarnSiteXML = null; protected static File hdfsSiteXML = null; protected static Map<String, String> env; private YarnClient yarnClient = null; private static org.apache.flink.configuration.Configuration globalConfiguration; protected org.apache.flink.configuration.Configuration flinkConfiguration; static { YARN_CONFIGURATION = new YarnConfiguration(); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 32); YARN_CONFIGURATION.setInt( YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, 4096); // 4096 is the available memory anyways YARN_CONFIGURATION.setBoolean( YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME, true); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 2); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES, 4); YARN_CONFIGURATION.setInt(YarnConfiguration.DEBUG_NM_DELETE_DELAY_SEC, 3600); YARN_CONFIGURATION.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, false); YARN_CONFIGURATION.setInt( YarnConfiguration.NM_VCORES, 666); // memory is overwritten in the MiniYARNCluster. // so we have to change the number of cores for testing. YARN_CONFIGURATION.setFloat( YarnConfiguration.NM_MAX_PER_DISK_UTILIZATION_PERCENTAGE, 99.0F); YARN_CONFIGURATION.set(YarnConfiguration.YARN_APPLICATION_CLASSPATH, getYarnClasspath()); YARN_CONFIGURATION.setInt( YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_MS, 1000); YARN_CONFIGURATION.setInt(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_MS, 5000); } /** * Searches for the yarn.classpath file generated by the "dependency:build-classpath" maven * plugin in "flink-yarn-tests". * * @return a classpath suitable for running all YARN-launched JVMs */ protected static String getYarnClasspath() { final String start = "../flink-yarn-tests"; try { File classPathFile = TestUtils.findFile(start, (dir, name) -> name.equals("yarn.classpath")); return FileUtils.readFileToString( classPathFile, StandardCharsets.UTF_8); // potential NPE is supposed to be fatal } catch (Throwable t) { LOG.error( "Error while getting YARN classpath in {}", new File(start).getAbsoluteFile(), t); throw new RuntimeException("Error while getting YARN classpath", t); } } public static void populateYarnSecureConfigurations( Configuration conf, String principal, String keytab) { conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); conf.set(YarnConfiguration.RM_KEYTAB, keytab); conf.set(YarnConfiguration.RM_PRINCIPAL, principal); conf.set(YarnConfiguration.NM_KEYTAB, keytab); conf.set(YarnConfiguration.NM_PRINCIPAL, principal); conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab); conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab); conf.set("hadoop.security.auth_to_local", "RULE:[1:$1] RULE:[2:$1]"); } @BeforeEach void setupYarnClient() { if (yarnClient == null) { yarnClient = YarnClient.createYarnClient(); yarnClient.init(getYarnConfiguration()); yarnClient.start(); } flinkConfiguration = new org.apache.flink.configuration.Configuration(globalConfiguration); } /** Sleep a bit between the tests (we are re-using the YARN cluster for the tests). */ @AfterEach void shutdownYarnClient() { yarnClient.stop(); } protected void runTest(RunnableWithException test) throws Exception { // wrapping the cleanup logic in an AutoClosable automatically suppresses additional // exceptions try (final CleanupYarnApplication ignored = new CleanupYarnApplication()) { test.run(); } } private
YarnTestBase
java
google__guava
android/guava/src/com/google/common/collect/MapMakerInternalMap.java
{ "start": 75445, "end": 81972 }
class ____ implements Runnable { final WeakReference<MapMakerInternalMap<?, ?, ?, ?>> mapReference; public CleanupMapTask(MapMakerInternalMap<?, ?, ?, ?> map) { this.mapReference = new WeakReference<>(map); } @Override public void run() { MapMakerInternalMap<?, ?, ?, ?> map = mapReference.get(); if (map == null) { throw new CancellationException(); } for (Segment<?, ?, ?, ?> segment : map.segments) { segment.runCleanup(); } } } @VisibleForTesting Strength keyStrength() { return entryHelper.keyStrength(); } @VisibleForTesting Strength valueStrength() { return entryHelper.valueStrength(); } @VisibleForTesting Equivalence<Object> valueEquivalence() { return entryHelper.valueStrength().defaultEquivalence(); } // ConcurrentMap methods @Override public boolean isEmpty() { /* * Sum per-segment modCounts to avoid mis-reporting when elements are concurrently added and * removed in one segment while checking another, in which case the table was never actually * empty at any point. (The sum ensures accuracy up through at least 1<<31 per-segment * modifications before recheck.) Method containsValue() uses similar constructions for * stability checks. */ long sum = 0L; Segment<K, V, E, S>[] segments = this.segments; for (int i = 0; i < segments.length; ++i) { if (segments[i].count != 0) { return false; } sum += segments[i].modCount; } if (sum != 0L) { // recheck unless no modifications for (int i = 0; i < segments.length; ++i) { if (segments[i].count != 0) { return false; } sum -= segments[i].modCount; } return sum == 0L; } return true; } @Override public int size() { Segment<K, V, E, S>[] segments = this.segments; long sum = 0; for (int i = 0; i < segments.length; ++i) { sum += segments[i].count; } return Ints.saturatedCast(sum); } @Override public @Nullable V get(@Nullable Object key) { if (key == null) { return null; } int hash = hash(key); return segmentFor(hash).get(key, hash); } /** * Returns the internal entry for the specified key. The entry may be computing or partially * collected. Does not impact recency ordering. */ @Nullable E getEntry(@Nullable Object key) { if (key == null) { return null; } int hash = hash(key); return segmentFor(hash).getEntry(key, hash); } @Override public boolean containsKey(@Nullable Object key) { if (key == null) { return false; } int hash = hash(key); return segmentFor(hash).containsKey(key, hash); } @Override public boolean containsValue(@Nullable Object value) { if (value == null) { return false; } // This implementation is patterned after ConcurrentHashMap, but without the locking. The only // way for it to return a false negative would be for the target value to jump around in the map // such that none of the subsequent iterations observed it, despite the fact that at every point // in time it was present somewhere int the map. This becomes increasingly unlikely as // CONTAINS_VALUE_RETRIES increases, though without locking it is theoretically possible. Segment<K, V, E, S>[] segments = this.segments; long last = -1L; for (int i = 0; i < CONTAINS_VALUE_RETRIES; i++) { long sum = 0L; for (Segment<K, V, E, S> segment : segments) { // ensure visibility of most recent completed write int unused = segment.count; // read-volatile AtomicReferenceArray<E> table = segment.table; for (int j = 0; j < table.length(); j++) { for (E e = table.get(j); e != null; e = e.getNext()) { V v = segment.getLiveValue(e); if (v != null && valueEquivalence().equivalent(value, v)) { return true; } } } sum += segment.modCount; } if (sum == last) { break; } last = sum; } return false; } @CanIgnoreReturnValue @Override public @Nullable V put(K key, V value) { checkNotNull(key); checkNotNull(value); int hash = hash(key); return segmentFor(hash).put(key, hash, value, false); } @CanIgnoreReturnValue @Override public @Nullable V putIfAbsent(K key, V value) { checkNotNull(key); checkNotNull(value); int hash = hash(key); return segmentFor(hash).put(key, hash, value, true); } @Override public void putAll(Map<? extends K, ? extends V> m) { for (Entry<? extends K, ? extends V> e : m.entrySet()) { put(e.getKey(), e.getValue()); } } @CanIgnoreReturnValue @Override public @Nullable V remove(@Nullable Object key) { if (key == null) { return null; } int hash = hash(key); return segmentFor(hash).remove(key, hash); } @CanIgnoreReturnValue @Override public boolean remove(@Nullable Object key, @Nullable Object value) { if (key == null || value == null) { return false; } int hash = hash(key); return segmentFor(hash).remove(key, hash, value); } @CanIgnoreReturnValue @Override public boolean replace(K key, @Nullable V oldValue, V newValue) { checkNotNull(key); checkNotNull(newValue); if (oldValue == null) { return false; } int hash = hash(key); return segmentFor(hash).replace(key, hash, oldValue, newValue); } @CanIgnoreReturnValue @Override public @Nullable V replace(K key, V value) { checkNotNull(key); checkNotNull(value); int hash = hash(key); return segmentFor(hash).replace(key, hash, value); } @Override public void clear() { for (Segment<K, V, E, S> segment : segments) { segment.clear(); } } @LazyInit transient @Nullable Set<K> keySet; @Override public Set<K> keySet() { Set<K> ks = keySet; return (ks != null) ? ks : (keySet = new KeySet()); } @LazyInit transient @Nullable Collection<V> values; @Override public Collection<V> values() { Collection<V> vs = values; return (vs != null) ? vs : (values = new Values()); } @LazyInit transient @Nullable Set<Entry<K, V>> entrySet; @Override public Set<Entry<K, V>> entrySet() { Set<Entry<K, V>> es = entrySet; return (es != null) ? es : (entrySet = new EntrySet()); } // Iterator Support abstract
CleanupMapTask
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/bugs/_375/Source.java
{ "start": 233, "end": 700 }
class ____ { private Int<String> testIterable; private Case<String, Integer> testMap; public Int<String> getTestIterable() { return testIterable; } public void setTestIterable(Int<String> testIterable) { this.testIterable = testIterable; } public Case<String, Integer> getTestMap() { return testMap; } public void setTestMap(Case<String, Integer> testMap) { this.testMap = testMap; } }
Source
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/ser/filter/SimpleFilterProviderTest.java
{ "start": 1133, "end": 4360 }
class ____ { public String c; public String d; public AnyBeanC(String c, String d) { this.c = c; this.d = d; } } /* /********************************************************** /* Tests /********************************************************** */ @Test public void testAddFilterLastOneRemains() throws Exception { FilterProvider prov = new SimpleFilterProvider() .addFilter("filterB", SimpleBeanPropertyFilter.serializeAll()) .addFilter("filterB", SimpleBeanPropertyFilter.filterOutAllExcept()); AnyBeanB beanB = new AnyBeanB("1a", "2b"); String jsonString = MAPPER.writer(prov).writeValueAsString(beanB); assertEquals("{}", jsonString); } @Test public void testAddFilterLastOneRemainsFlip() throws Exception { FilterProvider prov = new SimpleFilterProvider() .addFilter("filterB", SimpleBeanPropertyFilter.filterOutAllExcept("a")) .addFilter("filterB", SimpleBeanPropertyFilter.serializeAll()); AnyBeanB beanB = new AnyBeanB("1a", "2b"); String jsonString = MAPPER.writer(prov).writeValueAsString(beanB); Map<?,?> actualMap = MAPPER.readValue(jsonString, Map.class); Map<String, Object> expectedMap = new LinkedHashMap<>(); expectedMap.put("a", "1a"); expectedMap.put("b", "2b"); assertEquals(expectedMap, actualMap); } @Test public void testAddFilterWithEmptyStringId() throws Exception { FilterProvider prov = new SimpleFilterProvider() .addFilter("", SimpleBeanPropertyFilter.filterOutAllExcept("d")); AnyBeanC bean = new AnyBeanC(null, "D is filtered"); String jsonString = MAPPER.writer(prov).writeValueAsString(bean); Map<?,?> actualMap = MAPPER.readValue(jsonString, Map.class); Map<String, Object> expectedMap = new LinkedHashMap<>(); expectedMap.put("c", null); expectedMap.put("d", "D is filtered"); assertEquals(expectedMap, actualMap); } @Test public void testAddingNullFilter2ThrowsException() throws Exception { FilterProvider prov = new SimpleFilterProvider() .addFilter("filterB", null); ObjectWriter writer = MAPPER.writer(prov); AnyBeanB beanD = new AnyBeanB("1a", "2b"); try { writer.writeValueAsString(beanD); fail("Should not have passed"); } catch (DatabindException e) { verifyException(e, "No filter configured with id 'filterB'"); } } @Test public void testAddingNullFilterIdThrowsException() throws Exception { FilterProvider prov = new SimpleFilterProvider() .addFilter(null, SimpleBeanPropertyFilter.serializeAll()); ObjectWriter writer = MAPPER.writer(prov); AnyBeanB beanD = new AnyBeanB("1a", "2b"); try { writer.writeValueAsString(beanD); fail("Should not have passed"); } catch (DatabindException e) { verifyException(e, "No filter configured with id 'filterB'"); } } }
AnyBeanC
java
apache__spark
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
{ "start": 16084, "end": 21749 }
class ____ implements Iterator<ManagedBuffer> { private int index = 0; private final Function<Integer, ManagedBuffer> blockDataForIndexFn; private final int size; ManagedBufferIterator(OpenBlocks msg) { String appId = msg.appId; String execId = msg.execId; String[] blockIds = msg.blockIds; String[] blockId0Parts = blockIds[0].split("_"); if (blockId0Parts.length == 4 && blockId0Parts[0].equals(SHUFFLE_BLOCK_ID)) { final int shuffleId = Integer.parseInt(blockId0Parts[1]); final int[] mapIdAndReduceIds = shuffleMapIdAndReduceIds(blockIds, shuffleId); size = mapIdAndReduceIds.length; blockDataForIndexFn = index -> blockManager.getBlockData(appId, execId, shuffleId, mapIdAndReduceIds[index], mapIdAndReduceIds[index + 1]); } else if (blockId0Parts.length == 5 && blockId0Parts[0].equals(SHUFFLE_CHUNK_ID)) { final int shuffleId = Integer.parseInt(blockId0Parts[1]); final int shuffleMergeId = Integer.parseInt(blockId0Parts[2]); final int[] reduceIdAndChunkIds = shuffleReduceIdAndChunkIds(blockIds, shuffleId, shuffleMergeId); size = reduceIdAndChunkIds.length; blockDataForIndexFn = index -> mergeManager.getMergedBlockData(msg.appId, shuffleId, shuffleMergeId, reduceIdAndChunkIds[index], reduceIdAndChunkIds[index + 1]); } else if (blockId0Parts.length == 3 && blockId0Parts[0].equals("rdd")) { final int[] rddAndSplitIds = rddAndSplitIds(blockIds); size = rddAndSplitIds.length; blockDataForIndexFn = index -> blockManager.getRddBlockData(appId, execId, rddAndSplitIds[index], rddAndSplitIds[index + 1]); } else { throw new IllegalArgumentException("Unexpected block id format: " + blockIds[0]); } } private int[] rddAndSplitIds(String[] blockIds) { final int[] rddAndSplitIds = new int[2 * blockIds.length]; for (int i = 0; i < blockIds.length; i++) { String[] blockIdParts = blockIds[i].split("_"); if (blockIdParts.length != 3 || !blockIdParts[0].equals("rdd")) { throw new IllegalArgumentException("Unexpected RDD block id format: " + blockIds[i]); } rddAndSplitIds[2 * i] = Integer.parseInt(blockIdParts[1]); rddAndSplitIds[2 * i + 1] = Integer.parseInt(blockIdParts[2]); } return rddAndSplitIds; } /** * @param blockIds Regular shuffle blockIds starts with SHUFFLE_BLOCK_ID to be parsed * @param shuffleId shuffle blocks shuffleId * @return mapId and reduceIds of the shuffle blocks in the same order as that of the blockIds * * Regular shuffle blocks format should be shuffle_$shuffleId_$mapId_$reduceId */ private int[] shuffleMapIdAndReduceIds(String[] blockIds, int shuffleId) { final int[] mapIdAndReduceIds = new int[2 * blockIds.length]; for (int i = 0; i < blockIds.length; i++) { String[] blockIdParts = blockIds[i].split("_"); if (blockIdParts.length != 4 || !blockIdParts[0].equals(SHUFFLE_BLOCK_ID)) { throw new IllegalArgumentException("Unexpected shuffle block id format: " + blockIds[i]); } if (Integer.parseInt(blockIdParts[1]) != shuffleId) { throw new IllegalArgumentException("Expected shuffleId=" + shuffleId + ", got:" + blockIds[i]); } // mapId mapIdAndReduceIds[2 * i] = Integer.parseInt(blockIdParts[2]); // reduceId mapIdAndReduceIds[2 * i + 1] = Integer.parseInt(blockIdParts[3]); } return mapIdAndReduceIds; } /** * @param blockIds Shuffle merged chunks starts with SHUFFLE_CHUNK_ID to be parsed * @param shuffleId shuffle blocks shuffleId * @param shuffleMergeId shuffleMergeId is used to uniquely identify merging process * of shuffle by an indeterminate stage attempt. * @return reduceId and chunkIds of the shuffle chunks in the same order as that of the * blockIds * * Shuffle merged chunks format should be * shuffleChunk_$shuffleId_$shuffleMergeId_$reduceId_$chunkId */ private int[] shuffleReduceIdAndChunkIds( String[] blockIds, int shuffleId, int shuffleMergeId) { final int[] reduceIdAndChunkIds = new int[2 * blockIds.length]; for(int i = 0; i < blockIds.length; i++) { String[] blockIdParts = blockIds[i].split("_"); if (blockIdParts.length != 5 || !blockIdParts[0].equals(SHUFFLE_CHUNK_ID)) { throw new IllegalArgumentException("Unexpected shuffle chunk id format: " + blockIds[i]); } if (Integer.parseInt(blockIdParts[1]) != shuffleId || Integer.parseInt(blockIdParts[2]) != shuffleMergeId) { throw new IllegalArgumentException(String.format("Expected shuffleId = %s" + " and shuffleMergeId = %s but got %s", shuffleId, shuffleMergeId, blockIds[i])); } // reduceId reduceIdAndChunkIds[2 * i] = Integer.parseInt(blockIdParts[3]); // chunkId reduceIdAndChunkIds[2 * i + 1] = Integer.parseInt(blockIdParts[4]); } return reduceIdAndChunkIds; } @Override public boolean hasNext() { return index < size; } @Override public ManagedBuffer next() { final ManagedBuffer block = blockDataForIndexFn.apply(index); index += 2; metrics.blockTransferRate.mark(); metrics.blockTransferMessageRate.mark(); metrics.blockTransferRateBytes.mark(block != null ? block.size() : 0); return block; } } private
ManagedBufferIterator
java
quarkusio__quarkus
integration-tests/main/src/test/java/io/quarkus/it/main/QuarkusClassloaderProtectionDomainTest.java
{ "start": 632, "end": 730 }
class ____ * from the QuarkusClassLoader yields the appropriate result */ @QuarkusTest public
loaded
java
FasterXML__jackson-databind
src/main/java/tools/jackson/databind/EnumNamingStrategies.java
{ "start": 3557, "end": 3684 }
enum ____ to lower camel case in order to be further processed by a NamingStrategies * * @param enumName the
name
java
alibaba__nacos
core/src/main/java/com/alibaba/nacos/core/remote/tls/RpcServerSslContextRefresher.java
{ "start": 871, "end": 1224 }
interface ____ { /** * listener current rpc server and do something on ssl context change. * * @param baseRpcServer rpc server. * @return */ SslContextChangeAware refresh(BaseRpcServer baseRpcServer); /** * refresher name. * * @return */ String getName(); }
RpcServerSslContextRefresher
java
apache__kafka
streams/src/main/java/org/apache/kafka/streams/state/internals/RecordConverters.java
{ "start": 1883, "end": 2186 }
class ____ be instantiated (only used for its static members) private RecordConverters() {} public static RecordConverter rawValueToTimestampedValue() { return RAW_TO_TIMESTAMED_INSTANCE; } public static RecordConverter identity() { return IDENTITY_INSTANCE; } }
cannot
java
apache__commons-lang
src/test/java/org/apache/commons/lang3/CachedRandomBitsTest.java
{ "start": 1324, "end": 3437 }
class ____ extends Random { private static final long serialVersionUID = 1L; private final byte[] outputs; private int index; MockRandom(final byte[] outputs) { this.outputs = outputs.clone(); this.index = 0; } @Override public void nextBytes(final byte[] bytes) { Objects.requireNonNull(bytes, "bytes"); if (index + bytes.length > outputs.length) { throw new IllegalStateException("Not enough outputs given in MockRandom"); } System.arraycopy(outputs, index, bytes, 0, bytes.length); index += bytes.length; } } @ParameterizedTest @ValueSource(ints = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 32}) void testNext(final int cacheSize) { final MockRandom random = new MockRandom(new byte[]{ 0x11, 0x12, 0x13, 0x25, (byte) 0xab, (byte) 0xcd, (byte) 0xef, (byte) 0xff, 0x55, 0x44, 0x12, 0x34, 0x56, 0x78, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, }); final CachedRandomBits arb = new CachedRandomBits(cacheSize, random); assertIllegalArgumentException(() -> arb.nextBits(0)); assertIllegalArgumentException(() -> arb.nextBits(33)); assertEquals(0x11, arb.nextBits(8)); assertEquals(0x12, arb.nextBits(8)); assertEquals(0x1325, arb.nextBits(16)); assertEquals(0xabcdefff, arb.nextBits(32)); assertEquals(0x5, arb.nextBits(4)); assertEquals(0x1, arb.nextBits(1)); assertEquals(0x0, arb.nextBits(1)); assertEquals(0x1, arb.nextBits(2)); assertEquals(0x4, arb.nextBits(6)); assertEquals(0x40000000 | 0x12345600 >> 2 | 0x38, arb.nextBits(32)); assertEquals(1, arb.nextBits(1)); assertEquals(0, arb.nextBits(1)); assertEquals(0, arb.nextBits(9)); assertEquals(0, arb.nextBits(31)); } }
MockRandom
java
elastic__elasticsearch
modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SnowballAnalyzerTests.java
{ "start": 762, "end": 1879 }
class ____ extends ESTokenStreamTestCase { public void testEnglish() throws Exception { Analyzer a = new SnowballAnalyzer("English"); assertAnalyzesTo(a, "he abhorred accents", new String[] { "he", "abhor", "accent" }); } public void testStopwords() throws Exception { Analyzer a = new SnowballAnalyzer("English", EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); assertAnalyzesTo(a, "the quick brown fox jumped", new String[] { "quick", "brown", "fox", "jump" }); } /** * Test turkish lowercasing */ public void testTurkish() throws Exception { Analyzer a = new SnowballAnalyzer("Turkish"); assertAnalyzesTo(a, "ağacı", new String[] { "ağaç" }); assertAnalyzesTo(a, "AĞACI", new String[] { "ağaç" }); } public void testReusableTokenStream() throws Exception { Analyzer a = new SnowballAnalyzer("English"); assertAnalyzesTo(a, "he abhorred accents", new String[] { "he", "abhor", "accent" }); assertAnalyzesTo(a, "she abhorred him", new String[] { "she", "abhor", "him" }); } }
SnowballAnalyzerTests
java
spring-projects__spring-framework
spring-jdbc/src/main/java/org/springframework/jdbc/core/metadata/CallMetaDataProvider.java
{ "start": 1177, "end": 7035 }
interface ____ { /** * Initialize using the provided DatabaseMetData. * @param databaseMetaData used to retrieve database specific information * @throws SQLException in case of initialization failure */ void initializeWithMetaData(DatabaseMetaData databaseMetaData) throws SQLException; /** * Initialize the database specific management of procedure column meta-data. * <p>This is only called for databases that are supported. This initialization * can be turned off by specifying that column meta-data should not be used. * @param databaseMetaData used to retrieve database specific information * @param catalogName name of catalog to use (or {@code null} if none) * @param schemaName name of schema name to use (or {@code null} if none) * @param procedureName name of the stored procedure * @throws SQLException in case of initialization failure * @see org.springframework.jdbc.core.simple.SimpleJdbcCall#withoutProcedureColumnMetaDataAccess() */ void initializeWithProcedureColumnMetaData(DatabaseMetaData databaseMetaData, @Nullable String catalogName, @Nullable String schemaName, @Nullable String procedureName) throws SQLException; /** * Get the call parameter meta-data that is currently used. * @return a List of {@link CallParameterMetaData} */ List<CallParameterMetaData> getCallParameterMetaData(); /** * Provide any modification of the procedure name passed in to match the meta-data currently used. * <p>This could include altering the case. */ @Nullable String procedureNameToUse(@Nullable String procedureName); /** * Provide any modification of the catalog name passed in to match the meta-data currently used. * <p>This could include altering the case. */ @Nullable String catalogNameToUse(@Nullable String catalogName); /** * Provide any modification of the schema name passed in to match the meta-data currently used. * <p>This could include altering the case. */ @Nullable String schemaNameToUse(@Nullable String schemaName); /** * Provide any modification of the catalog name passed in to match the meta-data currently used. * <p>The returned value will be used for meta-data lookups. This could include altering the case * used or providing a base catalog if none is provided. */ @Nullable String metaDataCatalogNameToUse(@Nullable String catalogName) ; /** * Provide any modification of the schema name passed in to match the meta-data currently used. * <p>The returned value will be used for meta-data lookups. This could include altering the case * used or providing a base schema if none is provided. */ @Nullable String metaDataSchemaNameToUse(@Nullable String schemaName); /** * Provide any modification of the column name passed in to match the meta-data currently used. * <p>This could include altering the case. * @param parameterName name of the parameter of column */ @Nullable String parameterNameToUse(@Nullable String parameterName); /** * Return the name of the named parameter to use for binding the given parameter name. * @param parameterName the name of the parameter to bind * @return the name of the named parameter to use for binding the given parameter name * @since 6.1.2 */ String namedParameterBindingToUse(@Nullable String parameterName); /** * Create a default out parameter based on the provided meta-data. * <p>This is used when no explicit parameter declaration has been made. * @param parameterName the name of the parameter * @param meta meta-data used for this call * @return the configured SqlOutParameter */ SqlParameter createDefaultOutParameter(String parameterName, CallParameterMetaData meta); /** * Create a default in/out parameter based on the provided meta-data. * <p>This is used when no explicit parameter declaration has been made. * @param parameterName the name of the parameter * @param meta meta-data used for this call * @return the configured SqlInOutParameter */ SqlParameter createDefaultInOutParameter(String parameterName, CallParameterMetaData meta); /** * Create a default in parameter based on the provided meta-data. * <p>This is used when no explicit parameter declaration has been made. * @param parameterName the name of the parameter * @param meta meta-data used for this call * @return the configured SqlParameter */ SqlParameter createDefaultInParameter(String parameterName, CallParameterMetaData meta); /** * Get the name of the current user. Useful for meta-data lookups etc. * @return current user name from database connection */ @Nullable String getUserName(); /** * Are we using the meta-data for the procedure columns? */ boolean isProcedureColumnMetaDataUsed(); /** * Does this database support returning ResultSets that should be retrieved with the JDBC call: * {@link java.sql.Statement#getResultSet()}? */ boolean isReturnResultSetSupported(); /** * Does this database support returning ResultSets as ref cursors to be retrieved with * {@link java.sql.CallableStatement#getObject(int)} for the specified column? */ boolean isRefCursorSupported(); /** * Get the {@link java.sql.Types} type for columns that return ResultSets as ref cursors * if this feature is supported. */ int getRefCursorSqlType(); /** * Should we bypass the return parameter with the specified name? * <p>This allows the database specific implementation to skip the processing * for specific results returned by the database call. */ boolean byPassReturnParameter(String parameterName); /** * Does the database support the use of catalog name in procedure calls? */ boolean isSupportsCatalogsInProcedureCalls(); /** * Does the database support the use of schema name in procedure calls? */ boolean isSupportsSchemasInProcedureCalls(); }
CallMetaDataProvider
java
junit-team__junit5
junit-jupiter-params/src/main/java/org/junit/jupiter/params/BeforeParameterizedClassInvocationMethodInvoker.java
{ "start": 509, "end": 1119 }
class ____ extends AbstractParameterizedClassInvocationLifecycleMethodInvoker implements BeforeClassTemplateInvocationCallback { BeforeParameterizedClassInvocationMethodInvoker(ParameterizedClassContext declarationContext, EvaluatedArgumentSet arguments, int invocationIndex, ResolutionCache resolutionCache, ArgumentSetLifecycleMethod lifecycleMethod) { super(declarationContext, arguments, invocationIndex, resolutionCache, lifecycleMethod); } @Override public void beforeClassTemplateInvocation(ExtensionContext context) { invoke(context); } }
BeforeParameterizedClassInvocationMethodInvoker
java
spring-projects__spring-framework
spring-webmvc/src/main/java/org/springframework/web/servlet/handler/MatchableHandlerMapping.java
{ "start": 1000, "end": 1362 }
interface ____ a {@link HandlerMapping} can implement to expose * a request matching API aligned with its internal request matching * configuration and implementation. * * @author Rossen Stoyanchev * @since 4.3.1 * @deprecated together with {@link HandlerMappingIntrospector} without a replacement. */ @Deprecated(since = "7.0", forRemoval = true) public
that
java
quarkusio__quarkus
integration-tests/maven/src/test/resources-filtered/projects/build-mode-quarkus-profile-override/src/main/java/org/acme/HelloService.java
{ "start": 231, "end": 314 }
class ____ { public String name() { return "from foo"; } }
HelloService
java
apache__rocketmq
store/src/main/java/org/apache/rocketmq/store/timer/TimerMetrics.java
{ "start": 2087, "end": 7375 }
class ____ extends ConfigManager { private static final Logger log = LoggerFactory.getLogger(LoggerName.BROKER_LOGGER_NAME); private static final long LOCK_TIMEOUT_MILLIS = 3000; private transient final Lock lock = new ReentrantLock(); private final ConcurrentMap<String, Metric> timingCount = new ConcurrentHashMap<>(1024); private final ConcurrentMap<Integer, Metric> timingDistribution = new ConcurrentHashMap<>(1024); @SuppressWarnings("DoubleBraceInitialization") public List<Integer> timerDist = new ArrayList<Integer>() {{ add(5); add(60); add(300); // 5s, 1min, 5min add(900); add(3600); add(14400); // 15min, 1h, 4h add(28800); add(86400); // 8h, 24h }}; private final DataVersion dataVersion = new DataVersion(); private final String configPath; public TimerMetrics(String configPath) { this.configPath = configPath; } public long updateDistPair(int period, int value) { Metric distPair = getDistPair(period); return distPair.getCount().addAndGet(value); } public long addAndGet(MessageExt msg, int value) { String topic = msg.getProperty(MessageConst.PROPERTY_REAL_TOPIC); Metric pair = getTopicPair(topic); getDataVersion().nextVersion(); pair.setTimeStamp(System.currentTimeMillis()); return pair.getCount().addAndGet(value); } public Metric getDistPair(Integer period) { Metric pair = timingDistribution.get(period); if (null != pair) { return pair; } pair = new Metric(); final Metric previous = timingDistribution.putIfAbsent(period, pair); if (null != previous) { return previous; } return pair; } public Metric getTopicPair(String topic) { Metric pair = timingCount.get(topic); if (null != pair) { return pair; } pair = new Metric(); final Metric previous = timingCount.putIfAbsent(topic, pair); if (null != previous) { return previous; } return pair; } public List<Integer> getTimerDistList() { return this.timerDist; } public void setTimerDistList(List<Integer> timerDist) { this.timerDist = timerDist; } public long getTimingCount(String topic) { Metric pair = timingCount.get(topic); if (null == pair) { return 0; } else { return pair.getCount().get(); } } public Map<String, Metric> getTimingCount() { return timingCount; } protected void write0(Writer writer) { TimerMetricsSerializeWrapper wrapper = new TimerMetricsSerializeWrapper(); wrapper.setTimingCount(timingCount); wrapper.setDataVersion(dataVersion); JSON.writeJSONString(writer, wrapper, SerializerFeature.BrowserCompatible); } @Override public String encode() { return encode(false); } @Override public String configFilePath() { return configPath; } @Override public void decode(String jsonString) { if (jsonString != null) { TimerMetricsSerializeWrapper timerMetricsSerializeWrapper = TimerMetricsSerializeWrapper.fromJson(jsonString, TimerMetricsSerializeWrapper.class); if (timerMetricsSerializeWrapper != null) { this.timingCount.putAll(timerMetricsSerializeWrapper.getTimingCount()); this.dataVersion.assignNewOne(timerMetricsSerializeWrapper.getDataVersion()); } } } @Override public String encode(boolean prettyFormat) { TimerMetricsSerializeWrapper metricsSerializeWrapper = new TimerMetricsSerializeWrapper(); metricsSerializeWrapper.setDataVersion(this.dataVersion); metricsSerializeWrapper.setTimingCount(this.timingCount); return metricsSerializeWrapper.toJson(prettyFormat); } public DataVersion getDataVersion() { return dataVersion; } public void cleanMetrics(Set<String> topics) { if (topics == null || topics.isEmpty()) { return; } Iterator<Map.Entry<String, Metric>> iterator = timingCount.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<String, Metric> entry = iterator.next(); final String topic = entry.getKey(); if (topic.startsWith(TopicValidator.SYSTEM_TOPIC_PREFIX) || topic.startsWith(MixAll.LMQ_PREFIX)) { continue; } if (topics.contains(topic)) { continue; } iterator.remove(); log.info("clean timer metrics, because not in topic config, {}", topic); } } public boolean removeTimingCount(String topic) { try { timingCount.remove(topic); } catch (Exception e) { log.error("removeTimingCount error", e); return false; } return true; } public static
TimerMetrics
java
apache__kafka
clients/src/main/java/org/apache/kafka/common/requests/CreateAclsRequest.java
{ "start": 1789, "end": 5617 }
class ____ extends AbstractRequest.Builder<CreateAclsRequest> { private final CreateAclsRequestData data; public Builder(CreateAclsRequestData data) { super(ApiKeys.CREATE_ACLS); this.data = data; } @Override public CreateAclsRequest build(short version) { return new CreateAclsRequest(data, version); } @Override public String toString() { return data.toString(); } } private final CreateAclsRequestData data; CreateAclsRequest(CreateAclsRequestData data, short version) { super(ApiKeys.CREATE_ACLS, version); validate(data); this.data = data; } public List<AclCreation> aclCreations() { return data.creations(); } @Override public CreateAclsRequestData data() { return data; } @Override public AbstractResponse getErrorResponse(int throttleTimeMs, Throwable throwable) { CreateAclsResponseData.AclCreationResult result = CreateAclsRequest.aclResult(throwable); List<CreateAclsResponseData.AclCreationResult> results = Collections.nCopies(data.creations().size(), result); return new CreateAclsResponse(new CreateAclsResponseData() .setThrottleTimeMs(throttleTimeMs) .setResults(results)); } public static CreateAclsRequest parse(Readable readable, short version) { return new CreateAclsRequest(new CreateAclsRequestData(readable, version), version); } private void validate(CreateAclsRequestData data) { if (version() == 0) { final boolean unsupported = data.creations().stream().anyMatch(creation -> creation.resourcePatternType() != PatternType.LITERAL.code()); if (unsupported) throw new UnsupportedVersionException("Version 0 only supports literal resource pattern types"); } final boolean unknown = data.creations().stream().anyMatch(creation -> creation.resourcePatternType() == PatternType.UNKNOWN.code() || creation.resourceType() == ResourceType.UNKNOWN.code() || creation.permissionType() == AclPermissionType.UNKNOWN.code() || creation.operation() == AclOperation.UNKNOWN.code()); if (unknown) throw new IllegalArgumentException("CreatableAcls contain unknown elements: " + data.creations()); } public static AclBinding aclBinding(AclCreation acl) { ResourcePattern pattern = new ResourcePattern( ResourceType.fromCode(acl.resourceType()), acl.resourceName(), PatternType.fromCode(acl.resourcePatternType())); AccessControlEntry entry = new AccessControlEntry( acl.principal(), acl.host(), AclOperation.fromCode(acl.operation()), AclPermissionType.fromCode(acl.permissionType())); return new AclBinding(pattern, entry); } public static AclCreation aclCreation(AclBinding binding) { return new AclCreation() .setHost(binding.entry().host()) .setOperation(binding.entry().operation().code()) .setPermissionType(binding.entry().permissionType().code()) .setPrincipal(binding.entry().principal()) .setResourceName(binding.pattern().name()) .setResourceType(binding.pattern().resourceType().code()) .setResourcePatternType(binding.pattern().patternType().code()); } private static AclCreationResult aclResult(Throwable throwable) { ApiError apiError = ApiError.fromThrowable(throwable); return new AclCreationResult() .setErrorCode(apiError.error().code()) .setErrorMessage(apiError.message()); } }
Builder
java
quarkusio__quarkus
integration-tests/maven/src/test/resources-filtered/projects/reactive-routes/src/main/java/org/acme/reactive/routes/MyDeclarativeRoutes.java
{ "start": 268, "end": 1041 }
class ____ { @Route(path = "/", methods = HttpMethod.GET) public void handle(RoutingContext rc) { rc.response().end("hello"); } @Route(path = "/hello", methods = HttpMethod.GET) public void greetings(RoutingContext rc) { String name = rc.request().getParam("name"); if (name == null) { name = "world"; } rc.response().end("hello " + name); } @Route(path = "/netty-version", methods = HttpMethod.GET) public void nettyVersion(RoutingContext rc) { rc.response().end(Version.identify().containsKey("netty-common") + ";" + Version.identify().containsKey("netty-handler") + ";" + Version.identify().containsKey("netty-codec")); } }
MyDeclarativeRoutes
java
apache__maven
impl/maven-impl/src/main/java/org/apache/maven/impl/DefaultSettingsBuilder.java
{ "start": 16003, "end": 16949 }
class ____ implements SettingsBuilderResult { private final SettingsBuilderRequest request; private final Settings effectiveSettings; private final ProblemCollector<BuilderProblem> problems; DefaultSettingsBuilderResult( SettingsBuilderRequest request, Settings effectiveSettings, ProblemCollector<BuilderProblem> problems) { this.request = request; this.effectiveSettings = effectiveSettings; this.problems = (problems != null) ? problems : ProblemCollector.empty(); } @Override public SettingsBuilderRequest getRequest() { return request; } @Override public Settings getEffectiveSettings() { return effectiveSettings; } @Override public ProblemCollector<BuilderProblem> getProblems() { return problems; } } }
DefaultSettingsBuilderResult
java
spring-projects__spring-boot
module/spring-boot-micrometer-metrics/src/test/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/prometheus/PrometheusMetricsExportAutoConfigurationTests.java
{ "start": 2562, "end": 12047 }
class ____ { private final ApplicationContextRunner contextRunner = new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(PrometheusMetricsExportAutoConfiguration.class)); @Test void backsOffWithoutAClock() { this.contextRunner.run((context) -> assertThat(context) .doesNotHaveBean(io.micrometer.prometheusmetrics.PrometheusMeterRegistry.class)); } @Test void autoConfiguresItsConfigCollectorRegistryAndMeterRegistry() { this.contextRunner.withUserConfiguration(BaseConfiguration.class) .run((context) -> assertThat(context) .hasSingleBean(io.micrometer.prometheusmetrics.PrometheusMeterRegistry.class) .hasSingleBean(PrometheusRegistry.class) .hasSingleBean(io.micrometer.prometheusmetrics.PrometheusConfig.class)); } @Test void autoConfigurationCanBeDisabledWithDefaultsEnabledProperty() { this.contextRunner.withUserConfiguration(BaseConfiguration.class) .withPropertyValues("management.defaults.metrics.export.enabled=false") .run((context) -> assertThat(context) .doesNotHaveBean(io.micrometer.prometheusmetrics.PrometheusMeterRegistry.class) .doesNotHaveBean(PrometheusRegistry.class) .doesNotHaveBean(io.micrometer.prometheusmetrics.PrometheusConfig.class)); } @Test void autoConfigurationCanBeDisabledWithSpecificEnabledProperty() { this.contextRunner.withUserConfiguration(BaseConfiguration.class) .withPropertyValues("management.prometheus.metrics.export.enabled=false") .run((context) -> assertThat(context) .doesNotHaveBean(io.micrometer.prometheusmetrics.PrometheusMeterRegistry.class) .doesNotHaveBean(PrometheusRegistry.class) .doesNotHaveBean(io.micrometer.prometheusmetrics.PrometheusConfig.class)); } @Test void allowsCustomConfigToBeUsed() { this.contextRunner.withUserConfiguration(CustomConfigConfiguration.class) .run((context) -> assertThat(context) .hasSingleBean(io.micrometer.prometheusmetrics.PrometheusMeterRegistry.class) .hasSingleBean(PrometheusRegistry.class) .hasSingleBean(io.micrometer.prometheusmetrics.PrometheusConfig.class) .hasBean("customConfig")); } @Test void allowsCustomRegistryToBeUsed() { this.contextRunner.withUserConfiguration(CustomRegistryConfiguration.class) .run((context) -> assertThat(context) .hasSingleBean(io.micrometer.prometheusmetrics.PrometheusMeterRegistry.class) .hasBean("customRegistry") .hasSingleBean(PrometheusRegistry.class) .hasSingleBean(io.micrometer.prometheusmetrics.PrometheusConfig.class)); } @Test void allowsCustomCollectorRegistryToBeUsed() { this.contextRunner.withUserConfiguration(CustomPrometheusRegistryConfiguration.class) .run((context) -> assertThat(context) .hasSingleBean(io.micrometer.prometheusmetrics.PrometheusMeterRegistry.class) .hasBean("customPrometheusRegistry") .hasSingleBean(PrometheusRegistry.class) .hasSingleBean(io.micrometer.prometheusmetrics.PrometheusConfig.class)); } @Test void autoConfiguresPrometheusMeterRegistryIfSpanContextIsPresent() { this.contextRunner.withUserConfiguration(ExemplarsConfiguration.class) .run((context) -> assertThat(context).hasSingleBean(SpanContext.class) .hasSingleBean(PrometheusMeterRegistry.class)); } @Test void addsScrapeEndpointToManagementContext() { this.contextRunner.withConfiguration(AutoConfigurations.of(ManagementContextAutoConfiguration.class)) .withUserConfiguration(BaseConfiguration.class) .withPropertyValues("management.endpoints.web.exposure.include=prometheus") .run((context) -> assertThat(context).hasSingleBean(PrometheusScrapeEndpoint.class)); } @Test void scrapeEndpointNotAddedToManagementContextWhenNotExposed() { this.contextRunner.withConfiguration(AutoConfigurations.of(ManagementContextAutoConfiguration.class)) .withUserConfiguration(BaseConfiguration.class) .run((context) -> assertThat(context).doesNotHaveBean(PrometheusScrapeEndpoint.class)); } @Test void scrapeEndpointCanBeDisabled() { this.contextRunner.withConfiguration(AutoConfigurations.of(ManagementContextAutoConfiguration.class)) .withPropertyValues("management.endpoints.web.exposure.include=prometheus", "management.endpoint.prometheus.enabled=false") .withUserConfiguration(BaseConfiguration.class) .run((context) -> assertThat(context).doesNotHaveBean(PrometheusScrapeEndpoint.class)); } @Test void allowsCustomScrapeEndpointToBeUsed() { this.contextRunner.withConfiguration(AutoConfigurations.of(ManagementContextAutoConfiguration.class)) .withUserConfiguration(CustomEndpointConfiguration.class) .run((context) -> assertThat(context).hasBean("customEndpoint") .hasSingleBean(PrometheusScrapeEndpoint.class)); } @Test void pushGatewayIsNotConfiguredWhenEnabledFlagIsNotSet() { this.contextRunner.withUserConfiguration(BaseConfiguration.class) .run((context) -> assertThat(context).doesNotHaveBean(PrometheusPushGatewayManager.class)); } @Test @ExtendWith(OutputCaptureExtension.class) void withPushGatewayEnabled(CapturedOutput output) { this.contextRunner.withPropertyValues("management.prometheus.metrics.export.pushgateway.enabled=true") .withUserConfiguration(BaseConfiguration.class) .run((context) -> { assertThat(output).doesNotContain("Invalid PushGateway base url"); hasGatewayUrl(context, "http://localhost:9091/metrics/job/spring"); assertThat(getPushGateway(context)).extracting("connectionFactory") .isInstanceOf(DefaultHttpConnectionFactory.class); }); } @Test void withPushGatewayDisabled() { this.contextRunner.withPropertyValues("management.prometheus.metrics.export.pushgateway.enabled=false") .withUserConfiguration(BaseConfiguration.class) .run((context) -> assertThat(context).doesNotHaveBean(PrometheusPushGatewayManager.class)); } @Test void withCustomPushGatewayAddress() { this.contextRunner .withPropertyValues("management.prometheus.metrics.export.pushgateway.enabled=true", "management.prometheus.metrics.export.pushgateway.address=localhost:8080") .withUserConfiguration(BaseConfiguration.class) .run((context) -> hasGatewayUrl(context, "http://localhost:8080/metrics/job/spring")); } @Test void withCustomScheme() { this.contextRunner .withPropertyValues("management.prometheus.metrics.export.pushgateway.enabled=true", "management.prometheus.metrics.export.pushgateway.scheme=https") .withUserConfiguration(BaseConfiguration.class) .run((context) -> hasGatewayUrl(context, "https://localhost:9091/metrics/job/spring")); } @Test void withCustomFormat() { this.contextRunner .withPropertyValues("management.prometheus.metrics.export.pushgateway.enabled=true", "management.prometheus.metrics.export.pushgateway.format=text") .withUserConfiguration(BaseConfiguration.class) .run((context) -> assertThat(getPushGateway(context)).extracting("writer") .isInstanceOf(PrometheusTextFormatWriter.class)); } @Test void withPushGatewayBasicAuth() { this.contextRunner .withPropertyValues("management.prometheus.metrics.export.pushgateway.enabled=true", "management.prometheus.metrics.export.pushgateway.username=admin", "management.prometheus.metrics.export.pushgateway.password=secret") .withUserConfiguration(BaseConfiguration.class) .run((context) -> assertThat(getPushGateway(context)) .extracting("requestHeaders", InstanceOfAssertFactories.map(String.class, String.class)) .satisfies((headers) -> assertThat(headers.get("Authorization")).startsWith("Basic "))); } @Test void withPushGatewayBearerToken() { this.contextRunner .withPropertyValues("management.prometheus.metrics.export.pushgateway.enabled=true", "management.prometheus.metrics.export.pushgateway.token=a1b2c3d4") .withUserConfiguration(BaseConfiguration.class) .run((context) -> assertThat(getPushGateway(context)) .extracting("requestHeaders", InstanceOfAssertFactories.map(String.class, String.class)) .satisfies((headers) -> assertThat(headers.get("Authorization")).startsWith("Bearer "))); } @Test void failsFastWithBothBearerAndBasicAuthentication() { this.contextRunner .withPropertyValues("management.prometheus.metrics.export.pushgateway.enabled=true", "management.prometheus.metrics.export.pushgateway.username=alice", "management.prometheus.metrics.export.pushgateway.token=a1b2c3d4") .withUserConfiguration(BaseConfiguration.class) .run((context) -> assertThat(context).getFailure() .hasRootCauseInstanceOf(MutuallyExclusiveConfigurationPropertiesException.class) .hasMessageContainingAll("management.prometheus.metrics.export.pushgateway.username", "management.prometheus.metrics.export.pushgateway.token")); } private void hasGatewayUrl(AssertableApplicationContext context, String url) { try { assertThat(getPushGateway(context)).hasFieldOrPropertyWithValue("url", URI.create(url).toURL()); } catch (MalformedURLException ex) { throw new RuntimeException(ex); } } private PushGateway getPushGateway(AssertableApplicationContext context) { assertThat(context).hasSingleBean(PrometheusPushGatewayManager.class); PrometheusPushGatewayManager gatewayManager = context.getBean(PrometheusPushGatewayManager.class); Object field = ReflectionTestUtils.getField(gatewayManager, "pushGateway"); assertThat(field).isNotNull(); return (PushGateway) field; } @Configuration(proxyBeanMethods = false) static
PrometheusMetricsExportAutoConfigurationTests
java
netty__netty
handler/src/test/java/io/netty/handler/timeout/IdleStateHandlerTest.java
{ "start": 1416, "end": 14709 }
class ____ { @Test public void testReaderIdle() throws Exception { IdleStateHandler idleStateHandler = new IdleStateHandler( false, 1L, 0L, 0L, TimeUnit.SECONDS); // We start with one FIRST_READER_IDLE_STATE_EVENT, followed by an infinite number of READER_IDLE_STATE_EVENTs anyIdle(idleStateHandler, IdleStateEvent.FIRST_READER_IDLE_STATE_EVENT, IdleStateEvent.READER_IDLE_STATE_EVENT, IdleStateEvent.READER_IDLE_STATE_EVENT); } @Test public void testWriterIdle() throws Exception { IdleStateHandler idleStateHandler = new IdleStateHandler( false, 0L, 1L, 0L, TimeUnit.SECONDS); anyIdle(idleStateHandler, IdleStateEvent.FIRST_WRITER_IDLE_STATE_EVENT, IdleStateEvent.WRITER_IDLE_STATE_EVENT, IdleStateEvent.WRITER_IDLE_STATE_EVENT); } @Test public void testAllIdle() throws Exception { IdleStateHandler idleStateHandler = new IdleStateHandler( false, 0L, 0L, 1L, TimeUnit.SECONDS); anyIdle(idleStateHandler, IdleStateEvent.FIRST_ALL_IDLE_STATE_EVENT, IdleStateEvent.ALL_IDLE_STATE_EVENT, IdleStateEvent.ALL_IDLE_STATE_EVENT); } private static void anyIdle(IdleStateHandler idleStateHandler, Object... expected) throws Exception { assertThat(expected.length).isGreaterThanOrEqualTo(1); final List<Object> events = new ArrayList<Object>(); ChannelInboundHandlerAdapter handler = new ChannelInboundHandlerAdapter() { @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { events.add(evt); } }; EmbeddedChannel channel = new EmbeddedChannel(idleStateHandler, handler); channel.freezeTime(); try { // For each expected event advance the ticker and run() the task. Each // step should yield in an IdleStateEvent because we haven't written // or read anything from the channel. for (int i = 0; i < expected.length; i++) { channel.advanceTimeBy(1, TimeUnit.SECONDS); channel.runPendingTasks(); } assertEquals(expected.length, events.size()); // Compare the expected with the actual IdleStateEvents for (int i = 0; i < expected.length; i++) { Object evt = events.get(i); assertSame(expected[i], evt, "Element " + i + " is not matching"); } } finally { channel.finishAndReleaseAll(); } } @Test public void testResetReader() throws Exception { final IdleStateHandler idleStateHandler = new IdleStateHandler( false, 1L, 0L, 0L, TimeUnit.SECONDS); Action action = new Action() { @Override public void run(EmbeddedChannel channel) throws Exception { idleStateHandler.resetReadTimeout(); } }; anyNotIdle(idleStateHandler, action, IdleStateEvent.FIRST_READER_IDLE_STATE_EVENT); } @Test public void testResetWriter() throws Exception { final IdleStateHandler idleStateHandler = new IdleStateHandler( false, 0L, 1L, 0L, TimeUnit.SECONDS); Action action = new Action() { @Override public void run(EmbeddedChannel channel) throws Exception { idleStateHandler.resetWriteTimeout(); } }; anyNotIdle(idleStateHandler, action, IdleStateEvent.FIRST_WRITER_IDLE_STATE_EVENT); } @Test public void testReaderNotIdle() throws Exception { IdleStateHandler idleStateHandler = new IdleStateHandler( false, 1L, 0L, 0L, TimeUnit.SECONDS); Action action = new Action() { @Override public void run(EmbeddedChannel channel) throws Exception { channel.writeInbound("Hello, World!"); } }; anyNotIdle(idleStateHandler, action, IdleStateEvent.FIRST_READER_IDLE_STATE_EVENT); } @Test public void testWriterNotIdle() throws Exception { IdleStateHandler idleStateHandler = new IdleStateHandler( false, 0L, 1L, 0L, TimeUnit.SECONDS); Action action = new Action() { @Override public void run(EmbeddedChannel channel) throws Exception { channel.writeAndFlush("Hello, World!"); } }; anyNotIdle(idleStateHandler, action, IdleStateEvent.FIRST_WRITER_IDLE_STATE_EVENT); } @Test public void testAllNotIdle() throws Exception { // Reader... IdleStateHandler idleStateHandler = new IdleStateHandler( false, 0L, 0L, 1L, TimeUnit.SECONDS); Action reader = new Action() { @Override public void run(EmbeddedChannel channel) throws Exception { channel.writeInbound("Hello, World!"); } }; anyNotIdle(idleStateHandler, reader, IdleStateEvent.FIRST_ALL_IDLE_STATE_EVENT); // Writer... idleStateHandler = new IdleStateHandler( false, 0L, 0L, 1L, TimeUnit.SECONDS); Action writer = new Action() { @Override public void run(EmbeddedChannel channel) throws Exception { channel.writeAndFlush("Hello, World!"); } }; anyNotIdle(idleStateHandler, writer, IdleStateEvent.FIRST_ALL_IDLE_STATE_EVENT); } private static void anyNotIdle(IdleStateHandler idleStateHandler, Action action, Object expected) throws Exception { final List<Object> events = new ArrayList<Object>(); ChannelInboundHandlerAdapter handler = new ChannelInboundHandlerAdapter() { @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { events.add(evt); } }; EmbeddedChannel channel = new EmbeddedChannel(idleStateHandler, handler); channel.freezeTime(); try { long delayInNanos = TimeUnit.SECONDS.toNanos(1); channel.advanceTimeBy(delayInNanos / 2L + 1L, TimeUnit.NANOSECONDS); action.run(channel); // Advance the ticker by some fraction. // There shouldn't be an IdleStateEvent getting fired because // we've just performed an action on the channel that is meant // to reset the idle task. channel.advanceTimeBy(delayInNanos / 2L, TimeUnit.NANOSECONDS); channel.runPendingTasks(); assertEquals(0, events.size()); // Advance the ticker by the full amount and it should yield // in an IdleStateEvent. channel.advanceTimeBy(delayInNanos, TimeUnit.NANOSECONDS); channel.runPendingTasks(); assertEquals(1, events.size()); assertSame(expected, events.get(0)); } finally { channel.finishAndReleaseAll(); } } @Test public void testObserveWriterIdle() throws Exception { observeOutputIdle(true); } @Test public void testObserveAllIdle() throws Exception { observeOutputIdle(false); } private static void observeOutputIdle(boolean writer) throws Exception { long writerIdleTime = 0L; long allIdleTime = 0L; IdleStateEvent expected; if (writer) { writerIdleTime = 5L; expected = IdleStateEvent.FIRST_WRITER_IDLE_STATE_EVENT; } else { allIdleTime = 5L; expected = IdleStateEvent.FIRST_ALL_IDLE_STATE_EVENT; } IdleStateHandler idleStateHandler = new IdleStateHandler( true, 0L, writerIdleTime, allIdleTime, TimeUnit.SECONDS); final List<Object> events = new ArrayList<Object>(); ChannelInboundHandlerAdapter handler = new ChannelInboundHandlerAdapter() { @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { events.add(evt); } }; ObservableChannel channel = new ObservableChannel(idleStateHandler, handler); channel.freezeTime(); try { // We're writing 3 messages that will be consumed at different rates! channel.writeAndFlush(Unpooled.wrappedBuffer(new byte[] { 1 })); channel.writeAndFlush(Unpooled.wrappedBuffer(new byte[] { 2 })); channel.writeAndFlush(Unpooled.wrappedBuffer(new byte[] { 3 })); channel.writeAndFlush(Unpooled.wrappedBuffer(new byte[5 * 1024])); // Establish a baseline. We're not consuming anything and let it idle once. channel.advanceTimeBy(5, TimeUnit.SECONDS); channel.runPendingTasks(); assertEquals(1, events.size()); assertSame(expected, events.get(0)); events.clear(); // Consume one message in 4 seconds, then be idle for 2 seconds, // then run the task and we shouldn't get an IdleStateEvent because // we haven't been idle for long enough! channel.advanceTimeBy(4, TimeUnit.SECONDS); channel.runPendingTasks(); assertNotNullAndRelease(channel.consume()); channel.advanceTimeBy(2, TimeUnit.SECONDS); channel.runPendingTasks(); assertEquals(0, events.size()); // Consume one message in 3 seconds, then be idle for 4 seconds, // then run the task and we shouldn't get an IdleStateEvent because // we haven't been idle for long enough! channel.advanceTimeBy(2, TimeUnit.SECONDS); channel.runPendingTasks(); assertNotNullAndRelease(channel.consume()); channel.advanceTimeBy(4, TimeUnit.SECONDS); channel.runPendingTasks(); assertEquals(0, events.size()); // Don't consume a message and be idle for 5 seconds. // We should get an IdleStateEvent! channel.advanceTimeBy(5, TimeUnit.SECONDS); channel.runPendingTasks(); assertEquals(1, events.size()); events.clear(); // Consume one message in 2 seconds, then be idle for 1 seconds, // then run the task and we shouldn't get an IdleStateEvent because // we haven't been idle for long enough! channel.advanceTimeBy(2, TimeUnit.SECONDS); channel.runPendingTasks(); assertNotNullAndRelease(channel.consume()); channel.advanceTimeBy(1, TimeUnit.SECONDS); channel.runPendingTasks(); assertEquals(0, events.size()); // Consume part of the message every 2 seconds, then be idle for 1 seconds, // then run the task and we should get an IdleStateEvent because the first trigger channel.advanceTimeBy(2, TimeUnit.SECONDS); channel.runPendingTasks(); assertNotNullAndRelease(channel.consumePart(1024)); channel.advanceTimeBy(2, TimeUnit.SECONDS); channel.runPendingTasks(); assertNotNullAndRelease(channel.consumePart(1024)); channel.advanceTimeBy(1, TimeUnit.SECONDS); channel.runPendingTasks(); assertEquals(1, events.size()); events.clear(); // Consume part of the message every 2 seconds, then be idle for 1 seconds, // then consume all the rest of the message, then run the task and we shouldn't // get an IdleStateEvent because the data is flowing and we haven't been idle for long enough! channel.advanceTimeBy(2, TimeUnit.SECONDS); channel.runPendingTasks(); assertNotNullAndRelease(channel.consumePart(1024)); channel.advanceTimeBy(2, TimeUnit.SECONDS); channel.runPendingTasks(); assertNotNullAndRelease(channel.consumePart(1024)); channel.advanceTimeBy(1, TimeUnit.SECONDS); channel.runPendingTasks(); assertEquals(0, events.size()); channel.advanceTimeBy(2, TimeUnit.SECONDS); channel.runPendingTasks(); assertNotNullAndRelease(channel.consumePart(1024)); // There are no messages left! Advance the ticker by 3 seconds, // attempt a consume() but it will be null, then advance the // ticker by an another 2 seconds and we should get an IdleStateEvent // because we've been idle for 5 seconds. channel.advanceTimeBy(3, TimeUnit.SECONDS); channel.runPendingTasks(); assertNull(channel.consume()); channel.advanceTimeBy(2, TimeUnit.SECONDS); channel.runPendingTasks(); assertEquals(1, events.size()); // q.e.d. } finally { channel.finishAndReleaseAll(); } } private static void assertNotNullAndRelease(Object msg) { assertNotNull(msg); ReferenceCountUtil.release(msg); } private
IdleStateHandlerTest
java
spring-projects__spring-framework
spring-core/src/test/java/org/springframework/aot/hint/TypeHintTests.java
{ "start": 1055, "end": 7098 }
class ____ { @Test void createWithNullTypeReference() { assertThatIllegalArgumentException().isThrownBy(() -> TypeHint.of(null)); } @Test void createWithType() { TypeHint hint = TypeHint.of(TypeReference.of(String.class)).build(); assertThat(hint).isNotNull(); assertThat(hint.getType().getCanonicalName()).isEqualTo("java.lang.String"); } @Test void createWithTypeAndReachableType() { TypeHint hint = TypeHint.of(TypeReference.of(String.class)) .onReachableType(TypeReference.of("com.example.Test")).build(); assertThat(hint).isNotNull(); assertThat(hint.getReachableType()).isNotNull(); assertThat(hint.getReachableType().getCanonicalName()).isEqualTo("com.example.Test"); } @Test void createWithField() { assertFieldHint(TypeHint.of(TypeReference.of(String.class)) .withField("value"), fieldHint -> assertThat(fieldHint.getName()).isEqualTo("value")); } void assertFieldHint(Builder builder, Consumer<FieldHint> fieldHint) { TypeHint hint = builder.build(); assertThat(hint.fields()).singleElement().satisfies(fieldHint); assertThat(hint.constructors()).isEmpty(); assertThat(hint.methods()).isEmpty(); assertThat(hint.getMemberCategories()).isEmpty(); } @Test void createWithConstructor() { List<TypeReference> parameterTypes = TypeReference.listOf(byte[].class, int.class); assertConstructorHint(TypeHint.of(TypeReference.of(String.class)) .withConstructor(parameterTypes, ExecutableMode.INTROSPECT), constructorHint -> { assertThat(constructorHint.getParameterTypes()).containsOnlyOnceElementsOf(parameterTypes); assertThat(constructorHint.getMode()).isEqualTo(ExecutableMode.INTROSPECT); }); } @Test void createWithConstructorWithSameConstructorUpdatesEntry() { List<TypeReference> parameterTypes = TypeReference.listOf(byte[].class, int.class); Builder builder = TypeHint.of(TypeReference.of(String.class)) .withConstructor(parameterTypes, ExecutableMode.INTROSPECT); assertConstructorHint(builder.withConstructor(parameterTypes, ExecutableMode.INVOKE), constructorHint -> { assertThat(constructorHint.getParameterTypes()).containsExactlyElementsOf(parameterTypes); assertThat(constructorHint.getMode()).isEqualTo(ExecutableMode.INVOKE); }); } @Test void createWithConstructorAndSameConstructorAppliesExecutableModePrecedence() { List<TypeReference> parameterTypes = TypeReference.listOf(byte[].class, int.class); Builder builder = TypeHint.of(TypeReference.of(String.class)) .withConstructor(parameterTypes, ExecutableMode.INVOKE); assertConstructorHint(builder.withConstructor(parameterTypes, ExecutableMode.INTROSPECT), constructorHint -> { assertThat(constructorHint.getParameterTypes()).containsExactlyElementsOf(parameterTypes); assertThat(constructorHint.getMode()).isEqualTo(ExecutableMode.INVOKE); }); } void assertConstructorHint(Builder builder, Consumer<ExecutableHint> constructorHint) { TypeHint hint = builder.build(); assertThat(hint.fields()).isEmpty(); assertThat(hint.constructors()).singleElement().satisfies(constructorHint); assertThat(hint.methods()).isEmpty(); assertThat(hint.getMemberCategories()).isEmpty(); } @Test void createWithMethod() { List<TypeReference> parameterTypes = List.of(TypeReference.of(char[].class)); assertMethodHint(TypeHint.of(TypeReference.of(String.class)) .withMethod("valueOf", parameterTypes, ExecutableMode.INTROSPECT), methodHint -> { assertThat(methodHint.getName()).isEqualTo("valueOf"); assertThat(methodHint.getParameterTypes()).containsExactlyElementsOf(parameterTypes); assertThat(methodHint.getMode()).isEqualTo(ExecutableMode.INTROSPECT); }); } @Test void createWithMethodWithSameMethodUpdatesEntry() { List<TypeReference> parameterTypes = TypeReference.listOf(char[].class); Builder builder = TypeHint.of(TypeReference.of(String.class)) .withMethod("valueOf", parameterTypes, ExecutableMode.INTROSPECT); assertMethodHint(builder.withMethod("valueOf", parameterTypes, ExecutableMode.INVOKE), methodHint -> { assertThat(methodHint.getName()).isEqualTo("valueOf"); assertThat(methodHint.getParameterTypes()).containsExactlyElementsOf(parameterTypes); assertThat(methodHint.getMode()).isEqualTo(ExecutableMode.INVOKE); }); } @Test void createWithMethodAndSameMethodAppliesExecutableModePrecedence() { List<TypeReference> parameterTypes = TypeReference.listOf(char[].class); Builder builder = TypeHint.of(TypeReference.of(String.class)) .withMethod("valueOf", parameterTypes, ExecutableMode.INVOKE); assertMethodHint(builder.withMethod("valueOf", parameterTypes, ExecutableMode.INTROSPECT), methodHint -> { assertThat(methodHint.getName()).isEqualTo("valueOf"); assertThat(methodHint.getParameterTypes()).containsExactlyElementsOf(parameterTypes); assertThat(methodHint.getMode()).isEqualTo(ExecutableMode.INVOKE); }); } void assertMethodHint(Builder builder, Consumer<ExecutableHint> methodHint) { TypeHint hint = builder.build(); assertThat(hint.fields()).isEmpty(); assertThat(hint.constructors()).isEmpty(); assertThat(hint.methods()).singleElement().satisfies(methodHint); assertThat(hint.getMemberCategories()).isEmpty(); } @Test void createWithMemberCategory() { TypeHint hint = TypeHint.of(TypeReference.of(String.class)) .withMembers(MemberCategory.DECLARED_FIELDS).build(); assertThat(hint.getMemberCategories()).containsOnly(MemberCategory.DECLARED_FIELDS); } @Test void typeHintHasAppropriateToString() { TypeHint hint = TypeHint.of(TypeReference.of(String.class)).build(); assertThat(hint).hasToString("TypeHint[type=java.lang.String]"); } @Test void builtWithAppliesMemberCategories() { TypeHint.Builder builder = new TypeHint.Builder(TypeReference.of(String.class)); assertThat(builder.build().getMemberCategories()).isEmpty(); TypeHint.builtWith(MemberCategory.DECLARED_FIELDS).accept(builder); assertThat(builder.build().getMemberCategories()).containsExactly(MemberCategory.DECLARED_FIELDS); } }
TypeHintTests
java
grpc__grpc-java
s2a/src/main/java/io/grpc/s2a/internal/handshaker/S2AProtocolNegotiatorFactory.java
{ "start": 3461, "end": 4300 }
class ____ implements InternalProtocolNegotiator.ClientFactory { private final @Nullable S2AIdentity localIdentity; private final ObjectPool<Channel> channelPool; private final @Nullable S2AStub stub; S2AClientProtocolNegotiatorFactory( @Nullable S2AIdentity localIdentity, ObjectPool<Channel> channelPool, @Nullable S2AStub stub) { this.localIdentity = localIdentity; this.channelPool = channelPool; this.stub = stub; } @Override public ProtocolNegotiator newNegotiator() { return S2AProtocolNegotiator.createForClient(channelPool, localIdentity, stub); } @Override public int getDefaultPort() { return DEFAULT_PORT; } } /** Negotiates the TLS handshake using S2A. */ @VisibleForTesting static final
S2AClientProtocolNegotiatorFactory
java
quarkusio__quarkus
core/deployment/src/main/java/io/quarkus/deployment/dev/ClassComparisonUtil.java
{ "start": 583, "end": 7802 }
class ____ { private static final Set<DotName> IGNORED_ANNOTATIONS = Set.of( DotName.createSimple("kotlin.jvm.internal.SourceDebugExtension"), DotName.createSimple("kotlin.Metadata")); static boolean isSameStructure(ClassInfo clazz, ClassInfo old) { if (clazz.flags() != old.flags()) { return false; } if (!clazz.typeParameters().equals(old.typeParameters())) { return false; } if (!clazz.interfaceNames().equals(old.interfaceNames())) { return false; } if (!compareAnnotations(clazz.declaredAnnotations(), old.declaredAnnotations())) { return false; } if (old.fields().size() != clazz.fields().size()) { return false; } Map<String, FieldInfo> oldFields = old.fields().stream() .collect(Collectors.toMap(FieldInfo::name, Function.identity())); for (FieldInfo field : clazz.fields()) { FieldInfo of = oldFields.get(field.name()); if (of == null) { return false; } if (of.flags() != field.flags()) { return false; } if (!of.type().equals(field.type())) { return false; } if (!compareAnnotations(of.annotations(), field.annotations())) { return false; } } List<MethodInfo> methods = clazz.methods(); List<MethodInfo> oldMethods = old.methods(); if (methods.size() != oldMethods.size()) { return false; } for (MethodInfo method : methods) { MethodInfo om = null; for (MethodInfo i : oldMethods) { if (!i.name().equals(method.name())) { continue; } if (!i.returnType().equals(method.returnType())) { continue; } if (i.parametersCount() != method.parametersCount()) { continue; } if (i.flags() != method.flags()) { continue; } if (!Objects.equals(i.defaultValue(), method.defaultValue())) { continue; } boolean paramEqual = true; for (int j = 0; j < method.parametersCount(); ++j) { Type a = method.parameterType(j); Type b = i.parameterType(j); if (!a.equals(b)) { paramEqual = false; break; } } if (!paramEqual) { continue; } if (!compareMethodAnnotations(i.annotations(), method.annotations())) { continue; } om = i; } //no further checks needed, we fully matched in the loop if (om == null) { return false; } } return true; } static boolean compareAnnotations(Collection<AnnotationInstance> a, Collection<AnnotationInstance> b) { if (a.size() != b.size()) { return false; } Map<DotName, AnnotationInstance> lookup = b.stream() .collect(Collectors.toMap(AnnotationInstance::name, Function.identity())); for (AnnotationInstance i1 : a) { AnnotationInstance i2 = lookup.get(i1.name()); if (i2 == null) { return false; } if (!compareAnnotation(i1, i2)) { return false; } } return true; } static boolean compareMethodAnnotations(Collection<AnnotationInstance> a, Collection<AnnotationInstance> b) { if (a.size() != b.size()) { return false; } List<AnnotationInstance> method1 = new ArrayList<>(); Map<Integer, List<AnnotationInstance>> params1 = new HashMap<>(); Map<Integer, List<AnnotationInstance>> paramTypes1 = new HashMap<>(); methodMap(a, method1, params1, paramTypes1); List<AnnotationInstance> method2 = new ArrayList<>(); Map<Integer, List<AnnotationInstance>> params2 = new HashMap<>(); Map<Integer, List<AnnotationInstance>> paramTypes2 = new HashMap<>(); methodMap(b, method2, params2, paramTypes2); if (!compareAnnotations(method1, method2)) { return false; } if (!params1.keySet().equals(params2.keySet())) { return false; } for (Map.Entry<Integer, List<AnnotationInstance>> entry : params1.entrySet()) { List<AnnotationInstance> other = params2.get(entry.getKey()); if (!compareAnnotations(other, entry.getValue())) { return false; } } for (Map.Entry<Integer, List<AnnotationInstance>> entry : paramTypes1.entrySet()) { List<AnnotationInstance> other = paramTypes2.get(entry.getKey()); if (!compareAnnotations(other, entry.getValue())) { return false; } } return true; } private static void methodMap(Collection<AnnotationInstance> b, List<AnnotationInstance> method2, Map<Integer, List<AnnotationInstance>> params2, Map<Integer, List<AnnotationInstance>> paramTypes2) { for (AnnotationInstance i : b) { int index; switch (i.target().kind()) { case METHOD: method2.add(i); break; case METHOD_PARAMETER: index = i.target().asMethodParameter().position(); params2.computeIfAbsent(index, k -> new ArrayList<>()).add(i); break; case TYPE: TypeTarget.Usage usage = i.target().asType().usage(); if (usage == TypeTarget.Usage.METHOD_PARAMETER) { index = i.target().asType().asMethodParameterType().position(); paramTypes2.computeIfAbsent(index, k -> new ArrayList<>()).add(i); } else { throw new IllegalArgumentException("Unsupported type annotation usage: " + usage); } break; default: throw new IllegalArgumentException("Unsupported annotation target kind: " + i.target().kind()); } } } private static boolean compareAnnotation(AnnotationInstance a, AnnotationInstance b) { if (IGNORED_ANNOTATIONS.contains(a.name())) { return true; } List<AnnotationValue> valuesA = a.values(); List<AnnotationValue> valuesB = b.values(); if (valuesA.size() != valuesB.size()) { return false; } for (AnnotationValue valueA : valuesA) { AnnotationValue valueB = b.value(valueA.name()); if (!valueA.equals(valueB)) { return false; } } return true; } }
ClassComparisonUtil
java
apache__camel
components/camel-telegram/src/test/java/org/apache/camel/component/telegram/TelegramConsumerMediaGameTest.java
{ "start": 1626, "end": 4040 }
class ____ extends TelegramTestSupport { @EndpointInject("mock:telegram") private MockEndpoint endpoint; @Test public void testReceptionOfAMessageWithAGame() throws Exception { endpoint.expectedMinimumMessageCount(1); endpoint.assertIsSatisfied(5000); Exchange mediaExchange = endpoint.getExchanges().get(0); IncomingMessage msg = mediaExchange.getIn().getBody(IncomingMessage.class); IncomingGame game = msg.getGame(); InlineKeyboardMarkup inlineKeyboardMarkup = (InlineKeyboardMarkup) msg.getReplyMarkup(); Assertions.assertNotNull(game); Assertions.assertEquals("test_game", game.getTitle()); Assertions.assertEquals("test", game.getDescription()); Assertions.assertEquals("game text", game.getText()); Assertions.assertEquals(1, game.getPhoto().size()); Assertions.assertEquals("AgADBAADnrAxG1rhiVAsV1IghUpUwn4eqhsABAEAAwIAA20AA32sBQABFgQ", game.getPhoto().get(0).getFileId()); Assertions.assertEquals(2469, game.getPhoto().get(0).getFileSize()); Assertions.assertEquals(180, game.getPhoto().get(0).getHeight()); Assertions.assertEquals(320, game.getPhoto().get(0).getWidth()); Assertions.assertEquals("AQADfh6qGwAEfawFAAE", game.getPhoto().get(0).getFileUniqueId()); Assertions.assertNotNull(inlineKeyboardMarkup); Assertions.assertEquals("Play test_game", inlineKeyboardMarkup.getInlineKeyboard().get(0).get(0).getText()); } @Override protected RoutesBuilder[] createRouteBuilders() { return new RoutesBuilder[] { getMockRoutes(), new RouteBuilder() { @Override public void configure() { from("telegram:bots?authorizationToken=mock-token") .to("mock:telegram"); } } }; } @Override protected TelegramMockRoutes createMockRoutes() { return new TelegramMockRoutes(port) .addEndpoint( "getUpdates", "GET", String.class, TelegramTestUtil.stringResource("messages/updates-media-game.json"), TelegramTestUtil.stringResource("messages/updates-empty.json")); } }
TelegramConsumerMediaGameTest
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java
{ "start": 13879, "end": 16084 }
class ____ implements Writeable { protected long startTime = 0; protected long startNanoTime = 0; protected long time = -1; protected long stopTime = 0; public Timer() {} public Timer(StreamInput in) throws IOException { startTime = in.readVLong(); startNanoTime = in.readVLong(); stopTime = in.readVLong(); time = in.readVLong(); } @Override public synchronized void writeTo(StreamOutput out) throws IOException { out.writeVLong(startTime); out.writeVLong(startNanoTime); out.writeVLong(stopTime); // write a snapshot of current time, which is not per se the time field out.writeVLong(time()); } public synchronized void start() { assert startTime == 0 : "already started"; startTime = System.currentTimeMillis(); startNanoTime = System.nanoTime(); } /** Returns start time in millis */ public synchronized long startTime() { return startTime; } /** Returns elapsed time in millis, or 0 if timer was not started */ public synchronized long time() { if (startNanoTime == 0) { return 0; } if (time >= 0) { return time; } return Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startNanoTime)); } /** Returns stop time in millis */ public synchronized long stopTime() { return stopTime; } public synchronized void stop() { assert stopTime == 0 : "already stopped"; stopTime = Math.max(System.currentTimeMillis(), startTime); time = TimeValue.nsecToMSec(System.nanoTime() - startNanoTime); assert time >= 0; } public synchronized void reset() { startTime = 0; startNanoTime = 0; time = -1; stopTime = 0; } // for tests public long getStartNanoTime() { return startNanoTime; } } public static
Timer
java
mybatis__mybatis-3
src/test/java/org/apache/ibatis/submitted/oracle_cursor/Book2.java
{ "start": 737, "end": 1867 }
class ____ { private Integer id; private String name; private Author author; public Book2() { super(); } public Book2(Integer id, String name, Author author) { super(); this.id = id; this.name = name; this.author = author; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Author getAuthor() { return author; } public void setAuthor(Author author) { this.author = author; } @Override public int hashCode() { return Objects.hash(author, id, name); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof Book2)) { return false; } Book2 other = (Book2) obj; return Objects.equals(author, other.author) && Objects.equals(id, other.id) && Objects.equals(name, other.name); } @Override public String toString() { return "Book2 [id=" + id + ", name=" + name + ", author=" + author + "]"; } }
Book2
java
alibaba__fastjson
src/test/java/com/alibaba/fastjson/deserializer/issues3796/bean/ObjectO2.java
{ "start": 67, "end": 406 }
class ____ { private int a; private boolean b = true; private int c; public int getA() { return a; } public void setA(int a) { this.a = a; } public boolean isB() { return b; } public void setB(boolean b) { this.b = b; } public int getC() { return c; } public void setC(int c) { this.c = c; } }
ObjectO2
java
spring-projects__spring-boot
module/spring-boot-webclient/src/test/java/org/springframework/boot/webclient/autoconfigure/service/ReactiveHttpServiceClientAutoConfigurationTests.java
{ "start": 11125, "end": 11410 }
class ____ { @Bean WebClientHttpServiceGroupConfigurer restClientHttpServiceGroupConfigurer() { return (groups) -> groups.filterByName("one") .forEachClient((group, builder) -> builder.defaultHeader("customizedgroup", "true")); } }
HttpServiceGroupConfigurerConfiguration
java
lettuce-io__lettuce-core
src/main/java/io/lettuce/core/RedisPublisher.java
{ "start": 16512, "end": 23591 }
enum ____ { /** * The initial unsubscribed state. Will respond to {@link #subscribe(RedisSubscription, Subscriber)} by changing state * to {@link #NO_DEMAND}. */ UNSUBSCRIBED { @SuppressWarnings("unchecked") @Override void subscribe(RedisSubscription<?> subscription, Subscriber<?> subscriber) { LettuceAssert.notNull(subscriber, "Subscriber must not be null"); if (subscription.changeState(this, NO_DEMAND)) { subscription.subscriber = RedisSubscriber.create(subscriber, subscription.executor); subscriber.onSubscribe(subscription); } else { throw new IllegalStateException(toString()); } } }, /** * State that gets entered when there is no demand. Responds to {@link #request(RedisSubscription, long)} * (RedisPublisher, long)} by increasing the demand, changing state to {@link #DEMAND} and will check whether there is * data available for reading. */ NO_DEMAND { @Override void request(RedisSubscription<?> subscription, long n) { if (Operators.request(RedisSubscription.DEMAND, subscription, n)) { if (subscription.changeState(this, DEMAND)) { try { subscription.checkCommandDispatch(); } catch (Exception ex) { subscription.onError(ex); } subscription.checkOnDataAvailable(); } subscription.potentiallyReadMore(); subscription.state().onDataAvailable(subscription); } else { onError(subscription, Exceptions.nullOrNegativeRequestException(n)); } } }, /** * State that gets entered when there is demand. Responds to {@link #onDataAvailable(RedisSubscription)} by reading the * available data. The state will be changed to {@link #NO_DEMAND} if there is no demand. */ DEMAND { @Override void onDataAvailable(RedisSubscription<?> subscription) { try { do { if (!read(subscription)) { return; } } while (subscription.hasDemand()); } catch (Exception e) { subscription.onError(e); } } @Override void request(RedisSubscription<?> subscription, long n) { if (Operators.request(RedisSubscription.DEMAND, subscription, n)) { onDataAvailable(subscription); subscription.potentiallyReadMore(); } else { onError(subscription, Exceptions.nullOrNegativeRequestException(n)); } } /** * @param subscription * @return {@code true} if the {@code read()} call was able to perform a read and whether this method should be * called again to emit remaining data. */ private boolean read(RedisSubscription<?> subscription) { State state = subscription.state(); // concurrency/entry guard if (state == NO_DEMAND || state == DEMAND) { if (!subscription.changeState(state, READING)) { return false; } } else { return false; } subscription.readAndPublish(); if (subscription.allDataRead && subscription.data.isEmpty()) { state.onAllDataRead(subscription); return false; } // concurrency/leave guard subscription.afterRead(); if (subscription.allDataRead || !subscription.data.isEmpty()) { return true; } return false; } }, READING { @Override void request(RedisSubscription<?> subscription, long n) { DEMAND.request(subscription, n); } }, /** * The terminal completed state. Does not respond to any events. */ COMPLETED { @Override void request(RedisSubscription<?> subscription, long n) { // ignore } @Override void cancel(RedisSubscription<?> subscription) { // ignore } @Override void onAllDataRead(RedisSubscription<?> subscription) { // ignore } @Override void onError(RedisSubscription<?> subscription, Throwable t) { // ignore } }; void subscribe(RedisSubscription<?> subscription, Subscriber<?> subscriber) { throw new IllegalStateException(toString()); } void request(RedisSubscription<?> subscription, long n) { throw new IllegalStateException(toString()); } void cancel(RedisSubscription<?> subscription) { subscription.command.cancel(); if (subscription.changeState(this, COMPLETED)) { readData(subscription); } } void readData(RedisSubscription<?> subscription) { DemandAware.Source source = subscription.subscriptionCommand.source; if (source != null) { source.requestMore(); } } void onDataAvailable(RedisSubscription<?> subscription) { // ignore } void onAllDataRead(RedisSubscription<?> subscription) { if (subscription.data.isEmpty() && subscription.complete()) { readData(subscription); Subscriber<?> subscriber = subscription.subscriber; if (subscriber != null) { subscriber.onComplete(); } } } void onError(RedisSubscription<?> subscription, Throwable t) { State state; while ((state = subscription.state()) != COMPLETED && subscription.changeState(state, COMPLETED)) { readData(subscription); Subscriber<?> subscriber = subscription.subscriber; if (subscriber != null) { subscriber.onError(t); return; } } } } /** * Command that emits it data after completion to a {@link RedisSubscription}. * * @param <K> key type * @param <V> value type * @param <T> response type */ static
State
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/compliance/CriteriaDeleteTest.java
{ "start": 934, "end": 1954 }
class ____ { @BeforeEach public void setUp(EntityManagerFactoryScope scope) { scope.inTransaction( entityManager -> { entityManager.persist( new Person( 1, "Andrea", 5 ) ); entityManager.persist( new Person( 2, "Fab", 40 ) ); } ); } @Test public void testModifyingDeleteQueryWhere(EntityManagerFactoryScope scope) { scope.inTransaction( entityManager -> { final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder(); final CriteriaDelete<Person> criteriaDelete = criteriaBuilder .createCriteriaDelete( Person.class ); final Root<Person> Person = criteriaDelete.from( Person.class ); criteriaDelete.where( criteriaBuilder.lt( Person.get( "age" ), 35 ) ); final Query q = entityManager.createQuery( criteriaDelete ); criteriaDelete.where( criteriaBuilder.lt( Person.get( "age" ), 500 ) ); assertEquals( 1, q.executeUpdate() ); } ); } @Entity(name = "Person") @Table(name = "PERSON_TABLE") public static
CriteriaDeleteTest
java
apache__kafka
raft/src/main/java/org/apache/kafka/raft/internals/RemoveVoterHandlerState.java
{ "start": 1015, "end": 1672 }
class ____ { private final long lastOffset; private final Timer timeout; private final CompletableFuture<RemoveRaftVoterResponseData> future = new CompletableFuture<>(); RemoveVoterHandlerState(long lastOffset, Timer timeout) { this.lastOffset = lastOffset; this.timeout = timeout; } public long timeUntilOperationExpiration(long currentTimeMs) { timeout.update(currentTimeMs); return timeout.remainingMs(); } public CompletableFuture<RemoveRaftVoterResponseData> future() { return future; } public long lastOffset() { return lastOffset; } }
RemoveVoterHandlerState
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/associations/OneToManyJoinFetchDiscriminatorTest.java
{ "start": 4017, "end": 4300 }
class ____ extends BodyPart { @ManyToOne(fetch = FetchType.LAZY, optional = false) private Person person; public Arm() { } public Arm(String name, Person person) { this.name = name; this.person = person; } public Person getPerson() { return person; } } }
Arm
java
apache__camel
components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMixedToolsIT.java
{ "start": 2146, "end": 7540 }
class ____ extends CamelTestSupport { private static final String USER_DATABASE = """ {"id": "123", "name": "John Smith", "membership": "Gold", "rentals": 15, "preferredVehicle": "SUV"} """; private static final String USER_DB_NAME = "John Smith"; private static final String WEATHER_INFO = "sunny"; private static final String CALCULATION_RESULT = "10"; protected ChatModel chatModel; @RegisterExtension static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration() ? null : OllamaServiceFactory.createSingletonService(); @Override protected void setupResources() throws Exception { super.setupResources(); chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) : ModelHelper.loadFromEnv(); } @Test void testAgentWithMixedTools() throws InterruptedException { MockEndpoint mockEndpoint = this.context.getEndpoint("mock:agent-response", MockEndpoint.class); mockEndpoint.expectedMessageCount(1); String response = template.requestBody("direct:mixedTools", "Calculate 7 + 3", String.class); mockEndpoint.assertIsSatisfied(); assertNotNull(response, "AI response should not be null"); assertTrue(response.contains(CALCULATION_RESULT) || response.contains("ten"), "Response should contain the calculation result from the additional calculator tool"); } @Test void testAgentWithMultipleTagsAndAdditionalTools() throws InterruptedException { MockEndpoint mockEndpoint = this.context.getEndpoint("mock:agent-response", MockEndpoint.class); mockEndpoint.expectedMessageCount(1); String response = template.requestBody("direct:mixedTools", "Calculate 15 * 4 and convert 'hello' to uppercase", String.class); mockEndpoint.assertIsSatisfied(); assertNotNull(response, "AI response should not be null"); assertTrue(response.contains("60") || response.contains("sixty"), "Response should contain the multiplication result from additional tools"); assertTrue(response.contains("HELLO"), "Response should contain the uppercase conversion result from additional tools"); } @Test void testAgentWithCamelAndAdditionalTools() throws InterruptedException { MockEndpoint mockEndpoint = this.context.getEndpoint("mock:agent-response", MockEndpoint.class); mockEndpoint.expectedMessageCount(1); String response = template.requestBody("direct:mixedTools", "What is the name of user ID 123 and calculate 5 * 6?", String.class); mockEndpoint.assertIsSatisfied(); assertNotNull(response, "AI response should not be null"); assertTrue(response.contains(USER_DB_NAME), "Response should contain the user name from the Camel route tool"); assertTrue(response.contains("30") || response.contains("thirty"), "Response should contain the calculation result from the additional calculator tool"); } @Test void testAgentWithOnlyCamelRouteTools() throws InterruptedException { MockEndpoint mockEndpoint = this.context.getEndpoint("mock:agent-response", MockEndpoint.class); mockEndpoint.expectedMessageCount(1); String response = template.requestBody("direct:mixedTools", "What's the weather in New York?", String.class); mockEndpoint.assertIsSatisfied(); assertNotNull(response, "AI response should not be null"); assertTrue(response.toLowerCase().contains(WEATHER_INFO), "Response should contain weather information from the Camel route tool"); } @Override protected RouteBuilder createRouteBuilder() { // Create LangChain4jtool instances CalculatorTool calculator = new CalculatorTool(); StringTool stringTool = new StringTool(); List<Object> customTools = Arrays.asList(calculator, stringTool); // Create agent configuration with custom tools AgentConfiguration config = new AgentConfiguration() .withChatModel(chatModel) .withCustomTools(customTools); // Create agent Agent agent = new AgentWithoutMemory(config); // Register agent in Camel context this.context.getRegistry().bind("mixedToolsAgent", agent); return new RouteBuilder() { @Override public void configure() { // Route with mixed tools : custom tools (via agent) + camel routes from("direct:mixedTools") .to("langchain4j-agent:assistant?agent=#mixedToolsAgent&tags=users,weather") .to("mock:agent-response"); // Tool routes for function calling from("langchain4j-tools:userDb?tags=users&description=Query user database by user ID&parameter.userId=string") .setBody(constant(USER_DATABASE)); from("langchain4j-tools:weatherService?tags=weather&description=Get current weather information&parameter.location=string") .setBody(constant("{\"weather\": \"" + WEATHER_INFO + "\", \"location\": \"Current Location\"}")); } }; } }
LangChain4jAgentMixedToolsIT
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/RemoveUnusedImportsTest.java
{ "start": 4628, "end": 4914 }
class ____ {} """) .expectUnchanged() .doTest(); } @Test public void useInJavadocLink_selfReferenceDoesNotBreak() { testHelper .addInputLines( "in/Test.java", """ /** {@link #blah} */ public
Test
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/OverriddenSQLRestrictionAnnotation.java
{ "start": 976, "end": 2498 }
class ____ extends AbstractOverrider<SQLRestriction> implements DialectOverride.SQLRestriction, DialectOverrider<SQLRestriction> { private SQLRestriction override; /** * Used in creating dynamic annotation instances (e.g. from XML) */ public OverriddenSQLRestrictionAnnotation(ModelsContext sourceModelContext) { } /** * Used in creating annotation instances from JDK variant */ public OverriddenSQLRestrictionAnnotation( DialectOverride.SQLRestriction annotation, ModelsContext sourceModelContext) { dialect( annotation.dialect() ); before( annotation.before() ); sameOrAfter( annotation.sameOrAfter() ); override( extractJdkValue( annotation, DIALECT_OVERRIDE_SQL_RESTRICTION, "override", sourceModelContext ) ); } /** * Used in creating annotation instances from Jandex variant */ public OverriddenSQLRestrictionAnnotation( Map<String, Object> attributeValues, ModelsContext sourceModelContext) { super( attributeValues, DIALECT_OVERRIDE_SQL_ORDER, sourceModelContext ); override( (SQLRestriction) attributeValues.get( "override" ) ); } @Override public AnnotationDescriptor<SQLRestriction> getOverriddenDescriptor() { return HibernateAnnotations.SQL_RESTRICTION; } @Override public SQLRestriction override() { return override; } public void override(SQLRestriction value) { this.override = value; } @Override public Class<? extends Annotation> annotationType() { return DialectOverride.SQLRestriction.class; } }
OverriddenSQLRestrictionAnnotation
java
apache__kafka
streams/src/main/java/org/apache/kafka/streams/state/KeyValueStore.java
{ "start": 1171, "end": 3061 }
interface ____<K, V> extends StateStore, ReadOnlyKeyValueStore<K, V> { /** * Update the value associated with this key. * * @param key The key to associate the value to * @param value The value to update, it can be {@code null}; * if the serialized bytes are also {@code null} it is interpreted as deletes * @throws NullPointerException If {@code null} is used for key. * @throws InvalidStateStoreException if the store is not initialized */ void put(K key, V value); /** * Update the value associated with this key, unless a value is already associated with the key. * * @param key The key to associate the value to * @param value The value to update, it can be {@code null}; * if the serialized bytes are also {@code null} it is interpreted as deletes * @return The old value or {@code null} if there is no such key. * @throws NullPointerException If {@code null} is used for key. * @throws InvalidStateStoreException if the store is not initialized */ V putIfAbsent(K key, V value); /** * Update all the given key/value pairs. * * @param entries A list of entries to put into the store; * if the serialized bytes are also {@code null} it is interpreted as deletes * @throws NullPointerException If {@code null} is used for key. * @throws InvalidStateStoreException if the store is not initialized */ void putAll(List<KeyValue<K, V>> entries); /** * Delete the value from the store (if there is one). * * @param key The key * @return The old value or {@code null} if there is no such key. * @throws NullPointerException If {@code null} is used for key. * @throws InvalidStateStoreException if the store is not initialized */ V delete(K key); }
KeyValueStore
java
mockito__mockito
mockito-core/src/test/java/org/mockitousage/junitrunner/SilentRunnerTest.java
{ "start": 3394, "end": 3524 }
class ____ is passing but it has some unnecessary stubs */ @RunWith(MockitoJUnitRunner.Silent.class) public static
itself
java
elastic__elasticsearch
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/llama/request/embeddings/LlamaEmbeddingsRequestTests.java
{ "start": 1074, "end": 4437 }
class ____ extends ESTestCase { public void testCreateRequest_WithAuth_Success() throws IOException { var request = createRequest(); var httpRequest = request.createHttpRequest(); var httpPost = validateRequestUrlAndContentType(httpRequest); var requestMap = entityAsMap(httpPost.getEntity().getContent()); assertThat(requestMap, aMapWithSize(2)); assertThat(requestMap.get("contents"), is(List.of("ABCD"))); assertThat(requestMap.get("model_id"), is("llama-embed")); assertThat(httpPost.getFirstHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer apikey")); } public void testCreateRequest_NoAuth_Success() throws IOException { var request = createRequestNoAuth(); var httpRequest = request.createHttpRequest(); var httpPost = validateRequestUrlAndContentType(httpRequest); var requestMap = entityAsMap(httpPost.getEntity().getContent()); assertThat(requestMap, aMapWithSize(2)); assertThat(requestMap.get("contents"), is(List.of("ABCD"))); assertThat(requestMap.get("model_id"), is("llama-embed")); assertNull(httpPost.getFirstHeader("Authorization")); } public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { var request = createRequest(); var truncatedRequest = request.truncate(); var httpRequest = truncatedRequest.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) httpRequest.httpRequestBase(); var requestMap = entityAsMap(httpPost.getEntity().getContent()); assertThat(requestMap, aMapWithSize(2)); assertThat(requestMap.get("contents"), is(List.of("AB"))); assertThat(requestMap.get("model_id"), is("llama-embed")); } public void testIsTruncated_ReturnsTrue() { var request = createRequest(); assertFalse(request.getTruncationInfo()[0]); var truncatedRequest = request.truncate(); assertTrue(truncatedRequest.getTruncationInfo()[0]); } private HttpPost validateRequestUrlAndContentType(HttpRequest request) { assertThat(request.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) request.httpRequestBase(); assertThat(httpPost.getURI().toString(), is("url")); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); return httpPost; } private static LlamaEmbeddingsRequest createRequest() { var embeddingsModel = LlamaEmbeddingsModelTests.createEmbeddingsModel("llama-embed", "url", "apikey"); return new LlamaEmbeddingsRequest( TruncatorTests.createTruncator(), new Truncator.TruncationResult(List.of("ABCD"), new boolean[] { false }), embeddingsModel ); } private static LlamaEmbeddingsRequest createRequestNoAuth() { var embeddingsModel = LlamaEmbeddingsModelTests.createEmbeddingsModelNoAuth("llama-embed", "url"); return new LlamaEmbeddingsRequest( TruncatorTests.createTruncator(), new Truncator.TruncationResult(List.of("ABCD"), new boolean[] { false }), embeddingsModel ); } }
LlamaEmbeddingsRequestTests
java
lettuce-io__lettuce-core
src/test/jmh/io/lettuce/core/dynamic/RedisCommandsBenchmark.java
{ "start": 2124, "end": 2378 }
interface ____ extends Commands { void set(String key, String value); } public static void main(String[] args) { RedisCommandsBenchmark b = new RedisCommandsBenchmark(); b.setup(); b.asyncSet(); } }
BatchCommands
java
reactor__reactor-core
reactor-core/src/main/java/reactor/core/scheduler/SchedulerMetricDecorator.java
{ "start": 1287, "end": 3636 }
class ____ implements BiFunction<Scheduler, ScheduledExecutorService, ScheduledExecutorService>, Disposable { static final String TAG_SCHEDULER_ID = "reactor.scheduler.id"; static final String METRICS_DECORATOR_KEY = "reactor.metrics.decorator"; final WeakHashMap<Scheduler, String> seenSchedulers = new WeakHashMap<>(); final Map<String, AtomicInteger> schedulerDifferentiator = new HashMap<>(); final WeakHashMap<Scheduler, AtomicInteger> executorDifferentiator = new WeakHashMap<>(); final MeterRegistry registry; SchedulerMetricDecorator() { registry = Metrics.MicrometerConfiguration.getRegistry(); } @Override public synchronized ScheduledExecutorService apply(Scheduler scheduler, ScheduledExecutorService service) { //this is equivalent to `toString`, a detailed name like `parallel("foo", 3)` String schedulerName = Scannable .from(scheduler) .scanOrDefault(Attr.NAME, scheduler.getClass().getName()); //we hope that each NAME is unique enough, but we'll differentiate by Scheduler String schedulerId = seenSchedulers.computeIfAbsent(scheduler, s -> { int schedulerDifferentiator = this.schedulerDifferentiator .computeIfAbsent(schedulerName, k -> new AtomicInteger(0)) .getAndIncrement(); return (schedulerDifferentiator == 0) ? schedulerName : schedulerName + "#" + schedulerDifferentiator; }); //we now want an executorId unique to a given scheduler String executorId = schedulerId + "-" + executorDifferentiator.computeIfAbsent(scheduler, key -> new AtomicInteger(0)) .getAndIncrement(); Tag[] tags = new Tag[] { Tag.of(TAG_SCHEDULER_ID, schedulerId) }; /* Design note: we assume that a given Scheduler won't apply the decorator twice to the same ExecutorService. Even though, it would simply create an extraneous meter for that ExecutorService, which we think is not that bad (compared to paying the price upfront of also tracking executors instances to deduplicate). The main goal is to detect Scheduler instances that have already started decorating their executors, in order to avoid consider two calls in a row as duplicates (yet still being able to distinguish between two instances with the same name and configuration). */
SchedulerMetricDecorator
java
alibaba__druid
core/src/test/java/com/alibaba/druid/benckmark/proxy/DruidBenchmarkTest.java
{ "start": 761, "end": 2135 }
class ____ extends TestCase { public void test_druid_benchmark() throws Exception { BenchmarkExecutor executor = new BenchmarkExecutor(); executor.getSqlExecutors().add(createExecutorDirect()); executor.getSqlExecutors().add(createExecutorDruid()); executor.setExecuteCount(10); executor.setLoopCount(1000 * 100); executor.getCaseList().add(new SelectNow()); // executor.getCaseList().add(new SelectSysUser()); // executor.getCaseList().add(new Select1()); // executor.getCaseList().add(new SelectEmptyTable()); executor.execute(); } public DirectSQLExecutor createExecutorDirect() { String name = "direct"; String jdbcUrl = "jdbc:mysql://a.b.c.d/dragoon_v25masterdb?useUnicode=true&characterEncoding=UTF-8"; String user = "dragoon25"; String password = "dragoon25"; return new DirectSQLExecutor(name, jdbcUrl, user, password); } public DirectSQLExecutor createExecutorDruid() { String name = "druid"; String jdbcUrl = "jdbc:wrap-jdbc:filters=default:name=benchmark:jdbc:mysql://a.b.c.d/dragoon_v25masterdb?useUnicode=true&characterEncoding=UTF-8"; String user = "dragoon25"; String password = "dragoon25"; return new DirectSQLExecutor(name, jdbcUrl, user, password); } }
DruidBenchmarkTest
java
apache__camel
components/camel-saga/src/generated/java/org/apache/camel/component/saga/SagaComponentConfigurer.java
{ "start": 731, "end": 2287 }
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter { @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { SagaComponent target = (SagaComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "autowiredenabled": case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true; case "lazystartproducer": case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true; default: return false; } } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "autowiredenabled": case "autowiredEnabled": return boolean.class; case "lazystartproducer": case "lazyStartProducer": return boolean.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { SagaComponent target = (SagaComponent) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "autowiredenabled": case "autowiredEnabled": return target.isAutowiredEnabled(); case "lazystartproducer": case "lazyStartProducer": return target.isLazyStartProducer(); default: return null; } } }
SagaComponentConfigurer
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/LongLongBucketedSort.java
{ "start": 1162, "end": 15586 }
class ____ implements Releasable { private final BigArrays bigArrays; private final SortOrder order; private final int bucketSize; /** * {@code true} if the bucket is in heap mode, {@code false} if * it is still gathering. */ private final BitArray heapMode; /** * An array containing all the values on all buckets. The structure is as follows: * <p> * For each bucket, there are bucketSize elements, based on the bucket id (0, 1, 2...). * Then, for each bucket, it can be in 2 states: * </p> * <ul> * <li> * Gather mode: All buckets start in gather mode, and remain here while they have less than bucketSize elements. * In gather mode, the elements are stored in the array from the highest index to the lowest index. * The lowest index contains the offset to the next slot to be filled. * <p> * This allows us to insert elements in O(1) time. * </p> * <p> * When the bucketSize-th element is collected, the bucket transitions to heap mode, by heapifying its contents. * </p> * </li> * <li> * Heap mode: The bucket slots are organized as a min heap structure. * <p> * The root of the heap is the minimum value in the bucket, * which allows us to quickly discard new values that are not in the top N. * </p> * </li> * </ul> */ private LongArray values; private LongArray extraValues; public LongLongBucketedSort(BigArrays bigArrays, SortOrder order, int bucketSize) { this.bigArrays = bigArrays; this.order = order; this.bucketSize = bucketSize; heapMode = new BitArray(0, bigArrays); boolean success = false; try { values = bigArrays.newLongArray(0, false); extraValues = bigArrays.newLongArray(0, false); success = true; } finally { if (success == false) { close(); } } } /** * Collects a {@code value} into a {@code bucket}. * <p> * It may or may not be inserted in the heap, depending on if it is better than the current root. * </p> */ public void collect(long value, long extraValue, int bucket) { long rootIndex = (long) bucket * bucketSize; if (inHeapMode(bucket)) { if (betterThan(value, values.get(rootIndex), extraValue, extraValues.get(rootIndex))) { values.set(rootIndex, value); extraValues.set(rootIndex, extraValue); downHeap(rootIndex, 0, bucketSize); } return; } // Gathering mode long requiredSize = rootIndex + bucketSize; if (values.size() < requiredSize) { grow(bucket); } int next = getNextGatherOffset(rootIndex); assert 0 <= next && next < bucketSize : "Expected next to be in the range of valid buckets [0 <= " + next + " < " + bucketSize + "]"; long index = next + rootIndex; values.set(index, value); extraValues.set(index, extraValue); if (next == 0) { heapMode.set(bucket); heapify(rootIndex, bucketSize); } else { setNextGatherOffset(rootIndex, next - 1); } } /** * The order of the sort. */ public SortOrder getOrder() { return order; } /** * The number of values to store per bucket. */ public int getBucketSize() { return bucketSize; } /** * Get the first and last indexes (inclusive, exclusive) of the values for a bucket. * Returns [0, 0] if the bucket has never been collected. */ private Tuple<Long, Long> getBucketValuesIndexes(int bucket) { long rootIndex = (long) bucket * bucketSize; if (rootIndex >= values.size()) { // We've never seen this bucket. return Tuple.tuple(0L, 0L); } long start = inHeapMode(bucket) ? rootIndex : (rootIndex + getNextGatherOffset(rootIndex) + 1); long end = rootIndex + bucketSize; return Tuple.tuple(start, end); } /** * Merge the values from {@code other}'s {@code otherGroupId} into {@code groupId}. */ public void merge(int groupId, LongLongBucketedSort other, int otherGroupId) { var otherBounds = other.getBucketValuesIndexes(otherGroupId); // TODO: This can be improved for heapified buckets by making use of the heap structures for (long i = otherBounds.v1(); i < otherBounds.v2(); i++) { collect(other.values.get(i), other.extraValues.get(i), groupId); } } /** * Creates a block with the values from the {@code selected} groups. */ public void toBlocks(BlockFactory blockFactory, Block[] blocks, int offset, IntVector selected) { // Check if the selected groups are all empty, to avoid allocating extra memory if (allSelectedGroupsAreEmpty(selected)) { Block constantNullBlock = blockFactory.newConstantNullBlock(selected.getPositionCount()); constantNullBlock.incRef(); blocks[offset] = constantNullBlock; blocks[offset + 1] = constantNullBlock; return; } try ( var builder = blockFactory.newLongBlockBuilder(selected.getPositionCount()); var extraBuilder = blockFactory.newLongBlockBuilder(selected.getPositionCount()) ) { for (int s = 0; s < selected.getPositionCount(); s++) { int bucket = selected.getInt(s); var bounds = getBucketValuesIndexes(bucket); var rootIndex = bounds.v1(); var size = bounds.v2() - bounds.v1(); if (size == 0) { builder.appendNull(); extraBuilder.appendNull(); continue; } if (size == 1) { builder.appendLong(values.get(rootIndex)); extraBuilder.appendLong(extraValues.get(rootIndex)); continue; } // If we are in the gathering mode, we need to heapify before sorting. if (inHeapMode(bucket) == false) { heapify(rootIndex, (int) size); } heapSort(rootIndex, (int) size); builder.beginPositionEntry(); extraBuilder.beginPositionEntry(); for (int i = 0; i < size; i++) { builder.appendLong(values.get(rootIndex + i)); extraBuilder.appendLong(extraValues.get(rootIndex + i)); } builder.endPositionEntry(); extraBuilder.endPositionEntry(); } blocks[offset] = builder.build(); blocks[offset + 1] = extraBuilder.build(); } } /** * Checks if the selected groups are all empty. */ private boolean allSelectedGroupsAreEmpty(IntVector selected) { return IntStream.range(0, selected.getPositionCount()).map(selected::getInt).noneMatch(bucket -> { var bounds = this.getBucketValuesIndexes(bucket); var size = bounds.v2() - bounds.v1(); return size > 0; }); } /** * Is this bucket a min heap {@code true} or in gathering mode {@code false}? */ private boolean inHeapMode(int bucket) { return heapMode.get(bucket); } /** * Get the next index that should be "gathered" for a bucket rooted * at {@code rootIndex}. */ private int getNextGatherOffset(long rootIndex) { return (int) values.get(rootIndex); } /** * Set the next index that should be "gathered" for a bucket rooted * at {@code rootIndex}. */ private void setNextGatherOffset(long rootIndex, int offset) { values.set(rootIndex, offset); } /** * {@code true} if the entry at index {@code lhs} is "better" than * the entry at {@code rhs}. "Better" in this means "lower" for * {@link SortOrder#ASC} and "higher" for {@link SortOrder#DESC}. */ private boolean betterThan(long lhs, long rhs, long lhsExtra, long rhsExtra) { int res = Long.compare(lhs, rhs); if (res != 0) { return getOrder().reverseMul() * res < 0; } res = Long.compare(lhsExtra, rhsExtra); return getOrder().reverseMul() * res < 0; } /** * Swap the data at two indices. */ private void swap(long lhs, long rhs) { var tmp = values.get(lhs); values.set(lhs, values.get(rhs)); values.set(rhs, tmp); var tmpExtra = extraValues.get(lhs); extraValues.set(lhs, extraValues.get(rhs)); extraValues.set(rhs, tmpExtra); } /** * Allocate storage for more buckets and store the "next gather offset" * for those new buckets. We always grow the storage by whole bucket's * worth of slots at a time. We never allocate space for partial buckets. */ private void grow(int bucket) { long oldMax = values.size(); assert oldMax % bucketSize == 0; long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.LONG_PAGE_SIZE, Long.BYTES); // Round up to the next full bucket. newSize = (newSize + bucketSize - 1) / bucketSize; values = bigArrays.resize(values, newSize * bucketSize); // Round up to the next full bucket. extraValues = bigArrays.resize(extraValues, newSize * bucketSize); // Set the next gather offsets for all newly allocated buckets. fillGatherOffsets(oldMax); } /** * Maintain the "next gather offsets" for newly allocated buckets. */ private void fillGatherOffsets(long startingAt) { int nextOffset = getBucketSize() - 1; for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) { setNextGatherOffset(bucketRoot, nextOffset); } } /** * Heapify a bucket whose entries are in random order. * <p> * This works by validating the heap property on each node, iterating * "upwards", pushing any out of order parents "down". Check out the * <a href="https://en.wikipedia.org/w/index.php?title=Binary_heap&oldid=940542991#Building_a_heap">wikipedia</a> * entry on binary heaps for more about this. * </p> * <p> * While this *looks* like it could easily be {@code O(n * log n)}, it is * a fairly well studied algorithm attributed to Floyd. There's * been a bunch of work that puts this at {@code O(n)}, close to 1.88n worst * case. * </p> * <ul> * <li>Hayward, Ryan; McDiarmid, Colin (1991). * <a href="https://web.archive.org/web/20160205023201/http://www.stats.ox.ac.uk/__data/assets/pdf_file/0015/4173/heapbuildjalg.pdf"> * Average Case Analysis of Heap Building byRepeated Insertion</a> J. Algorithms. * <li>D.E. Knuth, ”The Art of Computer Programming, Vol. 3, Sorting and Searching”</li> * </ul> * @param rootIndex the index the start of the bucket */ private void heapify(long rootIndex, int heapSize) { int maxParent = heapSize / 2 - 1; for (int parent = maxParent; parent >= 0; parent--) { downHeap(rootIndex, parent, heapSize); } } /** * Sorts all the values in the heap using heap sort algorithm. * This runs in {@code O(n log n)} time. * @param rootIndex index of the start of the bucket * @param heapSize Number of values that belong to the heap. * Can be less than bucketSize. * In such a case, the remaining values in range * (rootIndex + heapSize, rootIndex + bucketSize) * are *not* considered part of the heap. */ private void heapSort(long rootIndex, int heapSize) { while (heapSize > 0) { swap(rootIndex, rootIndex + heapSize - 1); heapSize--; downHeap(rootIndex, 0, heapSize); } } /** * Correct the heap invariant of a parent and its children. This * runs in {@code O(log n)} time. * @param rootIndex index of the start of the bucket * @param parent Index within the bucket of the parent to check. * For example, 0 is the "root". * @param heapSize Number of values that belong to the heap. * Can be less than bucketSize. * In such a case, the remaining values in range * (rootIndex + heapSize, rootIndex + bucketSize) * are *not* considered part of the heap. */ private void downHeap(long rootIndex, int parent, int heapSize) { while (true) { long parentIndex = rootIndex + parent; int worst = parent; long worstIndex = parentIndex; int leftChild = parent * 2 + 1; long leftIndex = rootIndex + leftChild; if (leftChild < heapSize) { if (betterThan(values.get(worstIndex), values.get(leftIndex), extraValues.get(worstIndex), extraValues.get(leftIndex))) { worst = leftChild; worstIndex = leftIndex; } int rightChild = leftChild + 1; long rightIndex = rootIndex + rightChild; if (rightChild < heapSize && betterThan( values.get(worstIndex), values.get(rightIndex), extraValues.get(worstIndex), extraValues.get(rightIndex) )) { worst = rightChild; worstIndex = rightIndex; } } if (worst == parent) { break; } swap(worstIndex, parentIndex); parent = worst; } } @Override public final void close() { Releasables.close(values, extraValues, heapMode); } }
LongLongBucketedSort
java
alibaba__fastjson
src/test/java/com/alibaba/json/test/Base64.java
{ "start": 95, "end": 4170 }
class ____ encode and decode to and from BASE64 in full accordance * with RFC 2045.<br><br> * On Windows XP sp1 with 1.4.2_04 and later ;), this encoder and decoder is about 10 times faster * on small arrays (10 - 1000 bytes) and 2-3 times as fast on larger arrays (10000 - 1000000 bytes) * compared to <code>sun.misc.Encoder()/Decoder()</code>.<br><br> * * On byte arrays the encoder is about 20% faster than Jakarta Commons Base64 Codec for encode and * about 50% faster for decoding large arrays. This implementation is about twice as fast on very small * arrays (&lt 30 bytes). If source/destination is a <code>String</code> this * version is about three times as fast due to the fact that the Commons Codec result has to be recoded * to a <code>String</code> from <code>byte[]</code>, which is very expensive.<br><br> * * This encode/decode algorithm doesn't create any temporary arrays as many other codecs do, it only * allocates the resulting array. This produces less garbage and it is possible to handle arrays twice * as large as algorithms that create a temporary array. (E.g. Jakarta Commons Codec). It is unknown * whether Sun's <code>sun.misc.Encoder()/Decoder()</code> produce temporary arrays but since performance * is quite low it probably does.<br><br> * * The encoder produces the same output as the Sun one except that the Sun's encoder appends * a trailing line separator if the last character isn't a pad. Unclear why but it only adds to the * length and is probably a side effect. Both are in conformance with RFC 2045 though.<br> * Commons codec seem to always att a trailing line separator.<br><br> * * <b>Note!</b> * The encode/decode method pairs (types) come in three versions with the <b>exact</b> same algorithm and * thus a lot of code redundancy. This is to not create any temporary arrays for transcoding to/from different * format types. The methods not used can simply be commented out.<br><br> * * There is also a "fast" version of all decode methods that works the same way as the normal ones, but * har a few demands on the decoded input. Normally though, these fast verions should be used if the source if * the input is known and it hasn't bee tampered with.<br><br> * * If you find the code useful or you find a bug, please send me a note at base64 @ miginfocom . com. * * Licence (BSD): * ============== * * Copyright (c) 2004, Mikael Grev, MiG InfoCom AB. (base64 @ miginfocom . com) * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list * of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this * list of conditions and the following disclaimer in the documentation and/or other * materials provided with the distribution. * Neither the name of the MiG InfoCom AB nor the names of its contributors may be * used to endorse or promote products derived from this software without specific * prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY * OF SUCH DAMAGE. * * @version 2.2 * @author Mikael Grev * Date: 2004-aug-02 * Time: 11:31:11 */ public
to
java
hibernate__hibernate-orm
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/auditReader/AuditReaderAPITest.java
{ "start": 1181, "end": 4015 }
class ____ { @BeforeClassTemplate public void initData(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { em.getTransaction().begin(); AuditedTestEntity ent1 = new AuditedTestEntity( 1, "str1" ); NotAuditedTestEntity ent2 = new NotAuditedTestEntity( 1, "str1" ); em.persist( ent1 ); em.persist( ent2 ); em.getTransaction().commit(); em.getTransaction().begin(); ent1 = em.find( AuditedTestEntity.class, 1 ); ent2 = em.find( NotAuditedTestEntity.class, 1 ); ent1.setStr1( "str2" ); ent2.setStr1( "str2" ); em.getTransaction().commit(); em.getTransaction().begin(); ent1 = em.find( AuditedTestEntity.class, 1 ); em.remove( ent1 ); em.getTransaction().commit(); } ); } @Test public void testIsEntityClassAuditedForAuditedEntity(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { final var auditReader = AuditReaderFactory.get( em ); assertTrue( auditReader.isEntityClassAudited( AuditedTestEntity.class ) ); assertEquals( Arrays.asList( 1, 2, 3 ), auditReader.getRevisions( AuditedTestEntity.class, 1 ) ); } ); } @Test public void testIsEntityClassAuditedForNotAuditedEntity(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { final var auditReader = AuditReaderFactory.get( em ); assertFalse( auditReader.isEntityClassAudited( NotAuditedTestEntity.class ) ); try { auditReader.getRevisions( NotAuditedTestEntity.class, 1 ); fail( "Expected a NotAuditedException" ); } catch (NotAuditedException nae) { // expected } } ); } @Test public void testFindRevisionEntitiesWithoutDeletions(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { List<?> revisionInfos = AuditReaderFactory.get( em ).createQuery() .forRevisionsOfEntity( AuditedTestEntity.class, false ) .getResultList(); assertEquals( 2, revisionInfos.size() ); revisionInfos.forEach( e -> assertTyping( SequenceIdRevisionEntity.class, e ) ); } ); } @Test public void testFindRevisionEntitiesWithDeletions(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { List<?> revisionInfos = AuditReaderFactory.get( em ).createQuery() .forRevisionsOfEntity( AuditedTestEntity.class, true ) .getResultList(); assertEquals( 3, revisionInfos.size() ); revisionInfos.forEach( e -> assertTyping( SequenceIdRevisionEntity.class, e ) ); } ); } @Test public void testFindRevisionEntitiesNonAuditedEntity(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { try { AuditReaderFactory.get( em ).createQuery() .forRevisionsOfEntity( NotAuditedTestEntity.class, false ) .getResultList(); fail( "Expected a NotAuditedException" ); } catch (NotAuditedException e) { // expected } } ); } }
AuditReaderAPITest
java
netty__netty
resolver-dns/src/test/java/io/netty/resolver/dns/DnsNameResolverBuilderTest.java
{ "start": 8267, "end": 9290 }
class ____ implements AuthoritativeDnsServerCache { @Override public DnsServerAddressStream get(String hostname) { return null; } @Override public void cache(String hostname, InetSocketAddress address, long originalTtl, EventLoop loop) { //no-op } @Override public void clear() { //no-op } @Override public boolean clear(String hostname) { return false; } } @Test void testCustomQueryDnsServerAddressStream() { DnsServerAddressStream queryAddressStream = new TestQueryServerAddressStream(); resolver = builder.queryServerAddressStream(queryAddressStream).build(); assertThat(resolver.queryDnsServerAddressStream()).isSameAs(queryAddressStream); resolver = builder.copy().build(); assertThat(resolver.queryDnsServerAddressStream()).isSameAs(queryAddressStream); } private static final
TestAuthoritativeDnsServerCache
java
eclipse-vertx__vert.x
vertx-core/src/test/java/io/vertx/test/core/RepeatRule.java
{ "start": 623, "end": 680 }
class ____ implements TestRule { private static
RepeatRule
java
junit-team__junit5
platform-tests/src/test/java/org/junit/platform/commons/support/ModifierSupportTests.java
{ "start": 6878, "end": 6982 }
class ____ { @SuppressWarnings("unused") private void privateMethod() { } } protected
PrivateClass
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/basic/InheritedTest.java
{ "start": 1409, "end": 1578 }
class ____ { @Test public void test() { Employee charles = new Employee( "Charles", "Engineer" ); charles.setOca( 1002 ); // Check that both types of
InheritedTest
java
spring-projects__spring-framework
spring-web/src/main/java/org/springframework/web/server/session/HeaderWebSessionIdResolver.java
{ "start": 1018, "end": 2367 }
class ____ implements WebSessionIdResolver { /** Default value for {@link #setHeaderName(String)}. */ public static final String DEFAULT_HEADER_NAME = "SESSION"; private String headerName = DEFAULT_HEADER_NAME; /** * Set the name of the session header to use for the session ID. * <p>The name is used to extract the session ID from the request headers as * well to set the session ID on the response headers. * <p>By default set to {@code DEFAULT_HEADER_NAME} * @param headerName the header name */ public void setHeaderName(String headerName) { Assert.hasText(headerName, "'headerName' must not be empty"); this.headerName = headerName; } /** * Get the configured header name. * @return the configured header name */ public String getHeaderName() { return this.headerName; } @Override public List<String> resolveSessionIds(ServerWebExchange exchange) { HttpHeaders headers = exchange.getRequest().getHeaders(); return headers.getOrDefault(getHeaderName(), Collections.emptyList()); } @Override public void setSessionId(ServerWebExchange exchange, String id) { Assert.notNull(id, "'id' is required."); exchange.getResponse().getHeaders().set(getHeaderName(), id); } @Override public void expireSession(ServerWebExchange exchange) { setSessionId(exchange, ""); } }
HeaderWebSessionIdResolver
java
assertj__assertj-core
assertj-core/src/main/java/org/assertj/core/api/AbstractFloatAssert.java
{ "start": 12285, "end": 39544 }
class ____ SELF isNotCloseTo(final float expected, final Offset<Float> offset) { floats.assertIsNotCloseTo(info, actual, expected, offset); return myself; } /** * Verifies that the actual number is close to the given one within the given offset value. * <p> * When <i>abs(actual - expected) == offset value</i>, the assertion: * <ul> * <li><b>succeeds</b> when using {@link Assertions#within(Float)} or {@link Assertions#offset(Float)}</li> * <li><b>fails</b> when using {@link Assertions#byLessThan(Float)} or {@link Offset#strictOffset(Number)}</li> * </ul> * <p> * <b>Breaking change</b> since 2.9.0/3.9.0: using {@link Assertions#byLessThan(Float)} implies a <b>strict</b> comparison, * use {@link Assertions#within(Float)} to get the old behavior. * <p> * Examples: * <pre><code class='java'> // assertions succeed * assertThat(8.1f).isCloseTo(8.0f, within(0.2f)); * assertThat(8.1f).isCloseTo(8.0f, offset(0.2f)); // alias of within * assertThat(8.1f).isCloseTo(8.0f, byLessThan(0.2f)); // strict * * // assertions succeed when the difference == offset value ... * assertThat(0.1f).isCloseTo(0.0f, within(0.1f)); * assertThat(0.1f).isCloseTo(0.0f, offset(0.1f)); * // ... except when using byLessThan which implies a strict comparison * assertThat(0.1f).isCloseTo(0.0f, byLessThan(0.1f)); // strict =&gt; fail * * // this assertion also fails * assertThat(8.1f).isCloseTo(8.0f, within(0.001f));</code></pre> * * @param expected the given number to compare the actual value to. * @param offset the given positive offset. * @return {@code this} assertion object. * @throws NullPointerException if the given offset is {@code null}. * @throws NullPointerException if the expected number is {@code null}. * @throws AssertionError if the actual value is not close to the given one. */ @Override public SELF isCloseTo(Float expected, Offset<Float> offset) { floats.assertIsCloseTo(info, actual, expected, offset); return myself; } /** * Verifies that the actual number is not close to the given one by less than the given offset.<br> * <p> * When <i>abs(actual - expected) == offset value</i>, the assertion: * <ul> * <li><b>succeeds</b> when using {@link Assertions#byLessThan(Float)} or {@link Offset#strictOffset(Number)}</li> * <li><b>fails</b> when using {@link Assertions#within(Float)} or {@link Assertions#offset(Float)}</li> * </ul> * <p> * <b>Breaking change</b> since 2.9.0/3.9.0: using {@link Assertions#byLessThan(Float)} implies a <b>strict</b> comparison, * use {@link Assertions#within(Float)} to get the old behavior. * <p> * Examples: * <pre><code class='java'> // assertions succeed * assertThat(8.1f).isNotCloseTo(8.0f, byLessThan(0.01f)); * assertThat(8.1f).isNotCloseTo(8.0f, within(0.01f)); * assertThat(8.1f).isNotCloseTo(8.0f, offset(0.01f)); * // diff == offset but isNotCloseTo succeeds as we use byLessThan * assertThat(0.1f).isNotCloseTo(0.0f, byLessThan(0.1f)); * * // assertions fail * assertThat(0.1f).isNotCloseTo(0.0f, within(0.1f)); * assertThat(0.1f).isNotCloseTo(0.0f, offset(0.1f)); * assertThat(8.1f).isNotCloseTo(8.0f, within(0.2f)); * assertThat(8.1f).isNotCloseTo(8.0f, offset(0.2f)); * assertThat(8.1f).isNotCloseTo(8.0f, byLessThan(0.2f));</code></pre> * * @param expected the given number to compare the actual value to. * @param offset the given positive offset. * @return {@code this} assertion object. * @throws NullPointerException if the given offset is {@code null}. * @throws NullPointerException if the expected number is {@code null}. * @throws AssertionError if the actual value is close to the given one. * @see Assertions#byLessThan(Float) * @since 2.6.0 / 3.6.0 */ @Override public SELF isNotCloseTo(Float expected, Offset<Float> offset) { floats.assertIsNotCloseTo(info, actual, expected, offset); return myself; } /** * Verifies that the actual number is close to the given one within the given percentage.<br> * If difference is equal to the percentage value, assertion is considered valid. * <p> * Example with float: * <pre><code class='java'> // assertions will pass: * assertThat(11.0f).isCloseTo(new Float(10.0f), withinPercentage(20f)); * * // if difference is exactly equals to the computed offset (1.0), it's ok * assertThat(11.0f).isCloseTo(new Float(10.0f), withinPercentage(10f)); * * // assertion will fail * assertThat(11.0f).isCloseTo(new Float(10.0f), withinPercentage(5f));</code></pre> * * @param expected the given number to compare the actual value to. * @param percentage the given positive percentage. * @return {@code this} assertion object. * @throws NullPointerException if the given offset is {@code null}. * @throws NullPointerException if the expected number is {@code null}. * @throws AssertionError if the actual value is not close to the given one. */ @Override public SELF isCloseTo(Float expected, Percentage percentage) { floats.assertIsCloseToPercentage(info, actual, expected, percentage); return myself; } /** * Verifies that the actual number is not close to the given one within the given percentage.<br> * If difference is equal to the percentage value, the assertion fails. * <p> * Example with float: * <pre><code class='java'> // assertion will pass: * assertThat(11.0f).isNotCloseTo(new Float(10.0f), withinPercentage(5f)); * * // assertions will fail * assertThat(11.0f).isNotCloseTo(new Float(10.0f), withinPercentage(10f)); * assertThat(11.0f).isNotCloseTo(new Float(10.0f), withinPercentage(20f));</code></pre> * * @param expected the given number to compare the actual value to. * @param percentage the given positive percentage. * @return {@code this} assertion object. * @throws NullPointerException if the given offset is {@code null}. * @throws NullPointerException if the expected number is {@code null}. * @throws AssertionError if the actual value is close to the given one. * @since 2.6.0 / 3.6.0 */ @Override public SELF isNotCloseTo(Float expected, Percentage percentage) { floats.assertIsNotCloseToPercentage(info, actual, expected, percentage); return myself; } /** * Verifies that the actual number is close to the given one within the given percentage.<br> * If difference is equal to the percentage value, assertion is considered valid. * <p> * Example with float: * <pre><code class='java'> // assertions will pass: * assertThat(11.0f).isCloseTo(10.0f, withinPercentage(20f)); * * // if difference is exactly equals to the computed offset (1.0), it's ok * assertThat(11.0f).isCloseTo(10.0f, withinPercentage(10f)); * * // assertion will fail * assertThat(11.0f).isCloseTo(10.0f, withinPercentage(5f));</code></pre> * * @param expected the given number to compare the actual value to. * @param percentage the given positive percentage. * @return {@code this} assertion object. * @throws NullPointerException if the given offset is {@code null}. * @throws NullPointerException if the expected number is {@code null}. * @throws AssertionError if the actual value is not close to the given one. */ public SELF isCloseTo(float expected, Percentage percentage) { floats.assertIsCloseToPercentage(info, actual, expected, percentage); return myself; } /** * Verifies that the actual number is not close to the given one within the given percentage.<br> * If difference is equal to the percentage value, the assertion fails. * <p> * Example with float: * <pre><code class='java'> // assertion will pass: * assertThat(11.0f).isNotCloseTo(10.0f, withinPercentage(5f)); * * // assertions will fail * assertThat(11.0f).isNotCloseTo(10.0f, withinPercentage(10f)); * assertThat(11.0f).isNotCloseTo(10.0f, withinPercentage(20f));</code></pre> * * @param expected the given number to compare the actual value to. * @param percentage the given positive percentage. * @return {@code this} assertion object. * @throws NullPointerException if the given offset is {@code null}. * @throws NullPointerException if the expected number is {@code null}. * @throws AssertionError if the actual value is close to the given one. * @since 2.6.0 / 3.6.0 */ public SELF isNotCloseTo(float expected, Percentage percentage) { floats.assertIsNotCloseToPercentage(info, actual, expected, percentage); return myself; } /** * Verifies that the actual number is close to the given one within the given offset value. * <p> * This assertion is the same as {@link #isCloseTo(float, Offset)}. * <p> * When <i>abs(actual - expected) == offset value</i>, the assertion: * <ul> * <li><b>succeeds</b> when using {@link Assertions#within(Float)} or {@link Assertions#offset(Float)}</li> * <li><b>fails</b> when using {@link Assertions#byLessThan(Float)} or {@link Offset#strictOffset(Number)}</li> * </ul> * <p> * Examples: * <pre><code class='java'> // assertions will pass * assertThat(8.1f).isEqualTo(8.0f, within(0.2f)); * assertThat(8.1f).isEqualTo(8.0f, offset(0.2f)); // alias of within * assertThat(8.1f).isEqualTo(8.0f, byLessThan(0.2f)); // strict * * // assertions succeed when the difference == offset value ... * assertThat(0.1f).isEqualTo(0.0f, within(0.1f)); * assertThat(0.1f).isEqualTo(0.0f, offset(0.1f)); * // ... except when using byLessThan which implies a strict comparison * assertThat(0.1f).isEqualTo(0.0f, byLessThan(0.1f)); // strict =&gt; fail * * // this assertion also fails * assertThat(0.1f).isEqualTo(0.0f, within(0.001f));</code></pre> * * @param expected the given value to compare the actual value to. * @param offset the given positive offset. * @return {@code this} assertion object. * @throws NullPointerException if the given offset is {@code null}. * @throws NullPointerException if the expected number is {@code null}. * @throws AssertionError if the actual value is not equal to the given one. */ @Override public SELF isEqualTo(Float expected, Offset<Float> offset) { return isCloseTo(expected, offset); } /** * Verifies that the actual number is close to the given one within the given offset value. * <p> * This assertion is the same as {@link #isCloseTo(float, Offset)}. * <p> * When <i>abs(actual - expected) == offset value</i>, the assertion: * <ul> * <li><b>succeeds</b> when using {@link Assertions#within(Float)} or {@link Assertions#offset(Float)}</li> * <li><b>fails</b> when using {@link Assertions#byLessThan(Float)} or {@link Offset#strictOffset(Number)}</li> * </ul> * <p> * Examples: * <pre><code class='java'> // assertions will pass * assertThat(8.1f).isEqualTo(8.0f, within(0.2f)); * assertThat(8.1f).isEqualTo(8.0f, offset(0.2f)); // alias of within * assertThat(8.1f).isEqualTo(8.0f, byLessThan(0.2f)); // strict * * // assertions succeed when the difference == offset value ... * assertThat(0.1f).isEqualTo(0.0f, within(0.1f)); * assertThat(0.1f).isEqualTo(0.0f, offset(0.1f)); * // ... except when using byLessThan which implies a strict comparison * assertThat(0.1f).isEqualTo(0.0f, byLessThan(0.1f)); // strict =&gt; fail * * // this assertion also fails * assertThat(0.1f).isEqualTo(0.0f, within(0.001f));</code></pre> * * @param expected the given value to compare the actual value to. * @param offset the given positive offset. * @return {@code this} assertion object. * @throws NullPointerException if the given offset is {@code null}. * @throws NullPointerException if the expected number is {@code null}. * @throws AssertionError if the actual value is not equal to the given one. */ public SELF isEqualTo(float expected, Offset<Float> offset) { return isCloseTo(expected, offset); } /** * Verifies that the actual value is not equal to the given one. * <p> * Unless a specific comparator has been set (with {@link #usingComparator(Comparator) usingComparator}) the equality is performed * with {@code !=} which is slightly different from {@link Float#equals(Object)} - notably: * <ul> * <li>{@code Float.NaN != Float.NaN} but {@code Float.valueOf(Float.NaN).equals(Float.NaN) == true}</li> * <li>{@code 0.0f == -0.0f} but {@code Float.valueOf(0.0f).equals(-0.0f) == false}</li> * </ul> * <p> * Examples: * <pre><code class='java'> // assertions will pass: * assertThat(0.0f).isNotEqualTo(1.0f); * assertThat(-1.0f).isNotEqualTo(1.0f); * assertThat(Float.NaN).isNotEqualTo(Float.NaN); * * // assertions will fail: * assertThat(1.0f).isNotEqualTo(1.0f); * assertThat(1f).isNotEqualTo(1.0f); * assertThat(0.0f).isNotEqualTo(-0.0f);</code></pre> * <p> * Note that this assertion behaves slightly differently from {@link #isNotEqualTo(Float)}. * * @param other the given value to compare the actual value to. * @return {@code this} assertion object. * @throws AssertionError if the actual value is {@code null}. * @throws AssertionError if the actual value is equal to the given one. */ public SELF isNotEqualTo(float other) { if (noCustomComparatorSet()) { // use primitive comparison since the parameter is a primitive. if (other != actual.doubleValue()) return myself; throw Failures.instance().failure(info, shouldNotBeEqual(actual, other)); } floats.assertNotEqual(info, actual, other); return myself; } /** * Verifies that the actual value is not equal to the given {@link Float} using {@link Float#equals(Object)} semantics where * <ul> * <li>{@code Float.valueOf(Float.NaN).equals(Float.NaN) == true} but {@code Float.NaN != Float.NaN}</li> * <li>{@code Float.valueOf(0.0f).equals(-0.0f) == false} but {@code 0.0f == -0.0f}</li> * </ul> * <p> * Examples: * <pre><code class='java'> // assertions will pass: * assertThat(0.0f).isNotEqualTo(Float.valueOf(1.0)); * assertThat(-1.0).isNotEqualTo(Float.valueOf(1.0)); * assertThat(0.0f).isNotEqualTo(Float.valueOf(-0.0f)); * * // assertions will fail: * assertThat(1.0).isNotEqualTo(Float.valueOf(1.0)); * assertThat(0.0f).isNotEqualTo(Float.valueOf(0.0f)); * assertThat(Float.NaN).isNotEqualTo(Float.valueOf(Float.NaN));</code></pre> * <p> * Note that this assertion behaves slightly differently from {@link #isNotEqualTo(float)}. * * @param other the given value to compare the actual value to. * @return {@code this} assertion object. * @throws AssertionError if the actual value is {@code null}. * @throws AssertionError if the actual value is equal to the given one. */ public SELF isNotEqualTo(Float other) { // overloaded for javadoc return super.isNotEqualTo(other); } /** * Verifies that the actual value is less than the given one. * <p> * Examples: * <pre><code class='java'> // assertions will pass: * assertThat(1.0f).isLessThan(2.0f); * assertThat(1.0f).isLessThan(1.01f); * * // assertions will fail: * assertThat(2.0f).isLessThan(1.0f); * assertThat(1.0f).isLessThan(1.0f);</code></pre> * * @param other the given value to compare the actual value to. * @return {@code this} assertion object. * @throws AssertionError if the actual value is {@code null}. * @throws AssertionError if the actual value is equal to or greater than the given one. */ public SELF isLessThan(float other) { floats.assertLessThan(info, actual, other); return myself; } /** * Verifies that the actual value is less than or equal to the given one. * <p> * Unless a specific comparator has been set (with {@link #usingComparator(Comparator) usingComparator}) * this assertion will use {@code <=} semantics where notably {@code 0.0} == {@code -0.0}. * <p> * Examples: * <pre><code class='java'> // assertions will pass: * assertThat(-1.0f).isLessThanOrEqualTo(1.0f); * assertThat(1.0f).isLessThanOrEqualTo(1.0f); * // 0.0f == -0.0f * assertThat(-0.0f).isLessThanOrEqualTo(0.0f); * assertThat(0.0f).isLessThanOrEqualTo(-0.0f); * * // assertion will fail: * assertThat(2.0f).isLessThanOrEqualTo(1.0f);</code></pre> * <p> * Note that this assertion behaves differently from {@link #isLessThanOrEqualTo(Float)} which uses {@link Float#compareTo(Float)} semantics. * * @param other the given value to compare the actual value to. * @return {@code this} assertion object. * @throws AssertionError if the actual value is {@code null}. * @throws AssertionError if the actual value is greater than the given one. */ public SELF isLessThanOrEqualTo(float other) { if (noCustomComparatorSet()) { // use primitive comparison since the parameter is a primitive. if (actual <= other) return myself; throw Failures.instance().failure(info, shouldBeLessOrEqual(actual, other)); } floats.assertLessThanOrEqualTo(info, actual, other); return myself; } /** * Verifies that the actual value is less than or equal to the given one using {@link Float#compareTo(Float)} semantics where notably {@code -0.0} is <b>strictly</b> less than {@code 0.0}. * <p> * Examples: * <pre><code class='java'> // assertions will pass: * assertThat(-1.0f).isLessThanOrEqualTo(Float.valueOf(1.0f)); * assertThat(1.0f).isLessThanOrEqualTo(Float.valueOf(1.0f)); * assertThat(-0.0f).isLessThanOrEqualTo(Float.valueOf(0.0f)); * * // assertions will fail: * assertThat(2.0f).isLessThanOrEqualTo(Float.valueOf(1.0f)); * // 0.0f is not considered equal to -0.0f * assertThat(0.0f).isLessThanOrEqualTo(Float.valueOf(-0.0f));</code></pre> * <p> * Note that this assertion behaves differently from {@link #isLessThanOrEqualTo(float)} which uses {@link Float#compareTo(Float)} semantics. * * @param other the given value to compare the actual value to. * @return {@code this} assertion object. * @throws AssertionError if the actual value is {@code null}. * @throws AssertionError if the actual value is greater than the given one. */ @Override public SELF isLessThanOrEqualTo(Float other) { // overridden for javadoc return super.isLessThanOrEqualTo(other); } /** * Verifies that the actual value is greater than the given one. * <p> * Examples: * <pre><code class='java'> // assertions will pass: * assertThat(2.0f).isGreaterThan(1.0f); * assertThat(2.0f).isGreaterThan(1.99f); * * // assertions will fail: * assertThat(1.0f).isGreaterThan(1.0f); * assertThat(1.0f).isGreaterThan(2.0f);</code></pre> * * @param other the given value to compare the actual value to. * @return {@code this} assertion object. * @throws AssertionError if the actual value is {@code null}. * @throws AssertionError if the actual value is equal to or less than the given one. */ public SELF isGreaterThan(float other) { floats.assertGreaterThan(info, actual, other); return myself; } /** * Verifies that the actual value is greater than or equal to the given one. * <p> * Unless a specific comparator has been set (with {@link #usingComparator(Comparator) usingComparator}) * this assertion will use {@code >=} semantics where notably {@code 0.0f} == {@code -0.0f}. * <p> * Examples: * <pre><code class='java'> // assertions will pass: * assertThat(2.0f).isGreaterThanOrEqualTo(1.0f); * assertThat(1.0f).isGreaterThanOrEqualTo(1.0f); * assertThat(0.0f).isGreaterThanOrEqualTo(-0.0f); * * // assertion will fail: * assertThat(1.0f).isGreaterThanOrEqualTo(2.0f);</code></pre> * <p> * Note that this assertion behaves differently from {@link #isGreaterThanOrEqualTo(Float)} which uses {@link Float#compareTo(Float)} semantics. * * @param other the given value to compare the actual value to. * @return {@code this} assertion object. * @throws AssertionError if the actual value is {@code null}. * @throws AssertionError if the actual value is less than the given one. */ public SELF isGreaterThanOrEqualTo(float other) { if (noCustomComparatorSet()) { // use primitive comparison since the parameter is a primitive. if (actual >= other) return myself; throw Failures.instance().failure(info, shouldBeGreaterOrEqual(actual, other)); } floats.assertGreaterThanOrEqualTo(info, actual, other); return myself; } /** * Verifies that the actual value is greater than or equal to the given one using {@link Float#compareTo(Float)} semantics where notably {@code 0.0f} is <b>strictly</b> greater than {@code -0.0f}. * <p> * Examples: * <pre><code class='java'> // assertions will pass: * assertThat(2.0f).isGreaterThanOrEqualTo(Float.valueOf(1.0f)); * assertThat(1.0f).isGreaterThanOrEqualTo(Float.valueOf(1.0f)); * assertThat(0.0f).isGreaterThanOrEqualTo(Float.valueOf(-0.0f)); * * // assertions will fail: * assertThat(1.0f).isGreaterThanOrEqualTo(Float.valueOf(2.0f)); * // 0.0f is not considered equal to -0.0f * assertThat(-0.0f).isGreaterThanOrEqualTo(Float.valueOf(0.0f));</code></pre> * <p> * Note that this assertion behaves differently from {@link #isGreaterThanOrEqualTo(float)} which uses {@code >=} semantics. * * @param other the given value to compare the actual value to. * @return {@code this} assertion object. * @throws AssertionError if the actual value is {@code null}. * @throws AssertionError if the actual value is less than the given one. */ @Override public SELF isGreaterThanOrEqualTo(Float other) { // overridden for javadoc return super.isGreaterThanOrEqualTo(other); } /** * Verifies that the actual value is in [start, end] range (start included, end included). * * <p> * Examples: * <pre><code class='java'> // assertions will pass * assertThat(1f).isBetween(-1f, 2f); * assertThat(1f).isBetween(1f, 2f); * assertThat(1f).isBetween(0f, 1f); * * // assertion will fail * assertThat(1f).isBetween(2f, 3f);</code></pre> */ @Override public SELF isBetween(Float start, Float end) { floats.assertIsBetween(info, actual, start, end); return myself; } /** * Verifies that the actual value is in ]start, end[ range (start excluded, end excluded). * * <p> * Examples: * <pre><code class='java'> // assertion will pass * assertThat(1f).isStrictlyBetween(-1f, 2f); * * // assertions will fail * assertThat(1f).isStrictlyBetween(1f, 2f); * assertThat(1f).isStrictlyBetween(0f, 1f); * assertThat(1f).isStrictlyBetween(2f, 3f);</code></pre> * */ @Override public SELF isStrictlyBetween(Float start, Float end) { floats.assertIsStrictlyBetween(info, actual, start, end); return myself; } @Override @CheckReturnValue public SELF usingComparator(Comparator<? super Float> customComparator) { return usingComparator(customComparator, null); } @Override @CheckReturnValue public SELF usingComparator(Comparator<? super Float> customComparator, String customComparatorDescription) { floats = new Floats(new ComparatorBasedComparisonStrategy(customComparator, customComparatorDescription)); return super.usingComparator(customComparator, customComparatorDescription); } @Override @CheckReturnValue public SELF usingDefaultComparator() { floats = Floats.instance(); return super.usingDefaultComparator(); } private boolean noCustomComparatorSet() { return floats.getComparator() == null; } /** * Verifies that the float value is a finite floating-point value. * <p> * Example: * <pre><code class='java'> // assertion succeeds * assertThat(1.0f).isFinite(); * * // assertions fail * assertThat(Float.NaN).isFinite(); * assertThat(Float.NEGATIVE_INFINITY).isFinite(); * assertThat(Float.POSITIVE_INFINITY).isFinite();</code></pre> * * @return this assertion object. * @throws AssertionError if the actual value is not a finite floating-point value. * @throws AssertionError if the actual value is null. * @see #isNotFinite() * @see #isInfinite() * @see #isNaN() * @see java.lang.Float#isFinite(float) * @since 3.19.0 */ @Override public SELF isFinite() { floats.assertIsFinite(info, actual); return myself; } /** * Verifies that the float value is not a finite floating-point value. * <p> * Note that 'not finite' is not equivalent to infinite as `NaN` is neither finite nor infinite. * <p> * Example: * <pre><code class='java'> // assertions succeed * assertThat(Float.POSITIVE_INFINITY).isNotFinite(); * assertThat(Float.NEGATIVE_INFINITY).isNotFinite(); * assertThat(Float.NaN).isNotFinite(); * * // assertion fails * assertThat(1.0f).isNotFinite();</code></pre> * * @return this assertion object. * @throws AssertionError if the actual value is a finite floating-point value. * @throws AssertionError if the actual value is null. * @see #isFinite() * @see #isInfinite() * @see #isNaN() * @see java.lang.Float#isFinite(float) * @since 3.20.0 */ @Override public SELF isNotFinite() { floats.assertIsNotFinite(info, actual); return myself; } /** * Verifies that the float value represents positive infinity or negative infinity. * <p> * Examples: * <pre><code class='java'> // assertions succeed * assertThat(Float.NEGATIVE_INFINITY).isInfinite(); * assertThat(Float.POSITIVE_INFINITY).isInfinite(); * * // assertions fail * assertThat(1.0f).isInfinite(); * assertThat(Float.NaN).isInfinite();</code></pre> * * @return this assertion object. * @throws AssertionError if the actual value represents neither positive infinity nor negative infinity. * @throws AssertionError if the actual value is null. * @see #isNotInfinite() * @see #isFinite() * @see #isNaN() * @see java.lang.Float#isInfinite(float) * @since 3.19.0 */ @Override public SELF isInfinite() { floats.assertIsInfinite(info, actual); return myself; } /** * Verifies that the float value represents neither positive infinity nor negative infinity. * <p> * Examples: * <pre><code class='java'> // assertions succeed * assertThat(1.0f).isNotInfinite(); * assertThat(Float.NaN).isNotInfinite(); * * // assertions fail * assertThat(Float.POSITIVE_INFINITY).isNotInfinite(); * assertThat(Float.NEGATIVE_INFINITY).isNotInfinite();</code></pre> * * @return this assertion object. * @throws AssertionError if the actual value represents positive infinity or negative infinity. * @throws AssertionError if the actual value is null. * @see #isInfinite() * @see #isFinite() * @see #isNaN() * @see java.lang.Float#isInfinite(float) * @since 3.20.0 */ @Override public SELF isNotInfinite() { floats.assertIsNotInfinite(info, actual); return myself; } }
public
java
spring-projects__spring-security
core/src/main/java/org/springframework/security/core/authority/mapping/MapBasedAttributes2GrantedAuthoritiesMapper.java
{ "start": 1220, "end": 1549 }
class ____ the Attributes2GrantedAuthoritiesMapper and * MappableAttributesRetriever interfaces based on the supplied Map. It supports both * one-to-one and one-to-many mappings. The granted authorities to map to can be supplied * either as a String or as a GrantedAuthority object. * * @author Ruud Senden */ public
implements
java
quarkusio__quarkus
core/builder/src/main/java/io/quarkus/builder/ChainBuildException.java
{ "start": 182, "end": 1492 }
class ____ extends Exception { private static final long serialVersionUID = -1143606746171493097L; /** * Constructs a new {@code DeployerChainBuildException} instance. The message is left blank ({@code null}), and no * cause is specified. * */ public ChainBuildException() { } /** * Constructs a new {@code DeployerChainBuildException} instance with an initial message. No * cause is specified. * * @param msg the message */ public ChainBuildException(final String msg) { super(msg); } /** * Constructs a new {@code DeployerChainBuildException} instance with an initial cause. If * a non-{@code null} cause is specified, its message is used to initialize the message of this * {@code DeployerChainBuildException}; otherwise the message is left blank ({@code null}). * * @param cause the cause */ public ChainBuildException(final Throwable cause) { super(cause); } /** * Constructs a new {@code DeployerChainBuildException} instance with an initial message and cause. * * @param msg the message * @param cause the cause */ public ChainBuildException(final String msg, final Throwable cause) { super(msg, cause); } }
ChainBuildException
java
quarkusio__quarkus
extensions/web-dependency-locator/deployment/src/test/java/io/quarkus/webdependency/locator/test/WebDependencyLocatorRootPathTest.java
{ "start": 387, "end": 3146 }
class ____ extends WebDependencyLocatorTestSupport { private static final String META_INF_RESOURCES = "META-INF/resources/"; @RegisterExtension static QuarkusUnitTest runner = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addAsResource(new StringAsset("<html>Hello!<html>"), META_INF_RESOURCES + "index.html") .addAsResource(new StringAsset("Test"), META_INF_RESOURCES + "some/path/test.txt")) .overrideConfigKey("quarkus.http.root-path", "/app") .setForcedDependencies(List.of( Dependency.of("org.webjars", "jquery-ui", JQUERY_UI_VERSION), Dependency.of("org.webjars", "momentjs", MOMENTJS_VERSION), Dependency.of("org.mvnpm", "bootstrap", BOOTSTRAP_VERSION))); @Test public void test() { // Test normal files RestAssured.get("/").then() .statusCode(200) .body(is("<html>Hello!<html>")); RestAssured.get("/index.html").then() .statusCode(200) .body(is("<html>Hello!<html>")); RestAssured.get("/some/path/test.txt").then() .statusCode(200) .body(is("Test")); // Test Existing Web Jars RestAssured.get("/webjars/jquery-ui/jquery-ui.min.js").then() .statusCode(200); RestAssured.get("/webjars/momentjs/min/moment.min.js").then() .statusCode(200); RestAssured.get("/_static/bootstrap/dist/js/bootstrap.min.js").then() .statusCode(200); // Test using version in url of existing Web Jar RestAssured.get("/webjars/jquery-ui/" + JQUERY_UI_VERSION + "/jquery-ui.min.js").then() .statusCode(200); RestAssured.get("/webjars/momentjs/" + MOMENTJS_VERSION + "/min/moment.min.js").then() .statusCode(200); RestAssured.get("/_static/bootstrap/" + BOOTSTRAP_VERSION + "/dist/js/bootstrap.min.js").then() .statusCode(200); // Test non-existing Web Jar RestAssured.get("/webjars/bootstrap/js/bootstrap.min.js").then() .statusCode(404); RestAssured.get("/webjars/bootstrap/4.3.1/js/bootstrap.min.js").then() .statusCode(404); RestAssured.get("/webjars/momentjs/2.25.0/min/moment.min.js").then() .statusCode(404); RestAssured.get("/_static/foundation-sites/6.8.1/dist/js/foundation.esm.js").then() .statusCode(404); // Test webjar that does not have a version in the jar path RestAssured.get("/webjars/dcjs/dc.min.js").then() .statusCode(200); } }
WebDependencyLocatorRootPathTest
java
apache__spark
core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java
{ "start": 3728, "end": 13348 }
class ____<K, V> extends ShuffleWriter<K, V> implements ShuffleChecksumSupport { private static final SparkLogger logger = SparkLoggerFactory.getLogger(BypassMergeSortShuffleWriter.class); private final int fileBufferSize; private final boolean transferToEnabled; private final int numPartitions; private final BlockManager blockManager; private final Partitioner partitioner; private final ShuffleWriteMetricsReporter writeMetrics; private final int shuffleId; private final long mapId; private final Serializer serializer; private final ShuffleExecutorComponents shuffleExecutorComponents; /** Array of file writers, one for each partition */ private DiskBlockObjectWriter[] partitionWriters; private FileSegment[] partitionWriterSegments; @Nullable private MapStatus mapStatus; private long[] partitionLengths; /** Checksum calculator for each partition. Empty when shuffle checksum disabled. */ private final Checksum[] partitionChecksums; /** * Checksum calculator for each partition. Different from the above Checksum, * RowBasedChecksum is independent of the input row order, which is used to * detect whether different task attempts of the same partition produce different * output data or not. */ private final RowBasedChecksum[] rowBasedChecksums; /** * Are we in the process of stopping? Because map tasks can call stop() with success = true * and then call stop() with success = false if they get an exception, we want to make sure * we don't try deleting files, etc twice. */ private boolean stopping = false; BypassMergeSortShuffleWriter( BlockManager blockManager, BypassMergeSortShuffleHandle<K, V> handle, long mapId, SparkConf conf, ShuffleWriteMetricsReporter writeMetrics, ShuffleExecutorComponents shuffleExecutorComponents) throws SparkException { // Use getSizeAsKb (not bytes) to maintain backwards compatibility if no units are provided this.fileBufferSize = (int) (long) conf.get(package$.MODULE$.SHUFFLE_FILE_BUFFER_SIZE()) * 1024; this.transferToEnabled = (boolean) conf.get(package$.MODULE$.SHUFFLE_MERGE_PREFER_NIO()); this.blockManager = blockManager; final ShuffleDependency<K, V, V> dep = handle.dependency(); this.mapId = mapId; this.shuffleId = dep.shuffleId(); this.partitioner = dep.partitioner(); this.numPartitions = partitioner.numPartitions(); this.writeMetrics = writeMetrics; this.serializer = dep.serializer(); this.shuffleExecutorComponents = shuffleExecutorComponents; this.partitionChecksums = createPartitionChecksums(numPartitions, conf); this.rowBasedChecksums = dep.rowBasedChecksums(); } @Override public void write(Iterator<Product2<K, V>> records) throws IOException { assert (partitionWriters == null); ShuffleMapOutputWriter mapOutputWriter = shuffleExecutorComponents .createMapOutputWriter(shuffleId, mapId, numPartitions); try { if (!records.hasNext()) { partitionLengths = mapOutputWriter.commitAllPartitions( ShuffleChecksumHelper.EMPTY_CHECKSUM_VALUE).getPartitionLengths(); mapStatus = MapStatus$.MODULE$.apply( blockManager.shuffleServerId(), partitionLengths, mapId, getAggregatedChecksumValue()); return; } final SerializerInstance serInstance = serializer.newInstance(); final long openStartTime = System.nanoTime(); partitionWriters = new DiskBlockObjectWriter[numPartitions]; partitionWriterSegments = new FileSegment[numPartitions]; for (int i = 0; i < numPartitions; i++) { final Tuple2<TempShuffleBlockId, File> tempShuffleBlockIdPlusFile = blockManager.diskBlockManager().createTempShuffleBlock(); final File file = tempShuffleBlockIdPlusFile._2(); final BlockId blockId = tempShuffleBlockIdPlusFile._1(); DiskBlockObjectWriter writer = blockManager.getDiskWriter(blockId, file, serInstance, fileBufferSize, writeMetrics); if (partitionChecksums.length > 0) { writer.setChecksum(partitionChecksums[i]); } partitionWriters[i] = writer; } // Creating the file to write to and creating a disk writer both involve interacting with // the disk, and can take a long time in aggregate when we open many files, so should be // included in the shuffle write time. writeMetrics.incWriteTime(System.nanoTime() - openStartTime); while (records.hasNext()) { final Product2<K, V> record = records.next(); final K key = record._1(); final int partitionId = partitioner.getPartition(key); partitionWriters[partitionId].write(key, record._2()); if (rowBasedChecksums.length > 0) { rowBasedChecksums[partitionId].update(key, record._2()); } } for (int i = 0; i < numPartitions; i++) { try (DiskBlockObjectWriter writer = partitionWriters[i]) { partitionWriterSegments[i] = writer.commitAndGet(); } } partitionLengths = writePartitionedData(mapOutputWriter); mapStatus = MapStatus$.MODULE$.apply( blockManager.shuffleServerId(), partitionLengths, mapId, getAggregatedChecksumValue()); } catch (Exception e) { try { mapOutputWriter.abort(e); } catch (Exception e2) { logger.error("Failed to abort the writer after failing to write map output.", e2); e.addSuppressed(e2); } throw e; } } @Override public long[] getPartitionLengths() { return partitionLengths; } // For test only. @VisibleForTesting RowBasedChecksum[] getRowBasedChecksums() { return rowBasedChecksums; } @VisibleForTesting long getAggregatedChecksumValue() { return RowBasedChecksum.getAggregatedChecksumValue(rowBasedChecksums); } /** * Concatenate all of the per-partition files into a single combined file. * * @return array of lengths, in bytes, of each partition of the file (used by map output tracker). */ private long[] writePartitionedData(ShuffleMapOutputWriter mapOutputWriter) throws IOException { // Track location of the partition starts in the output file if (partitionWriters != null) { final long writeStartTime = System.nanoTime(); try { for (int i = 0; i < numPartitions; i++) { final File file = partitionWriterSegments[i].file(); ShufflePartitionWriter writer = mapOutputWriter.getPartitionWriter(i); if (file.exists()) { if (transferToEnabled) { // Using WritableByteChannelWrapper to make resource closing consistent between // this implementation and UnsafeShuffleWriter. Optional<WritableByteChannelWrapper> maybeOutputChannel = writer.openChannelWrapper(); if (maybeOutputChannel.isPresent()) { writePartitionedDataWithChannel(file, maybeOutputChannel.get()); } else { writePartitionedDataWithStream(file, writer); } } else { writePartitionedDataWithStream(file, writer); } if (!file.delete()) { logger.error("Unable to delete file for partition {}", MDC.of(LogKeys.PARTITION_ID, i)); } } } } finally { writeMetrics.incWriteTime(System.nanoTime() - writeStartTime); } partitionWriters = null; } return mapOutputWriter.commitAllPartitions(getChecksumValues(partitionChecksums)) .getPartitionLengths(); } private void writePartitionedDataWithChannel( File file, WritableByteChannelWrapper outputChannel) throws IOException { boolean copyThrewException = true; try { FileInputStream in = new FileInputStream(file); try (FileChannel inputChannel = in.getChannel()) { Utils.copyFileStreamNIO( inputChannel, outputChannel.channel(), 0L, inputChannel.size()); copyThrewException = false; } finally { Closeables.close(in, copyThrewException); } } finally { Closeables.close(outputChannel, copyThrewException); } } private void writePartitionedDataWithStream(File file, ShufflePartitionWriter writer) throws IOException { boolean copyThrewException = true; FileInputStream in = new FileInputStream(file); OutputStream outputStream; try { outputStream = writer.openStream(); try { Utils.copyStream(in, outputStream, false, false); copyThrewException = false; } finally { Closeables.close(outputStream, copyThrewException); } } finally { Closeables.close(in, copyThrewException); } } @Override public Option<MapStatus> stop(boolean success) { if (stopping) { return None$.empty(); } else { stopping = true; if (success) { if (mapStatus == null) { throw new IllegalStateException("Cannot call stop(true) without having called write()"); } return Option.apply(mapStatus); } else { // The map task failed, so delete our output data. if (partitionWriters != null) { try { for (DiskBlockObjectWriter writer : partitionWriters) { // This method explicitly does _not_ throw exceptions: writer.closeAndDelete(); } } finally { partitionWriters = null; } } return None$.empty(); } } } }
BypassMergeSortShuffleWriter
java
grpc__grpc-java
alts/src/test/java/io/grpc/alts/internal/AltsProtocolNegotiatorTest.java
{ "start": 17836, "end": 19366 }
class ____ implements TsiHandshaker { private final TsiHandshaker delegate; DelegatingTsiHandshaker(TsiHandshaker delegate) { this.delegate = delegate; } @Override public void getBytesToSendToPeer(ByteBuffer bytes) throws GeneralSecurityException { delegate.getBytesToSendToPeer(bytes); } @Override public boolean processBytesFromPeer(ByteBuffer bytes) throws GeneralSecurityException { return delegate.processBytesFromPeer(bytes); } @Override public boolean isInProgress() { return delegate.isInProgress(); } @Override public TsiPeer extractPeer() throws GeneralSecurityException { return delegate.extractPeer(); } @Override public Object extractPeerObject() throws GeneralSecurityException { return delegate.extractPeerObject(); } @Override public TsiFrameProtector createFrameProtector(ByteBufAllocator alloc) { InterceptingProtector protector = new InterceptingProtector(delegate.createFrameProtector(alloc)); protectors.add(protector); return protector; } @Override public TsiFrameProtector createFrameProtector(int maxFrameSize, ByteBufAllocator alloc) { InterceptingProtector protector = new InterceptingProtector(delegate.createFrameProtector(maxFrameSize, alloc)); protectors.add(protector); return protector; } @Override public void close() { delegate.close(); } } private static
DelegatingTsiHandshaker
java
assertj__assertj-core
assertj-core/src/main/java/org/assertj/core/util/URLs.java
{ "start": 1155, "end": 4482 }
class ____ { private URLs() {} /** * Loads the text content of a URL into a character string. * * @param url the URL. * @param charsetName the name of the character set to use. * @return the content of the file. * @throws IllegalArgumentException if the given character set is not supported on this platform. * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url, String charsetName) { checkArgumentCharsetIsSupported(charsetName); return contentOf(url, Charset.forName(charsetName)); } /** * Loads the text content of a URL into a character string. * * @param url the URL. * @param charset the character set to use. * @return the content of the URL. * @throws NullPointerException if the given charset is {@code null}. * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url, Charset charset) { requireNonNull(charset, "The charset should not be null"); try { return loadContents(url.openStream(), charset); } catch (IOException e) { throw new UncheckedIOException("Unable to read " + url, e); } } /** * Loads the text content of a URL into a list of strings, each string corresponding to a line. The line endings are * either \n, \r or \r\n. * * @param url the URL. * @param charset the character set to use. * @return the content of the URL. * @throws NullPointerException if the given charset is {@code null}. * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url, Charset charset) { requireNonNull(charset, "The charset should not be null"); try { return loadLines(url.openStream(), charset); } catch (IOException e) { throw new UncheckedIOException("Unable to read " + url, e); } } /** * Loads the text content of a URL into a list of strings, each string corresponding to a line. The line endings are * either \n, \r or \r\n. * * @param url the URL. * @param charsetName the name of the character set to use. * @return the content of the URL. * @throws NullPointerException if the given charset is {@code null}. * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url, String charsetName) { checkArgumentCharsetIsSupported(charsetName); return linesOf(url, Charset.forName(charsetName)); } private static String loadContents(InputStream stream, Charset charset) throws IOException { try (StringWriter writer = new StringWriter(); BufferedReader reader = new BufferedReader(new InputStreamReader(stream, charset))) { int c; while ((c = reader.read()) != -1) { writer.write(c); } return writer.toString(); } } private static List<String> loadLines(InputStream stream, Charset charset) throws IOException { try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream, charset))) { return reader.lines().collect(Collectors.toList()); } } private static void checkArgumentCharsetIsSupported(String charsetName) { checkArgument(Charset.isSupported(charsetName), "Charset:<'%s'> is not supported on this system", charsetName); } }
URLs
java
quarkusio__quarkus
extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/VertxHttpRecorder.java
{ "start": 84321, "end": 84778 }
class ____ implements Handler<RoutingContext> { private final ClassLoader currentCl; public HotReplacementRoutingContextHandler(ClassLoader currentCl) { this.currentCl = currentCl; } @Override public void handle(RoutingContext event) { Thread.currentThread().setContextClassLoader(currentCl); hotReplacementHandler.handle(event); } } }
HotReplacementRoutingContextHandler
java
junit-team__junit5
documentation/src/test/java/example/ParameterizedRecordDemo.java
{ "start": 566, "end": 1138 }
class ____ { @SuppressWarnings("JUnitMalformedDeclaration") // tag::example[] @ParameterizedClass @CsvSource({ "apple, 23", "banana, 42" }) record FruitTests(String fruit, int quantity) { @Test void test() { assertFruit(fruit); assertQuantity(quantity); } @Test void anotherTest() { // ... } } // end::example[] static void assertFruit(String fruit) { assertTrue(Arrays.asList("apple", "banana", "cherry", "dewberry").contains(fruit)); } static void assertQuantity(int quantity) { assertTrue(quantity >= 0); } }
ParameterizedRecordDemo
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TranslationAware.java
{ "start": 1041, "end": 1964 }
interface ____ { /** * Can this instance be translated or not? Usually checks whether the * expression arguments are actual fields that exist in Lucene. See {@link Translatable} * for precisely what can be signaled from this method. */ Translatable translatable(LucenePushdownPredicates pushdownPredicates); /** * Is an {@link Expression} translatable? */ static TranslationAware.Translatable translatable(Expression exp, LucenePushdownPredicates lucenePushdownPredicates) { if (exp instanceof TranslationAware aware) { return aware.translatable(lucenePushdownPredicates); } return TranslationAware.Translatable.NO; } /** * Translates the implementing expression into a Query. * If during translation a child needs to be translated first, the handler needs to be used even if the child implements this *
TranslationAware
java
eclipse-vertx__vert.x
vertx-core/src/main/java/io/vertx/core/http/impl/StatisticsGatheringHttpClientStream.java
{ "start": 1034, "end": 5546 }
class ____ implements HttpClientStream { private final HttpClientStream delegate; private final ServerInteraction endpointRequest; StatisticsGatheringHttpClientStream(HttpClientStream delegate, ServerInteraction endpointRequest) { this.delegate = delegate; this.endpointRequest = endpointRequest; } @Override public int id() { return delegate.id(); } @Override public Object metric() { return delegate.metric(); } @Override public Object trace() { return delegate.trace(); } @Override public HttpVersion version() { return delegate.version(); } @Override public HttpClientConnection connection() { return delegate.connection(); } @Override public ContextInternal context() { return delegate.context(); } @Override public Future<Void> writeHead(HttpRequestHead request, boolean chunked, Buffer buf, boolean end, StreamPriority priority, boolean connect) { endpointRequest.reportRequestBegin(); if (end) { endpointRequest.reportRequestEnd(); } return delegate.writeHead(request, chunked, buf, end, priority, connect); } @Override public Future<Void> writeChunk(Buffer buf, boolean end) { if (end) { endpointRequest.reportRequestEnd(); } return delegate.writeChunk(buf, end); } @Override public Future<Void> writeFrame(int type, int flags, Buffer payload) { return delegate.writeFrame(type, flags, payload); } @Override public HttpClientStream continueHandler(Handler<Void> handler) { delegate.continueHandler(handler); return this; } @Override public HttpClientStream earlyHintsHandler(Handler<MultiMap> handler) { delegate.earlyHintsHandler(handler); return this; } @Override public HttpClientStream pushHandler(Handler<HttpClientPush> handler) { delegate.pushHandler(handler); return this; } @Override public HttpClientStream customFrameHandler(Handler<HttpFrame> handler) { delegate.customFrameHandler(handler); return this; } @Override public HttpClientStream headHandler(Handler<HttpResponseHead> handler) { if (handler != null) { delegate.headHandler(multimap -> { endpointRequest.reportResponseBegin(); handler.handle(multimap); }); } else { delegate.headHandler(null); } return this; } @Override public HttpClientStream dataHandler(Handler<Buffer> handler) { delegate.dataHandler(handler); return this; } @Override public HttpClientStream trailersHandler(Handler<MultiMap> handler) { if (handler != null) { delegate.trailersHandler(multimap -> { endpointRequest.reportResponseEnd(); handler.handle(multimap); }); } else { delegate.trailersHandler(null); } return this; } @Override public HttpClientStream priorityChangeHandler(Handler<StreamPriority> handler) { delegate.priorityChangeHandler(handler); return this; } @Override public HttpClientStream closeHandler(Handler<Void> handler) { delegate.closeHandler(handler); return this; } @Override public HttpClientStream pause() { delegate.pause(); return this; } @Override public HttpClientStream fetch(long amount) { delegate.fetch(amount); return this; } @Override public Future<Void> writeReset(long code) { return delegate.writeReset(code); } @Override public StreamPriority priority() { return delegate.priority(); } @Override public HttpClientStream updatePriority(StreamPriority streamPriority) { delegate.updatePriority(streamPriority); return this; } @Override public HttpClientStream resetHandler(Handler<Long> handler) { delegate.resetHandler(handler); return this; } @Override public HttpClientStream exceptionHandler(@Nullable Handler<Throwable> handler) { if (handler != null) { delegate.exceptionHandler(err -> { endpointRequest.reportFailure(err); handler.handle(err); }); } else { delegate.exceptionHandler(null); } return this; } @Override @Fluent public HttpClientStream setWriteQueueMaxSize(int maxSize) { return delegate.setWriteQueueMaxSize(maxSize); } @Override public boolean isWritable() { return delegate.isWritable(); } @Override @Fluent public HttpClientStream drainHandler(@Nullable Handler<Void> handler) { return delegate.drainHandler(handler); } }
StatisticsGatheringHttpClientStream
java
spring-projects__spring-boot
module/spring-boot-data-jpa-test/src/test/java/org/springframework/boot/data/jpa/test/autoconfigure/DataJpaTestSchemaCredentialsIntegrationTests.java
{ "start": 1340, "end": 1810 }
class ____ { @Autowired private DataSource dataSource; @Test void replacesDefinedDataSourceWithEmbeddedDefault() throws Exception { String product = this.dataSource.getConnection().getMetaData().getDatabaseProductName(); assertThat(product).isEqualTo("H2"); assertThat(new JdbcTemplate(this.dataSource).queryForList("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES", String.class)) .contains("EXAMPLE"); } }
DataJpaTestSchemaCredentialsIntegrationTests
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQueryTests.java
{ "start": 1630, "end": 5391 }
class ____ extends ESTestCase { public void testBasics() { Sort sort1 = new Sort(new SortedNumericSortField("field1", SortField.Type.INT), new SortedSetSortField("field2", false)); Sort sort2 = new Sort(new SortedNumericSortField("field1", SortField.Type.INT), new SortedSetSortField("field3", false)); FieldDoc fieldDoc1 = new FieldDoc(0, 0f, new Object[] { 5, new BytesRef("foo") }); FieldDoc fieldDoc2 = new FieldDoc(0, 0f, new Object[] { 5, new BytesRef("foo") }); SearchAfterSortedDocQuery query1 = new SearchAfterSortedDocQuery(sort1, fieldDoc1); SearchAfterSortedDocQuery query2 = new SearchAfterSortedDocQuery(sort1, fieldDoc2); SearchAfterSortedDocQuery query3 = new SearchAfterSortedDocQuery(sort2, fieldDoc2); QueryUtils.check(query1); QueryUtils.checkEqual(query1, query2); QueryUtils.checkUnequal(query1, query3); } public void testInvalidSort() { Sort sort = new Sort(new SortedNumericSortField("field1", SortField.Type.INT)); FieldDoc fieldDoc = new FieldDoc(0, 0f, new Object[] { 4, 5 }); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new SearchAfterSortedDocQuery(sort, fieldDoc)); assertThat(ex.getMessage(), equalTo("after doc has 2 value(s) but sort has 1.")); } public void testRandom() throws IOException { final int numDocs = randomIntBetween(100, 200); final Document doc = new Document(); final Directory dir = newDirectory(); Sort sort = new Sort( new SortedNumericSortField("number1", SortField.Type.INT, randomBoolean()), new SortField("string", SortField.Type.STRING, randomBoolean()) ); final IndexWriterConfig config = new IndexWriterConfig(); config.setIndexSort(sort); final RandomIndexWriter w = new RandomIndexWriter(random(), dir, config); for (int i = 0; i < numDocs; ++i) { int rand = randomIntBetween(0, 10); doc.add(new SortedNumericDocValuesField("number", rand)); doc.add(new SortedDocValuesField("string", new BytesRef(randomAlphaOfLength(randomIntBetween(5, 50))))); w.addDocument(doc); doc.clear(); if (rarely()) { w.commit(); } } final IndexReader reader = w.getReader(); final IndexSearcher searcher = newSearcher(reader); int step = randomIntBetween(1, 10); FixedBitSet bitSet = new FixedBitSet(numDocs); TopDocs topDocs = null; for (int i = 0; i < numDocs;) { if (topDocs != null) { FieldDoc after = (FieldDoc) topDocs.scoreDocs[topDocs.scoreDocs.length - 1]; topDocs = searcher.search(new SearchAfterSortedDocQuery(sort, after), step, sort); } else { topDocs = searcher.search(new MatchAllDocsQuery(), step, sort); } i += step; for (ScoreDoc topDoc : topDocs.scoreDocs) { int readerIndex = ReaderUtil.subIndex(topDoc.doc, reader.leaves()); final LeafReaderContext leafReaderContext = reader.leaves().get(readerIndex); int docRebase = topDoc.doc - leafReaderContext.docBase; if (leafReaderContext.reader().hasDeletions()) { assertTrue(leafReaderContext.reader().getLiveDocs().get(docRebase)); } assertFalse(bitSet.get(topDoc.doc)); bitSet.set(topDoc.doc); } } assertThat(bitSet.cardinality(), equalTo(reader.numDocs())); w.close(); reader.close(); dir.close(); } }
SearchAfterSortedDocQueryTests
java
micronaut-projects__micronaut-core
http-server-netty/src/main/java/io/micronaut/http/server/netty/handler/accesslog/element/NotImplementedElementBuilder.java
{ "start": 764, "end": 1264 }
class ____ implements LogElementBuilder { private static final String[] NOT_IMPLEMENTED = new String[] { "l", "u" }; @Override public int getOrder() { return Ordered.LOWEST_PRECEDENCE; } @Override public LogElement build(String token, String param) { for (String element: NOT_IMPLEMENTED) { if (token.equals(element)) { return ConstantElement.UNKNOWN; } } return null; } }
NotImplementedElementBuilder
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
{ "start": 32478, "end": 32810 }
class ____ extends Options.PathOption implements Option { FileOption(Path path) { super(path); } } /** * @deprecated only used for backwards-compatibility in the createWriter methods * that take FileSystem. */ @Deprecated private static
FileOption
java
micronaut-projects__micronaut-core
http-server/src/test/java/io/micronaut/http/server/exceptions/response/HtmlErrorResponseBodyProviderTest.java
{ "start": 706, "end": 3565 }
class ____ extends Specification { private static final Logger LOG = LoggerFactory.getLogger(HtmlErrorResponseBodyProviderTest.class); @Inject HtmlErrorResponseBodyProvider htmlProvider; @ParameterizedTest @EnumSource(HttpStatus.class) void htmlPageforStatus(HttpStatus status) { if (status.getCode() >= 400) { ErrorContext errorContext = new ErrorContext() { @Override public @NonNull HttpRequest<?> getRequest() { return new SimpleHttpRequest(HttpMethod.GET, "/foobar", null); } @Override public @NonNull Optional<Throwable> getRootCause() { return Optional.empty(); } @Override public @NonNull List<Error> getErrors() { return Collections.emptyList(); } }; HttpResponse<?> response = new HttpResponse<Object>() { @Override public HttpStatus getStatus() { return status; } @Override public int code() { return status.getCode(); } @Override public String reason() { return status.getReason(); } @Override public HttpHeaders getHeaders() { return null; } @Override public MutableConvertibleValues<Object> getAttributes() { return null; } @Override public Optional<Object> getBody() { return Optional.empty(); } }; String html = htmlProvider.body(errorContext, response); assertNotNull(html); assertExpectedSubstringInHtml(status.getReason(), html); assertExpectedSubstringInHtml("<!doctype html>", html); if (status.getCode() == 404) { assertExpectedSubstringInHtml("The page you were looking for doesn’t exist", html); assertExpectedSubstringInHtml("You may have mistyped the address or the page may have moved", html); } else if (status.getCode() == 413) { assertExpectedSubstringInHtml("The file or data you are trying to upload exceeds the allowed size", html); assertExpectedSubstringInHtml("Please try again with a smaller file", html); } } } private void assertExpectedSubstringInHtml(String expected, String html) { if (!html.contains(expected)) { LOG.trace("{}", html); } assertTrue(html.contains(expected)); } }
HtmlErrorResponseBodyProviderTest
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/nullvaluemapping/_target/CarDto.java
{ "start": 212, "end": 1065 }
class ____ { private String make; private int seatCount; private String model; private String catalogId; public CarDto() { } public CarDto(String make, int seatCount) { this.make = make; this.seatCount = seatCount; } public String getMake() { return make; } public void setMake(String make) { this.make = make; } public int getSeatCount() { return seatCount; } public void setSeatCount(int seatCount) { this.seatCount = seatCount; } public String getModel() { return model; } public void setModel(String model) { this.model = model; } public String getCatalogId() { return catalogId; } public void setCatalogId(String catalogId) { this.catalogId = catalogId; } }
CarDto
java
google__error-prone
core/src/test/java/com/google/errorprone/fixes/SuggestedFixesTest.java
{ "start": 24334, "end": 25506 }
class ____ extends BugChecker implements BugChecker.MethodTreeMatcher { private final String newReturnType; public ReplaceReturnTypeString(String newReturnType) { this.newReturnType = newReturnType; } @Override public Description matchMethod(MethodTree tree, VisitorState state) { SuggestedFix.Builder builder = SuggestedFix.builder(); String qualifiedName = SuggestedFixes.qualifyType(state, builder, newReturnType); return describeMatch( tree.getReturnType(), builder.replace(tree.getReturnType(), qualifiedName).build()); } } @Test public void qualifyTypeString_nestedType() { qualifyNestedType(new ReplaceReturnTypeString("pkg.Outer.Inner")); } @Test public void qualifyTypeString_deeplyNestedType() { qualifyDeeplyNestedType(new ReplaceReturnTypeString("pkg.Outer.Inner.Innermost")); } @Test public void qualifiedName_canImportInnerClass() { BugCheckerRefactoringTestHelper.newInstance(new ReplaceReturnTypeString("foo.A.B"), getClass()) .addInputLines( "foo/A.java", """ package foo; public
ReplaceReturnTypeString
java
spring-projects__spring-framework
spring-context/src/main/java/org/springframework/context/annotation/ScopedProxyCreator.java
{ "start": 898, "end": 1124 }
class ____ to just introduce an AOP framework dependency * when actually creating a scoped proxy. * * @author Juergen Hoeller * @since 3.0 * @see org.springframework.aop.scope.ScopedProxyUtils#createScopedProxy */ final
used
java
spring-projects__spring-boot
core/spring-boot/src/test/java/org/springframework/boot/io/Base64ProtocolResolverTests.java
{ "start": 1124, "end": 2019 }
class ____ { @Test void base64LocationResolves() throws IOException { String location = Base64.getEncoder().encodeToString("test value".getBytes()); Resource resource = new Base64ProtocolResolver().resolve("base64:" + location, new DefaultResourceLoader()); assertThat(resource).isNotNull(); assertThat(resource.getContentAsString(StandardCharsets.UTF_8)).isEqualTo("test value"); } @Test void base64LocationWithInvalidBase64ThrowsException() { assertThatIllegalArgumentException() .isThrownBy( () -> new Base64ProtocolResolver().resolve("base64:not valid base64", new DefaultResourceLoader())) .withMessageContaining("Illegal base64"); } @Test void locationWithoutPrefixDoesNotResolve() { Resource resource = new Base64ProtocolResolver().resolve("file:notbase64.txt", new DefaultResourceLoader()); assertThat(resource).isNull(); } }
Base64ProtocolResolverTests
java
micronaut-projects__micronaut-core
inject-java/src/main/java/io/micronaut/annotation/processing/MixinVisitorProcessor.java
{ "start": 2089, "end": 5765 }
class ____ extends AbstractInjectAnnotationProcessor { @Override public Set<String> getSupportedAnnotationTypes() { return Set.of(Mixin.class.getName()); } @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (!(annotations.size() == 1 && Generated.class.getName().equals(annotations.iterator().next().getQualifiedName().toString()))) { for (Object nativeType : postponedTypes.values()) { AbstractAnnotationMetadataBuilder.clearMutated(nativeType); } var elements = new LinkedHashSet<TypeElement>(); postponedTypes.keySet().stream().map(elementUtils::getTypeElement).filter(Objects::nonNull).forEach(elements::add); postponedTypes.clear(); JavaElementFactory elementFactory = javaVisitorContext.getElementFactory(); JavaElementAnnotationMetadataFactory elementAnnotationMetadataFactory = javaVisitorContext.getElementAnnotationMetadataFactory(); for (TypeElement annotation : annotations) { modelUtils.resolveTypeElements( roundEnv.getElementsAnnotatedWith(annotation) ).forEach(elements::add); } List<JavaClassElement> javaClassElements = elements.stream() .map(typeElement -> elementFactory.newSourceClassElement(typeElement, elementAnnotationMetadataFactory)) .toList(); for (JavaClassElement mixin : javaClassElements) { try { AnnotationValue<Mixin> mixinAnnotation = mixin.getAnnotation(Mixin.class); if (mixinAnnotation == null) { continue; } String target = mixinAnnotation.stringValue("target").orElse(mixinAnnotation.stringValue().orElse(null)); if (target == null || Object.class.getName().equals(target)) { continue; } ClassElement mixinTarget = javaVisitorContext.getClassElement(target, elementAnnotationMetadataFactory).orElse(null); if (mixinTarget == null) { javaVisitorContext.warn("Cannot access class: " + target, mixin); continue; } VisitorUtils.applyMixin(mixinAnnotation, mixin, mixinTarget, javaVisitorContext); } catch (ProcessingException e) { var originatingElement = (JavaNativeElement) e.getOriginatingElement(); if (originatingElement == null) { originatingElement = mixin.getNativeType(); } error(originatingElement.element(), e.getMessage()); } catch (PostponeToNextRoundException e) { postponedTypes.put(mixin.getCanonicalName(), e.getNativeErrorElement()); } catch (ElementPostponedToNextRoundException e) { Object nativeType = e.getOriginatingElement().getNativeType(); Element element = PostponeToNextRoundException.resolvedFailedElement(nativeType); if (element != null) { postponedTypes.put(mixin.getCanonicalName(), element); } else { // should never happen. throw e; } } } } if (roundEnv.processingOver()) { javaVisitorContext.finish(); } return false; } }
MixinVisitorProcessor
java
apache__camel
components/camel-microprofile/camel-microprofile-fault-tolerance/src/main/java/org/apache/camel/component/microprofile/faulttolerance/FaultToleranceReifier.java
{ "start": 1732, "end": 9126 }
class ____ extends ProcessorReifier<CircuitBreakerDefinition> { public FaultToleranceReifier(Route route, CircuitBreakerDefinition definition) { super(route, definition); } @Override public Processor createProcessor() throws Exception { // create the regular and fallback processors Processor processor = createChildProcessor(true); Processor fallback = null; if (definition.getOnFallback() != null && !definition.getOnFallback().getOutputs().isEmpty()) { fallback = createOutputsProcessor(definition.getOnFallback().getOutputs()); } boolean fallbackViaNetwork = definition.getOnFallback() != null && parseBoolean(definition.getOnFallback().getFallbackViaNetwork(), false); if (fallbackViaNetwork) { throw new UnsupportedOperationException("camel-microprofile-fault-tolerance does not support onFallbackViaNetwork"); } final FaultToleranceConfigurationCommon config = buildFaultToleranceConfiguration(); FaultToleranceConfiguration configuration = new FaultToleranceConfiguration(); configureCircuitBreaker(config, configuration); configureTimeLimiter(config, configuration); configureBulkhead(config, configuration); FaultToleranceProcessor answer = new FaultToleranceProcessor(configuration, processor, fallback); answer.setDisabled(isDisabled(camelContext, definition)); // using any existing circuit breakers? if (config.getTypedGuard() != null) { TypedGuard<Exchange> cb = mandatoryLookup(parseString(config.getTypedGuard()), TypedGuard.class); answer.setTypedGuard(cb); } configureExecutorService(answer, config); return answer; } private void configureCircuitBreaker(FaultToleranceConfigurationCommon config, FaultToleranceConfiguration target) { target.setDelay(parseDuration(config.getDelay(), 5000)); target.setSuccessThreshold(parseInt(config.getSuccessThreshold(), 1)); target.setRequestVolumeThreshold(parseInt(config.getRequestVolumeThreshold(), 20)); if (config.getFailureRatio() != null) { float num = parseFloat(config.getFailureRatio(), 50); if (num < 1 || num > 100) { throw new IllegalArgumentException("FailureRatio must be between 1 and 100, was: " + num); } float percent = num / 100; target.setFailureRatio(percent); } else { target.setFailureRatio(0.5f); } } private void configureTimeLimiter(FaultToleranceConfigurationCommon config, FaultToleranceConfiguration target) { target.setTimeoutEnabled(parseBoolean(config.getTimeoutEnabled(), false)); target.setTimeoutDuration(parseDuration(config.getTimeoutDuration(), 1000)); target.setTimeoutPoolSize(parseInt(config.getTimeoutPoolSize(), 10)); } private void configureBulkhead(FaultToleranceConfigurationCommon config, FaultToleranceConfiguration target) { if (!parseBoolean(config.getBulkheadEnabled(), false)) { return; } target.setBulkheadMaxConcurrentCalls(parseInt(config.getBulkheadMaxConcurrentCalls(), 10)); target.setBulkheadWaitingTaskQueue(parseInt(config.getBulkheadWaitingTaskQueue(), 10)); } private void configureExecutorService(FaultToleranceProcessor processor, FaultToleranceConfigurationCommon config) { if (config.getThreadOffloadExecutorService() != null) { String ref = config.getThreadOffloadExecutorService(); boolean shutdownThreadPool = false; ExecutorService executorService = lookupByNameAndType(ref, ExecutorService.class); if (executorService == null) { executorService = lookupExecutorServiceRef("CircuitBreaker", definition, ref); shutdownThreadPool = true; } processor.setExecutorService(executorService); processor.setShutdownExecutorService(shutdownThreadPool); } } // ******************************* // Helpers // ******************************* FaultToleranceConfigurationDefinition buildFaultToleranceConfiguration() throws Exception { Map<String, Object> properties = new HashMap<>(); final PropertyConfigurer configurer = PluginHelper.getConfigurerResolver(camelContext) .resolvePropertyConfigurer(FaultToleranceConfigurationDefinition.class.getName(), camelContext); // Extract properties from default configuration, the one configured on // camel context takes the precedence over those in the registry loadProperties(properties, Suppliers.firstNotNull( () -> camelContext.getCamelContextExtension().getContextPlugin(Model.class) .getFaultToleranceConfiguration(null), () -> lookupByNameAndType(FaultToleranceConstants.DEFAULT_FAULT_TOLERANCE_CONFIGURATION_ID, FaultToleranceConfigurationDefinition.class)), configurer); // Extract properties from referenced configuration, the one configured // on camel context takes the precedence over those in the registry if (definition.getConfiguration() != null) { final String ref = parseString(definition.getConfiguration()); loadProperties(properties, Suppliers.firstNotNull( () -> camelContext.getCamelContextExtension().getContextPlugin(Model.class) .getFaultToleranceConfiguration(ref), () -> mandatoryLookup(ref, FaultToleranceConfigurationDefinition.class)), configurer); } // Extract properties from local configuration loadProperties(properties, Optional.ofNullable(definition.getFaultToleranceConfiguration()), configurer); // Apply properties to a new configuration FaultToleranceConfigurationDefinition config = new FaultToleranceConfigurationDefinition(); PropertyBindingSupport.build() .withCamelContext(camelContext) .withIgnoreCase(true) .withConfigurer(configurer) .withProperties(properties) .withTarget(config) .bind(); return config; } private void loadProperties(Map<String, Object> properties, Optional<?> optional, PropertyConfigurer configurer) { BeanIntrospection beanIntrospection = PluginHelper.getBeanIntrospection(camelContext); optional.ifPresent(bean -> { if (configurer instanceof ExtendedPropertyConfigurerGetter) { ExtendedPropertyConfigurerGetter getter = (ExtendedPropertyConfigurerGetter) configurer; Map<String, Object> types = getter.getAllOptions(bean); types.forEach((k, t) -> { Object value = getter.getOptionValue(bean, k, true); if (value != null) { properties.put(k, value); } }); } else { // no configurer found so use bean introspection (reflection) beanIntrospection.getProperties(bean, properties, null, false); } }); } }
FaultToleranceReifier
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/aggregate/LongValueMax.java
{ "start": 990, "end": 1150 }
class ____ a value aggregator that maintain the maximum of * a sequence of long values. */ @InterfaceAudience.Public @InterfaceStability.Stable public
implements
java
apache__commons-lang
src/main/java/org/apache/commons/lang3/concurrent/LazyInitializer.java
{ "start": 2150, "end": 3017 }
class ____ extends LazyInitializer<ComplexObject> { * &#064;Override * protected ComplexObject initialize() { * return new ComplexObject(); * } * } * } * </pre> * * <p> * Access to the data object is provided through the {@code get()} method. So, code that wants to obtain the {@code ComplexObject} instance would simply look * like this: * </p> * * <pre> * // Create an instance of the lazy initializer * ComplexObjectInitializer initializer = new ComplexObjectInitializer(); * ... * // When the object is actually needed: * ComplexObject cobj = initializer.get(); * </pre> * * <p> * If multiple threads call the {@code get()} method when the object has not yet been created, they are blocked until initialization completes. The algorithm * guarantees that only a single instance of the wrapped object
ComplexObjectInitializer
java
hibernate__hibernate-orm
hibernate-testing/src/main/java/org/hibernate/testing/orm/AsyncExecutor.java
{ "start": 378, "end": 1584 }
class ____ { // Need more than a single thread, because not all databases support cancellation of statements waiting for locks private static final ExecutorService EXECUTOR_SERVICE = Executors.newCachedThreadPool(); public static void executeAsync(Runnable action) { final Future<?> future = EXECUTOR_SERVICE.submit( action ); try { future.get(); } catch (InterruptedException e) { future.cancel( true ); throw new TimeoutException( "Thread interruption", e ); } catch (ExecutionException e) { throw new RuntimeException( "Async execution error", e.getCause() ); } } public static void executeAsync(int timeout, TimeUnit timeoutUnit, Runnable action) { final Future<?> future = EXECUTOR_SERVICE.submit( action ); try { future.get( timeout, timeoutUnit ); } catch (InterruptedException e) { future.cancel( true ); throw new TimeoutException( "Thread interruption", e ); } catch (java.util.concurrent.TimeoutException e) { future.cancel( true ); throw new TimeoutException( "Thread timeout exceeded", e ); } catch (ExecutionException e) { throw new RuntimeException( "Async execution error", e.getCause() ); } } public static
AsyncExecutor
java
redisson__redisson
redisson/src/main/java/org/redisson/transaction/HashKey.java
{ "start": 768, "end": 1410 }
class ____ { final Codec codec; final String name; public HashKey(String name, Codec codec) { this.name = name; this.codec = codec; } public Codec getCodec() { return codec; } public String getName() { return name; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; HashKey hashKey = (HashKey) o; return Objects.equals(name, hashKey.name); } @Override public int hashCode() { return Objects.hashCode(name); } }
HashKey