language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/filter/UnknownPropertyDeserTest.java | {
"start": 2612,
"end": 2707
} | class ____ {
@JsonUnwrapped
UnwrappedChild child;
static | IgnoreUnknownUnwrapped |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schema/CheckTest.java | {
"start": 1287,
"end": 2853
} | class ____ {
@AfterEach
void tearDown(EntityManagerFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
public void test(EntityManagerFactoryScope factoryScope) {
factoryScope.inTransaction( entityManager -> {
Book book = new Book();
book.setId(0L);
book.setTitle("Hibernate in Action");
book.setPrice(49.99d);
entityManager.persist(book);
});
factoryScope.inTransaction( entityManager -> {
Book book = entityManager.find(Book.class, 0L);
assertEquals( 1, book.edition );
assertEquals( 2, book.nextEdition );
});
try {
factoryScope.inTransaction( entityManager -> {
//tag::schema-generation-database-checks-persist-example[]
Book book = new Book();
book.setId(1L);
book.setPrice(49.99d);
book.setTitle("High-Performance Java Persistence");
book.setIsbn("11-11-2016");
entityManager.persist(book);
//end::schema-generation-database-checks-persist-example[]
});
fail( "Should fail because the ISBN is not of the right length!" );
}
catch (PersistenceException e) {
assertInstanceOf( ConstraintViolationException.class, e.getCause() );
}
try {
factoryScope.inTransaction( entityManager -> {
Person person = new Person();
person.setId(1L);
person.setName("John Doe");
person.setCode(0L);
entityManager.persist(person);
});
fail( "Should fail because the code is 0!" );
}
catch (PersistenceException e) {
assertInstanceOf( ConstraintViolationException.class, e.getCause() );
}
}
@Entity(name = "Person")
public static | CheckTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/identifier/PooledOptimizerTest.java | {
"start": 1346,
"end": 2561
} | class ____ {
@Id
@GeneratedValue(
strategy = GenerationType.SEQUENCE,
generator = "product_generator"
)
@GenericGenerator(
name = "product_generator",
type = org.hibernate.id.enhanced.SequenceStyleGenerator.class,
parameters = {
@Parameter(name = "sequence_name", value = "product_sequence"),
@Parameter(name = "initial_value", value = "1"),
@Parameter(name = "increment_size", value = "3"),
@Parameter(name = "optimizer", value = "pooled-lo")
}
)
private Long id;
@Column(name = "p_name")
private String name;
@Column(name = "p_number")
private String number;
//Getters and setters are omitted for brevity
//end::identifiers-generators-pooled-lo-optimizer-mapping-example[]
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getNumber() {
return number;
}
public void setNumber(String number) {
this.number = number;
}
//tag::identifiers-generators-pooled-lo-optimizer-mapping-example[]
}
//end::identifiers-generators-pooled-lo-optimizer-mapping-example[]
}
| Product |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/authentication/event/AuthenticationFailureProxyUntrustedEvent.java | {
"start": 1004,
"end": 1334
} | class ____ extends AbstractAuthenticationFailureEvent {
@Serial
private static final long serialVersionUID = 1801476426012753252L;
public AuthenticationFailureProxyUntrustedEvent(Authentication authentication, AuthenticationException exception) {
super(authentication, exception);
}
}
| AuthenticationFailureProxyUntrustedEvent |
java | google__guice | core/src/com/google/inject/spi/ProvisionListener.java | {
"start": 2139,
"end": 2563
} | class ____<T> {
/**
* Returns the Binding this is provisioning.
*
* <p>You must not call {@link Provider#get()} on the provider returned by {@link
* Binding#getProvider}, otherwise you will get confusing error messages.
*/
public abstract Binding<T> getBinding();
/** Performs the provision, returning the object provisioned. */
public abstract T provision();
}
}
| ProvisionInvocation |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/ValueSerializer.java | {
"start": 2035,
"end": 5838
} | class ____ (using classname as tag) to their Kryo registration.
*
* <p>This map serves as a preview of the final registration result of the Kryo instance, taking
* into account registration overwrites.
*
* <p>Currently, we only have one single registration for the value type. Nevertheless, we keep
* this information here for future compatibility.
*/
private LinkedHashMap<String, KryoRegistration> kryoRegistrations;
private transient Kryo kryo;
private transient T copyInstance;
// --------------------------------------------------------------------------------------------
public ValueSerializer(Class<T> type) {
this.type = checkNotNull(type);
this.kryoRegistrations = asKryoRegistrations(type);
}
// --------------------------------------------------------------------------------------------
@Override
public boolean isImmutableType() {
return false;
}
@Override
public ValueSerializer<T> duplicate() {
return new ValueSerializer<T>(type);
}
@Override
public T createInstance() {
return InstantiationUtil.instantiate(this.type);
}
@Override
public T copy(T from) {
checkKryoInitialized();
return KryoUtils.copy(from, kryo, this);
}
@Override
public T copy(T from, T reuse) {
checkKryoInitialized();
return KryoUtils.copy(from, reuse, kryo, this);
}
@Override
public int getLength() {
return -1;
}
@Override
public void serialize(T value, DataOutputView target) throws IOException {
value.write(target);
}
@Override
public T deserialize(DataInputView source) throws IOException {
return deserialize(createInstance(), source);
}
@Override
public T deserialize(T reuse, DataInputView source) throws IOException {
reuse.read(source);
return reuse;
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
if (this.copyInstance == null) {
this.copyInstance = InstantiationUtil.instantiate(type);
}
this.copyInstance.read(source);
this.copyInstance.write(target);
}
private void checkKryoInitialized() {
if (this.kryo == null) {
this.kryo = new Kryo();
DefaultInstantiatorStrategy initStrategy = new DefaultInstantiatorStrategy();
initStrategy.setFallbackInstantiatorStrategy(new StdInstantiatorStrategy());
kryo.setInstantiatorStrategy(initStrategy);
// this.kryo.setAsmEnabled(true);
KryoUtils.applyRegistrations(
this.kryo, kryoRegistrations.values(), this.kryo.getNextRegistrationId());
}
}
// --------------------------------------------------------------------------------------------
@Override
public int hashCode() {
return this.type.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof ValueSerializer) {
ValueSerializer<?> other = (ValueSerializer<?>) obj;
return type == other.type;
} else {
return false;
}
}
private Class<T> getValueType() {
return type;
}
// --------------------------------------------------------------------------------------------
// Serializer configuration snapshotting & compatibility
// --------------------------------------------------------------------------------------------
@Override
public TypeSerializerSnapshot<T> snapshotConfiguration() {
return new ValueSerializerSnapshot<>(type);
}
/** {@link ValueSerializer} snapshot class. */
public static final | tag |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/collection/DetachedCollectionChangeTest.java | {
"start": 3191,
"end": 8809
} | class ____ implements Serializable {
private String value1;
private String value2;
public CompositeName() {
}
public CompositeName(String value1, String value2) {
this.value1 = value1;
this.value2 = value2;
}
public String getValue1() {
return value1;
}
public void setValue1(String value1) {
this.value1 = value1;
}
public String getValue2() {
return value2;
}
public void setValue2(String value2) {
this.value2 = value2;
}
}
private Integer ruleName1Id;
private Integer ruleName2Id;
private Integer alertId;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
RuleName ruleName1 = new RuleName();
RuleName ruleName2 = new RuleName();
CompositeName compositeName1 = new CompositeName( "First1", "Last1" );
CompositeName compositeName2 = new CompositeName( "First2", "Last2" );
Alert alert = new Alert();
alert.getRuleNames().add( ruleName1 );
alert.getRuleNames().add( ruleName2 );
alert.getNames().add( "N1" );
alert.getNames().add( "N2" );
alert.getComposites().add( compositeName1 );
alert.getComposites().add( compositeName2 );
// Revision 1
em.getTransaction().begin();
em.persist( ruleName1 );
em.persist( ruleName2 );
em.persist( alert );
em.getTransaction().commit();
alertId = alert.id;
ruleName1Id = ruleName1.id;
ruleName2Id = ruleName2.id;
} );
}
@Test
@Order(1)
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( Alert.class, alertId ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( RuleName.class, ruleName1Id ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( RuleName.class, ruleName2Id ) );
} );
}
@Test
@Order(2)
public void testClearAndAddWithinTransactionDoesNotChangeAnything(EntityManagerFactoryScope scope) {
scope.inTransaction( em -> {
final Alert alert = em.find( Alert.class, alertId );
List<RuleName> ruleNamesClone = new ArrayList<>( alert.getRuleNames() );
List<String> namesClone = new ArrayList<>( alert.getNames() );
List<CompositeName> compositeNamesClones = new ArrayList<>( alert.getComposites() );
alert.getRuleNames().clear();
alert.getRuleNames().addAll( ruleNamesClone );
alert.getNames().clear();
alert.getNames().addAll( namesClone );
alert.getComposites().clear();
alert.getComposites().addAll( compositeNamesClones );
em.persist( alert );
} );
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( Alert.class, alertId ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( RuleName.class, ruleName1Id ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( RuleName.class, ruleName2Id ) );
} );
}
@Test
@Order(3)
public void testClearAddDetachedOutsideTransaction(EntityManagerFactoryScope scope) {
final RuleName ruleName1;
final RuleName ruleName2;
final CompositeName compositeName1 = new CompositeName( "First1", "Last1" );
final CompositeName compositeName2 = new CompositeName( "First2", "Last2" );
// Load entities outside transaction
ruleName1 = scope.fromTransaction( em -> em.find( RuleName.class, ruleName1Id ) );
ruleName2 = scope.fromTransaction( em -> em.find( RuleName.class, ruleName2Id ) );
List<RuleName> ruleNamesClone = Arrays.asList( ruleName1, ruleName2 );
List<String> namesClone = Arrays.asList( "N1", "N2" );
List<CompositeName> compositeNamesClone = Arrays.asList( compositeName1, compositeName2 );
scope.inTransaction( em -> {
Alert alert = em.find( Alert.class, alertId );
alert.getRuleNames().clear();
alert.getRuleNames().addAll( ruleNamesClone );
alert.getNames().clear();
alert.getNames().addAll( namesClone );
alert.getComposites().clear();
alert.getComposites().addAll( compositeNamesClone );
em.persist( alert );
} );
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( Alert.class, alertId ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( RuleName.class, ruleName1Id ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( RuleName.class, ruleName2Id ) );
} );
}
@Test
@Order(4)
public void testClearAddOneWithinTransaction(EntityManagerFactoryScope scope) {
scope.inTransaction( em -> {
Alert alert = em.find( Alert.class, alertId );
List<RuleName> ruleNamesClone = new ArrayList<>( alert.getRuleNames() );
List<String> namesClone = new ArrayList<>( alert.getNames() );
List<CompositeName> compositeNamesClones = new ArrayList<>( alert.getComposites() );
alert.getRuleNames().clear();
alert.getRuleNames().add( ruleNamesClone.get( 0 ) );
alert.getNames().clear();
alert.getNames().add( namesClone.get( 0 ) );
alert.getComposites().clear();
alert.getComposites().add( compositeNamesClones.get( 0 ) );
em.persist( alert );
} );
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1, 2 ), auditReader.getRevisions( Alert.class, alertId ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( RuleName.class, ruleName1Id ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( RuleName.class, ruleName2Id ) );
} );
}
}
| CompositeName |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/writeClassName/MapTest.java | {
"start": 1310,
"end": 1592
} | class ____ {
private Map<String, Object> value = new HashMap<String, Object>();
public Map<String, Object> getValue() {
return value;
}
public void setValue(Map<String, Object> value) {
this.value = value;
}
}
}
| VO |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/RemoveHeaderProcessor.java | {
"start": 1110,
"end": 2272
} | class ____ extends BaseProcessorSupport implements Traceable, IdAware, RouteIdAware {
private final String headerName;
private String id;
private String routeId;
public RemoveHeaderProcessor(String headerName) {
this.headerName = headerName;
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
try {
exchange.getMessage().removeHeader(headerName);
} catch (Exception e) {
exchange.setException(e);
}
callback.done(true);
return true;
}
@Override
public String toString() {
return id;
}
@Override
public String getTraceLabel() {
return "removeHeader[" + headerName + "]";
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
@Override
public String getRouteId() {
return routeId;
}
@Override
public void setRouteId(String routeId) {
this.routeId = routeId;
}
public String getHeaderName() {
return headerName;
}
}
| RemoveHeaderProcessor |
java | elastic__elasticsearch | x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleAction.java | {
"start": 966,
"end": 1726
} | class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(new Route(GET, "/_slm/policy"), new Route(GET, "/_slm/policy/{name}"));
}
@Override
public String getName() {
return "slm_get_lifecycle";
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) {
final var req = new GetSnapshotLifecycleAction.Request(
getMasterNodeTimeout(request),
getAckTimeout(request),
Strings.splitStringByCommaToArray(request.param("name"))
);
return channel -> client.execute(GetSnapshotLifecycleAction.INSTANCE, req, new RestToXContentListener<>(channel));
}
}
| RestGetSnapshotLifecycleAction |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/UnusedTypeParameter.java | {
"start": 2073,
"end": 5132
} | class ____ extends BugChecker implements CompilationUnitTreeMatcher {
@Override
public Description matchCompilationUnit(CompilationUnitTree tree, VisitorState state) {
var usedIdentifiers = findUsedIdentifiers(tree);
new SuppressibleTreePathScanner<Void, Void>(state) {
@Override
public Void visitClass(ClassTree node, Void unused) {
if ((getSymbol(node).flags() & Flags.FINAL) != 0) {
handle(node, node.getTypeParameters());
}
return super.visitClass(node, null);
}
@Override
public Void visitMethod(MethodTree node, Void unused) {
var symbol = getSymbol(node);
if (methodCanBeOverridden(symbol)
|| !findSuperMethods(symbol, state.getTypes()).isEmpty()) {
return null;
}
handle(node, node.getTypeParameters());
return super.visitMethod(node, null);
}
private void handle(Tree tree, List<? extends TypeParameterTree> typeParameters) {
for (TypeParameterTree typeParameter : typeParameters) {
if (usedIdentifiers.count(getSymbol(typeParameter)) == 1) {
state.reportMatch(
describeMatch(
typeParameter,
removeTypeParameter(tree, typeParameter, typeParameters, state)));
}
}
}
}.scan(state.getPath(), null);
return Description.NO_MATCH;
}
private static ImmutableMultiset<TypeVariableSymbol> findUsedIdentifiers(
CompilationUnitTree tree) {
ImmutableMultiset.Builder<TypeVariableSymbol> identifiers = ImmutableMultiset.builder();
new TreeScanner<Void, Void>() {
@Override
public Void scan(Tree tree, Void unused) {
var symbol = getSymbol(tree);
if (symbol instanceof TypeVariableSymbol typeVariableSymbol) {
identifiers.add(typeVariableSymbol);
}
return super.scan(tree, null);
}
}.scan(tree, null);
return identifiers.build();
}
private static SuggestedFix removeTypeParameter(
Tree tree,
TypeParameterTree typeParameter,
List<? extends TypeParameterTree> typeParameters,
VisitorState state) {
if (typeParameters.size() > 1) {
return removeElement(typeParameter, typeParameters, state);
}
var tokens =
ErrorProneTokens.getTokens(
state.getSourceForNode(tree), getStartPosition(tree), state.context);
int startPos =
tokens.reverse().stream()
.filter(
t -> t.pos() <= getStartPosition(typeParameter) && t.kind().equals(TokenKind.LT))
.findFirst()
.get()
.pos();
int endPos =
tokens.stream()
.filter(
t ->
t.endPos() >= state.getEndPosition(getLast(typeParameters))
&& (t.kind().equals(TokenKind.GT) || t.kind().equals(TokenKind.GTGT)))
.findFirst()
.get()
.endPos();
return SuggestedFix.replace(startPos, endPos, "");
}
}
| UnusedTypeParameter |
java | apache__camel | components/camel-google/camel-google-functions/src/main/java/org/apache/camel/component/google/functions/GoogleCloudFunctionsClientFactory.java | {
"start": 1309,
"end": 2853
} | class ____ {
/**
* Prevent instantiation.
*/
private GoogleCloudFunctionsClientFactory() {
}
public static CloudFunctionsServiceClient create(
CamelContext context,
GoogleCloudFunctionsConfiguration configuration)
throws Exception {
CloudFunctionsServiceClient cloudFunctionsClient = null;
if (!Strings.isNullOrEmpty(configuration.getServiceAccountKey())) {
InputStream resolveMandatoryResourceAsInputStream
= ResourceHelper.resolveMandatoryResourceAsInputStream(context, configuration.getServiceAccountKey());
Credentials myCredentials = ServiceAccountCredentials
.fromStream(resolveMandatoryResourceAsInputStream);
CloudFunctionsServiceSettings settings = CloudFunctionsServiceSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)).build();
cloudFunctionsClient = CloudFunctionsServiceClient.create(settings);
} else {
// it needs to define the environment variable GOOGLE_APPLICATION_CREDENTIALS
// with the service account file
// more info at https://cloud.google.com/docs/authentication/production
CloudFunctionsServiceSettings settings = CloudFunctionsServiceSettings.newBuilder().build();
cloudFunctionsClient = CloudFunctionsServiceClient.create(settings);
}
return cloudFunctionsClient;
}
}
| GoogleCloudFunctionsClientFactory |
java | elastic__elasticsearch | modules/reindex/src/test/java/org/elasticsearch/reindex/UpdateByQueryMetadataTests.java | {
"start": 1399,
"end": 2129
} | class ____ extends TransportUpdateByQueryAction.AsyncIndexBySearchAction {
TestAction() {
super(
UpdateByQueryMetadataTests.this.task,
UpdateByQueryMetadataTests.this.logger,
null,
UpdateByQueryMetadataTests.this.threadPool,
null,
request(),
ClusterState.EMPTY_STATE,
listener()
);
}
@Override
public AbstractAsyncBulkByScrollAction.RequestWrapper<?> copyMetadata(
AbstractAsyncBulkByScrollAction.RequestWrapper<?> request,
Hit doc
) {
return super.copyMetadata(request, doc);
}
}
}
| TestAction |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/accept/DefaultApiVersionStrategy.java | {
"start": 1355,
"end": 7561
} | class ____ implements ApiVersionStrategy {
private final List<ApiVersionResolver> versionResolvers;
private final ApiVersionParser<?> versionParser;
private final boolean versionRequired;
private final @Nullable Comparable<?> defaultVersion;
private final Set<Comparable<?>> supportedVersions = new TreeSet<>();
private final boolean detectSupportedVersions;
private final Set<Comparable<?>> detectedVersions = new TreeSet<>();
private final Predicate<Comparable<?>> supportedVersionPredicate;
private final @Nullable ApiVersionDeprecationHandler deprecationHandler;
/**
* Create an instance.
* @param versionResolvers one or more resolvers to try; the first non-null
* value returned by any resolver becomes the resolved used
* @param versionParser parser for to raw version values
* @param versionRequired whether a version is required leading to
* {@link MissingApiVersionException} for requests that don't have one;
* by default set to true unless there is a defaultVersion
* @param defaultVersion a default version to assign to requests that
* don't specify one
* @param detectSupportedVersions whether to use API versions that appear in
* mappings for supported version validation (true), or use only explicitly
* configured versions (false).
* @param deprecationHandler handler to send hints and information about
* deprecated API versions to clients
*/
public DefaultApiVersionStrategy(
List<ApiVersionResolver> versionResolvers, ApiVersionParser<?> versionParser,
@Nullable Boolean versionRequired, @Nullable String defaultVersion,
boolean detectSupportedVersions, @Nullable Predicate<Comparable<?>> supportedVersionPredicate,
@Nullable ApiVersionDeprecationHandler deprecationHandler) {
Assert.notEmpty(versionResolvers, "At least one ApiVersionResolver is required");
Assert.notNull(versionParser, "ApiVersionParser is required");
Assert.isTrue(defaultVersion == null || versionRequired == null || !versionRequired,
"versionRequired cannot be set to true if a defaultVersion is also configured");
this.versionResolvers = new ArrayList<>(versionResolvers);
this.versionParser = versionParser;
this.versionRequired = (versionRequired != null ? versionRequired : defaultVersion == null);
this.defaultVersion = (defaultVersion != null ? versionParser.parseVersion(defaultVersion) : null);
this.detectSupportedVersions = detectSupportedVersions;
this.supportedVersionPredicate = initSupportedVersionPredicate(supportedVersionPredicate);
this.deprecationHandler = deprecationHandler;
if (defaultVersion != null) {
addSupportedVersion(defaultVersion);
}
}
private Predicate<Comparable<?>> initSupportedVersionPredicate(@Nullable Predicate<Comparable<?>> predicate) {
return (predicate != null ? predicate :
(version -> (this.supportedVersions.contains(version) ||
this.detectSupportedVersions && this.detectedVersions.contains(version))));
}
@Override
public @Nullable Comparable<?> getDefaultVersion() {
return this.defaultVersion;
}
/**
* Whether the strategy is configured to detect supported versions.
* If this is set to {@code false} then {@link #addMappedVersion} is ignored
* and the list of supported versions can be built explicitly through calls
* to {@link #addSupportedVersion}.
*/
public boolean detectSupportedVersions() {
return this.detectSupportedVersions;
}
/**
* Add to the list of supported versions to check against in
* {@link ApiVersionStrategy#validateVersion} before raising
* {@link InvalidApiVersionException} for unknown versions.
* <p>By default, actual version values that appear in request mappings are
* considered supported, and use of this method is optional. However, if you
* prefer to use only explicitly configured, supported versions, then set
* {@code detectSupportedVersions} flag to {@code false}.
* @param versions the supported versions to add
* @see #addMappedVersion(String...)
*/
public void addSupportedVersion(String... versions) {
for (String version : versions) {
this.supportedVersions.add(parseVersion(version));
}
}
/**
* Internal method to add to the list of actual version values that appear in
* request mappings, which allows supported versions to be discovered rather
* than {@link #addSupportedVersion(String...) configured}.
* <p>If you prefer to use explicitly configured, supported versions only,
* set the {@code detectSupportedVersions} flag to {@code false}.
* @param versions the versions to add
* @see #addSupportedVersion(String...)
*/
public void addMappedVersion(String... versions) {
for (String version : versions) {
this.detectedVersions.add(parseVersion(version));
}
}
@Override
public @Nullable String resolveVersion(ServerWebExchange exchange) {
for (ApiVersionResolver resolver : this.versionResolvers) {
String version = resolver.resolveVersion(exchange);
if (version != null) {
return version;
}
}
return null;
}
@Override
public Comparable<?> parseVersion(String version) {
return this.versionParser.parseVersion(version);
}
@Override
public void validateVersion(@Nullable Comparable<?> requestVersion, ServerWebExchange exchange)
throws MissingApiVersionException, InvalidApiVersionException {
if (requestVersion == null) {
if (this.versionRequired) {
throw new MissingApiVersionException();
}
return;
}
if (!this.supportedVersionPredicate.test(requestVersion)) {
throw new InvalidApiVersionException(requestVersion.toString());
}
}
@Override
public void handleDeprecations(Comparable<?> version, Object handler, ServerWebExchange exchange) {
if (this.deprecationHandler != null) {
this.deprecationHandler.handleVersion(version, handler, exchange);
}
}
@Override
public String toString() {
return "DefaultApiVersionStrategy[" +
"supportedVersions=" + this.supportedVersions + ", " +
"mappedVersions=" + this.detectedVersions + ", " +
"detectSupportedVersions=" + this.detectSupportedVersions + ", " +
"versionRequired=" + this.versionRequired + ", " +
"defaultVersion=" + this.defaultVersion + "]";
}
}
| DefaultApiVersionStrategy |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/AsyncUtilTest.java | {
"start": 6597,
"end": 6789
} | class ____ extends ScalarFunction {
public String eval(Integer i, Long l, String s) {
return null;
}
}
/** Test function. */
public static final | ScalarFunc |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alterTable/MySqlAlterTableTest42.java | {
"start": 865,
"end": 1421
} | class ____ extends TestCase {
public void test_alter_modify_clustered_by() throws Exception {
String sql = "alter table task AUTO_INCREMENT = 20000000 COMMENT ='自增起始值'";
List<SQLStatement> stmtList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
assertEquals(1, stmtList.size());
SQLStatement stmt = stmtList.get(0);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("ALTER TABLE task\n" +
"\tAUTO_INCREMENT = 20000000 COMMENT = '自增起始值'", output);
}
}
| MySqlAlterTableTest42 |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/tier/UnknownTierShuffleDescriptor.java | {
"start": 1025,
"end": 1298
} | class ____ implements TierShuffleDescriptor {
private static final long serialVersionUID = 1L;
public static final UnknownTierShuffleDescriptor INSTANCE = new UnknownTierShuffleDescriptor();
private UnknownTierShuffleDescriptor() {}
}
| UnknownTierShuffleDescriptor |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java | {
"start": 42821,
"end": 43033
} | class ____ implements DialectFeatureCheck {
@Override
public boolean apply(Dialect dialect) {
return dialect.getNationalizationSupport() == NationalizationSupport.EXPLICIT;
}
}
public static | SupportsNClob |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/context/config/ConfigData.java | {
"start": 1416,
"end": 3628
} | class ____ {
private final List<PropertySource<?>> propertySources;
private final PropertySourceOptions propertySourceOptions;
/**
* A {@link ConfigData} instance that contains no data.
*/
public static final ConfigData EMPTY = new ConfigData(Collections.emptySet());
/**
* Create a new {@link ConfigData} instance with the same options applied to each
* source.
* @param propertySources the config data property sources in ascending priority
* order.
* @param options the config data options applied to each source
* @see #ConfigData(Collection, PropertySourceOptions)
*/
public ConfigData(Collection<? extends PropertySource<?>> propertySources, Option... options) {
this(propertySources, PropertySourceOptions.always(Options.of(options)));
}
/**
* Create a new {@link ConfigData} instance with specific property source options.
* @param propertySources the config data property sources in ascending priority
* order.
* @param propertySourceOptions the property source options
* @since 2.4.5
*/
public ConfigData(Collection<? extends PropertySource<?>> propertySources,
PropertySourceOptions propertySourceOptions) {
Assert.notNull(propertySources, "'propertySources' must not be null");
Assert.notNull(propertySourceOptions, "'propertySourceOptions' must not be null");
this.propertySources = Collections.unmodifiableList(new ArrayList<>(propertySources));
this.propertySourceOptions = propertySourceOptions;
}
/**
* Return the configuration data property sources in ascending priority order. If the
* same key is contained in more than one of the sources, then the later source will
* win.
* @return the config data property sources
*/
public List<PropertySource<?>> getPropertySources() {
return this.propertySources;
}
/**
* Return the {@link Options config data options} that apply to the given source.
* @param propertySource the property source to check
* @return the options that apply
* @since 2.4.5
*/
public Options getOptions(PropertySource<?> propertySource) {
Options options = this.propertySourceOptions.get(propertySource);
return (options != null) ? options : Options.NONE;
}
/**
* Strategy | ConfigData |
java | apache__camel | dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/process/ListEvent.java | {
"start": 1679,
"end": 6230
} | class ____ extends ProcessWatchCommand {
@CommandLine.Parameters(description = "Name or pid of running Camel integration", arity = "0..1")
String name = "*";
@CommandLine.Option(names = { "--sort" }, completionCandidates = PidNameAgeCompletionCandidates.class,
description = "Sort by pid, name or age", defaultValue = "pid")
String sort;
@CommandLine.Option(names = { "--filter" },
description = "Filter event by event type: context, route, or exchange")
String filter;
public ListEvent(CamelJBangMain main) {
super(main);
}
@Override
public Integer doProcessWatchCall() throws Exception {
List<Row> rows = new ArrayList<>();
List<Long> pids = findPids(name);
ProcessHandle.allProcesses()
.filter(ph -> pids.contains(ph.pid()))
.forEach(ph -> {
JsonObject root = loadStatus(ph.pid());
// there must be a status file for the running Camel integration
if (root != null) {
Row row = new Row();
JsonObject context = (JsonObject) root.get("context");
if (context == null) {
return;
}
row.name = context.getString("name");
if ("CamelJBang".equals(row.name)) {
row.name = ProcessHelper.extractName(root, ph);
}
row.pid = Long.toString(ph.pid());
if (filter == null || filter.contains("context")) {
fetchEvents(root, row, "events", rows);
}
if (filter == null || filter.contains("route")) {
fetchEvents(root, row, "routeEvents", rows);
}
if (filter == null || filter.contains("exchange")) {
fetchEvents(root, row, "exchangeEvents", rows);
}
}
});
// sort rows
rows.sort(this::sortRow);
if (!rows.isEmpty()) {
printer().println(AsciiTable.getTable(AsciiTable.NO_BORDERS, rows, Arrays.asList(
new Column().header("PID").headerAlign(HorizontalAlign.CENTER).with(r -> r.pid),
new Column().header("NAME").dataAlign(HorizontalAlign.LEFT).maxWidth(30, OverflowBehaviour.ELLIPSIS_RIGHT)
.with(r -> r.name),
new Column().header("TYPE").dataAlign(HorizontalAlign.LEFT).with(r -> r.type),
new Column().header("AGE").dataAlign(HorizontalAlign.RIGHT).with(this::getTimestamp),
new Column().header("MESSAGE").dataAlign(HorizontalAlign.LEFT).with(r -> r.message))));
}
return 0;
}
private static void fetchEvents(JsonObject root, Row row, String type, List<Row> rows) {
JsonObject jo = (JsonObject) root.get("events");
if (jo != null) {
JsonArray arr = (JsonArray) jo.get(type);
if (arr != null) {
for (Object o : arr) {
row = row.copy();
jo = (JsonObject) o;
row.type = jo.getString("type");
Long ts = jo.getLong("timestamp");
if (ts != null) {
row.timestamp = ts;
}
row.exchangeId = jo.getString("exchangeId");
row.message = jo.getString("message");
rows.add(row);
}
}
}
}
private String getTimestamp(Row r) {
if (r.timestamp > 0) {
return TimeUtils.printSince(r.timestamp);
}
return "";
}
protected int sortRow(Row o1, Row o2) {
String s = sort;
int negate = 1;
if (s.startsWith("-")) {
s = s.substring(1);
negate = -1;
}
switch (s) {
case "pid":
return Long.compare(Long.parseLong(o1.pid), Long.parseLong(o2.pid)) * negate;
case "name":
return o1.name.compareToIgnoreCase(o2.name) * negate;
case "age":
return Long.compare(o1.timestamp, o2.timestamp) * negate;
default:
return 0;
}
}
private static | ListEvent |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/processor/internals/GlobalStateManagerImplTest.java | {
"start": 50076,
"end": 50323
} | class ____<K, V> extends NoOpReadOnlyStore<K, V> implements TimestampedBytesStore {
ConverterStore(final String name,
final boolean rocksdbStore) {
super(name, rocksdbStore);
}
}
}
| ConverterStore |
java | apache__spark | sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/GetInfoValue.java | {
"start": 933,
"end": 2111
} | class ____ {
private String stringValue = null;
private short shortValue;
private int intValue;
private long longValue;
public GetInfoValue(String stringValue) {
this.stringValue = stringValue;
}
public GetInfoValue(short shortValue) {
this.shortValue = shortValue;
}
public GetInfoValue(int intValue) {
this.intValue = intValue;
}
public GetInfoValue(long longValue) {
this.longValue = longValue;
}
public GetInfoValue(TGetInfoValue tGetInfoValue) {
switch (tGetInfoValue.getSetField()) {
case STRING_VALUE:
stringValue = tGetInfoValue.getStringValue();
break;
default:
throw new IllegalArgumentException("Unrecognized TGetInfoValue");
}
}
public TGetInfoValue toTGetInfoValue() {
TGetInfoValue tInfoValue = new TGetInfoValue();
if (stringValue != null) {
tInfoValue.setStringValue(stringValue);
}
return tInfoValue;
}
public String getStringValue() {
return stringValue;
}
public short getShortValue() {
return shortValue;
}
public int getIntValue() {
return intValue;
}
public long getLongValue() {
return longValue;
}
}
| GetInfoValue |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteReadStripedFile.java | {
"start": 1869,
"end": 12360
} | class ____ {
public static final Logger LOG =
LoggerFactory.getLogger(TestWriteReadStripedFile.class);
private final ErasureCodingPolicy ecPolicy =
SystemErasureCodingPolicies.getByID(
SystemErasureCodingPolicies.RS_3_2_POLICY_ID);
private final int cellSize = ecPolicy.getCellSize();
private final short dataBlocks = (short) ecPolicy.getNumDataUnits();
private final short parityBlocks = (short) ecPolicy.getNumParityUnits();
private final int numDNs = dataBlocks + parityBlocks;
private final int stripesPerBlock = 2;
private final int blockSize = stripesPerBlock * cellSize;
private final int blockGroupSize = blockSize * dataBlocks;
private MiniDFSCluster cluster;
private DistributedFileSystem fs;
private Configuration conf = new HdfsConfiguration();
static {
GenericTestUtils.setLogLevel(DFSOutputStream.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.TRACE);
}
@BeforeEach
public void setup() throws IOException {
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build();
fs = cluster.getFileSystem();
fs.enableErasureCodingPolicy(ecPolicy.getName());
fs.mkdirs(new Path("/ec"));
cluster.getFileSystem().getClient().setErasureCodingPolicy("/ec",
ecPolicy.getName());
}
@AfterEach
public void tearDown() throws IOException {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
@Test
public void testFileEmpty() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/EmptyFile", 0);
testOneFileUsingDFSStripedInputStream("/ec/EmptyFile2", 0, true);
}
@Test
public void testFileSmallerThanOneCell1() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneCell", 1);
testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneCell2", 1, true);
}
@Test
public void testFileSmallerThanOneCell2() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneCell",
cellSize - 1);
testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneCell2",
cellSize - 1, true);
}
@Test
public void testFileEqualsWithOneCell() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/EqualsWithOneCell", cellSize);
testOneFileUsingDFSStripedInputStream("/ec/EqualsWithOneCell2",
cellSize, true);
}
@Test
public void testFileSmallerThanOneStripe1() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneStripe",
cellSize * dataBlocks - 1);
testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneStripe2",
cellSize * dataBlocks - 1, true);
}
@Test
public void testFileSmallerThanOneStripe2() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneStripe",
cellSize + 123);
testOneFileUsingDFSStripedInputStream("/ec/SmallerThanOneStripe2",
cellSize + 123, true);
}
@Test
public void testFileEqualsWithOneStripe() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/EqualsWithOneStripe",
cellSize * dataBlocks);
testOneFileUsingDFSStripedInputStream("/ec/EqualsWithOneStripe2",
cellSize * dataBlocks, true);
}
@Test
public void testFileMoreThanOneStripe1() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/MoreThanOneStripe1",
cellSize * dataBlocks + 123);
testOneFileUsingDFSStripedInputStream("/ec/MoreThanOneStripe12",
cellSize * dataBlocks + 123, true);
}
@Test
public void testFileMoreThanOneStripe2() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/MoreThanOneStripe2",
cellSize * dataBlocks + cellSize * dataBlocks + 123);
testOneFileUsingDFSStripedInputStream("/ec/MoreThanOneStripe22",
cellSize * dataBlocks + cellSize * dataBlocks + 123, true);
}
@Test
public void testLessThanFullBlockGroup() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/LessThanFullBlockGroup",
cellSize * dataBlocks * (stripesPerBlock - 1) + cellSize);
testOneFileUsingDFSStripedInputStream("/ec/LessThanFullBlockGroup2",
cellSize * dataBlocks * (stripesPerBlock - 1) + cellSize, true);
}
@Test
public void testFileFullBlockGroup() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/FullBlockGroup",
blockSize * dataBlocks);
testOneFileUsingDFSStripedInputStream("/ec/FullBlockGroup2",
blockSize * dataBlocks, true);
}
@Test
public void testFileMoreThanABlockGroup1() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup1",
blockSize * dataBlocks + 123);
testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup12",
blockSize * dataBlocks + 123, true);
}
@Test
public void testFileMoreThanABlockGroup2() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup2",
blockSize * dataBlocks + cellSize + 123);
testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup22",
blockSize * dataBlocks + cellSize + 123, true);
}
@Test
public void testFileMoreThanABlockGroup3() throws Exception {
testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup3",
blockSize * dataBlocks * 3 + cellSize * dataBlocks
+ cellSize + 123);
testOneFileUsingDFSStripedInputStream("/ec/MoreThanABlockGroup32",
blockSize * dataBlocks * 3 + cellSize * dataBlocks
+ cellSize + 123, true);
}
private void testOneFileUsingDFSStripedInputStream(String src, int fileLength)
throws Exception {
testOneFileUsingDFSStripedInputStream(src, fileLength, false);
}
private void testOneFileUsingDFSStripedInputStream(String src, int fileLength,
boolean withDataNodeFailure) throws Exception {
final byte[] expected = StripedFileTestUtil.generateBytes(fileLength);
Path srcPath = new Path(src);
DFSTestUtil.writeFile(fs, srcPath, new String(expected));
StripedFileTestUtil.waitBlockGroupsReported(fs, src);
StripedFileTestUtil.verifyLength(fs, srcPath, fileLength);
if (withDataNodeFailure) {
int dnIndex = 1; // TODO: StripedFileTestUtil.random.nextInt(dataBlocks);
LOG.info("stop DataNode " + dnIndex);
stopDataNode(srcPath, dnIndex);
}
byte[] smallBuf = new byte[1024];
byte[] largeBuf = new byte[fileLength + 100];
StripedFileTestUtil.verifyPread(fs, srcPath, fileLength, expected,
largeBuf);
StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected,
largeBuf);
StripedFileTestUtil.verifySeek(fs, srcPath, fileLength, ecPolicy,
blockGroupSize);
StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected,
ByteBuffer.allocate(fileLength + 100));
StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected,
smallBuf);
StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected,
ByteBuffer.allocate(1024));
}
private void stopDataNode(Path path, int failedDNIdx)
throws IOException {
BlockLocation[] locs = fs.getFileBlockLocations(path, 0, cellSize);
if (locs != null && locs.length > 0) {
String name = (locs[0].getNames())[failedDNIdx];
for (DataNode dn : cluster.getDataNodes()) {
int port = dn.getXferPort();
if (name.contains(Integer.toString(port))) {
dn.shutdown();
break;
}
}
}
}
@Test
public void testWriteReadUsingWebHdfs() throws Exception {
int fileLength = blockSize * dataBlocks + cellSize + 123;
final byte[] expected = StripedFileTestUtil.generateBytes(fileLength);
FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
WebHdfsConstants.WEBHDFS_SCHEME);
Path srcPath = new Path("/testWriteReadUsingWebHdfs");
DFSTestUtil.writeFile(fs, srcPath, new String(expected));
StripedFileTestUtil.verifyLength(fs, srcPath, fileLength);
byte[] smallBuf = new byte[1024];
byte[] largeBuf = new byte[fileLength + 100];
StripedFileTestUtil
.verifyPread(fs, srcPath, fileLength, expected, largeBuf, ecPolicy);
StripedFileTestUtil
.verifyStatefulRead(fs, srcPath, fileLength, expected, largeBuf);
StripedFileTestUtil.verifySeek(fs, srcPath, fileLength, ecPolicy,
blockGroupSize);
StripedFileTestUtil
.verifyStatefulRead(fs, srcPath, fileLength, expected, smallBuf);
// webhdfs doesn't support bytebuffer read
}
@Test
public void testConcat() throws Exception {
final byte[] data =
StripedFileTestUtil.generateBytes(blockSize * dataBlocks * 10 + 234);
int totalLength = 0;
Random r = new Random();
Path target = new Path("/ec/testConcat_target");
DFSTestUtil.writeFile(fs, target, Arrays.copyOfRange(data, 0, 123));
totalLength += 123;
int numFiles = 5;
Path[] srcs = new Path[numFiles];
for (int i = 0; i < numFiles; i++) {
srcs[i] = new Path("/ec/testConcat_src_file_" + i);
int srcLength = r.nextInt(blockSize * dataBlocks * 2) + 1;
DFSTestUtil.writeFile(fs, srcs[i],
Arrays.copyOfRange(data, totalLength, totalLength + srcLength));
totalLength += srcLength;
}
fs.concat(target, srcs);
StripedFileTestUtil.verifyStatefulRead(fs, target, totalLength,
Arrays.copyOfRange(data, 0, totalLength), new byte[1024]);
}
@Test
public void testConcatWithDifferentECPolicy() throws Exception {
final byte[] data =
StripedFileTestUtil.generateBytes(blockSize * dataBlocks);
Path nonECFile = new Path("/non_ec_file");
DFSTestUtil.writeFile(fs, nonECFile, data);
Path target = new Path("/ec/non_ec_file");
fs.rename(nonECFile, target);
int numFiles = 2;
Path[] srcs = new Path[numFiles];
for (int i = 0; i < numFiles; i++) {
srcs[i] = new Path("/ec/testConcat_src_file_"+i);
DFSTestUtil.writeFile(fs, srcs[i], data);
}
try {
fs.concat(target, srcs);
Assertions.fail("non-ec file shouldn't concat with ec file");
} catch (RemoteException e){
Assertions.assertTrue(e.getMessage().contains("have different erasure coding policy"));
}
}
}
| TestWriteReadStripedFile |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ServiceProviderBuildItem.java | {
"start": 4426,
"end": 4693
} | class ____ resources having path '" + resourcePath + "'", e);
}
}
/**
* Creates a new {@link Collection} of {@code ServiceProviderBuildItem}s for the selected artifact.
* It includes all the providers, that are contained in all the service | path |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/Snapshot.java | {
"start": 2205,
"end": 6066
} | class ____ implements Comparable<byte[]> {
/**
* This id is used to indicate the current state (vs. snapshots)
*/
public static final int CURRENT_STATE_ID = Integer.MAX_VALUE - 1;
public static final int NO_SNAPSHOT_ID = -1;
/**
* The pattern for generating the default snapshot name.
* E.g. s20130412-151029.033
*/
private static final String DEFAULT_SNAPSHOT_NAME_PATTERN = "'s'yyyyMMdd-HHmmss.SSS";
public static String generateDefaultSnapshotName() {
return new SimpleDateFormat(DEFAULT_SNAPSHOT_NAME_PATTERN).format(new Date());
}
public static String generateDeletedSnapshotName(Snapshot s) {
return getSnapshotName(s) + "#" + s.getId();
}
public static String getSnapshotPath(String snapshottableDir,
String snapshotRelativePath) {
final StringBuilder b = new StringBuilder(snapshottableDir);
if (b.charAt(b.length() - 1) != Path.SEPARATOR_CHAR) {
b.append(Path.SEPARATOR);
}
return b.append(HdfsConstants.DOT_SNAPSHOT_DIR)
.append(Path.SEPARATOR)
.append(snapshotRelativePath)
.toString();
}
/**
* Get the name of the given snapshot.
* @param s The given snapshot.
* @return The name of the snapshot, or an empty string if {@code s} is null
*/
static String getSnapshotName(Snapshot s) {
return s != null ? s.getRoot().getLocalName() : "";
}
public static int getSnapshotId(Snapshot s) {
return s == null ? CURRENT_STATE_ID : s.getId();
}
public static String getSnapshotString(int snapshot) {
return snapshot == CURRENT_STATE_ID? "<CURRENT_STATE>"
: snapshot == NO_SNAPSHOT_ID? "<NO_SNAPSHOT>"
: "Snapshot #" + snapshot;
}
/**
* Compare snapshot with IDs, where null indicates the current status thus
* is greater than any non-null snapshot.
*/
public static final Comparator<Snapshot> ID_COMPARATOR
= new Comparator<Snapshot>() {
@Override
public int compare(Snapshot left, Snapshot right) {
return ID_INTEGER_COMPARATOR.compare(Snapshot.getSnapshotId(left),
Snapshot.getSnapshotId(right));
}
};
/**
* Compare snapshot with IDs, where null indicates the current status thus
* is greater than any non-null ID.
*/
public static final Comparator<Integer> ID_INTEGER_COMPARATOR
= new Comparator<Integer>() {
@Override
public int compare(Integer left, Integer right) {
// Snapshot.CURRENT_STATE_ID means the current state, thus should be the
// largest
return left - right;
}
};
/**
* Find the latest snapshot that 1) covers the given inode (which means the
* snapshot was either taken on the inode or taken on an ancestor of the
* inode), and 2) was taken before the given snapshot (if the given snapshot
* is not null).
*
* @param inode the given inode that the returned snapshot needs to cover
* @param anchor the returned snapshot should be taken before this given id.
* @return id of the latest snapshot that covers the given inode and was taken
* before the the given snapshot (if it is not null).
*/
public static int findLatestSnapshot(INode inode, final int anchor) {
int latest = NO_SNAPSHOT_ID;
for(; inode != null; inode = inode.getParent()) {
if (inode.isDirectory()) {
final INodeDirectory dir = inode.asDirectory();
if (dir.isWithSnapshot()) {
latest = dir.getDiffs().updatePrior(anchor, latest);
}
}
}
return latest;
}
static Snapshot read(DataInput in, FSImageFormat.Loader loader)
throws IOException {
final int snapshotId = in.readInt();
final INode root = loader.loadINodeWithLocalName(false, in, false);
return new Snapshot(snapshotId, root.asDirectory(), null);
}
/** The root directory of the snapshot. */
static public | Snapshot |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/tracing/RedisObservation.java | {
"start": 1647,
"end": 3686
} | enum ____ implements KeyName {
/**
* Database system.
*/
DATABASE_SYSTEM {
@Override
public String asString() {
return "db.system";
}
},
/**
* Network transport.
*/
NET_TRANSPORT {
@Override
public String asString() {
return "net.transport";
}
},
/**
* Name of the database host.
*/
NET_PEER_NAME {
@Override
public String asString() {
return "net.peer.name";
}
},
/**
* Logical remote port number.
*/
NET_PEER_PORT {
@Override
public String asString() {
return "net.peer.port";
}
},
/**
* Redis peer address.
*/
NET_SOCK_PEER_ADDR {
@Override
public String asString() {
return "net.sock.peer.addr";
}
},
/**
* Redis peer port.
*/
NET_SOCK_PEER_PORT {
@Override
public String asString() {
return "net.sock.peer.port";
}
},
/**
* Redis user.
*/
DB_USER {
@Override
public String asString() {
return "db.user";
}
},
/**
* Redis database index.
*/
DB_INDEX {
@Override
public String asString() {
return "db.redis.database_index";
}
},
/**
* Redis command value.
*/
REDIS_COMMAND {
@Override
public String asString() {
return "db.operation";
}
}
}
/**
* Enums related to high cardinality key names for Redis commands.
*/
| LowCardinalityCommandKeyNames |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/logging/log4j2/SimpleStackTracePrinter.java | {
"start": 898,
"end": 1141
} | class ____ implements StackTracePrinter {
@Override
public void printStackTrace(Throwable throwable, Appendable out) throws IOException {
out.append("stacktrace:" + ClassUtils.getShortName(throwable.getClass()));
}
}
| SimpleStackTracePrinter |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/bindings/multiple/MyOtherBean.java | {
"start": 165,
"end": 249
} | class ____ {
public String foo() {
return "anotherFoo";
}
}
| MyOtherBean |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/ttl/TtlAwareSerializerSnapshotWrapper.java | {
"start": 1234,
"end": 3596
} | class ____<T> {
private final TypeSerializerSnapshot<T> typeSerializerSnapshot;
public TtlAwareSerializerSnapshotWrapper(TypeSerializerSnapshot<T> typeSerializerSnapshot) {
this.typeSerializerSnapshot = typeSerializerSnapshot;
}
public TypeSerializerSnapshot<T> getTtlAwareSerializerSnapshot() {
if (typeSerializerSnapshot instanceof ListSerializerSnapshot) {
return wrapListSerializerSnapshot();
} else if (typeSerializerSnapshot instanceof MapSerializerSnapshot) {
return wrapMapSerializerSnapshot();
} else {
return wrapValueSerializerSnapshot();
}
}
private TypeSerializerSnapshot<T> wrapValueSerializerSnapshot() {
return typeSerializerSnapshot instanceof TtlAwareSerializerSnapshot
? typeSerializerSnapshot
: new TtlAwareSerializerSnapshot<>(typeSerializerSnapshot);
}
@SuppressWarnings({"unchecked", "rawtypes"})
private TypeSerializerSnapshot<T> wrapListSerializerSnapshot() {
ListSerializerSnapshot listSerializerSnapshot =
(ListSerializerSnapshot) typeSerializerSnapshot;
if (!(listSerializerSnapshot.getElementSerializerSnapshot()
instanceof TtlAwareSerializerSnapshot)) {
CompositeTypeSerializerUtil.setNestedSerializersSnapshots(
listSerializerSnapshot,
new TtlAwareSerializerSnapshot<>(
listSerializerSnapshot.getElementSerializerSnapshot()));
}
return listSerializerSnapshot;
}
@SuppressWarnings({"unchecked", "rawtypes"})
private TypeSerializerSnapshot<T> wrapMapSerializerSnapshot() {
MapSerializerSnapshot mapSerializerSnapshot =
(MapSerializerSnapshot) typeSerializerSnapshot;
if (!(mapSerializerSnapshot.getValueSerializerSnapshot()
instanceof TtlAwareSerializerSnapshot)) {
CompositeTypeSerializerUtil.setNestedSerializersSnapshots(
mapSerializerSnapshot,
mapSerializerSnapshot.getKeySerializerSnapshot(),
new TtlAwareSerializerSnapshot<>(
mapSerializerSnapshot.getValueSerializerSnapshot()));
}
return mapSerializerSnapshot;
}
}
| TtlAwareSerializerSnapshotWrapper |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/utils/TimeoutUtils.java | {
"start": 1076,
"end": 2720
} | class ____ {
/**
* Total time to get the data of consumption, the unit of ms.
*/
private final AtomicLong totalTime = new AtomicLong(0L);
private volatile long lastResetTime;
private volatile boolean initialized = false;
/**
* Total timeout to get data, the unit of ms.
*/
private long totalTimeout;
/**
* The cumulative expiration time of the time consumed by fetching the data, the unit of ms.
*/
private long invalidThreshold;
public TimeoutUtils(long totalTimeout, long invalidThreshold) {
this.totalTimeout = totalTimeout;
this.invalidThreshold = invalidThreshold;
}
/**
* Init last reset time.
*/
public synchronized void initLastResetTime() {
if (initialized) {
return;
}
lastResetTime = System.currentTimeMillis();
initialized = true;
}
/**
* Cumulative total time.
*/
public void addTotalTime(long time) {
totalTime.addAndGet(time);
}
/**
* Is timeout.
*/
public boolean isTimeout() {
return totalTime.get() > this.totalTimeout;
}
/**
* Clean the total time.
*/
public void resetTotalTime() {
if (isTotalTimeExpired()) {
totalTime.set(0L);
lastResetTime = System.currentTimeMillis();
}
}
public AtomicLong getTotalTime() {
return totalTime;
}
private boolean isTotalTimeExpired() {
return System.currentTimeMillis() - lastResetTime > this.invalidThreshold;
}
}
| TimeoutUtils |
java | google__dagger | javatests/artifacts/dagger/build-tests/src/test/java/buildtests/TransitiveBindsQualifierTest.java | {
"start": 5701,
"end": 6469
} | interface ____ {",
" @Binds",
" @MyQualifier",
" Object bindObject(@MyQualifier Number number);",
"",
" @Binds",
" @MyQualifier",
" Number bindNumber(int i);",
"}");
GradleModule.create(projectDir, "library2")
.addBuildFile(
"plugins {",
" id 'java'",
" id 'java-library'",
"}",
"dependencies {",
" implementation 'javax.inject:javax.inject:1'",
"}")
.addSrcFile(
"MyQualifier.java",
"package library2;",
"",
"import javax.inject.Qualifier;",
"",
"@Qualifier",
"public @ | MyModule |
java | google__dagger | javatests/dagger/internal/codegen/XMethodElementsTest.java | {
"start": 1314,
"end": 1513
} | class ____ {
@Test
public void javaHasOverride() {
Source javaBase =
javaSource(
"test.JavaBase",
"package test;",
"",
" | XMethodElementsTest |
java | apache__camel | core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/CamelInternalProcessor.java | {
"start": 20315,
"end": 23004
} | class ____ implements CamelInternalProcessorAdvice<Object> {
private static final Logger LOG = LoggerFactory.getLogger(RoutePolicyAdvice.class);
private final List<RoutePolicy> routePolicies;
private Route route;
public RoutePolicyAdvice(List<RoutePolicy> routePolicies) {
this.routePolicies = routePolicies;
}
public void setRoute(Route route) {
this.route = route;
}
/**
* Strategy to determine if this policy is allowed to run
*
* @param policy the policy
* @return <tt>true</tt> to run
*/
boolean isRoutePolicyRunAllowed(RoutePolicy policy) {
if (policy instanceof StatefulService ss) {
return ss.isRunAllowed();
}
return true;
}
@Override
public Object before(Exchange exchange) throws Exception {
// invoke begin
for (RoutePolicy policy : routePolicies) {
try {
if (isRoutePolicyRunAllowed(policy)) {
policy.onExchangeBegin(route, exchange);
}
} catch (Exception e) {
LOG.warn("Error occurred during onExchangeBegin on RoutePolicy: {}. This exception will be ignored", policy,
e);
}
}
return null;
}
@Override
public void after(Exchange exchange, Object data) throws Exception {
// do not invoke it if Camel is stopping as we don't want
// the policy to start a consumer during Camel is stopping
if (isCamelStopping(exchange.getContext())) {
return;
}
for (RoutePolicy policy : routePolicies) {
try {
if (isRoutePolicyRunAllowed(policy)) {
policy.onExchangeDone(route, exchange);
}
} catch (Exception e) {
LOG.warn("Error occurred during onExchangeDone on RoutePolicy: {}. This exception will be ignored",
policy, e);
}
}
}
private static boolean isCamelStopping(CamelContext context) {
if (context != null) {
return context.isStopping() || context.isStopped();
}
return false;
}
@Override
public boolean hasState() {
return false;
}
}
/**
* Advice to execute the {@link BacklogTracer} if enabled.
*/
public static final | RoutePolicyAdvice |
java | apache__flink | flink-formats/flink-sql-avro/src/test/java/org/apache/flink/formats/avro/PackagingITCase.java | {
"start": 1098,
"end": 1486
} | class ____ {
@Test
void testPackaging() throws Exception {
final Path jar = ResourceTestUtils.getResource(".*/flink-sql-avro[^/]*\\.jar");
PackagingTestUtils.assertJarContainsOnlyFilesMatching(
jar, Arrays.asList("org/apache/flink/", "META-INF/"));
PackagingTestUtils.assertJarContainsServiceEntry(jar, Factory.class);
}
}
| PackagingITCase |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxReplay.java | {
"start": 15409,
"end": 18923
} | class ____<T> implements ReplayBuffer<T> {
final int limit;
final int indexUpdateLimit;
volatile Node<T> head;
Node<T> tail;
int size;
volatile boolean done;
@Nullable Throwable error;
SizeBoundReplayBuffer(int limit) {
if (limit < 0) {
throw new IllegalArgumentException("Limit cannot be negative");
}
this.limit = limit;
this.indexUpdateLimit = Operators.unboundedOrLimit(limit);
Node<T> n = new Node<>(-1, null);
this.tail = n;
this.head = n;
}
@Override
public boolean isExpired() {
return false;
}
@Override
public int capacity() {
return limit;
}
@Override
public void add(T value) {
final Node<T> tail = this.tail;
final Node<T> n = new Node<>(tail.index + 1, value);
tail.set(n);
this.tail = n;
int s = size;
if (s == limit) {
Node<T> afterHead = head.get();
assert afterHead != null : "afterHead can not be null when s == limit";
head = afterHead;
}
else {
size = s + 1;
}
}
@Override
public void onError(Throwable ex) {
error = ex;
done = true;
}
@Override
public void onComplete() {
done = true;
}
void replayNormal(ReplaySubscription<T> rs) {
final Subscriber<? super T> a = rs.actual();
int missed = 1;
for (; ; ) {
long r = rs.requested();
long e = 0L;
@SuppressWarnings("unchecked") Node<T> node = (Node<T>) rs.node();
if (node == null) {
node = head;
}
while (e != r) {
if (rs.isCancelled()) {
rs.node(null);
return;
}
boolean d = done;
Node<T> next = node.get();
boolean empty = next == null;
if (d && empty) {
rs.node(null);
Throwable ex = error;
if (ex != null) {
a.onError(ex);
}
else {
a.onComplete();
}
return;
}
if (empty) {
break;
}
assert next != null && next.value != null : "next and next.value must not be null";
a.onNext(next.value);
e++;
node = next;
if ((next.index + 1) % indexUpdateLimit == 0) {
rs.requestMore(next.index + 1);
}
}
if (e == r) {
if (rs.isCancelled()) {
rs.node(null);
return;
}
boolean d = done;
boolean empty = node.get() == null;
if (d && empty) {
rs.node(null);
Throwable ex = error;
if (ex != null) {
a.onError(ex);
}
else {
a.onComplete();
}
return;
}
}
if (e != 0L) {
if (r != Long.MAX_VALUE) {
rs.produced(e);
}
}
rs.node(node);
missed = rs.leave(missed);
if (missed == 0) {
break;
}
}
}
void replayFused(ReplaySubscription<T> rs) {
int missed = 1;
final Subscriber<? super T> a = rs.actual();
for (; ; ) {
if (rs.isCancelled()) {
rs.node(null);
return;
}
boolean d = done;
a.onNext(null);
if (d) {
Throwable ex = error;
if (ex != null) {
a.onError(ex);
}
else {
a.onComplete();
}
return;
}
missed = rs.leave(missed);
if (missed == 0) {
break;
}
}
}
@Override
public void replay(ReplaySubscription<T> rs) {
if (!rs.enter()) {
return;
}
if (rs.fusionMode() == NONE) {
replayNormal(rs);
}
else {
replayFused(rs);
}
}
@Override
public @Nullable Throwable getError() {
return error;
}
@Override
public boolean isDone() {
return done;
}
static final | SizeBoundReplayBuffer |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/ParameterizedTestIntegrationTests.java | {
"start": 84250,
"end": 84835
} | class ____ {
@ParameterizedTest(quoteTextArguments = false)
@CsvFileSource(resources = "two-column.csv")
@CsvFileSource(resources = "two-column-with-headers.csv", delimiter = '|', useHeadersInDisplayName = true, nullValues = "NIL")
void testWithRepeatableCsvFileSource(String column1, String column2) {
fail("%s %s".formatted(column1, column2));
}
@CsvFileSource(resources = "two-column.csv")
@CsvFileSource(resources = "two-column-with-headers.csv", delimiter = '|', useHeadersInDisplayName = true, nullValues = "NIL")
@Retention(RUNTIME)
@ | RepeatableSourcesTestCase |
java | apache__camel | components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jSimpleAgentIT.java | {
"start": 2075,
"end": 6600
} | class ____ extends CamelTestSupport {
// Test constants
private static final String TEST_USER_MESSAGE_SIMPLE = "What is Apache Camel?";
private static final String TEST_USER_MESSAGE_STORY = "Write a short story about a lost cat.";
private static final String TEST_SYSTEM_MESSAGE
= """
You are a whimsical storyteller. Your responses should be imaginative, descriptive, and always include a touch of magic. Start every story with 'Once upon a starlit night...'""";
private static final String EXPECTED_STORY_START = "Once upon a starlit night";
private static final String EXPECTED_STORY_CONTENT = "cat";
protected ChatModel chatModel;
@RegisterExtension
static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration()
? null
: OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) : ModelHelper.loadFromEnv();
}
@Test
void testSimpleUserMessage() throws InterruptedException {
MockEndpoint mockEndpoint = this.context.getEndpoint("mock:response", MockEndpoint.class);
mockEndpoint.expectedMessageCount(1);
String response = template.requestBody("direct:send-simple-user-message", TEST_USER_MESSAGE_SIMPLE, String.class);
mockEndpoint.assertIsSatisfied();
assertNotNull(response, "AI response should not be null");
assertNotEquals(TEST_USER_MESSAGE_SIMPLE, response, "AI response should be different from the input message");
assertTrue(response.contains("Apache Camel"), "Response should contain information about 'Apache Camel'");
}
@Test
void testSimpleUserMessageWithHeaderPrompt() throws InterruptedException {
MockEndpoint mockEndpoint = this.context.getEndpoint("mock:response", MockEndpoint.class);
mockEndpoint.expectedMessageCount(1);
String response = template.requestBodyAndHeader("direct:send-simple-user-message",
TEST_USER_MESSAGE_STORY, SYSTEM_MESSAGE, TEST_SYSTEM_MESSAGE, String.class);
mockEndpoint.assertIsSatisfied();
assertNotNull(response, "AI response should not be null");
assertNotEquals(TEST_USER_MESSAGE_STORY, response, "AI response should be different from the input message");
assertTrue(response.contains(EXPECTED_STORY_START), "Response should start with the expected magical opening phrase");
assertTrue(response.contains(EXPECTED_STORY_CONTENT), "Response should contain content about a cat as requested");
}
@Test
void testSimpleUserMessageWithBodyBean() throws InterruptedException {
MockEndpoint mockEndpoint = this.context.getEndpoint("mock:response", MockEndpoint.class);
mockEndpoint.expectedMessageCount(1);
AiAgentBody<?> body = new AiAgentBody<>()
.withSystemMessage(TEST_SYSTEM_MESSAGE)
.withUserMessage(TEST_USER_MESSAGE_STORY);
String response = template.requestBody("direct:send-simple-user-message", body, String.class);
mockEndpoint.assertIsSatisfied();
assertNotNull(response, "AI response should not be null");
assertNotEquals(TEST_USER_MESSAGE_STORY, response, "AI response should be different from the input message");
assertTrue(response.contains(EXPECTED_STORY_START), "Response should start with the expected magical opening phrase");
assertTrue(response.contains(EXPECTED_STORY_CONTENT), "Response should contain content about a cat as requested");
}
@Override
protected RouteBuilder createRouteBuilder() {
// Create simple agent configuration (no memory, tools, RAG, or guardrails)
AgentConfiguration configuration = new AgentConfiguration()
.withChatModel(chatModel)
.withInputGuardrailClasses(List.of())
.withOutputGuardrailClasses(List.of());
Agent simpleAgent = new AgentWithoutMemory(configuration);
// Register agent in the context
this.context.getRegistry().bind("simpleAgent", simpleAgent);
return new RouteBuilder() {
public void configure() {
from("direct:send-simple-user-message")
.to("langchain4j-agent:test-agent?agent=#simpleAgent")
.to("mock:response");
}
};
}
}
| LangChain4jSimpleAgentIT |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java | {
"start": 28309,
"end": 30233
} | enum ____ {
INIT((byte) 0, false, false),
SUCCESS((byte) 2, true, false),
FAILED((byte) 3, true, true),
ABORTED((byte) 4, false, true),
/**
* Shard primary is unassigned and shard cannot be snapshotted.
*/
MISSING((byte) 5, true, true),
/**
* Shard snapshot is waiting for the primary to snapshot to become available.
*/
WAITING((byte) 6, false, false),
/**
* Shard snapshot is waiting for another shard snapshot for the same shard and to the same repository to finish.
*/
QUEUED((byte) 7, false, false),
/**
* Primary shard is assigned to a node which is marked for removal from the cluster (or which was previously marked for removal and
* we're still waiting for its other shards to pause).
*/
PAUSED_FOR_NODE_REMOVAL((byte) 8, false, false);
private final byte value;
private final boolean completed;
private final boolean failed;
ShardState(byte value, boolean completed, boolean failed) {
this.value = value;
this.completed = completed;
this.failed = failed;
}
public boolean completed() {
return completed;
}
public boolean failed() {
return failed;
}
public static ShardState fromValue(byte value) {
return switch (value) {
case 0 -> INIT;
case 2 -> SUCCESS;
case 3 -> FAILED;
case 4 -> ABORTED;
case 5 -> MISSING;
case 6 -> WAITING;
case 7 -> QUEUED;
case 8 -> PAUSED_FOR_NODE_REMOVAL;
default -> throw new IllegalArgumentException("No shard snapshot state for value [" + value + "]");
};
}
}
public | ShardState |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/cluster/api/sync/RedisClusterCommands.java | {
"start": 1271,
"end": 14502
} | interface ____<K, V> extends BaseRedisCommands<K, V>, RedisAclCommands<K, V>,
RedisFunctionCommands<K, V>, RedisGeoCommands<K, V>, RedisHashCommands<K, V>, RedisHLLCommands<K, V>,
RedisKeyCommands<K, V>, RedisListCommands<K, V>, RedisScriptingCommands<K, V>, RedisServerCommands<K, V>,
RedisSetCommands<K, V>, RedisSortedSetCommands<K, V>, RedisStreamCommands<K, V>, RedisStringCommands<K, V>,
RedisJsonCommands<K, V>, RedisVectorSetCommands<K, V>, RediSearchCommands<K, V> {
/**
* Set the default timeout for operations. A zero timeout value indicates to not time out.
*
* @param timeout the timeout value
* @since 5.0
* @deprecated since 6.2. Use the corresponding {@link io.lettuce.core.api.StatefulConnection#setTimeout(Duration)} method
* on the connection interface. To be removed with Lettuce 7.0.
*/
@Deprecated
void setTimeout(Duration timeout);
/**
* The asking command is required after a {@code -ASK} redirection. The client should issue {@code ASKING} before to
* actually send the command to the target instance. See the Redis Cluster specification for more information.
*
* @return String simple-string-reply
*/
String asking();
/**
* Authenticate to the server.
*
* @param password the password
* @return String simple-string-reply
*/
String auth(CharSequence password);
/**
* Authenticate to the server with username and password. Requires Redis 6 or newer.
*
* @param username the username
* @param password the password
* @return String simple-string-reply
* @since 6.0
*/
String auth(String username, CharSequence password);
/**
* Adds slots to the cluster node. The current node will become the master for the specified slots.
*
* @param slots one or more slots from {@literal 0} to {@literal 16384}
* @return String simple-string-reply
*/
String clusterAddSlots(int... slots);
/**
* Generate a new config epoch, incrementing the current epoch, assign the new epoch to this node, WITHOUT any consensus and
* persist the configuration on disk before sending packets with the new configuration.
*
* @return String simple-string-reply If the new config epoch is generated and assigned either BUMPED (epoch) or STILL
* (epoch) are returned.
*/
String clusterBumpepoch();
/**
* Returns the number of failure reports for the specified node. Failure reports are the way Redis Cluster uses in order to
* promote a {@literal PFAIL} state, that means a node is not reachable, to a {@literal FAIL} state, that means that the
* majority of masters in the cluster agreed within a window of time that the node is not reachable.
*
* @param nodeId the node id
* @return Integer reply: The number of active failure reports for the node.
*/
Long clusterCountFailureReports(String nodeId);
/**
* Returns the number of keys in the specified Redis Cluster hash {@code slot}.
*
* @param slot the slot
* @return Integer reply: The number of keys in the specified hash slot, or an error if the hash slot is invalid.
*/
Long clusterCountKeysInSlot(int slot);
/**
* Takes a list of slot ranges (specified by start and end slots) to assign to the node.
*
* @param ranges a list of slot ranges (specified by start and end slots)
* @return String simple-string-reply
* @since 6.2
*/
String clusterAddSlotsRange(Range<Integer>... ranges);
/**
* Removes slots from the cluster node.
*
* @param slots one or more slots from {@literal 0} to {@literal 16384}
* @return String simple-string-reply
*/
String clusterDelSlots(int... slots);
/**
* Takes a list of slot ranges (specified by start and end slots) to remove to the node.
*
* @param ranges a list of slot ranges (specified by start and end slots)
* @return String simple-string-reply
* @since 6.2
*/
String clusterDelSlotsRange(Range<Integer>... ranges);
/**
* Failover a cluster node. Turns the currently connected node into a master and the master into its replica.
*
* @param force do not coordinate with master if {@code true}
* @return String simple-string-reply
*/
String clusterFailover(boolean force);
/**
* Failover a cluster node. Turns the currently connected node into a master and the master into its replica.
*
* @param force do not coordinate with master if {@code true}
* @param takeOver do not coordinate with the rest of the cluster if {@code true} force will take precedence over takeOver
* if both are set.
* @return String simple-string-reply
* @since 6.2.3
*/
String clusterFailover(boolean force, boolean takeOver);
/**
* Delete all the slots associated with the specified node. The number of deleted slots is returned.
*
* @return String simple-string-reply
*/
String clusterFlushslots();
/**
* Disallow connections and remove the cluster node from the cluster.
*
* @param nodeId the node Id
* @return String simple-string-reply
*/
String clusterForget(String nodeId);
/**
* Retrieve the list of keys within the {@code slot}.
*
* @param slot the slot
* @param count maximal number of keys
* @return List<K> array-reply list of keys
*/
List<K> clusterGetKeysInSlot(int slot, int count);
/**
* Get information and statistics about the cluster viewed by the current node.
*
* @return String bulk-string-reply as a collection of text lines.
*/
String clusterInfo();
/**
* Returns an integer identifying the hash slot the specified key hashes to. This command is mainly useful for debugging and
* testing, since it exposes via an API the underlying Redis implementation of the hashing algorithm. Basically the same as
* {@link io.lettuce.core.cluster.SlotHash#getSlot(byte[])}. If not, call Houston and report that we've got a problem.
*
* @param key the key.
* @return Integer reply: The hash slot number.
*/
Long clusterKeyslot(K key);
/**
* Meet another cluster node to include the node into the cluster. The command starts the cluster handshake and returns with
* {@literal OK} when the node was added to the cluster.
*
* @param ip IP address of the host
* @param port port number.
* @return String simple-string-reply
*/
String clusterMeet(String ip, int port);
/**
* Obtain the nodeId for the currently connected node.
*
* @return String simple-string-reply
*/
String clusterMyId();
/**
* Obtain the shard ID for the currently connected node.
* <p>
* The CLUSTER MYSHARDID command returns the unique, auto-generated identifier that is associated with the shard to which
* the connected cluster node belongs.
*
* @return String simple-string-reply
*/
String clusterMyShardId();
/**
* Obtain details about all cluster nodes. Can be parsed using
* {@link io.lettuce.core.cluster.models.partitions.ClusterPartitionParser#parse}
*
* @return String bulk-string-reply as a collection of text lines
*/
String clusterNodes();
/**
* Turn this node into a replica of the node with the id {@code nodeId}.
*
* @param nodeId master node id
* @return String simple-string-reply
*/
String clusterReplicate(String nodeId);
/**
* List replicas for a certain node identified by its {@code nodeId}. Can be parsed using
* {@link io.lettuce.core.cluster.models.partitions.ClusterPartitionParser#parse}
*
* @param nodeId node id of the master node
* @return List<String> array-reply list of replicas. The command returns data in the same format as
* {@link #clusterNodes()} but one line per replica.
* @since 6.1.7
*/
List<String> clusterReplicas(String nodeId);
/**
* Reset a node performing a soft or hard reset:
* <ul>
* <li>All other nodes are forgotten</li>
* <li>All the assigned / open slots are released</li>
* <li>If the node is a replica, it turns into a master</li>
* <li>Only for hard reset: a new Node ID is generated</li>
* <li>Only for hard reset: currentEpoch and configEpoch are set to 0</li>
* <li>The new configuration is saved and the cluster state updated</li>
* <li>If the node was a replica, the whole data set is flushed away</li>
* </ul>
*
* @param hard {@code true} for hard reset. Generates a new nodeId and currentEpoch/configEpoch are set to 0
* @return String simple-string-reply
*/
String clusterReset(boolean hard);
/**
* Forces a node to save the nodes.conf configuration on disk.
*
* @return String simple-string-reply: {@code OK} or an error if the operation fails.
*/
String clusterSaveconfig();
/**
* This command sets a specific config epoch in a fresh node. It only works when:
* <ul>
* <li>The nodes table of the node is empty.</li>
* <li>The node current config epoch is zero.</li>
* </ul>
*
* @param configEpoch the config epoch
* @return String simple-string-reply: {@code OK} or an error if the operation fails.
*/
String clusterSetConfigEpoch(long configEpoch);
/**
* Flag a slot as {@literal IMPORTING} (incoming) from the node specified in {@code nodeId}.
*
* @param slot the slot
* @param nodeId the id of the node is the master of the slot
* @return String simple-string-reply
*/
String clusterSetSlotImporting(int slot, String nodeId);
/**
* Flag a slot as {@literal MIGRATING} (outgoing) towards the node specified in {@code nodeId}. The slot must be handled by
* the current node in order to be migrated.
*
* @param slot the slot
* @param nodeId the id of the node is targeted to become the master for the slot
* @return String simple-string-reply
*/
String clusterSetSlotMigrating(int slot, String nodeId);
/**
* Assign a slot to a node. The command migrates the specified slot from the current node to the specified node in
* {@code nodeId}
*
* @param slot the slot
* @param nodeId the id of the node that will become the master for the slot
* @return String simple-string-reply
*/
String clusterSetSlotNode(int slot, String nodeId);
/**
* Clears migrating / importing state from the slot.
*
* @param slot the slot
* @return String simple-string-reply
*/
String clusterSetSlotStable(int slot);
/**
* Get array of cluster shards
*
* @return RedisFuture<List<Object>> array-reply nested list of the shards response.
* @since 6.2
*/
List<Object> clusterShards();
/**
* List replicas for a certain node identified by its {@code nodeId}. Can be parsed using
* {@link io.lettuce.core.cluster.models.partitions.ClusterPartitionParser#parse}
*
* @param nodeId node id of the master node
* @return List<String> array-reply list of replicas. The command returns data in the same format as
* {@link #clusterNodes()} but one line per replica.
* @deprecated since 6.1.7, use {@link #clusterReplicas(String)} instead.
*/
@Deprecated
List<String> clusterSlaves(String nodeId);
/**
* Get array of cluster slots to node mappings.
*
* @return List<Object> array-reply nested list of slot ranges with IP/Port mappings.
*/
List<Object> clusterSlots();
/**
* Tells a Redis cluster replica node that the client is ok reading possibly stale data and is not interested in running
* write queries.
*
* @return String simple-string-reply
*/
@Override
String readOnly();
/**
* Resets readOnly flag.
*
* @return String simple-string-reply
*/
@Override
String readWrite();
/**
* Retrieves information about the TCP links between nodes in a Redis Cluster.
*
* @return List of maps containing attributes and values for each peer link.
*/
List<Map<String, Object>> clusterLinks();
/**
* @return the currently configured instance of the {@link JsonParser}
* @since 6.5
*/
JsonParser getJsonParser();
/**
* Set multiple keys to multiple values with optional conditions and expiration. Emits: numkeys, pairs, then [NX|XX] and one
* of [EX|PX|EXAT|PXAT|KEEPTTL]. Cross-slot keys will result in multiple calls to the particular cluster nodes.
*
* @param map the map of keys and values.
* @param args the {@link MSetExArgs} specifying NX/XX and expiration.
* @return Boolean from integer-reply: {@code 1} if all keys were set, {@code 0} otherwise.
* @since 7.1
*/
Boolean msetex(Map<K, V> map, MSetExArgs args);
}
| RedisClusterCommands |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/creators/NamingStrategyViaCreator556Test.java | {
"start": 995,
"end": 1427
} | class ____
{
protected String myName;
protected int myAge;
private RenamedFactoryBean(int a, String n, boolean foo) {
myAge = a;
myName = n;
}
@JsonCreator
public static RenamedFactoryBean create(int age, String name) {
return new RenamedFactoryBean(age, name, true);
}
}
@SuppressWarnings("serial")
static | RenamedFactoryBean |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/xml/SimpleSaxErrorHandler.java | {
"start": 1055,
"end": 1632
} | class ____ implements ErrorHandler {
private final Log logger;
/**
* Create a new SimpleSaxErrorHandler for the given
* Commons Logging logger instance.
*/
public SimpleSaxErrorHandler(Log logger) {
this.logger = logger;
}
@Override
public void warning(SAXParseException ex) throws SAXException {
logger.warn("Ignored XML validation warning", ex);
}
@Override
public void error(SAXParseException ex) throws SAXException {
throw ex;
}
@Override
public void fatalError(SAXParseException ex) throws SAXException {
throw ex;
}
}
| SimpleSaxErrorHandler |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java | {
"start": 1045,
"end": 8181
} | class ____ extends ESTestCase {
// this test also tests if calls are correct if one or more listeners throw exceptions
public void testListenersAreExecuted() {
AtomicInteger preIndex = new AtomicInteger();
AtomicInteger postIndex = new AtomicInteger();
AtomicInteger postIndexException = new AtomicInteger();
AtomicInteger preDelete = new AtomicInteger();
AtomicInteger postDelete = new AtomicInteger();
AtomicInteger postDeleteException = new AtomicInteger();
ShardId randomShardId = new ShardId(new Index(randomAlphaOfLength(10), randomAlphaOfLength(10)), randomIntBetween(1, 10));
IndexingOperationListener listener = new IndexingOperationListener() {
@Override
public Engine.Index preIndex(ShardId shardId, Engine.Index operation) {
assertThat(shardId, is(randomShardId));
preIndex.incrementAndGet();
return operation;
}
@Override
public void postIndex(ShardId shardId, Engine.Index index, Engine.IndexResult result) {
assertThat(shardId, is(randomShardId));
switch (result.getResultType()) {
case SUCCESS -> postIndex.incrementAndGet();
case FAILURE -> postIndex(shardId, index, result.getFailure());
default -> throw new IllegalArgumentException("unknown result type: " + result.getResultType());
}
}
@Override
public void postIndex(ShardId shardId, Engine.Index index, Exception ex) {
assertThat(shardId, is(randomShardId));
postIndexException.incrementAndGet();
}
@Override
public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) {
assertThat(shardId, is(randomShardId));
preDelete.incrementAndGet();
return delete;
}
@Override
public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResult result) {
assertThat(shardId, is(randomShardId));
switch (result.getResultType()) {
case SUCCESS -> postDelete.incrementAndGet();
case FAILURE -> postDelete(shardId, delete, result.getFailure());
default -> throw new IllegalArgumentException("unknown result type: " + result.getResultType());
}
}
@Override
public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) {
assertThat(shardId, is(randomShardId));
postDeleteException.incrementAndGet();
}
};
IndexingOperationListener throwingListener = new IndexingOperationListener() {
@Override
public Engine.Index preIndex(ShardId shardId, Engine.Index operation) {
throw new RuntimeException();
}
@Override
public void postIndex(ShardId shardId, Engine.Index index, Engine.IndexResult result) {
throw new RuntimeException();
}
@Override
public void postIndex(ShardId shardId, Engine.Index index, Exception ex) {
throw new RuntimeException();
}
@Override
public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) {
throw new RuntimeException();
}
@Override
public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResult result) {
throw new RuntimeException();
}
@Override
public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) {
throw new RuntimeException();
}
};
final List<IndexingOperationListener> indexingOperationListeners = new ArrayList<>(Arrays.asList(listener, listener));
if (randomBoolean()) {
indexingOperationListeners.add(throwingListener);
if (randomBoolean()) {
indexingOperationListeners.add(throwingListener);
}
}
Collections.shuffle(indexingOperationListeners, random());
IndexingOperationListener.CompositeListener compositeListener = new IndexingOperationListener.CompositeListener(
indexingOperationListeners,
logger
);
ParsedDocument doc = EngineTestCase.createParsedDoc("1", null);
Engine.Delete delete = new Engine.Delete("1", Uid.encodeId(doc.id()), randomNonNegativeLong());
Engine.Index index = new Engine.Index(Uid.encodeId(doc.id()), randomNonNegativeLong(), doc);
compositeListener.postDelete(
randomShardId,
delete,
new Engine.DeleteResult(1, 0, SequenceNumbers.UNASSIGNED_SEQ_NO, true, delete.id())
);
assertEquals(0, preIndex.get());
assertEquals(0, postIndex.get());
assertEquals(0, postIndexException.get());
assertEquals(0, preDelete.get());
assertEquals(2, postDelete.get());
assertEquals(0, postDeleteException.get());
compositeListener.postDelete(randomShardId, delete, new RuntimeException());
assertEquals(0, preIndex.get());
assertEquals(0, postIndex.get());
assertEquals(0, postIndexException.get());
assertEquals(0, preDelete.get());
assertEquals(2, postDelete.get());
assertEquals(2, postDeleteException.get());
compositeListener.preDelete(randomShardId, delete);
assertEquals(0, preIndex.get());
assertEquals(0, postIndex.get());
assertEquals(0, postIndexException.get());
assertEquals(2, preDelete.get());
assertEquals(2, postDelete.get());
assertEquals(2, postDeleteException.get());
compositeListener.postIndex(
randomShardId,
index,
new Engine.IndexResult(0, 0, SequenceNumbers.UNASSIGNED_SEQ_NO, false, index.id())
);
assertEquals(0, preIndex.get());
assertEquals(2, postIndex.get());
assertEquals(0, postIndexException.get());
assertEquals(2, preDelete.get());
assertEquals(2, postDelete.get());
assertEquals(2, postDeleteException.get());
compositeListener.postIndex(randomShardId, index, new RuntimeException());
assertEquals(0, preIndex.get());
assertEquals(2, postIndex.get());
assertEquals(2, postIndexException.get());
assertEquals(2, preDelete.get());
assertEquals(2, postDelete.get());
assertEquals(2, postDeleteException.get());
compositeListener.preIndex(randomShardId, index);
assertEquals(2, preIndex.get());
assertEquals(2, postIndex.get());
assertEquals(2, postIndexException.get());
assertEquals(2, preDelete.get());
assertEquals(2, postDelete.get());
assertEquals(2, postDeleteException.get());
}
}
| IndexingOperationListenerTests |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/testkit/Name.java | {
"start": 821,
"end": 2450
} | class ____ implements Comparable<Name> {
public static final Comparator<Name> lastNameComparator = comparing(Name::getLast);
// intentionally public to test field retrieval
// getter have been created to test property retrieval
public String first;
// keep private to test we are able to read property but not field
private String last;
public Name() {}
public Name(String first) {
setFirst(first);
}
public Name(String first, String last) {
setFirst(first);
setLast(last);
}
public static Name name(String first, String last) {
return new Name(first, last);
}
public String getFirst() {
return first;
}
public void setFirst(String first) {
this.first = first;
}
public String getLast() {
return last;
}
public void setLast(String last) {
this.last = last;
}
// property without field in order to test field/property combinations
public String getName() {
return "%s %s".formatted(getFirst(), getLast());
}
@Override
public String toString() {
return "%s[first='%s', last='%s']".formatted(getClass().getSimpleName(), first, last);
}
@Override
public int hashCode() {
return Objects.hash(first, last);
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
Name other = (Name) obj;
return Objects.equals(first, other.first) && Objects.equals(last, other.last);
}
@Override
public int compareTo(Name other) {
return this.getName().compareToIgnoreCase(other.getName());
}
}
| Name |
java | apache__camel | dsl/camel-jbang/camel-jbang-plugin-kubernetes/src/main/java/org/apache/camel/dsl/jbang/core/commands/kubernetes/traits/model/Ingress.java | {
"start": 4901,
"end": 5353
} | enum ____ {
@JsonProperty("Exact")
EXACT("Exact"),
@JsonProperty("Prefix")
PREFIX("Prefix"),
@JsonProperty("ImplementationSpecific")
IMPLEMENTATIONSPECIFIC("ImplementationSpecific");
private final String value;
PathType(String value) {
this.value = value;
}
@JsonValue
public String getValue() {
return this.value;
}
}
}
| PathType |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl.java | {
"start": 2035,
"end": 19329
} | class ____ extends QueueInfo {
QueueInfoProto proto = QueueInfoProto.getDefaultInstance();
QueueInfoProto.Builder builder = null;
boolean viaProto = false;
List<ApplicationReport> applicationsList;
List<QueueInfo> childQueuesList;
Set<String> accessibleNodeLabels;
Map<String, QueueConfigurations> queueConfigurations;
public QueueInfoPBImpl() {
builder = QueueInfoProto.newBuilder();
}
public QueueInfoPBImpl(QueueInfoProto proto) {
this.proto = proto;
viaProto = true;
}
@Override
public List<ApplicationReport> getApplications() {
initLocalApplicationsList();
return this.applicationsList;
}
@Override
public float getCapacity() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasCapacity()) ? p.getCapacity() : -1;
}
@Override
public List<QueueInfo> getChildQueues() {
initLocalChildQueuesList();
return this.childQueuesList;
}
@Override
public float getCurrentCapacity() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasCurrentCapacity()) ? p.getCurrentCapacity() : 0;
}
@Override
public float getMaximumCapacity() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasMaximumCapacity()) ? p.getMaximumCapacity() : -1;
}
@Override
public String getQueueName() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasQueueName()) ? p.getQueueName() : null;
}
@Override
public String getQueuePath() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasQueuePath()) ? p.getQueuePath() : null;
}
@Override
public QueueState getQueueState() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasState()) {
return null;
}
return convertFromProtoFormat(p.getState());
}
@Override
public void setApplications(List<ApplicationReport> applications) {
if (applications == null) {
builder.clearApplications();
}
this.applicationsList = applications;
}
@Override
public void setCapacity(float capacity) {
maybeInitBuilder();
builder.setCapacity(capacity);
}
@Override
public float getWeight() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasWeight()) ? p.getWeight() : -1;
}
@Override
public void setWeight(float weight) {
maybeInitBuilder();
builder.setWeight(weight);
}
@Override
public int getMaxParallelApps() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasMaxParallelApps()) ? p.getMaxParallelApps() : -1;
}
@Override
public void setMaxParallelApps(int weight) {
maybeInitBuilder();
builder.setMaxParallelApps(weight);
}
@Override
public void setChildQueues(List<QueueInfo> childQueues) {
if (childQueues == null) {
builder.clearChildQueues();
}
this.childQueuesList = childQueues;
}
@Override
public void setCurrentCapacity(float currentCapacity) {
maybeInitBuilder();
builder.setCurrentCapacity(currentCapacity);
}
@Override
public void setMaximumCapacity(float maximumCapacity) {
maybeInitBuilder();
builder.setMaximumCapacity(maximumCapacity);
}
@Override
public void setQueueName(String queueName) {
maybeInitBuilder();
if (queueName == null) {
builder.clearQueueName();
return;
}
builder.setQueueName(queueName);
}
@Override
public void setQueuePath(String queuePath) {
maybeInitBuilder();
if (queuePath == null) {
builder.clearQueuePath();
return;
}
builder.setQueuePath(queuePath);
}
@Override
public void setQueueState(QueueState queueState) {
maybeInitBuilder();
if (queueState == null) {
builder.clearState();
return;
}
builder.setState(convertToProtoFormat(queueState));
}
public QueueInfoProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
private void initLocalApplicationsList() {
if (this.applicationsList != null) {
return;
}
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
List<ApplicationReportProto> list = p.getApplicationsList();
applicationsList = new ArrayList<ApplicationReport>();
for (ApplicationReportProto a : list) {
applicationsList.add(convertFromProtoFormat(a));
}
}
private void addApplicationsToProto() {
maybeInitBuilder();
builder.clearApplications();
if (applicationsList == null)
return;
Iterable<ApplicationReportProto> iterable = new Iterable<ApplicationReportProto>() {
@Override
public Iterator<ApplicationReportProto> iterator() {
return new Iterator<ApplicationReportProto>() {
Iterator<ApplicationReport> iter = applicationsList.iterator();
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public ApplicationReportProto next() {
return convertToProtoFormat(iter.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
builder.addAllApplications(iterable);
}
private void initLocalChildQueuesList() {
if (this.childQueuesList != null) {
return;
}
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
List<QueueInfoProto> list = p.getChildQueuesList();
childQueuesList = new ArrayList<QueueInfo>();
for (QueueInfoProto a : list) {
childQueuesList.add(convertFromProtoFormat(a));
}
}
private void addChildQueuesInfoToProto() {
maybeInitBuilder();
builder.clearChildQueues();
if (childQueuesList == null)
return;
Iterable<QueueInfoProto> iterable = new Iterable<QueueInfoProto>() {
@Override
public Iterator<QueueInfoProto> iterator() {
return new Iterator<QueueInfoProto>() {
Iterator<QueueInfo> iter = childQueuesList.iterator();
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public QueueInfoProto next() {
return convertToProtoFormat(iter.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
builder.addAllChildQueues(iterable);
}
private void addQueueConfigurations() {
maybeInitBuilder();
builder.clearQueueConfigurationsMap();
if (queueConfigurations == null) {
return;
}
Iterable<? extends QueueConfigurationsMapProto> values =
new Iterable<QueueConfigurationsMapProto>() {
@Override
public Iterator<QueueConfigurationsMapProto> iterator() {
return new Iterator<QueueConfigurationsMapProto>() {
private Iterator<String> iterator =
queueConfigurations.keySet().iterator();
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public QueueConfigurationsMapProto next() {
String key = iterator.next();
return QueueConfigurationsMapProto.newBuilder()
.setPartitionName(key)
.setQueueConfigurations(
convertToProtoFormat(queueConfigurations.get(key)))
.build();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
this.builder.addAllQueueConfigurationsMap(values);
}
private void mergeLocalToBuilder() {
if (this.childQueuesList != null) {
addChildQueuesInfoToProto();
}
if (this.applicationsList != null) {
addApplicationsToProto();
}
if (this.accessibleNodeLabels != null) {
builder.clearAccessibleNodeLabels();
builder.addAllAccessibleNodeLabels(this.accessibleNodeLabels);
}
if (this.queueConfigurations != null) {
addQueueConfigurations();
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = QueueInfoProto.newBuilder(proto);
}
viaProto = false;
}
private ApplicationReportPBImpl convertFromProtoFormat(ApplicationReportProto a) {
return new ApplicationReportPBImpl(a);
}
private ApplicationReportProto convertToProtoFormat(ApplicationReport t) {
return ((ApplicationReportPBImpl)t).getProto();
}
private QueueInfoPBImpl convertFromProtoFormat(QueueInfoProto a) {
return new QueueInfoPBImpl(a);
}
private QueueInfoProto convertToProtoFormat(QueueInfo q) {
return ((QueueInfoPBImpl)q).getProto();
}
private QueueState convertFromProtoFormat(QueueStateProto q) {
return ProtoUtils.convertFromProtoFormat(q);
}
private QueueStateProto convertToProtoFormat(QueueState queueState) {
return ProtoUtils.convertToProtoFormat(queueState);
}
private QueueConfigurationsPBImpl convertFromProtoFormat(
QueueConfigurationsProto q) {
return new QueueConfigurationsPBImpl(q);
}
private QueueConfigurationsProto convertToProtoFormat(
QueueConfigurations q) {
return ((QueueConfigurationsPBImpl)q).getProto();
}
@Override
public void setAccessibleNodeLabels(Set<String> nodeLabels) {
maybeInitBuilder();
builder.clearAccessibleNodeLabels();
this.accessibleNodeLabels = nodeLabels;
}
private void initNodeLabels() {
if (this.accessibleNodeLabels != null) {
return;
}
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
this.accessibleNodeLabels = new HashSet<String>();
this.accessibleNodeLabels.addAll(p.getAccessibleNodeLabelsList());
}
@Override
public Set<String> getAccessibleNodeLabels() {
initNodeLabels();
return this.accessibleNodeLabels;
}
@Override
public String getDefaultNodeLabelExpression() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasDefaultNodeLabelExpression()) ? p
.getDefaultNodeLabelExpression().trim() : null;
}
@Override
public void setDefaultNodeLabelExpression(String defaultNodeLabelExpression) {
maybeInitBuilder();
if (defaultNodeLabelExpression == null) {
builder.clearDefaultNodeLabelExpression();
return;
}
builder.setDefaultNodeLabelExpression(defaultNodeLabelExpression);
}
private QueueStatistics convertFromProtoFormat(QueueStatisticsProto q) {
return new QueueStatisticsPBImpl(q);
}
private QueueStatisticsProto convertToProtoFormat(QueueStatistics q) {
return ((QueueStatisticsPBImpl) q).getProto();
}
@Override
public QueueStatistics getQueueStatistics() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasQueueStatistics()) ? convertFromProtoFormat(p
.getQueueStatistics()) : null;
}
@Override
public void setQueueStatistics(QueueStatistics queueStatistics) {
maybeInitBuilder();
if (queueStatistics == null) {
builder.clearQueueStatistics();
return;
}
builder.setQueueStatistics(convertToProtoFormat(queueStatistics));
}
@Override
public Boolean getPreemptionDisabled() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasPreemptionDisabled()) ? p
.getPreemptionDisabled() : null;
}
@Override
public void setPreemptionDisabled(boolean preemptionDisabled) {
maybeInitBuilder();
builder.setPreemptionDisabled(preemptionDisabled);
}
private void initQueueConfigurations() {
if (queueConfigurations != null) {
return;
}
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
List<QueueConfigurationsMapProto> lists = p.getQueueConfigurationsMapList();
queueConfigurations =
new HashMap<String, QueueConfigurations>(lists.size());
for (QueueConfigurationsMapProto queueConfigurationsProto : lists) {
queueConfigurations.put(queueConfigurationsProto.getPartitionName(),
convertFromProtoFormat(
queueConfigurationsProto.getQueueConfigurations()));
}
}
@Override
public Map<String, QueueConfigurations> getQueueConfigurations() {
initQueueConfigurations();
return queueConfigurations;
}
@Override
public void setQueueConfigurations(
Map<String, QueueConfigurations> queueConfigurations) {
if (queueConfigurations == null) {
return;
}
initQueueConfigurations();
this.queueConfigurations.clear();
this.queueConfigurations.putAll(queueConfigurations);
}
@Override
public Boolean getIntraQueuePreemptionDisabled() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasIntraQueuePreemptionDisabled()) ? p
.getIntraQueuePreemptionDisabled() : null;
}
@Override
public void setIntraQueuePreemptionDisabled(
boolean intraQueuePreemptionDisabled) {
maybeInitBuilder();
builder.setIntraQueuePreemptionDisabled(intraQueuePreemptionDisabled);
}
@Override
public String getSchedulerType() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasSchedulerType()) ? p.getSchedulerType() : null;
}
@Override
public void setSchedulerType(String schedulerType) {
maybeInitBuilder();
builder.setSchedulerType(schedulerType);
}
@Override
public int getMinResourceVCore() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasMinResourceVCore()) ? p.getMinResourceVCore() : 0;
}
@Override
public void setMinResourceVCore(int vCore) {
maybeInitBuilder();
builder.setMinResourceVCore(vCore);
}
@Override
public long getMinResourceMemory() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasMaxResourceVCore()) ? p.getMaxResourceVCore() : 0;
}
@Override
public void setMinResourceMemory(long memory) {
maybeInitBuilder();
builder.setMinResourceMemory(memory);
}
@Override
public int getMaxResourceVCore() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasMaxResourceVCore()) ? p.getMaxResourceVCore() : 0;
}
@Override
public void setMaxResourceVCore(int vCore) {
maybeInitBuilder();
builder.setMinResourceVCore(vCore);
}
@Override
public long getMaxResourceMemory() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasMaxResourceMemory()) ? p.getMaxResourceMemory() : 0;
}
@Override
public void setMaxResourceMemory(long memory) {
maybeInitBuilder();
builder.setMaxResourceMemory(memory);
}
@Override
public int getReservedResourceVCore() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasReservedResourceVCore()) ? p.getReservedResourceVCore() : 0;
}
@Override
public void setReservedResourceVCore(int vCore) {
maybeInitBuilder();
builder.setReservedResourceVCore(vCore);
}
@Override
public long getReservedResourceMemory() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasReservedResourceMemory()) ? p.getReservedResourceMemory() : 0;
}
@Override
public void setReservedResourceMemory(long memory) {
maybeInitBuilder();
builder.setReservedResourceMemory(memory);
}
@Override
public int getSteadyFairShareVCore() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasSteadyFairShareVCore()) ? p.getSteadyFairShareVCore() : 0;
}
@Override
public void setSteadyFairShareVCore(int vCore) {
maybeInitBuilder();
builder.setSteadyFairShareVCore(vCore);
}
@Override
public long getSteadyFairShareMemory() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasSteadyFairShareMemory()) ? p.getSteadyFairShareMemory() : 0;
}
@Override
public void setSteadyFairShareMemory(long memory) {
maybeInitBuilder();
builder.setSteadyFairShareMemory(memory);
}
@Override
public String getSubClusterId() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasSubClusterId()) ? p.getSubClusterId() : null;
}
@Override
public void setSubClusterId(String subClusterId) {
maybeInitBuilder();
builder.setSubClusterId(subClusterId);
}
@Override
public int getMaxRunningApp() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasMaxRunningApp()) ? p.getMaxRunningApp() : 0;
}
@Override
public void setMaxRunningApp(int maxRunningApp) {
maybeInitBuilder();
builder.setMaxRunningApp(maxRunningApp);
}
}
| QueueInfoPBImpl |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/conditional/propertyname/sourcepropertyname/ConditionalMethodWithSourcePropertyNameInContextMapper.java | {
"start": 1090,
"end": 1588
} | interface ____ {
ConditionalMethodWithSourcePropertyNameInContextMapper INSTANCE
= Mappers.getMapper( ConditionalMethodWithSourcePropertyNameInContextMapper.class );
@Mapping(target = "country", source = "originCountry")
@Mapping(target = "addresses", source = "originAddresses")
Employee map(EmployeeDto employee, @Context PresenceUtils utils);
Address map(AddressDto addressDto, @Context PresenceUtils utils);
| ConditionalMethodWithSourcePropertyNameInContextMapper |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng7716BuildDeadlock.java | {
"start": 1128,
"end": 2149
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that maven invocation works (no NPE/error happens).
*
* @throws Exception in case of failure
*/
@Test
@Timeout(value = 120, unit = TimeUnit.SECONDS)
void testNoDeadlockAtVersionUpdate() throws Exception {
File testDir = extractResources("/mng-7716");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.addCliArgument("-f");
verifier.addCliArgument("settings");
verifier.addCliArgument("install");
verifier.setLogFileName("log-settings.txt");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier = newVerifier(testDir.getAbsolutePath());
verifier.addCliArgument("-T1C");
verifier.addCliArgument("org.codehaus.mojo:versions-maven-plugin:2.15.0:set");
verifier.addCliArgument("-DnewVersion=1.2.3");
verifier.execute();
verifier.verifyErrorFreeLog();
}
}
| MavenITmng7716BuildDeadlock |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/env/ConstantPropertySources.java | {
"start": 731,
"end": 1063
} | class ____ is used to register property sources
* statically. This is typically used when converting "dynamic"
* property sources like YAML files into "static" property sources
* (Java configuration) at build time.
* The list of static property sources is injected via this class.
*
* @since 3.2.0
*/
@Internal
public final | which |
java | quarkusio__quarkus | extensions/devui/deployment-spi/src/main/java/io/quarkus/devui/spi/page/ExternalPageBuilder.java | {
"start": 77,
"end": 3111
} | class ____ extends PageBuilder<ExternalPageBuilder> {
private static final Logger log = Logger.getLogger(ExternalPageBuilder.class);
private static final String QWC_EXTERNAL_PAGE_JS = "qwc-external-page.js";
private static final String EXTERNAL_URL = "externalUrl";
private static final String DYNAMIC_URL = "dynamicUrlMethodName";
private static final String MIME_TYPE = "mimeType";
public static final String MIME_TYPE_HTML = "text/html";
public static final String MIME_TYPE_JSON = "application/json";
public static final String MIME_TYPE_YAML = "application/yaml";
public static final String MIME_TYPE_PDF = "application/pdf";
protected ExternalPageBuilder(String title) {
super();
super.title = title;
super.componentLink = QWC_EXTERNAL_PAGE_JS;
super.internalComponent = true;// As external page runs on "internal" namespace
}
public ExternalPageBuilder url(String url) {
return url(url, null);
}
public ExternalPageBuilder url(String url, String externalLink) {
if (url == null || url.isEmpty()) {
throw new RuntimeException("Invalid external URL, can not be empty");
}
super.metadata.put(EXTERNAL_URL, url);
if (externalLink != null) {
return staticLabel("<a style='color: var(--lumo-contrast-80pct);' href='" + externalLink
+ "' target='_blank'><vaadin-icon class='icon' icon='font-awesome-solid:up-right-from-square'></vaadin-icon></a>");
}
return this;
}
@SuppressWarnings("unchecked")
public ExternalPageBuilder dynamicUrlJsonRPCMethodName(String methodName) {
if (methodName == null || methodName.isEmpty()) {
throw new RuntimeException("Invalid dynamic URL Method name, can not be empty");
}
super.metadata.put(DYNAMIC_URL, methodName);
return this;
}
public ExternalPageBuilder isHtmlContent() {
return mimeType(MIME_TYPE_HTML);
}
public ExternalPageBuilder isJsonContent() {
return mimeType(MIME_TYPE_JSON);
}
public ExternalPageBuilder isYamlContent() {
return mimeType(MIME_TYPE_YAML);
}
public ExternalPageBuilder isPdfContent() {
return mimeType(MIME_TYPE_PDF);
}
public ExternalPageBuilder mimeType(String mimeType) {
if (mimeType == null || mimeType.isEmpty()) {
throw new RuntimeException("Invalid mimeType, can not be empty");
}
if (super.metadata.containsKey(MIME_TYPE)) {
log.warn("MimeType already set to " + super.metadata.get(MIME_TYPE) + ", overriding with new value");
}
super.metadata.put(MIME_TYPE, mimeType);
return this;
}
public ExternalPageBuilder doNotEmbed() {
return doNotEmbed(false);
}
public ExternalPageBuilder doNotEmbed(boolean includeInMenu) {
super.embed = false;
super.includeInMenu = includeInMenu;
return this;
}
} | ExternalPageBuilder |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/jdbc/leak/MySQLIdleConnectionCounter.java | {
"start": 371,
"end": 1153
} | class ____ implements IdleConnectionCounter {
public static final IdleConnectionCounter INSTANCE = new MySQLIdleConnectionCounter();
@Override
public boolean appliesTo(Class<? extends Dialect> dialect) {
return MySQLDialect.class.isAssignableFrom( dialect );
}
@Override
public int count(Connection connection) {
try ( Statement statement = connection.createStatement() ) {
try ( ResultSet resultSet = statement.executeQuery(
"SHOW PROCESSLIST" ) ) {
int count = 0;
while ( resultSet.next() ) {
String state = resultSet.getString( "command" );
if ( "sleep".equalsIgnoreCase( state ) ) {
count++;
}
}
return count;
}
}
catch ( SQLException e ) {
throw new IllegalStateException( e );
}
}
}
| MySQLIdleConnectionCounter |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/sstmerge/RocksDBManualCompactionOptions.java | {
"start": 1178,
"end": 4545
} | class ____ {
@Documentation.Section(Documentation.Sections.EXPERT_ROCKSDB)
public static final ConfigOption<Duration> MIN_INTERVAL =
ConfigOptions.key("state.backend.rocksdb.manual-compaction.min-interval")
.durationType()
.defaultValue(Duration.ofMinutes(0))
.withDescription(
"The minimum interval between manual compactions. Zero disables manual compactions");
@Documentation.Section(Documentation.Sections.EXPERT_ROCKSDB)
public static final ConfigOption<Integer> MAX_PARALLEL_COMPACTIONS =
ConfigOptions.key("state.backend.rocksdb.manual-compaction.max-parallel-compactions")
.intType()
.defaultValue(5)
.withDescription(
"The maximum number of manual compactions to start."
+ "Note that only one of them can run at a time as of v8.10.0; all the others will be waiting");
@Documentation.Section(Documentation.Sections.EXPERT_ROCKSDB)
public static final ConfigOption<MemorySize> MAX_FILE_SIZE_TO_COMPACT =
ConfigOptions.key("state.backend.rocksdb.manual-compaction.max-file-size-to-compact")
.memoryType()
.defaultValue(MemorySize.parse("50k"))
.withDescription("The maximum size of individual input files");
@Documentation.Section(Documentation.Sections.EXPERT_ROCKSDB)
public static final ConfigOption<Integer> MIN_FILES_TO_COMPACT =
ConfigOptions.key("state.backend.rocksdb.manual-compaction.min-files-to-compact")
.intType()
.defaultValue(5)
.withDescription(
"The minimum number of input files to compact together in a single compaction run");
@Documentation.Section(Documentation.Sections.EXPERT_ROCKSDB)
public static final ConfigOption<Integer> MAX_FILES_TO_COMPACT =
ConfigOptions.key("state.backend.rocksdb.manual-compaction.max-files-to-compact")
.intType()
.defaultValue(30)
.withDescription(
"The maximum number of input files to compact together in a single compaction run");
@Documentation.Section(Documentation.Sections.EXPERT_ROCKSDB)
public static final ConfigOption<MemorySize> MAX_OUTPUT_FILE_SIZE =
ConfigOptions.key("state.backend.rocksdb.manual-compaction.max-output-file-size")
.memoryType()
.defaultValue(MemorySize.parse("64Mb"))
.withDescription("The maximum output file size");
@Documentation.Section(Documentation.Sections.EXPERT_ROCKSDB)
public static final ConfigOption<Integer> MAX_AUTO_COMPACTIONS =
ConfigOptions.key("state.backend.rocksdb.manual-compaction.max-auto-compactions")
.intType()
.defaultValue(30)
.withDescription(
"The maximum number of automatic compactions running for manual compaction to start."
+ "If the actual number is higher, manual compaction won't be started to avoid delaying automatic ones.");
}
| RocksDBManualCompactionOptions |
java | google__dagger | dagger-producers/main/java/dagger/producers/monitoring/TimingProductionComponentMonitor.java | {
"start": 1782,
"end": 2445
} | class ____ extends ProductionComponentMonitor.Factory {
private final ProductionComponentTimingRecorder.Factory recorderFactory;
private final Ticker ticker;
public Factory(ProductionComponentTimingRecorder.Factory recorderFactory) {
this(recorderFactory, Ticker.systemTicker());
}
Factory(ProductionComponentTimingRecorder.Factory recorderFactory, Ticker ticker) {
this.recorderFactory = recorderFactory;
this.ticker = ticker;
}
@Override
public ProductionComponentMonitor create(Object component) {
return new TimingProductionComponentMonitor(recorderFactory.create(component), ticker);
}
}
}
| Factory |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/type/Argument.java | {
"start": 16646,
"end": 22957
} | class ____ be converted to an argument");
}
} else {
throw new IllegalArgumentException("Type [" + type + "] must be a Class or ParameterizedType");
}
}
/**
* Creates a new argument for the given type and name.
*
* @param type The type
* @param <T> The generic type
* @return The argument instance
*/
@UsedByGeneratedCode
@NonNull
static <T> Argument<T> of(
@NonNull Class<T> type) {
return new DefaultArgument<>(type, null, AnnotationMetadata.EMPTY_METADATA, Collections.emptyMap(), Argument.ZERO_ARGUMENTS);
}
/**
* Creates a new argument for the type of the given instance.
*
* @param instance The argument instance
* @param <T> The generic type
* @return The argument instance
* @since 4.6
*/
@NonNull
static <T> Argument<T> ofInstance(@NonNull T instance) {
return Argument.of((Class<T>) instance.getClass());
}
/**
* Creates a new argument for the given type and name.
*
* @param type The type
* @param typeParameters the parameters type
* @param <T> The generic type
* @return The argument instance
*/
@UsedByGeneratedCode
@NonNull
static <T> Argument<T> of(@NonNull Class<T> type, @Nullable Class<?>... typeParameters) {
return of(type, AnnotationMetadata.EMPTY_METADATA, typeParameters);
}
/**
* Creates a new argument for the given type and name.
* NOTE: This method should be avoided as it does use the reflection to retrieve the type parameter names.
*
* @param type The type
* @param annotationMetadata The annotation metadata
* @param typeParameters The parameters type
* @param <T> The generic type
* @return The argument instance
* @since 3.0.0
*/
@UsedByGeneratedCode
@NonNull
static <T> Argument<T> of(@NonNull Class<T> type, @Nullable AnnotationMetadata annotationMetadata, @Nullable Class<?>[] typeParameters) {
if (ArrayUtils.isEmpty(typeParameters)) {
return of(type, annotationMetadata);
}
TypeVariable<Class<T>>[] parameters = type.getTypeParameters();
int len = typeParameters.length;
if (parameters.length != len) {
throw new IllegalArgumentException("Type parameter length does not match. Required: " + parameters.length + ", Specified: " + len);
}
Argument<?>[] typeArguments = new Argument[len];
for (int i = 0; i < parameters.length; i++) {
TypeVariable<Class<T>> parameter = parameters[i];
typeArguments[i] = Argument.ofTypeVariable(typeParameters[i], parameter.getName());
}
return new DefaultArgument<>(type, annotationMetadata != null ? annotationMetadata : AnnotationMetadata.EMPTY_METADATA, typeArguments);
}
/**
* Creates a new argument representing a generic list.
*
* @param type list element type
* @param <T> list element type
* @return The argument instance
*/
@NonNull
static <T> Argument<List<T>> listOf(@NonNull Class<T> type) {
return listOf(Argument.of(type, "E"));
}
/**
* Creates a new argument representing a generic list.
*
* @param type list element type
* @param <T> list element type
* @return The argument instance
* @since 2.0.1
*/
@NonNull
static <T> Argument<List<T>> listOf(@NonNull Argument<T> type) {
//noinspection unchecked
return of((Class<List<T>>) ((Class) List.class), "list", type);
}
/**
* Creates a new argument representing a generic set.
*
* @param type set element type
* @param <T> set element type
* @return The argument instance
*/
@NonNull
static <T> Argument<Set<T>> setOf(@NonNull Class<T> type) {
return setOf(Argument.of(type, "E"));
}
/**
* Creates a new argument representing a generic set.
*
* @param type set element type
* @param <T> set element type
* @return The argument instance
* @since 2.0.1
*/
@NonNull
static <T> Argument<Set<T>> setOf(@NonNull Argument<T> type) {
//noinspection unchecked
return of((Class<Set<T>>) ((Class) Set.class), "set", type);
}
/**
* Creates a new argument representing a generic map.
*
* @param keyType The key type
* @param valueType The value type
* @param <K> The map key type
* @param <V> The map value type
* @return The argument instance
*/
@NonNull
static <K, V> Argument<Map<K, V>> mapOf(@NonNull Class<K> keyType, @NonNull Class<V> valueType) {
return mapOf(Argument.of(keyType, "K"), Argument.of(valueType, "V"));
}
/**
* Creates a new argument representing a generic map.
*
* @param keyType The key type
* @param valueType The value type
* @param <K> The map key type
* @param <V> The map value type
* @return The argument instance
* @since 2.0.1
*/
@NonNull
static <K, V> Argument<Map<K, V>> mapOf(@NonNull Argument<K> keyType, @NonNull Argument<V> valueType) {
//noinspection unchecked
return of((Class<Map<K, V>>) ((Class) Map.class), "map", keyType, valueType);
}
/**
* Creates a new argument representing an optional.
*
* @param optionalValueClass The optional type
* @param <T> The optional type
* @return The argument instance
* @since 4.0.0
*/
@NonNull
static <T> Argument<Optional<T>> optionalOf(@NonNull Class<T> optionalValueClass) {
return optionalOf(Argument.of(optionalValueClass, "T"));
}
/**
* Creates a new argument representing an optional.
*
* @param optionalValueArgument The optional type
* @param <T> The optional type
* @return The argument instance
* @since 4.0.0
*/
@NonNull
static <T> Argument<Optional<T>> optionalOf(@NonNull Argument<T> optionalValueArgument) {
//noinspection unchecked
return of((Class<Optional<T>>) ((Class) Optional.class), "optional", optionalValueArgument);
}
}
| cannot |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/impl/standalone/ApiRunner.java | {
"start": 6017,
"end": 16418
} | class ____ extends AbstractSession {
private final Map<String, String> systemProperties;
private final Instant startTime = MonotonicClock.now();
DefaultSession(RepositorySystemSession session, RepositorySystem repositorySystem, Lookup lookup) {
this(session, repositorySystem, Collections.emptyList(), null, lookup);
}
protected DefaultSession(
RepositorySystemSession session,
RepositorySystem repositorySystem,
List<RemoteRepository> repositories,
List<org.eclipse.aether.repository.RemoteRepository> resolverRepositories,
Lookup lookup) {
super(session, repositorySystem, repositories, resolverRepositories, lookup);
systemProperties = System.getenv().entrySet().stream()
.collect(Collectors.toMap(e -> "env." + e.getKey(), Map.Entry::getValue));
System.getProperties().forEach((k, v) -> systemProperties.put(k.toString(), v.toString()));
}
@Override
protected Session newSession(RepositorySystemSession session, List<RemoteRepository> repositories) {
return new DefaultSession(session, repositorySystem, repositories, null, lookup);
}
@Override
public Settings getSettings() {
return Settings.newInstance();
}
@Override
@Nonnull
public Collection<ToolchainModel> getToolchains() {
return List.of();
}
@Override
public Map<String, String> getUserProperties() {
return Map.of();
}
@Override
public Map<String, String> getSystemProperties() {
return systemProperties;
}
@Override
public Map<String, String> getEffectiveProperties(Project project) {
HashMap<String, String> result = new HashMap<>(getSystemProperties());
if (project != null) {
result.putAll(project.getModel().getProperties());
}
result.putAll(getUserProperties());
return result;
}
@Override
public Version getMavenVersion() {
return null;
}
@Override
public int getDegreeOfConcurrency() {
return 0;
}
@Override
public Instant getStartTime() {
return startTime;
}
@Override
public Path getTopDirectory() {
return null;
}
@Override
public Path getRootDirectory() {
throw new IllegalStateException();
}
@Override
public List<Project> getProjects() {
return List.of();
}
@Override
public Map<String, Object> getPluginContext(Project project) {
throw new UnsupportedInStandaloneModeException();
}
}
@Provides
@SuppressWarnings("unused")
static Lookup newLookup(Injector injector) {
return new Lookup() {
@Override
public <T> T lookup(Class<T> type) {
try {
return injector.getInstance(type);
} catch (DIException e) {
throw new MavenException("Unable to locate instance of type " + type, e);
}
}
@Override
public <T> T lookup(Class<T> type, String name) {
try {
return injector.getInstance(Key.of(type, name));
} catch (DIException e) {
throw new MavenException("Unable to locate instance of type " + type, e);
}
}
@Override
public <T> Optional<T> lookupOptional(Class<T> type) {
try {
return Optional.of(injector.getInstance(type));
} catch (DIException e) {
return Optional.empty();
}
}
@Override
public <T> Optional<T> lookupOptional(Class<T> type, String name) {
try {
return Optional.of(injector.getInstance(Key.of(type, name)));
} catch (DIException e) {
return Optional.empty();
}
}
@Override
public <T> List<T> lookupList(Class<T> type) {
return injector.getInstance(new Key<List<T>>() {});
}
@Override
public <T> Map<String, T> lookupMap(Class<T> type) {
return injector.getInstance(new Key<Map<String, T>>() {});
}
};
}
@Provides
@SuppressWarnings("unused")
static ArtifactManager newArtifactManager() {
return new ArtifactManager() {
private final Map<Artifact, Path> paths = new ConcurrentHashMap<>();
@Override
public Optional<Path> getPath(Artifact artifact) {
return Optional.ofNullable(paths.get(artifact));
}
@Override
public void setPath(ProducedArtifact artifact, Path path) {
paths.put(artifact, path);
}
};
}
@Provides
@SuppressWarnings("unused")
static PackagingRegistry newPackagingRegistry(TypeRegistry typeRegistry) {
return id -> Optional.of(new DumbPackaging(id, typeRegistry.require(id), Map.of()));
}
@Provides
@SuppressWarnings("unused")
static TypeRegistry newTypeRegistry(List<TypeProvider> providers) {
return new TypeRegistry() {
@Override
public Optional<Type> lookup(String id) {
return providers.stream()
.flatMap(p -> p.provides().stream())
.filter(t -> Objects.equals(id, t.id()))
.findAny();
}
};
}
@Provides
@SuppressWarnings("unused")
static LifecycleRegistry newLifecycleRegistry() {
return new LifecycleRegistry() {
@Override
public Iterator<Lifecycle> iterator() {
return Collections.emptyIterator();
}
@Override
public Optional<Lifecycle> lookup(String id) {
return Optional.empty();
}
@Override
public List<String> computePhases(Lifecycle lifecycle) {
return List.of();
}
};
}
@Provides
@SuppressWarnings("unused")
static Session newSession(RepositorySystem system, Lookup lookup, @Nullable LocalRepoProvider localRepoProvider) {
Map<String, String> properties = new HashMap<>();
// Env variables prefixed with "env."
System.getenv().forEach((k, v) -> properties.put("env." + k, v));
// Java System properties
System.getProperties().forEach((k, v) -> properties.put(k.toString(), v.toString()));
// Do not allow user settings to interfere with our unit tests
// TODO: remove that when this go more public
properties.put("user.home", "target");
Path userHome = Paths.get(properties.get("user.home"));
Path mavenUserHome = userHome.resolve(".m2");
Path mavenSystemHome = properties.containsKey("maven.home")
? Paths.get(properties.get("maven.home"))
: properties.containsKey("env.MAVEN_HOME") ? Paths.get(properties.get("env.MAVEN_HOME")) : null;
DefaultRepositorySystemSession rsession = new DefaultRepositorySystemSession(h -> false);
rsession.setScopeManager(new ScopeManagerImpl(Maven4ScopeManagerConfiguration.INSTANCE));
rsession.setSystemProperties(properties);
rsession.setConfigProperties(properties);
DefaultSession session = new DefaultSession(
rsession,
system,
List.of(lookup.lookup(RepositoryFactory.class)
.createRemote("central", "https://repo.maven.apache.org/maven2")),
null,
lookup);
Settings settings = session.getService(SettingsBuilder.class)
.build(
session,
mavenSystemHome != null ? mavenSystemHome.resolve("settings.xml") : null,
mavenUserHome.resolve("settings.xml"))
.getEffectiveSettings();
// local repository
String localRepository = settings.getLocalRepository() != null
&& !settings.getLocalRepository().isEmpty()
? settings.getLocalRepository()
: localRepoProvider != null && localRepoProvider.getLocalRepo() != null
? localRepoProvider.getLocalRepo().toString()
: mavenUserHome.resolve("repository").toString();
LocalRepositoryManager llm = system.newLocalRepositoryManager(rsession, new LocalRepository(localRepository));
rsession.setLocalRepositoryManager(llm);
// active proxies
// TODO
// active profiles
Profile profile = session.getService(SettingsBuilder.class)
.convert(org.apache.maven.api.settings.Profile.newBuilder()
.repositories(settings.getRepositories())
.pluginRepositories(settings.getPluginRepositories())
.build());
RepositoryFactory repositoryFactory = session.getService(RepositoryFactory.class);
List<RemoteRepository> repositories = profile.getRepositories().stream()
.map(repositoryFactory::createRemote)
.toList();
InternalSession s = (InternalSession) session.withRemoteRepositories(repositories);
InternalSession.associate(rsession, s);
return s;
// List<RemoteRepository> repositories = repositoryFactory.createRemote();
// session.getService(SettingsBuilder.class).convert()
// settings.getDelegate().getRepositories().stream()
// .map(r -> SettingsUtilsV4.)
// defaultSession.getService(RepositoryFactory.class).createRemote()
// return defaultSession;
}
record DumbPackaging(String id, Type type, Map<String, PluginContainer> plugins) implements Packaging {}
}
| DefaultSession |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/XmlSignerEndpointBuilderFactory.java | {
"start": 31917,
"end": 36038
} | interface ____
extends
EndpointProducerBuilder {
default XmlSignerEndpointBuilder basic() {
return (XmlSignerEndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedXmlSignerEndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedXmlSignerEndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* If you want to restrict the remote access via reference URIs, you can
* set an own dereferencer. Optional parameter. If not set the provider
* default dereferencer is used which can resolve URI fragments, HTTP,
* file and XPpointer URIs. Attention: The implementation is provider
* dependent!.
*
* The option is a: <code>javax.xml.crypto.URIDereferencer</code> type.
*
* Group: advanced
*
* @param uriDereferencer the value to set
* @return the dsl builder
*/
default AdvancedXmlSignerEndpointBuilder uriDereferencer(javax.xml.crypto.URIDereferencer uriDereferencer) {
doSetProperty("uriDereferencer", uriDereferencer);
return this;
}
/**
* If you want to restrict the remote access via reference URIs, you can
* set an own dereferencer. Optional parameter. If not set the provider
* default dereferencer is used which can resolve URI fragments, HTTP,
* file and XPpointer URIs. Attention: The implementation is provider
* dependent!.
*
* The option will be converted to a
* <code>javax.xml.crypto.URIDereferencer</code> type.
*
* Group: advanced
*
* @param uriDereferencer the value to set
* @return the dsl builder
*/
default AdvancedXmlSignerEndpointBuilder uriDereferencer(String uriDereferencer) {
doSetProperty("uriDereferencer", uriDereferencer);
return this;
}
}
public | AdvancedXmlSignerEndpointBuilder |
java | apache__flink | flink-table/flink-table-code-splitter/src/test/resources/add-boolean/expected/TestAddBooleanBeforeReturn.java | {
"start": 7,
"end": 513
} | class ____ {
boolean fun2HasReturned$1;
boolean fun1HasReturned$0;
public void fun1(int a) {
if (a > 0) {
a += 5;
{ fun1HasReturned$0 = true; return; }
}
a -= 5;
{ fun1HasReturned$0 = true; return; }
}
public void fun2(String b) {
if (b.length() > 5) {
b += "b";
{ fun2HasReturned$1 = true; return; }
}
b += "a";
{ fun2HasReturned$1 = true; return; }
}
}
| TestAddBooleanBeforeReturn |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/search/nested/LongNestedSortingTests.java | {
"start": 975,
"end": 1664
} | class ____ extends AbstractNumberNestedSortingTestCase {
@Override
protected String getFieldDataType() {
return "long";
}
@Override
protected IndexFieldData.XFieldComparatorSource createFieldComparator(
String fieldName,
MultiValueMode sortMode,
Object missingValue,
Nested nested
) {
IndexNumericFieldData fieldData = getForField(fieldName);
return new LongValuesComparatorSource(fieldData, missingValue, sortMode, nested, null);
}
@Override
protected IndexableField createField(String name, int value) {
return new SortedNumericDocValuesField(name, value);
}
}
| LongNestedSortingTests |
java | micronaut-projects__micronaut-core | http-client-tck/src/main/java/io/micronaut/http/client/tck/tests/filter/ClientRequestFilterTest.java | {
"start": 2071,
"end": 14783
} | class ____ {
public static final String SPEC_NAME = "ClientRequestFilterTest";
@Test
public void requestFilterImmediateRequestParameter() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/immediate-request-parameter"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("foo")
.build());
Assertions.assertEquals(
List.of("requestFilterImmediateRequestParameter /request-filter/immediate-request-parameter"),
server.getApplicationContext().getBean(MyClientFilter.class).events
);
})
.run();
}
@Test
public void requestFilterImmediateMutableRequestParameter() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/immediate-mutable-request-parameter"))
.assertion((server, request) -> AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("bar")
.build()))
.run();
}
@Test
@Disabled // updating the request is not supported by http client atm
public void requestFilterReplaceRequest() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/replace-request"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("/request-filter/replace-request-2")
.build());
})
.run();
}
@Test
@Disabled // updating the request is not supported by http client atm
public void requestFilterReplaceMutableRequest() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/replace-mutable-request"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("/request-filter/replace-mutable-request-2")
.build());
})
.run();
}
@Test
public void requestFilterReplaceRequestNull() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/replace-request-null"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("/request-filter/replace-request-null")
.build());
})
.run();
}
@Test
public void requestFilterReplaceRequestEmpty() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/replace-request-empty"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("/request-filter/replace-request-empty")
.build());
})
.run();
}
@Test
@Disabled // updating the request is not supported by http client atm
public void requestFilterReplaceRequestPublisher() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/replace-request-publisher"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("/request-filter/replace-request-publisher-2")
.build());
})
.run();
}
@Test
@Disabled // updating the request is not supported by http client atm
public void requestFilterReplaceRequestMono() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/replace-request-mono"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("/request-filter/replace-request-mono-2")
.build());
})
.run();
}
@Test
@Disabled // updating the request is not supported by http client atm
public void requestFilterReplaceRequestCompletable() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/replace-request-completable"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("/request-filter/replace-request-completable-2")
.build());
})
.run();
}
@Test
@Disabled // updating the request is not supported by http client atm
public void requestFilterReplaceRequestCompletion() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/replace-request-completion"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("/request-filter/replace-request-completion-2")
.build());
})
.run();
}
@Test
public void requestFilterContinuationBlocking() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/continuation-blocking"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("bar")
.build());
Assertions.assertEquals(
List.of("requestFilterContinuationBlocking bar"),
server.getApplicationContext().getBean(MyClientFilter.class).events
);
})
.run();
}
@Test
public void requestFilterContinuationReactivePublisher() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/continuation-reactive-publisher"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("bar")
.build());
Assertions.assertEquals(
List.of("requestFilterContinuationReactivePublisher bar"),
server.getApplicationContext().getBean(MyClientFilter.class).events
);
})
.run();
}
@Test
@Disabled // updating the request is not supported by http client atm
public void requestFilterContinuationUpdateRequest() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/continuation-update-request"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("/request-filter/continuation-update-request-2")
.build());
})
.run();
}
@Test
public void requestFilterImmediateResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/immediate-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("requestFilterImmediateResponse")
.build());
})
.run();
}
@Test
public void requestFilterNullResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/null-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("foo")
.build());
Assertions.assertEquals(
List.of("requestFilterNullResponse"),
server.getApplicationContext().getBean(MyClientFilter.class).events
);
})
.run();
}
@Test
public void requestFilterEmptyOptionalResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/empty-optional-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("foo")
.build());
Assertions.assertEquals(
List.of("requestFilterEmptyOptionalResponse"),
server.getApplicationContext().getBean(MyClientFilter.class).events
);
})
.run();
}
@Test
public void requestFilterPublisherResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/publisher-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("requestFilterPublisherResponse")
.build());
})
.run();
}
@Test
public void requestFilterMonoResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/mono-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("requestFilterMonoResponse")
.build());
})
.run();
}
@Test
public void requestFilterCompletableResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/completable-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("requestFilterCompletableResponse")
.build());
})
.run();
}
@Test
public void requestFilterCompletionResponse() throws IOException {
TestScenario.builder()
.specName(SPEC_NAME)
.request(HttpRequest.GET("/request-filter/completion-response"))
.assertion((server, request) -> {
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.body("requestFilterCompletionResponse")
.build());
})
.run();
}
@ClientFilter
@Singleton
@Requires(property = "spec.name", value = SPEC_NAME)
public static | ClientRequestFilterTest |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/matchers/CapturingArgumentsTest.java | {
"start": 991,
"end": 1382
} | class ____ {
private final EmailService service;
public BulkEmailService(EmailService service) {
this.service = service;
}
public void email(Integer... personId) {
for (Integer i : personId) {
Person person = new Person(i);
service.sendEmailTo(person);
}
}
}
| BulkEmailService |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/interceptor/SpringTransactionalClientDataSourceTransactedTest.java | {
"start": 1533,
"end": 3130
} | class ____ extends SpringTestSupport {
protected JdbcTemplate jdbc;
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"/org/apache/camel/spring/interceptor/springTransactionalClientDataSource.xml");
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
// create database and insert dummy data
final DataSource ds = getMandatoryBean(DataSource.class, "dataSource");
jdbc = new JdbcTemplate(ds);
}
@Test
public void testTransactionSuccess() throws Exception {
template.sendBody("direct:okay", "Hello World");
int count = jdbc.queryForObject("select count(*) from books", Integer.class);
assertEquals(3, count, "Number of books");
}
@Test
public void testTransactionRollback() throws Exception {
try {
template.sendBody("direct:fail", "Hello World");
fail("Should have thrown exception");
} catch (RuntimeCamelException e) {
// expected as we fail
assertIsInstanceOf(RuntimeCamelException.class, e.getCause());
assertTrue(e.getCause().getCause() instanceof IllegalArgumentException);
assertEquals("We don't have Donkeys, only Camels", e.getCause().getCause().getMessage());
}
int count = jdbc.queryForObject("select count(*) from books", Integer.class);
assertEquals(1, count, "Number of books");
}
}
| SpringTransactionalClientDataSourceTransactedTest |
java | spring-projects__spring-boot | integration-test/spring-boot-server-integration-tests/src/intTest/java/org/springframework/boot/context/embedded/EmbeddedServletContainerWarDevelopmentIntegrationTests.java | {
"start": 1444,
"end": 3455
} | class ____ {
@TestTemplate
void metaInfResourceFromDependencyIsAvailableViaHttp(RestTemplate rest) {
ResponseEntity<String> entity = rest.getForEntity("/nested-meta-inf-resource.txt", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
}
@TestTemplate
@DisabledOnOs(OS.WINDOWS)
void metaInfResourceFromDependencyWithNameThatContainsReservedCharactersIsAvailableViaHttp(RestTemplate rest) {
ResponseEntity<String> entity = rest.getForEntity(
"/nested-reserved-%21%23%24%25%26%28%29%2A%2B%2C%3A%3D%3F%40%5B%5D-meta-inf-resource.txt",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(entity.getBody()).isEqualTo("encoded-name");
}
@TestTemplate
void metaInfResourceFromDependencyIsAvailableViaServletContext(RestTemplate rest) {
ResponseEntity<String> entity = rest.getForEntity("/servletContext?/nested-meta-inf-resource.txt",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
}
@TestTemplate
void webappResourcesAreAvailableViaHttp(RestTemplate rest) {
ResponseEntity<String> entity = rest.getForEntity("/webapp-resource.txt", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
}
@TestTemplate
void loaderClassesAreNotAvailableViaResourcePaths(RestTemplate rest) {
ResponseEntity<String> entity = rest.getForEntity("/resourcePaths", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(readLines(entity.getBody()))
.noneMatch((resourcePath) -> resourcePath.startsWith("/org/springframework/boot/loader"));
}
private List<String> readLines(String input) {
if (input == null) {
return Collections.emptyList();
}
try (BufferedReader reader = new BufferedReader(new StringReader(input))) {
return reader.lines().toList();
}
catch (IOException ex) {
throw new RuntimeException("Failed to read lines from input '" + input + "'");
}
}
}
| EmbeddedServletContainerWarDevelopmentIntegrationTests |
java | square__javapoet | src/test/java/com/squareup/javapoet/TypeSpecTest.java | {
"start": 6106,
"end": 7889
} | class ____ {\n"
+ " static final Thing.Thang<Foo, Bar> NAME = new Thing.Thang<Foo, Bar>() {\n"
+ " @Override\n"
+ " public Thung<? super Bar> call(final Thung<? super Foo> thung) {\n"
+ " return new SimpleThung<Bar>(thung) {\n"
+ " @Override\n"
+ " public void doSomething(Bar bar) {\n"
+ " /* code snippets */\n"
+ " }\n"
+ " };\n"
+ " }\n"
+ " };\n"
+ "}\n");
}
@Test public void annotatedParameters() throws Exception {
TypeSpec service = TypeSpec.classBuilder("Foo")
.addMethod(MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addParameter(long.class, "id")
.addParameter(ParameterSpec.builder(String.class, "one")
.addAnnotation(ClassName.get(tacosPackage, "Ping"))
.build())
.addParameter(ParameterSpec.builder(String.class, "two")
.addAnnotation(ClassName.get(tacosPackage, "Ping"))
.build())
.addParameter(ParameterSpec.builder(String.class, "three")
.addAnnotation(AnnotationSpec.builder(ClassName.get(tacosPackage, "Pong"))
.addMember("value", "$S", "pong")
.build())
.build())
.addParameter(ParameterSpec.builder(String.class, "four")
.addAnnotation(ClassName.get(tacosPackage, "Ping"))
.build())
.addCode("/* code snippets */\n")
.build())
.build();
assertThat(toString(service)).isEqualTo(""
+ "package com.squareup.tacos;\n"
+ "\n"
+ "import java.lang.String;\n"
+ "\n"
+ " | Taco |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/asm/SortFieldTest.java | {
"start": 267,
"end": 1519
} | class ____ extends TestCase {
public void test_0() throws Exception {
V0 entity = new V0();
String text = JSON.toJSONString(entity, SerializerFeature.UseSingleQuotes, SerializerFeature.SortField);
Assert.assertEquals("{'f0':0,'f1':0,'f10':0,'f11':0,'f12':0,'f13':0,'f14':0,'f2':0,'f3':0,'f4':0,'f5':0,'f6':0,'f7':0,'f8':0,'f9':0}", text);
LinkedHashMap object = JSON.parseObject(text, LinkedHashMap.class);
text = JSON.toJSONString(object, SerializerFeature.UseSingleQuotes, SerializerFeature.SortField);
Assert.assertEquals("{'f0':0,'f1':0,'f10':0,'f11':0,'f12':0,'f13':0,'f14':0,'f2':0,'f3':0,'f4':0,'f5':0,'f6':0,'f7':0,'f8':0,'f9':0}", text);
}
public void test_1() throws Exception {
V1 entity = new V1();
String text = JSON.toJSONString(entity, SerializerFeature.SortField);
System.out.println(text);
// 按字段顺序输出
// {"f1":0,"f2":0,"f3":0,"f4":0,"f5":0}
Assert.assertEquals("{\"f1\":0,\"f2\":0,\"f3\":0,\"f4\":0,\"f5\":0}", text);
JSONObject object = JSON.parseObject(text);
text = JSON.toJSONString(object, SerializerFeature.SortField);
Assert.assertEquals("{\"f1\":0,\"f2\":0,\"f3\":0,\"f4\":0,\"f5\":0}", text);
}
public static | SortFieldTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NarrowCalculationTest.java | {
"start": 3541,
"end": 3740
} | class ____ {
void t(int a) {
// BUG: Diagnostic contains: 2L * a
long b = 2 * a;
}
}
""")
.doTest();
}
}
| Test |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/logging/jdbc/BaseJdbcLogger.java | {
"start": 1170,
"end": 1279
} | class ____ proxies to do logging.
*
* @author Clinton Begin
* @author Eduardo Macarron
*/
public abstract | for |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/metrics/jfr/FlightRecorderStartupStep.java | {
"start": 3071,
"end": 3428
} | class ____ implements Iterator<Tag> {
private int idx = 0;
@Override
public boolean hasNext() {
return this.idx < tags.length;
}
@Override
public Tag next() {
return tags[this.idx++];
}
@Override
public void remove() {
throw new UnsupportedOperationException("tags are append only");
}
}
}
static | TagsIterator |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/exceptions/OnErrorNotImplementedExceptionTest.java | {
"start": 932,
"end": 3590
} | class ____ extends RxJavaTest {
List<Throwable> errors;
@Before
public void before() {
errors = TestHelper.trackPluginErrors();
}
@After
public void after() {
RxJavaPlugins.reset();
assertFalse("" + errors, errors.isEmpty());
TestHelper.assertError(errors, 0, OnErrorNotImplementedException.class);
Throwable c = errors.get(0).getCause();
assertTrue("" + c, c instanceof TestException);
}
@Test
public void flowableSubscribe0() {
Flowable.error(new TestException())
.subscribe();
}
@Test
public void flowableSubscribe1() {
Flowable.error(new TestException())
.subscribe(Functions.emptyConsumer());
}
@Test
public void flowableForEachWhile() {
Flowable.error(new TestException())
.forEachWhile(Functions.alwaysTrue());
}
@Test
public void flowableBlockingSubscribe1() {
Flowable.error(new TestException())
.blockingSubscribe(Functions.emptyConsumer());
}
@Test
public void flowableBoundedBlockingSubscribe1() {
Flowable.error(new TestException())
.blockingSubscribe(Functions.emptyConsumer(), 128);
}
@Test
public void observableSubscribe0() {
Observable.error(new TestException())
.subscribe();
}
@Test
public void observableSubscribe1() {
Observable.error(new TestException())
.subscribe(Functions.emptyConsumer());
}
@Test
public void observableForEachWhile() {
Observable.error(new TestException())
.forEachWhile(Functions.alwaysTrue());
}
@Test
public void observableBlockingSubscribe1() {
Observable.error(new TestException())
.blockingSubscribe(Functions.emptyConsumer());
}
@Test
public void singleSubscribe0() {
Single.error(new TestException())
.subscribe();
}
@Test
public void singleSubscribe1() {
Single.error(new TestException())
.subscribe(Functions.emptyConsumer());
}
@Test
public void maybeSubscribe0() {
Maybe.error(new TestException())
.subscribe();
}
@Test
public void maybeSubscribe1() {
Maybe.error(new TestException())
.subscribe(Functions.emptyConsumer());
}
@Test
public void completableSubscribe0() {
Completable.error(new TestException())
.subscribe();
}
@Test
public void completableSubscribe1() {
Completable.error(new TestException())
.subscribe(Functions.EMPTY_ACTION);
}
}
| OnErrorNotImplementedExceptionTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cdi/general/hibernatesearch/TheMainNamedApplicationScopedBeanImpl.java | {
"start": 427,
"end": 1115
} | class ____ implements TheNamedApplicationScopedBean {
public static final String NAME = "TheMainNamedApplicationScopedBeanImpl_name";
@jakarta.inject.Inject
private TheNestedDependentBean nestedDependentBean;
public TheMainNamedApplicationScopedBeanImpl() {
Monitor.theMainNamedApplicationScopedBean().instantiated();
}
@Override
public void ensureInitialized() {
nestedDependentBean.ensureInitialized();
}
@PostConstruct
public void postConstruct() {
Monitor.theMainNamedApplicationScopedBean().postConstructCalled();
}
@PreDestroy
public void preDestroy() {
Monitor.theMainNamedApplicationScopedBean().preDestroyCalled();
}
}
| TheMainNamedApplicationScopedBeanImpl |
java | apache__spark | sql/core/src/test/scala/org/apache/spark/sql/types/JavaGeometryTypeSuite.java | {
"start": 1094,
"end": 3816
} | class ____ {
/*
* Test cases GeometryType construction based on SRID.
*/
@Test
public void geometryTypeWithSpecifiedValidSridTest() {
// Valid SRID values for GEOMETRY.
Stream.of(0, 3857, 4326).forEach(srid -> {
DataType geometryType = DataTypes.createGeometryType(srid);
Assertions.assertEquals("GEOMETRY(" + srid + ")", geometryType.sql());
Assertions.assertEquals("geometry(" + srid + ")", geometryType.typeName());
Assertions.assertEquals("geometry(" + srid + ")", geometryType.simpleString());
});
}
@Test
public void geometryTypeWithSpecifiedInvalidSridTest() {
// Invalid SRID values for GEOMETRY.
Stream.of(-1, -2, 1, 2).forEach(srid -> {
try {
DataTypes.createGeometryType(srid);
Assertions.fail("Expected SparkIllegalArgumentException for SRID: " + srid);
} catch (SparkIllegalArgumentException e) {
Assertions.assertEquals("ST_INVALID_SRID_VALUE", e.getCondition());
Assertions.assertEquals(String.valueOf(srid), e.getMessageParameters().get("srid"));
}
});
}
/*
* Test cases GeometryType construction based on CRS.
*/
@Test
public void geometryTypeWithSpecifiedValidCrsTest() {
// Valid CRS values for GEOMETRY.
Stream.of("SRID:0", "EPSG:3857", "OGC:CRS84").forEach(crs -> {
Integer srid = CartesianSpatialReferenceSystemMapper.getSrid(crs);
DataType geometryType = DataTypes.createGeometryType(crs);
Assertions.assertEquals("GEOMETRY(" + srid + ")", geometryType.sql());
Assertions.assertEquals("geometry(" + srid + ")", geometryType.typeName());
Assertions.assertEquals("geometry(" + srid + ")", geometryType.simpleString());
});
}
@Test
public void geometryTypeWithSpecifiedInvalidCrsTest() {
// Invalid CRS values for GEOMETRY.
Stream.of("0", "SRID", "SRID:-1", "SRID:4326", "CRS84", "").forEach(crs -> {
try {
DataTypes.createGeometryType(crs);
Assertions.fail("Expected SparkIllegalArgumentException for CRS: " + crs);
} catch (SparkIllegalArgumentException e) {
Assertions.assertEquals("ST_INVALID_CRS_VALUE", e.getCondition());
Assertions.assertEquals(crs, e.getMessageParameters().get("crs"));
}
});
}
@Test
public void geometryTypeWithSpecifiedAnyTest() {
// Special string value "ANY" in place of CRS is used to denote a mixed GEOMETRY type.
DataType geometryType = DataTypes.createGeometryType("ANY");
Assertions.assertEquals("GEOMETRY(ANY)", geometryType.sql());
Assertions.assertEquals("geometry(any)", geometryType.typeName());
Assertions.assertEquals("geometry(any)", geometryType.simpleString());
}
}
| JavaGeometryTypeSuite |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableFilter.java | {
"start": 7513,
"end": 8641
} | class ____ implements KTableValueGetter<KIn, VIn> {
private final KTableValueGetter<KIn, VIn> parentGetter;
KTableFilterValueGetter(final KTableValueGetter<KIn, VIn> parentGetter) {
this.parentGetter = parentGetter;
}
@Override
public void init(final ProcessorContext<?, ?> context) {
// This is the old processor context for compatibility with the other KTable processors.
// Once we migrate them all, we can swap this out.
parentGetter.init(context);
}
@Override
public ValueAndTimestamp<VIn> get(final KIn key) {
return computeValue(key, parentGetter.get(key));
}
@Override
public ValueAndTimestamp<VIn> get(final KIn key, final long asOfTimestamp) {
return computeValue(key, parentGetter.get(key, asOfTimestamp));
}
@Override
public boolean isVersioned() {
return parentGetter.isVersioned();
}
@Override
public void close() {
parentGetter.close();
}
}
}
| KTableFilterValueGetter |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/multipart/MultipartResource.java | {
"start": 625,
"end": 3436
} | class ____ {
@POST
@Produces(MediaType.TEXT_PLAIN)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Path("/simple/{times}")
@NonBlocking
public String simple(@BeanParam FormData formData, Integer times) {
if (BlockingOperationSupport.isBlockingAllowed()) {
throw new RuntimeException("should not have dispatched");
}
return formData.getName() + " - " + formData.active + " - " + times * formData.getNum() + " - " + formData.getStatus()
+ " - "
+ formData.getHtmlPart().contentType() + " - " + Files.exists(formData.xmlPart) + " - "
+ formData.txtFile.exists();
}
@POST
@Blocking
@Produces(MediaType.TEXT_PLAIN)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Path("/blocking")
public Response blocking(@DefaultValue("1") @RestQuery Integer times, FormData formData) throws IOException {
if (!BlockingOperationSupport.isBlockingAllowed()) {
throw new RuntimeException("should have dispatched");
}
return Response.ok(formData.getName() + " - " + times * formData.getNum() + " - " + formData.getStatus())
.header("html-size", formData.getHtmlPart().size())
.header("html-path", formData.getHtmlPart().uploadedFile().toAbsolutePath().toString())
.header("xml-size", Files.readAllBytes(formData.xmlPart).length)
.header("xml-path", formData.xmlPart.toAbsolutePath().toString())
.header("txt-size", Files.readAllBytes(formData.txtFile.toPath()).length)
.header("txt-path", formData.txtFile.toPath().toAbsolutePath().toString())
.build();
}
@POST
@Produces(MediaType.TEXT_PLAIN)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Path("/same-name")
public String sameName(FormDataSameFileName formData) {
if (!BlockingOperationSupport.isBlockingAllowed()) {
throw new RuntimeException("should have dispatched");
}
return formData.status + " - " + formData.getHtmlFiles().size() + " - " + formData.txtFiles.size() + " - "
+ formData.xmlFiles.size();
}
@POST
@Produces(MediaType.TEXT_PLAIN)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Path("/optional")
@NonBlocking
public String optional(@BeanParam FormData formData) {
if (BlockingOperationSupport.isBlockingAllowed()) {
throw new RuntimeException("should not have dispatched");
}
return formData.getName() + " - " + formData.active + " - " + formData.getNum() + " - " + formData.getStatus()
+ " - " + (formData.getHtmlPart() != null) + " - " + (formData.xmlPart != null) + " - "
+ (formData.txtFile != null);
}
}
| MultipartResource |
java | apache__dubbo | dubbo-registry/dubbo-registry-nacos/src/main/java/org/apache/dubbo/registry/nacos/NacosNamingServiceWrapper.java | {
"start": 14788,
"end": 15727
} | class ____ {
private final String serviceName;
private final String group;
public InstanceId(String serviceName, String group) {
this.serviceName = serviceName;
this.group = group;
}
public String getServiceName() {
return serviceName;
}
public String getGroup() {
return group;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InstanceId that = (InstanceId) o;
return Objects.equals(serviceName, that.serviceName) && Objects.equals(group, that.group);
}
@Override
public int hashCode() {
return Objects.hash(serviceName, group);
}
}
protected static | InstanceId |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java | {
"start": 740,
"end": 1826
} | class ____ extends AExpression {
private final AExpression leftNode;
private final AExpression rightNode;
private final Operation operation;
public EBinary(int identifier, Location location, AExpression leftNode, AExpression rightNode, Operation operation) {
super(identifier, location);
this.operation = Objects.requireNonNull(operation);
this.leftNode = Objects.requireNonNull(leftNode);
this.rightNode = Objects.requireNonNull(rightNode);
}
public AExpression getLeftNode() {
return leftNode;
}
public AExpression getRightNode() {
return rightNode;
}
public Operation getOperation() {
return operation;
}
@Override
public <Scope> void visit(UserTreeVisitor<Scope> userTreeVisitor, Scope scope) {
userTreeVisitor.visitBinary(this, scope);
}
@Override
public <Scope> void visitChildren(UserTreeVisitor<Scope> userTreeVisitor, Scope scope) {
leftNode.visit(userTreeVisitor, scope);
rightNode.visit(userTreeVisitor, scope);
}
}
| EBinary |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/localtime/LocalTimeAssert_isAfter_Test.java | {
"start": 1227,
"end": 3531
} | class ____ extends LocalTimeAssertBaseTest {
@Test
void should_pass_if_actual_is_after_localTime_parameter() {
assertThat(AFTER).isAfter(REFERENCE);
}
@Test
void should_pass_if_actual_is_after_localTime_parameter_as_string() {
assertThat(AFTER).isAfter(REFERENCE.toString());
}
@Test
void should_fail_if_actual_is_equal_to_localTime_parameter() {
// WHEN
ThrowingCallable code = () -> assertThat(REFERENCE).isAfter(REFERENCE);
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessage(shouldBeAfter(REFERENCE, REFERENCE).create());
}
@Test
void should_fail_if_actual_is_equal_to_localTime_as_string_parameter() {
// WHEN
ThrowingCallable code = () -> assertThat(REFERENCE).isAfter(REFERENCE.toString());
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessage(shouldBeAfter(REFERENCE, REFERENCE).create());
}
@Test
void should_fail_if_actual_is_before_localTime_parameter() {
// WHEN
ThrowingCallable code = () -> assertThat(BEFORE).isAfter(REFERENCE);
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessage(shouldBeAfter(BEFORE, REFERENCE).create());
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
LocalTime actual = null;
// WHEN
ThrowingCallable code = () -> assertThat(actual).isAfter(LocalTime.now());
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessage(actualIsNull());
}
@Test
void should_fail_if_localTime_parameter_is_null() {
// GIVEN
LocalTime otherLocalTime = null;
// WHEN
ThrowingCallable code = () -> assertThat(LocalTime.now()).isAfter(otherLocalTime);
// THEN
assertThatIllegalArgumentException().isThrownBy(code)
.withMessage("The LocalTime to compare actual with should not be null");
}
@Test
void should_fail_if_localTime_as_string_parameter_is_null() {
// GIVEN
String otherLocalTimeAsString = null;
// WHEN
ThrowingCallable code = () -> assertThat(LocalTime.now()).isAfter(otherLocalTimeAsString);
// THEN
assertThatIllegalArgumentException().isThrownBy(code)
.withMessage("The String representing the LocalTime to compare actual with should not be null");
}
}
| LocalTimeAssert_isAfter_Test |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/rest/messages/DashboardConfigurationTest.java | {
"start": 1083,
"end": 1648
} | class ____ extends RestResponseMarshallingTestBase<DashboardConfiguration> {
@Override
protected Class<DashboardConfiguration> getTestResponseClass() {
return DashboardConfiguration.class;
}
@Override
protected DashboardConfiguration getTestResponseInstance() {
return new DashboardConfiguration(
1L,
"foobar",
42,
"version",
"revision",
new DashboardConfiguration.Features(true, true, true, false));
}
}
| DashboardConfigurationTest |
java | google__guice | core/test/com/google/inject/matcher/MatcherTest.java | {
"start": 7009,
"end": 7043
} | interface ____ {}
@Baz
static | Baz |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/VertxDevUILogBuildItem.java | {
"start": 270,
"end": 639
} | class ____ extends SimpleBuildItem {
private final RuntimeValue<SubmissionPublisher<String>> publisher;
public VertxDevUILogBuildItem(RuntimeValue<SubmissionPublisher<String>> publisher) {
this.publisher = publisher;
}
public RuntimeValue<SubmissionPublisher<String>> getPublisher() {
return this.publisher;
}
}
| VertxDevUILogBuildItem |
java | apache__kafka | connect/api/src/main/java/org/apache/kafka/connect/components/Versioned.java | {
"start": 950,
"end": 1158
} | interface ____ {
/**
* Get the version of this component.
*
* @return the version, formatted as a String. The version may not be {@code null} or empty.
*/
String version();
}
| Versioned |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/Assertions_assertThatCode_Test.java | {
"start": 1204,
"end": 5377
} | class ____ {
@Test
void can_invoke_late_assertion() {
// GIVEN
ThrowingCallable boom = raisingException("boom!");
// WHEN/THEN
thenCode(boom).isInstanceOf(Exception.class)
.hasMessageContaining("boom!");
}
@Test
void should_fail_when_asserting_no_exception_was_thrown_and_an_exception_was_thrown() {
// GIVEN
Exception exception = new Exception("boom");
ThrowingCallable boom = raisingException(exception);
// WHEN
var error = expectAssertionError(() -> assertThatCode(boom).doesNotThrowAnyException());
// THEN
then(error).hasMessage(shouldNotHaveThrown(exception).create());
}
@Test
void should_fail_when_asserting_no_exception_was_thrown_except_an_empty_list_and_an_exception_was_thrown() {
// GIVEN
Exception exception = new Exception("boom");
ThrowingCallable boom = raisingException(exception);
// WHEN
var error = expectAssertionError(() -> assertThatCode(boom).doesNotThrowAnyExceptionExcept());
// THEN
then(error).hasMessage(shouldNotHaveThrownExcept(exception).create());
}
@Test
void should_fail_when_asserting_no_exception_was_thrown_except_some_and_a_non_ignored_exception_was_thrown() {
// GIVEN
Exception exception = new IllegalArgumentException("boom");
ThrowingCallable boom = raisingException(exception);
// WHEN
var error = expectAssertionError(() -> assertThatCode(boom).doesNotThrowAnyExceptionExcept(IllegalStateException.class,
IOException.class));
// THEN
then(error).hasMessage(shouldNotHaveThrownExcept(exception, IllegalStateException.class, IOException.class).create());
}
@Test
void can_use_description_in_error_message() {
// GIVEN
ThrowingCallable boom = raisingException("boom");
// WHEN
var error = expectAssertionError(() -> assertThatCode(boom).as("Test").doesNotThrowAnyException());
// THEN
then(error).hasMessageStartingWith("[Test]");
}
@Test
void error_message_contains_stacktrace() {
// GIVEN
Exception exception = new Exception("boom");
ThrowingCallable boom = raisingException(exception);
// WHEN
var error = expectAssertionError(() -> assertThatCode(boom).doesNotThrowAnyException());
// THEN
then(error).hasMessageContainingAll("java.lang.Exception: boom",
"at org.assertj.tests.core/org.assertj.tests.core.api.Assertions_assertThatCode_Test.error_message_contains_stacktrace");
}
@Test
void should_succeed_when_asserting_no_exception_was_thrown() {
// GIVEN
ThrowingCallable silent = () -> {};
// WHEN/THEN
thenCode(silent).doesNotThrowAnyException();
}
@Test
void should_succeed_when_asserting_no_exception_was_thrown_except_an_empty_list() {
// GIVEN
ThrowingCallable silent = () -> {};
// WHEN/THEN
thenCode(silent).doesNotThrowAnyExceptionExcept();
}
@Test
void should_succeed_when_asserting_no_exception_was_thrown_except_some() {
// GIVEN
ThrowingCallable silent = () -> {};
// WHEN/THEN
thenCode(silent).doesNotThrowAnyExceptionExcept(IOException.class, IllegalStateException.class);
}
@Test
void should_succeed_when_asserting_no_exception_was_thrown_except_one_that_is_an_ignored() {
// GIVEN
ThrowingCallable boom = raisingException(new IllegalArgumentException("boom"));
// WHEN/THEN
thenCode(boom).doesNotThrowAnyExceptionExcept(IOException.class, IllegalArgumentException.class);
}
@Test
void should_succeed_when_asserting_no_exception_was_thrown_except_one_that_inherits_an_ignored_exception() {
// GIVEN
ThrowingCallable boom = raisingException(new IllegalArgumentException("boom"));
// WHEN/THEN
thenCode(boom).doesNotThrowAnyExceptionExcept(RuntimeException.class);
}
private ThrowingCallable raisingException(final String reason) {
return raisingException(new Exception(reason));
}
private ThrowingCallable raisingException(final Exception exception) {
return () -> {
throw exception;
};
}
}
| Assertions_assertThatCode_Test |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/StateTypeStrategy.java | {
"start": 1119,
"end": 1866
} | interface ____ extends TypeStrategy {
static StateTypeStrategy of(TypeStrategy typeStrategy) {
return new DefaultStateTypeStrategy(typeStrategy, null);
}
static StateTypeStrategy of(TypeStrategy typeStrategy, @Nullable Duration timeToLive) {
return new DefaultStateTypeStrategy(typeStrategy, timeToLive);
}
/**
* The time-to-live (TTL) duration that automatically cleans up the state entry.
*
* <p>Returning {@code Optional.empty()} will fall back to default behavior. Returning a value
* equal or greater than 0 means setting a custom TTL for this state entry and ignoring the
* global defaults.
*/
Optional<Duration> getTimeToLive(CallContext callContext);
}
| StateTypeStrategy |
java | netty__netty | transport-native-epoll/src/test/java/io/netty/channel/epoll/EpollSocketCloseForciblyTest.java | {
"start": 907,
"end": 1182
} | class ____ extends SocketCloseForciblyTest {
@Override
protected List<TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap>> newFactories() {
return EpollSocketTestPermutation.INSTANCE.socketWithoutFastOpen();
}
}
| EpollSocketCloseForciblyTest |
java | apache__camel | components/camel-openapi-java/src/test/java/org/apache/camel/openapi/model/SampleComplexRequestType.java | {
"start": 1955,
"end": 2092
} | class ____ {
private long longField;
public long getLongField() {
return longField;
}
}
}
| InnerClass |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/TNonblockingMultiFetchClient.java | {
"start": 6388,
"end": 13387
} | class ____ implements Runnable {
private Selector selector;
/**
* main entry function for fetching.
*
* <p>Server responses are stored in TNonblocingMultiFetchClient.recvBuf, and fetch statistics
* is in TNonblockingMultiFetchClient.stats.
*
* <p>Sanity check for parameters has been done in TNonblockingMultiFetchClient before calling
* this function.
*/
public void run() {
long t1 = System.currentTimeMillis();
int numTotalServers = servers.size();
stats.setNumTotalServers(numTotalServers);
// buffer for receiving response from servers
recvBuf = new ByteBuffer[numTotalServers];
// buffer for sending request
ByteBuffer[] sendBuf = new ByteBuffer[numTotalServers];
long[] numBytesRead = new long[numTotalServers];
int[] frameSize = new int[numTotalServers];
boolean[] hasReadFrameSize = new boolean[numTotalServers];
try {
selector = Selector.open();
} catch (IOException ioe) {
LOGGER.error("Selector opens error", ioe);
return;
}
for (int i = 0; i < numTotalServers; i++) {
// create buffer to send request to server.
sendBuf[i] = requestBuf.duplicate();
// create buffer to read response's frame size from server
recvBuf[i] = ByteBuffer.allocate(4);
stats.incTotalRecvBufBytes(4);
InetSocketAddress server = servers.get(i);
SocketChannel s = null;
SelectionKey key = null;
try {
s = SocketChannel.open();
s.configureBlocking(false);
// now this method is non-blocking
s.connect(server);
key = s.register(selector, s.validOps());
// attach index of the key
key.attach(i);
} catch (Exception e) {
stats.incNumConnectErrorServers();
LOGGER.error("Set up socket to server {} error", server, e);
// free resource
if (s != null) {
try {
s.close();
} catch (Exception ex) {
LOGGER.error("failed to free up socket", ex);
}
}
if (key != null) {
key.cancel();
}
}
}
// wait for events
while (stats.getNumReadCompletedServers() + stats.getNumConnectErrorServers()
< stats.getNumTotalServers()) {
// if the thread is interrupted (e.g., task is cancelled)
if (Thread.currentThread().isInterrupted()) {
return;
}
try {
selector.select();
} catch (Exception e) {
LOGGER.error("Selector selects error", e);
continue;
}
Iterator<SelectionKey> it = selector.selectedKeys().iterator();
while (it.hasNext()) {
SelectionKey selKey = it.next();
it.remove();
// get previously attached index
int index = (Integer) selKey.attachment();
if (selKey.isValid() && selKey.isConnectable()) {
// if this socket throws an exception (e.g., connection refused),
// print error msg and skip it.
try {
SocketChannel sChannel = (SocketChannel) selKey.channel();
sChannel.finishConnect();
} catch (Exception e) {
stats.incNumConnectErrorServers();
LOGGER.error("Socket {} connects to server {} error", index, servers.get(index), e);
}
}
if (selKey.isValid() && selKey.isWritable() && sendBuf[index].hasRemaining()) {
// if this socket throws an exception, print error msg and
// skip it.
try {
SocketChannel sChannel = (SocketChannel) selKey.channel();
sChannel.write(sendBuf[index]);
} catch (Exception e) {
LOGGER.error("Socket {} writes to server {} error", index, servers.get(index), e);
}
}
if (selKey.isValid() && selKey.isReadable()) {
// if this socket throws an exception, print error msg and
// skip it.
try {
SocketChannel sChannel = (SocketChannel) selKey.channel();
int bytesRead = sChannel.read(recvBuf[index]);
if (bytesRead > 0) {
numBytesRead[index] += bytesRead;
if (!hasReadFrameSize[index] && recvBuf[index].remaining() == 0) {
// if the frame size has been read completely, then prepare
// to read the actual frame.
frameSize[index] = recvBuf[index].getInt(0);
if (frameSize[index] <= 0) {
stats.incNumInvalidFrameSize();
LOGGER.error(
"Read an invalid frame size {} from {}. Does the server use TFramedTransport?",
frameSize[index],
servers.get(index));
sChannel.close();
continue;
}
if (frameSize[index] + 4 > stats.getMaxResponseBytes()) {
stats.setMaxResponseBytes(frameSize[index] + 4);
}
if (frameSize[index] + 4 > maxRecvBufBytesPerServer) {
stats.incNumOverflowedRecvBuf();
LOGGER.error(
"Read frame size {} from {}, total buffer size would exceed limit {}",
frameSize[index],
servers.get(index),
maxRecvBufBytesPerServer);
sChannel.close();
continue;
}
// reallocate buffer for actual frame data
recvBuf[index] = ByteBuffer.allocate(frameSize[index] + 4);
recvBuf[index].putInt(frameSize[index]);
stats.incTotalRecvBufBytes(frameSize[index]);
hasReadFrameSize[index] = true;
}
if (hasReadFrameSize[index] && numBytesRead[index] >= frameSize[index] + 4) {
// has read all data
sChannel.close();
stats.incNumReadCompletedServers();
long t2 = System.currentTimeMillis();
stats.setReadTime(t2 - t1);
}
}
} catch (Exception e) {
LOGGER.error("Socket {} reads from server {} error", index, servers.get(index), e);
}
}
}
}
}
/** dispose any resource allocated */
public void close() {
try {
if (selector.isOpen()) {
for (SelectionKey selKey : selector.keys()) {
SocketChannel sChannel = (SocketChannel) selKey.channel();
sChannel.close();
}
selector.close();
}
} catch (IOException e) {
LOGGER.error("Free resource error", e);
}
}
}
}
| MultiFetch |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestWordStats.java | {
"start": 7030,
"end": 9380
} | class ____ to delete the output directory. Meant solely for
* use before and after the test is run; this is so next iterations of the
* test do not encounter a "file already exists" error.
*
* @param dir
* The directory to delete.
* @return Returns whether the deletion was successful or not.
*/
public static boolean deleteDir(File dir) {
if (dir.isDirectory()) {
String[] children = dir.list();
for (int i = 0; i < children.length; i++) {
boolean success = deleteDir(new File(dir, children[i]));
if (!success) {
System.out.println("Could not delete directory after test!");
return false;
}
}
}
// The directory is now empty so delete it
return dir.delete();
}
@BeforeEach public void setup() throws Exception {
deleteDir(new File(MEAN_OUTPUT));
deleteDir(new File(MEDIAN_OUTPUT));
deleteDir(new File(STDDEV_OUTPUT));
}
@Test
void testGetTheMean() throws Exception {
String args[] = new String[2];
args[0] = INPUT;
args[1] = MEAN_OUTPUT;
WordMean wm = new WordMean();
ToolRunner.run(new Configuration(), wm, args);
double mean = wm.getMean();
// outputs MUST match
WordMeanReader wr = new WordMeanReader();
assertEquals(mean, wr.read(INPUT), 0.0);
}
@Test
void testGetTheMedian() throws Exception {
String args[] = new String[2];
args[0] = INPUT;
args[1] = MEDIAN_OUTPUT;
WordMedian wm = new WordMedian();
ToolRunner.run(new Configuration(), wm, args);
double median = wm.getMedian();
// outputs MUST match
WordMedianReader wr = new WordMedianReader();
assertEquals(median, wr.read(INPUT), 0.0);
}
@Test
void testGetTheStandardDeviation() throws Exception {
String args[] = new String[2];
args[0] = INPUT;
args[1] = STDDEV_OUTPUT;
WordStandardDeviation wsd = new WordStandardDeviation();
ToolRunner.run(new Configuration(), wsd, args);
double stddev = wsd.getStandardDeviation();
// outputs MUST match
WordStdDevReader wr = new WordStdDevReader();
assertEquals(stddev, wr.read(INPUT), 0.0);
}
@AfterAll public static void cleanup() throws Exception {
deleteDir(new File(MEAN_OUTPUT));
deleteDir(new File(MEDIAN_OUTPUT));
deleteDir(new File(STDDEV_OUTPUT));
}
}
| designed |
java | playframework__playframework | core/play/src/test/java/play/mvc/CallTest.java | {
"start": 319,
"end": 5183
} | class ____ {
@Test
public void calShouldReturnCorrectUrlInPath() {
final TestCall call = new TestCall("/myurl", "GET");
assertEquals("/myurl", call.path());
}
@Test
public void callShouldReturnCorrectUrlAndFragmentInPath() {
final Call call = new TestCall("/myurl", "GET").withFragment("myfragment");
assertEquals("/myurl#myfragment", call.path());
}
@Test
public void absoluteURLWithRequestShouldHaveHTTPScheme() {
final Request req = new RequestBuilder().uri("http://playframework.com/playframework").build();
final TestCall call = new TestCall("/url", "GET");
assertEquals("http://playframework.com/url", call.absoluteURL(req));
}
@Test
public void absoluteURLWithRequestAndSecureParameterIsFalseShouldHaveHTTPScheme() {
final Request req = new RequestBuilder().uri("https://playframework.com/playframework").build();
final TestCall call = new TestCall("/url", "GET");
assertEquals("http://playframework.com/url", call.absoluteURL(req, false));
}
@Test
public void absoluteURLWithHostAndSecureParameterIsFalseShouldHaveHTTPScheme() {
final TestCall call = new TestCall("/url", "GET");
assertEquals("http://foobar.com/url", call.absoluteURL(false, "foobar.com"));
}
@Test
public void absoluteURLWithRequestShouldHaveHTTPSScheme() {
final Request req = new RequestBuilder().uri("https://playframework.com/playframework").build();
final TestCall call = new TestCall("/url", "GET");
assertEquals("https://playframework.com/url", call.absoluteURL(req));
}
@Test
public void absoluteUrlWithRequestAndSecureParameterIsTrueShouldHaveHTTPSScheme() {
final Request req = new RequestBuilder().uri("http://playframework.com/playframework").build();
final TestCall call = new TestCall("/url", "GET");
assertEquals("https://playframework.com/url", call.absoluteURL(req, true));
}
@Test
public void absoluteURLWithHostAndSecureParameterIsTrueShouldHaveHTTPSScheme() {
final TestCall call = new TestCall("/url", "GET");
assertEquals("https://foobar.com/url", call.absoluteURL(true, "foobar.com"));
}
@Test
public void webSocketURLWithRequestShouldHaveHTTPScheme() {
final Request req = new RequestBuilder().uri("http://playframework.com/playframework").build();
final TestCall call = new TestCall("/url", "GET");
assertEquals("ws://playframework.com/url", call.webSocketURL(req));
}
@Test
public void webSocketURLWithRequestAndSecureParameterIsFalseShouldHaveHTTPScheme() {
final Request req = new RequestBuilder().uri("https://playframework.com/playframework").build();
final TestCall call = new TestCall("/url", "GET");
assertEquals("ws://playframework.com/url", call.webSocketURL(req, false));
}
@Test
public void webSocketURLWithHostAndSecureParameterIsFalseShouldHaveHTTPScheme() {
final TestCall call = new TestCall("/url", "GET");
assertEquals("ws://foobar.com/url", call.webSocketURL(false, "foobar.com"));
}
@Test
public void webSocketURLWithRequestShouldHaveHTTPSScheme() {
final Request req = new RequestBuilder().uri("https://playframework.com/playframework").build();
final TestCall call = new TestCall("/url", "GET");
assertEquals("wss://playframework.com/url", call.webSocketURL(req));
}
@Test
public void webSocketURLWithRequestAndSecureParameterIsTrueShouldHaveHTTPSScheme() {
final Request req = new RequestBuilder().uri("http://playframework.com/playframework").build();
final TestCall call = new TestCall("/url", "GET");
assertEquals("wss://playframework.com/url", call.webSocketURL(req, true));
}
@Test
public void webSocketURLWithHostAndSecureParameterIsTrueShouldHaveHTTPSScheme() {
final TestCall call = new TestCall("/url", "GET");
assertEquals("wss://foobar.com/url", call.webSocketURL(true, "foobar.com"));
}
@Test
public void relativePathTakesStartPathFromRequest() {
final Request req = new RequestBuilder().uri("http://playframework.com/one/two").build();
final TestCall call = new TestCall("/one/two-b", "GET");
assertEquals("two-b", call.relativeTo(req));
}
@Test
public void relativePathTakesStartPathAsString() {
final String startPath = "/one/two";
final TestCall call = new TestCall("/one/two-b", "GET");
assertEquals("two-b", call.relativeTo(startPath));
}
@Test
public void relativePathIncludesFragment() {
final Request req = new RequestBuilder().uri("http://playframework.com/one/two").build();
final TestCall call = new TestCall("/one/two-b", "GET", "foo");
assertEquals("two-b#foo", call.relativeTo(req));
}
@Test
public void canonicalPathReturnedFromCall() {
final TestCall call = new TestCall("/one/.././two//three-b", "GET");
assertEquals("/two/three-b", call.canonical());
}
}
final | CallTest |
java | elastic__elasticsearch | x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregator.java | {
"start": 1631,
"end": 7891
} | class ____ extends MetricsAggregator {
final ValuesSource.Bytes valuesSource;
final DocValueFormat format;
/** Option to show the probability distribution for each character appearing in all terms. */
private final boolean showDistribution;
LongArray count;
IntArray minLength;
IntArray maxLength;
/** Accummulates the total length of all fields. Used for calculate average length and char frequencies. */
LongArray totalLength;
/** Map that stores the number of occurrences for each character. */
Map<Character, LongArray> charOccurrences;
StringStatsAggregator(
String name,
ValuesSource valuesSource,
boolean showDistribution,
DocValueFormat format,
AggregationContext context,
Aggregator parent,
Map<String, Object> metadata
) throws IOException {
super(name, context, parent, metadata);
this.showDistribution = showDistribution;
this.valuesSource = (ValuesSource.Bytes) valuesSource;
if (valuesSource != null) {
count = bigArrays().newLongArray(1, true);
totalLength = bigArrays().newLongArray(1, true);
minLength = bigArrays().newIntArray(1, false);
minLength.fill(0, minLength.size(), Integer.MAX_VALUE);
maxLength = bigArrays().newIntArray(1, false);
maxLength.fill(0, maxLength.size(), Integer.MIN_VALUE);
charOccurrences = new HashMap<>();
}
this.format = format;
}
@Override
public ScoreMode scoreMode() {
return (valuesSource != null && valuesSource.needsScores()) ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
final SortedBinaryDocValues values = valuesSource.bytesValues(aggCtx.getLeafReaderContext());
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long bucket) throws IOException {
if (values.advanceExact(doc)) {
final long overSize = BigArrays.overSize(bucket + 1);
if (bucket >= count.size()) {
final long from = count.size();
count = bigArrays().resize(count, overSize);
totalLength = bigArrays().resize(totalLength, overSize);
minLength = bigArrays().resize(minLength, overSize);
maxLength = bigArrays().resize(maxLength, overSize);
minLength.fill(from, overSize, Integer.MAX_VALUE);
maxLength.fill(from, overSize, Integer.MIN_VALUE);
}
final int valuesCount = values.docValueCount();
count.increment(bucket, valuesCount);
for (int i = 0; i < valuesCount; i++) {
BytesRef value = values.nextValue();
if (value.length > 0) {
String valueStr = (String) format.format(value);
int length = valueStr.length();
totalLength.increment(bucket, length);
// Update min/max length for string
int min = Math.min(minLength.get(bucket), length);
int max = Math.max(maxLength.get(bucket), length);
minLength.set(bucket, min);
maxLength.set(bucket, max);
// Parse string chars and count occurrences
for (Character c : valueStr.toCharArray()) {
LongArray occ = charOccurrences.get(c);
if (occ == null) {
occ = bigArrays().newLongArray(overSize, true);
} else {
if (bucket >= occ.size()) {
occ = bigArrays().resize(occ, overSize);
}
}
occ.increment(bucket, 1);
charOccurrences.put(c, occ);
}
}
}
}
}
};
}
@Override
public InternalAggregation buildAggregation(long bucket) {
if (valuesSource == null || bucket >= count.size()) {
return buildEmptyAggregation();
}
// Convert Map entries: Character -> String and LongArray -> Long
// Include only characters that have at least one occurrence
Map<String, Long> occurrences = Maps.newMapWithExpectedSize(charOccurrences.size());
for (Map.Entry<Character, LongArray> e : charOccurrences.entrySet()) {
if (e.getValue().size() > bucket) {
long occ = e.getValue().get(bucket);
if (occ > 0) {
occurrences.put(e.getKey().toString(), occ);
}
}
}
return new InternalStringStats(
name,
count.get(bucket),
totalLength.get(bucket),
minLength.get(bucket),
maxLength.get(bucket),
occurrences,
showDistribution,
format,
metadata()
);
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalStringStats(
name,
0,
0,
Integer.MAX_VALUE,
Integer.MIN_VALUE,
Collections.emptyMap(),
showDistribution,
format,
metadata()
);
}
@Override
public void doClose() {
Releasables.close(maxLength, minLength, count, totalLength);
if (charOccurrences != null) {
Releasables.close(charOccurrences.values());
}
}
}
| StringStatsAggregator |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoAnnotationErrorsTest.java | {
"start": 1141,
"end": 1375
} | class ____ {
private static final JavaFileObject TEST_ANNOTATION =
JavaFileObjects.forSourceLines(
"com.example.TestAnnotation",
"package com.example;",
"",
"public @ | AutoAnnotationErrorsTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableBufferTimed.java | {
"start": 1432,
"end": 3278
} | class ____<T, U extends Collection<? super T>> extends AbstractFlowableWithUpstream<T, U> {
final long timespan;
final long timeskip;
final TimeUnit unit;
final Scheduler scheduler;
final Supplier<U> bufferSupplier;
final int maxSize;
final boolean restartTimerOnMaxSize;
public FlowableBufferTimed(Flowable<T> source, long timespan, long timeskip, TimeUnit unit, Scheduler scheduler, Supplier<U> bufferSupplier, int maxSize,
boolean restartTimerOnMaxSize) {
super(source);
this.timespan = timespan;
this.timeskip = timeskip;
this.unit = unit;
this.scheduler = scheduler;
this.bufferSupplier = bufferSupplier;
this.maxSize = maxSize;
this.restartTimerOnMaxSize = restartTimerOnMaxSize;
}
@Override
protected void subscribeActual(Subscriber<? super U> s) {
if (timespan == timeskip && maxSize == Integer.MAX_VALUE) {
source.subscribe(new BufferExactUnboundedSubscriber<>(
new SerializedSubscriber<>(s),
bufferSupplier, timespan, unit, scheduler));
return;
}
Scheduler.Worker w = scheduler.createWorker();
if (timespan == timeskip) {
source.subscribe(new BufferExactBoundedSubscriber<>(
new SerializedSubscriber<>(s),
bufferSupplier,
timespan, unit, maxSize, restartTimerOnMaxSize, w
));
return;
}
// Can't use maxSize because what to do if a buffer is full but its
// timespan hasn't been elapsed?
source.subscribe(new BufferSkipBoundedSubscriber<>(
new SerializedSubscriber<>(s),
bufferSupplier, timespan, timeskip, unit, w));
}
static final | FlowableBufferTimed |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/InvalidMagicNumberException.java | {
"start": 1241,
"end": 1993
} | class ____ extends IOException {
private static final long serialVersionUID = 1L;
private final boolean handshake4Encryption;
/**
* Creates a new InvalidMagicNumberException.
*
* @param magicNumber expected value
*/
public InvalidMagicNumberException(final int magicNumber,
final boolean handshake4Encryption) {
super(String.format("Received %x instead of %x from client.",
magicNumber, SASL_TRANSFER_MAGIC_NUMBER));
this.handshake4Encryption = handshake4Encryption;
}
/**
* Return true if it's handshake for encryption
*
* @return boolean true if it's handshake for encryption
*/
public boolean isHandshake4Encryption() {
return handshake4Encryption;
}
}
| InvalidMagicNumberException |
java | spring-projects__spring-boot | loader/spring-boot-loader-tools/src/main/java/org/springframework/boot/loader/tools/MainClassFinder.java | {
"start": 1575,
"end": 1752
} | class ____ a {@code public static main} method by performing a breadth first
* search.
*
* @author Phillip Webb
* @author Andy Wilkinson
* @since 1.0.0
*/
public abstract | with |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ClassPathScanningCandidateComponentProvider.java | {
"start": 22445,
"end": 22762
} | class ____.
*/
public void clearCache() {
if (this.metadataReaderFactory instanceof CachingMetadataReaderFactory cmrf) {
// Clear cache in externally provided MetadataReaderFactory; this is a no-op
// for a shared cache since it'll be cleared by the ApplicationContext.
cmrf.clearCache();
}
}
}
| metadata |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/InternalTimerService.java | {
"start": 1281,
"end": 2857
} | interface ____<N> {
/** Returns the current processing time. */
long currentProcessingTime();
/** Returns the current event-time watermark. */
long currentWatermark();
/** Initialize watermark after restore. */
void initializeWatermark(long watermark);
/**
* Registers a timer to be fired when processing time passes the given time. The namespace you
* pass here will be provided when the timer fires.
*/
void registerProcessingTimeTimer(N namespace, long time);
/** Deletes the timer for the given key and namespace. */
void deleteProcessingTimeTimer(N namespace, long time);
/**
* Registers a timer to be fired when event time watermark passes the given time. The namespace
* you pass here will be provided when the timer fires.
*/
void registerEventTimeTimer(N namespace, long time);
/** Deletes the timer for the given key and namespace. */
void deleteEventTimeTimer(N namespace, long time);
/**
* Performs an action for each registered timer. The timer service will set the key context for
* the timers key before invoking the action.
*/
void forEachEventTimeTimer(BiConsumerWithException<N, Long, Exception> consumer)
throws Exception;
/**
* Performs an action for each registered timer. The timer service will set the key context for
* the timers key before invoking the action.
*/
void forEachProcessingTimeTimer(BiConsumerWithException<N, Long, Exception> consumer)
throws Exception;
}
| InternalTimerService |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/NMClientAsyncImpl.java | {
"start": 17495,
"end": 18156
} | class ____
extends AbstractEvent<ContainerEventType>{
private ContainerId containerId;
private NodeId nodeId;
private Token containerToken;
public ContainerEvent(ContainerId containerId, NodeId nodeId,
Token containerToken, ContainerEventType type) {
super(type);
this.containerId = containerId;
this.nodeId = nodeId;
this.containerToken = containerToken;
}
public ContainerId getContainerId() {
return containerId;
}
public NodeId getNodeId() {
return nodeId;
}
public Token getContainerToken() {
return containerToken;
}
}
protected static | ContainerEvent |
java | netty__netty | transport/src/main/java/io/netty/bootstrap/AbstractBootstrap.java | {
"start": 2055,
"end": 7528
} | class ____<B extends AbstractBootstrap<B, C>, C extends Channel> implements Cloneable {
@SuppressWarnings("unchecked")
private static final Map.Entry<ChannelOption<?>, Object>[] EMPTY_OPTION_ARRAY = new Map.Entry[0];
@SuppressWarnings("unchecked")
private static final Map.Entry<AttributeKey<?>, Object>[] EMPTY_ATTRIBUTE_ARRAY = new Map.Entry[0];
volatile EventLoopGroup group;
@SuppressWarnings("deprecation")
private volatile ChannelFactory<? extends C> channelFactory;
private volatile SocketAddress localAddress;
// The order in which ChannelOptions are applied is important they may depend on each other for validation
// purposes.
private final Map<ChannelOption<?>, Object> options = new LinkedHashMap<ChannelOption<?>, Object>();
private final Map<AttributeKey<?>, Object> attrs = new ConcurrentHashMap<AttributeKey<?>, Object>();
private volatile ChannelHandler handler;
private volatile ClassLoader extensionsClassLoader;
AbstractBootstrap() {
// Disallow extending from a different package.
}
AbstractBootstrap(AbstractBootstrap<B, C> bootstrap) {
group = bootstrap.group;
channelFactory = bootstrap.channelFactory;
handler = bootstrap.handler;
localAddress = bootstrap.localAddress;
synchronized (bootstrap.options) {
options.putAll(bootstrap.options);
}
attrs.putAll(bootstrap.attrs);
extensionsClassLoader = bootstrap.extensionsClassLoader;
}
/**
* The {@link EventLoopGroup} which is used to handle all the events for the to-be-created
* {@link Channel}
*/
public B group(EventLoopGroup group) {
ObjectUtil.checkNotNull(group, "group");
if (this.group != null) {
throw new IllegalStateException("group set already");
}
this.group = group;
return self();
}
@SuppressWarnings("unchecked")
private B self() {
return (B) this;
}
/**
* The {@link Class} which is used to create {@link Channel} instances from.
* You either use this or {@link #channelFactory(io.netty.channel.ChannelFactory)} if your
* {@link Channel} implementation has no no-args constructor.
*/
public B channel(Class<? extends C> channelClass) {
return channelFactory(new ReflectiveChannelFactory<C>(
ObjectUtil.checkNotNull(channelClass, "channelClass")
));
}
/**
* @deprecated Use {@link #channelFactory(io.netty.channel.ChannelFactory)} instead.
*/
@Deprecated
public B channelFactory(ChannelFactory<? extends C> channelFactory) {
ObjectUtil.checkNotNull(channelFactory, "channelFactory");
if (this.channelFactory != null) {
throw new IllegalStateException("channelFactory set already");
}
this.channelFactory = channelFactory;
return self();
}
/**
* {@link io.netty.channel.ChannelFactory} which is used to create {@link Channel} instances from
* when calling {@link #bind()}. This method is usually only used if {@link #channel(Class)}
* is not working for you because of some more complex needs. If your {@link Channel} implementation
* has a no-args constructor, its highly recommend to just use {@link #channel(Class)} to
* simplify your code.
*/
@SuppressWarnings({ "unchecked", "deprecation" })
public B channelFactory(io.netty.channel.ChannelFactory<? extends C> channelFactory) {
return channelFactory((ChannelFactory<C>) channelFactory);
}
/**
* The {@link SocketAddress} which is used to bind the local "end" to.
*/
public B localAddress(SocketAddress localAddress) {
this.localAddress = localAddress;
return self();
}
/**
* @see #localAddress(SocketAddress)
*/
public B localAddress(int inetPort) {
return localAddress(new InetSocketAddress(inetPort));
}
/**
* @see #localAddress(SocketAddress)
*/
public B localAddress(String inetHost, int inetPort) {
return localAddress(SocketUtils.socketAddress(inetHost, inetPort));
}
/**
* @see #localAddress(SocketAddress)
*/
public B localAddress(InetAddress inetHost, int inetPort) {
return localAddress(new InetSocketAddress(inetHost, inetPort));
}
/**
* Allow to specify a {@link ChannelOption} which is used for the {@link Channel} instances once they got
* created. Use a value of {@code null} to remove a previous set {@link ChannelOption}.
*/
public <T> B option(ChannelOption<T> option, T value) {
ObjectUtil.checkNotNull(option, "option");
synchronized (options) {
if (value == null) {
options.remove(option);
} else {
options.put(option, value);
}
}
return self();
}
/**
* Allow to specify an initial attribute of the newly created {@link Channel}. If the {@code value} is
* {@code null}, the attribute of the specified {@code key} is removed.
*/
public <T> B attr(AttributeKey<T> key, T value) {
ObjectUtil.checkNotNull(key, "key");
if (value == null) {
attrs.remove(key);
} else {
attrs.put(key, value);
}
return self();
}
/**
* Load {@link ChannelInitializerExtension}s using the given | AbstractBootstrap |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/ClassWriter.java | {
"start": 36521,
"end": 36879
} | class ____ or adapters.</i>
*
* @param packageName name of the package in its internal form.
* @return the index of a new or already existing module reference item.
*/
public int newPackage(final String packageName) {
return symbolTable.addConstantPackage(packageName).index;
}
/**
* Adds a handle to the constant pool of the | generators |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/Component.java | {
"start": 1177,
"end": 5016
} | interface ____ extends CamelContextAware, Service {
/**
* Attempt to resolve an endpoint for the given URI if the component is capable of handling the URI.
* <p/>
* See {@link #useRawUri()} for controlling whether the passed in uri should be as-is (raw), or encoded (default).
*
* @param uri the URI to create; either raw or encoded (default)
* @return a newly created {@link Endpoint} or null if this component cannot create {@link Endpoint}
* instances using the given uri
* @throws Exception is thrown if error creating the endpoint
* @see #useRawUri()
*/
Endpoint createEndpoint(String uri) throws Exception;
/**
* Attempt to resolve an endpoint for the given URI if the component is capable of handling the URI.
* <p/>
* See {@link #useRawUri()} for controlling whether the passed in uri should be as-is (raw), or encoded (default).
*
* @param uri the URI to create; either raw or encoded (default)
* @param parameters the parameters for the endpoint
* @return a newly created {@link Endpoint} or null if this component cannot create {@link Endpoint}
* instances using the given uri
* @throws Exception is thrown if error creating the endpoint
* @see #useRawUri()
*/
Endpoint createEndpoint(String uri, Map<String, Object> parameters) throws Exception;
/**
* Whether to use raw or encoded uri, when creating endpoints.
* <p/>
* <b>Notice:</b> When using raw uris, then the parameter values is raw as well.
*
* @return <tt>true</tt> to use raw uris, <tt>false</tt> to use encoded uris (default).
*/
boolean useRawUri();
/**
* Gets the component {@link PropertyConfigurer}.
*
* @return the configurer, or <tt>null</tt> if the component does not support using property configurer.
*/
default PropertyConfigurer getComponentPropertyConfigurer() {
return null;
}
/**
* Gets the endpoint {@link PropertyConfigurer}.
*
* @return the configurer, or <tt>null</tt> if the endpoint does not support using property configurer.
*/
default PropertyConfigurer getEndpointPropertyConfigurer() {
return null;
}
/**
* Gets a list of supported extensions.
*
* @return the list of extensions.
*/
default Collection<Class<? extends ComponentExtension>> getSupportedExtensions() {
return Collections.emptyList();
}
/**
* Gets the extension of the given type.
*
* @param extensionType tye type of the extensions
* @return an optional extension
*/
default <T extends ComponentExtension> Optional<T> getExtension(Class<T> extensionType) {
return Optional.empty();
}
/**
* Set the {@link Component} context if the component is an instance of {@link ComponentAware}.
*/
static <T> T trySetComponent(T object, Component component) {
if (object instanceof ComponentAware componentAware) {
componentAware.setComponent(component);
}
return object;
}
/**
* Gets the default name of the component.
*/
default String getDefaultName() {
return null;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring options (the option must be marked as
* autowired) by looking up in the registry to find if there is a single instance of matching type, which then gets
* configured on the component. This can be used for automatic configuring JDBC data sources, JMS connection
* factories, AWS Clients, etc.
*/
default boolean isAutowiredEnabled() {
return true;
}
}
| Component |
java | qos-ch__slf4j | slf4j-api/src/main/java/org/slf4j/MDC.java | {
"start": 2438,
"end": 2885
} | class ____ {
static final String NULL_MDCA_URL = "http://www.slf4j.org/codes.html#null_MDCA";
private static final String MDC_ADAPTER_CANNOT_BE_NULL_MESSAGE = "MDCAdapter cannot be null. See also " + NULL_MDCA_URL;
static final String NO_STATIC_MDC_BINDER_URL = "http://www.slf4j.org/codes.html#no_static_mdc_binder";
static MDCAdapter MDC_ADAPTER;
/**
* An adapter to remove the key when done.
*/
public static | MDC |
java | playframework__playframework | documentation/manual/working/javaGuide/main/async/code/javaguide/async/JavaStream.java | {
"start": 6765,
"end": 7663
} | class ____ extends MockJavaAction {
Controller4(JavaHandlerComponents javaHandlerComponents) {
super(javaHandlerComponents);
}
// #chunked
public Result index() {
// Prepare a chunked text stream
Source<ByteString, ?> source =
Source.<ByteString>actorRef(256, OverflowStrategy.dropNew())
.mapMaterializedValue(
sourceActor -> {
sourceActor.tell(ByteString.fromString("kiki"), null);
sourceActor.tell(ByteString.fromString("foo"), null);
sourceActor.tell(ByteString.fromString("bar"), null);
sourceActor.tell(new Status.Success(NotUsed.getInstance()), null);
return NotUsed.getInstance();
});
// Serves this stream with 200 OK
return ok().chunked(source);
}
// #chunked
}
}
| Controller4 |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/concurrent/CircuitBreaker.java | {
"start": 860,
"end": 1625
} | interface ____ a <a
* href="https://martinfowler.com/bliki/CircuitBreaker.html">Circuit Breaker</a> component.
*
* <p>
* A <em>circuit breaker</em> can be used to protect an application against unreliable
* services or unexpected load. It typically monitors a specific resource. As long as this
* resource works as expected, it stays in state <em>closed</em>, meaning that the
* resource can be used. If problems are encountered when using the resource, the circuit
* breaker can switch into state <em>open</em>; then access to this resource is
* prohibited. Depending on a concrete implementation, it is possible that the circuit
* breaker switches back to state <em>closed</em> when the resource becomes available
* again.
* </p>
* <p>
* This | describing |
java | apache__camel | catalog/camel-route-parser/src/test/java/org/apache/camel/parser/java/RoasterSplitTokenizeTest.java | {
"start": 1452,
"end": 3466
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(RoasterSplitTokenizeTest.class);
@Test
void parse() throws Exception {
JavaClassSource clazz = (JavaClassSource) Roaster
.parse(new File("src/test/java/org/apache/camel/parser/java/SplitTokenizeTest.java"));
MethodSource<JavaClassSource> method = CamelJavaParserHelper.findConfigureMethod(clazz);
List<CamelEndpointDetails> details = new ArrayList<>();
RouteBuilderParser.parseRouteBuilderEndpoints(clazz, "src/test/java", "org/apache/camel/parser/SplitTokenizeTest.java",
details);
LOG.info("{}", details);
List<ParserResult> list = CamelJavaParserHelper.parseCamelConsumerUris(method, true, true);
for (ParserResult result : list) {
LOG.info("Consumer: {}", result.getElement());
}
assertEquals("direct:a", list.get(0).getElement());
assertEquals("direct:b", list.get(1).getElement());
assertEquals("direct:c", list.get(2).getElement());
assertEquals("direct:d", list.get(3).getElement());
assertEquals("direct:e", list.get(4).getElement());
assertEquals("direct:f", list.get(5).getElement());
list = CamelJavaParserHelper.parseCamelProducerUris(method, true, true);
for (ParserResult result : list) {
LOG.info("Producer: {}", result.getElement());
}
assertEquals("mock:split", list.get(0).getElement());
assertEquals("mock:split", list.get(1).getElement());
assertEquals("mock:split", list.get(2).getElement());
assertEquals("mock:split", list.get(3).getElement());
assertEquals("mock:split", list.get(4).getElement());
assertEquals("mock:split", list.get(5).getElement());
assertEquals(12, details.size());
assertEquals("direct:a", details.get(0).getEndpointUri());
assertEquals("mock:split", details.get(11).getEndpointUri());
}
}
| RoasterSplitTokenizeTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.