language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | google__gson | gson/src/test/java/com/google/gson/metrics/PerformanceTest.java | {
"start": 10114,
"end": 10333
} | class ____ {
final String field;
ClassWithField() {
this("");
}
ClassWithField(String field) {
this.field = field;
}
}
@SuppressWarnings("unused")
private static final | ClassWithField |
java | apache__spark | common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java | {
"start": 2565,
"end": 3687
} | class ____ extends ChannelOutboundHandlerAdapter {
private final int maxOutboundBlockSize;
private final SaslEncryptionBackend backend;
EncryptionHandler(SaslEncryptionBackend backend, int maxOutboundBlockSize) {
this.backend = backend;
this.maxOutboundBlockSize = maxOutboundBlockSize;
}
/**
* Wrap the incoming message in an implementation that will perform encryption lazily. This is
* needed to guarantee ordering of the outgoing encrypted packets - they need to be decrypted in
* the same order, and netty doesn't have an atomic ChannelHandlerContext.write() API, so it
* does not guarantee any ordering.
*/
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise)
throws Exception {
ctx.write(new EncryptedMessage(backend, msg, maxOutboundBlockSize), promise);
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
try {
backend.dispose();
} finally {
super.handlerRemoved(ctx);
}
}
}
private static | EncryptionHandler |
java | apache__spark | examples/src/main/java/org/apache/spark/examples/mllib/JavaStreamingTestExample.java | {
"start": 2301,
"end": 3973
} | class ____ {
private static int timeoutCounter = 0;
public static void main(String[] args) throws Exception {
if (args.length != 3) {
System.err.println("Usage: JavaStreamingTestExample " +
"<dataDir> <batchDuration> <numBatchesTimeout>");
System.exit(1);
}
String dataDir = args[0];
Duration batchDuration = Seconds.apply(Long.parseLong(args[1]));
int numBatchesTimeout = Integer.parseInt(args[2]);
SparkConf conf = new SparkConf().setMaster("local").setAppName("StreamingTestExample");
JavaStreamingContext ssc = new JavaStreamingContext(conf, batchDuration);
ssc.checkpoint(Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark").toString());
// $example on$
JavaDStream<BinarySample> data = ssc.textFileStream(dataDir).map(line -> {
String[] ts = line.split(",");
boolean label = Boolean.parseBoolean(ts[0]);
double value = Double.parseDouble(ts[1]);
return new BinarySample(label, value);
});
StreamingTest streamingTest = new StreamingTest()
.setPeacePeriod(0)
.setWindowSize(0)
.setTestMethod("welch");
JavaDStream<StreamingTestResult> out = streamingTest.registerStream(data);
out.print();
// $example off$
// Stop processing if test becomes significant or we time out
timeoutCounter = numBatchesTimeout;
out.foreachRDD(rdd -> {
timeoutCounter -= 1;
boolean anySignificant = !rdd.filter(v -> v.pValue() < 0.05).isEmpty();
if (timeoutCounter <= 0 || anySignificant) {
rdd.context().stop();
}
});
ssc.start();
ssc.awaitTermination();
}
}
| JavaStreamingTestExample |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualErrorTests.java | {
"start": 806,
"end": 1605
} | class ____ extends ErrorsForCasesWithoutExamplesTestCase {
@Override
protected List<TestCaseSupplier> cases() {
return paramsToSuppliers(LessThanOrEqualTests.parameters());
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new LessThanOrEqual(source, args.get(0), args.get(1));
}
@Override
protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) {
return equalTo(
errorMessageStringForBinaryOperators(
validPerPosition,
signature,
(l, p) -> "date_nanos, datetime, double, integer, ip, keyword, long, text, unsigned_long or version"
)
);
}
}
| LessThanOrEqualErrorTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/property/GetAndIsVariantGetterTest.java | {
"start": 1042,
"end": 3610
} | class ____ {
private static StandardServiceRegistry ssr;
@BeforeAll
public static void prepare() {
ssr = ServiceRegistryUtil.serviceRegistry();
}
@AfterAll
public static void release() {
if ( ssr != null ) {
StandardServiceRegistryBuilder.destroy( ssr );
}
}
@Test
@JiraKey(value = "HHH-10172")
public void testHbmXml() {
MappingException mappingException = assertThrows( MappingException.class, () ->
new MetadataSources( ssr )
.addResource( "org/hibernate/property/TheEntity.hbm.xml" )
.buildMetadata()
);
assertThat( mappingException.getMessage() ).endsWith( "variants of getter for property 'id'" );
}
@Test
@JiraKey(value = "HHH-10172")
public void testAnnotations() {
MappingException mappingException = assertThrows( MappingException.class, () ->
new MetadataSources( ssr )
.addAnnotatedClass( TheEntity.class )
.buildMetadata()
);
assertThat( mappingException.getMessage() ).startsWith( "Ambiguous persistent property methods" );
}
@Test
@JiraKey(value = "HHH-10242")
public void testAnnotationsCorrected() {
Metadata metadata = new MetadataSources( ssr )
.addAnnotatedClass( TheEntity2.class )
.buildMetadata();
assertThat( metadata.getEntityBinding( TheEntity2.class.getName() ).getIdentifier() ).isNotNull();
assertThat( metadata.getEntityBinding( TheEntity2.class.getName() ).getIdentifierProperty() ).isNotNull();
}
@Test
@JiraKey(value = "HHH-10309")
public void testAnnotationsFieldAccess() {
// this one should be ok because the AccessType is FIELD
Metadata metadata = new MetadataSources( ssr )
.addAnnotatedClass( AnotherEntity.class )
.buildMetadata();
assertThat( metadata.getEntityBinding( AnotherEntity.class.getName() ).getIdentifier() ).isNotNull();
assertThat( metadata.getEntityBinding( AnotherEntity.class.getName() ).getIdentifierProperty() ).isNotNull();
}
@Test
@JiraKey(value = "HHH-12046")
public void testInstanceStaticConflict() {
Metadata metadata = new MetadataSources( ssr )
.addAnnotatedClass( InstanceStaticEntity.class )
.buildMetadata();
assertThat( metadata.getEntityBinding( InstanceStaticEntity.class.getName() ).getIdentifier() ).isNotNull();
assertThat( metadata.getEntityBinding( InstanceStaticEntity.class.getName() ).getIdentifierProperty() ).isNotNull();
assertThat( metadata.getEntityBinding( InstanceStaticEntity.class.getName() ).hasProperty( "foo" ) ).isTrue();
ReflectHelper.findGetterMethod( InstanceStaticEntity.class, "foo" );
}
@Entity
public static | GetAndIsVariantGetterTest |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/config/WebFluxConfigurerComposite.java | {
"start": 1555,
"end": 4974
} | class ____ implements WebFluxConfigurer {
private final List<WebFluxConfigurer> delegates = new ArrayList<>();
public void addWebFluxConfigurers(List<WebFluxConfigurer> configurers) {
if (!CollectionUtils.isEmpty(configurers)) {
this.delegates.addAll(configurers);
}
}
@Override
public void configureHttpMessageCodecs(ServerCodecConfigurer configurer) {
this.delegates.forEach(delegate -> delegate.configureHttpMessageCodecs(configurer));
}
@Override
public void addFormatters(FormatterRegistry registry) {
this.delegates.forEach(delegate -> delegate.addFormatters(registry));
}
@Override
@SuppressWarnings("NullAway") // https://github.com/uber/NullAway/issues/1128
public @Nullable Validator getValidator() {
return createSingleBean(WebFluxConfigurer::getValidator, Validator.class);
}
@Override
@SuppressWarnings("NullAway") // https://github.com/uber/NullAway/issues/1128
public @Nullable MessageCodesResolver getMessageCodesResolver() {
return createSingleBean(WebFluxConfigurer::getMessageCodesResolver, MessageCodesResolver.class);
}
@Override
public void addCorsMappings(CorsRegistry registry) {
this.delegates.forEach(delegate -> delegate.addCorsMappings(registry));
}
@Override
public void configureBlockingExecution(BlockingExecutionConfigurer configurer) {
this.delegates.forEach(delegate -> delegate.configureBlockingExecution(configurer));
}
@Override
public void configureContentTypeResolver(RequestedContentTypeResolverBuilder builder) {
this.delegates.forEach(delegate -> delegate.configureContentTypeResolver(builder));
}
@Override
public void configureApiVersioning(ApiVersionConfigurer configurer) {
for (WebFluxConfigurer delegate : this.delegates) {
delegate.configureApiVersioning(configurer);
}
}
@Override
public void configurePathMatching(PathMatchConfigurer configurer) {
this.delegates.forEach(delegate -> delegate.configurePathMatching(configurer));
}
@Override
public void configureArgumentResolvers(ArgumentResolverConfigurer configurer) {
this.delegates.forEach(delegate -> delegate.configureArgumentResolvers(configurer));
}
@Override
public void addErrorResponseInterceptors(List<ErrorResponse.Interceptor> interceptors) {
for (WebFluxConfigurer delegate : this.delegates) {
delegate.addErrorResponseInterceptors(interceptors);
}
}
@Override
public void configureViewResolvers(ViewResolverRegistry registry) {
this.delegates.forEach(delegate -> delegate.configureViewResolvers(registry));
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
this.delegates.forEach(delegate -> delegate.addResourceHandlers(registry));
}
@Override
@SuppressWarnings("NullAway") // https://github.com/uber/NullAway/issues/1128
public @Nullable WebSocketService getWebSocketService() {
return createSingleBean(WebFluxConfigurer::getWebSocketService, WebSocketService.class);
}
private <T> @Nullable T createSingleBean(Function<WebFluxConfigurer, @Nullable T> factory, Class<T> beanType) {
List<T> result = this.delegates.stream().map(factory).filter(Objects::nonNull).toList();
if (result.isEmpty()) {
return null;
}
else if (result.size() == 1) {
return result.get(0);
}
else {
throw new IllegalStateException("More than one WebFluxConfigurer implements " +
beanType.getSimpleName() + " factory method.");
}
}
}
| WebFluxConfigurerComposite |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/pool/ValidConnectionChecker.java | {
"start": 714,
"end": 1016
} | interface ____ {
boolean isValidConnection(Connection c, String query, int validationQueryTimeout) throws Exception;
/**
* @param properties the properties to be used for configuration
* @since 0.2.21
*/
void configFromProperties(Properties properties);
}
| ValidConnectionChecker |
java | quarkusio__quarkus | extensions/panache/panache-hibernate-common/deployment/src/main/java/io/quarkus/panache/hibernate/common/deployment/PanacheHibernateCommonResourceProcessor.java | {
"start": 1405,
"end": 9802
} | class ____ {
private static final DotName DOTNAME_ENTITY = DotName.createSimple(Entity.class.getName());
private static final DotName DOTNAME_MAPPED_SUPERCLASS = DotName.createSimple(MappedSuperclass.class.getName());
private static final DotName DOTNAME_EMBEDDABLE = DotName.createSimple(Embeddable.class.getName());
private static final DotName DOTNAME_TRANSIENT = DotName.createSimple(Transient.class.getName());
private static final DotName DOTNAME_KOTLIN_METADATA = DotName.createSimple("kotlin.Metadata");
// This MUST be a separate step from replaceFieldAccess,
// to avoid a cycle in build steps:
//
// HibernateEnhancersRegisteredBuildItem
// needed for PanacheEntityClassesBuildItem
// needed for InterceptedStaticMethodsTransformersRegisteredBuildItem
// needed for HibernateEnhancersRegisteredBuildItem
@BuildStep
void findEntityClasses(CombinedIndexBuildItem index,
Optional<HibernateModelClassCandidatesForFieldAccessBuildItem> candidatesForFieldAccess,
BuildProducer<HibernateMetamodelForFieldAccessBuildItem> modelInfoBuildItem,
BuildProducer<PanacheEntityClassesBuildItem> fieldAccessEnhancedEntityClasses) {
if (candidatesForFieldAccess.isEmpty()) {
// Hibernate ORM is disabled
return;
}
MetamodelInfo modelInfo = new MetamodelInfo();
// Technically we wouldn't need to process embeddables, but we don't have an easy way to exclude them.
for (String entityClassName : candidatesForFieldAccess.get().getManagedClassNames()) {
ClassInfo entityClass = index.getIndex().getClassByName(DotName.createSimple(entityClassName));
if (entityClass == null) {
// Probably a synthetic entity, such as Envers' DefaultRevisionEntity.
// We don't need to generate accessors for those.
continue;
}
if (entityClass.annotationsMap().containsKey(DOTNAME_KOTLIN_METADATA)) {
// This is a Kotlin class.
// Historically we've never created accessors automatically for Kotlin,
// since Kotlin language features (properties) can be used instead.
continue;
}
modelInfo.addEntityModel(createEntityModel(entityClass));
}
// Share the metamodel for use in replaceFieldAccesses
modelInfoBuildItem.produce(new HibernateMetamodelForFieldAccessBuildItem(modelInfo));
Set<String> entitiesWithExternallyAccessibleFields = modelInfo.getEntitiesWithExternallyAccessibleFields();
if (entitiesWithExternallyAccessibleFields.isEmpty()) {
// There are no fields to be accessed in the first place.
return;
}
// Share with other extensions that we will generate accessors for some classes
fieldAccessEnhancedEntityClasses
.produce(new PanacheEntityClassesBuildItem(entitiesWithExternallyAccessibleFields));
}
@BuildStep
@Consume(HibernateEnhancersRegisteredBuildItem.class)
@Consume(InterceptedStaticMethodsTransformersRegisteredBuildItem.class)
void replaceFieldAccesses(CombinedIndexBuildItem index,
ApplicationArchivesBuildItem applicationArchivesBuildItem,
Optional<HibernateMetamodelForFieldAccessBuildItem> modelInfoBuildItem,
BuildProducer<BytecodeTransformerBuildItem> transformers) {
if (modelInfoBuildItem.isEmpty()) {
// Hibernate ORM is disabled
return;
}
MetamodelInfo modelInfo = modelInfoBuildItem.get().getMetamodelInfo();
Set<String> entitiesWithExternallyAccessibleFields = modelInfo.getEntitiesWithExternallyAccessibleFields();
if (entitiesWithExternallyAccessibleFields.isEmpty()) {
// There are no fields to be accessed in the first place.
return;
}
// Generate accessors for externally accessible fields in entities, mapped superclasses
// (and embeddables, see where we build modelInfo above).
PanacheJpaEntityAccessorsEnhancer entityAccessorsEnhancer = new PanacheJpaEntityAccessorsEnhancer(index.getIndex(),
modelInfo);
for (String entityClassName : entitiesWithExternallyAccessibleFields) {
final BytecodeTransformerBuildItem transformation = new BytecodeTransformerBuildItem.Builder()
.setClassToTransform(entityClassName)
.setCacheable(true)
.setVisitorFunction(entityAccessorsEnhancer)
.build();
transformers.produce(transformation);
}
// Replace field access in application code with calls to accessors
PanacheFieldAccessEnhancer panacheFieldAccessEnhancer = new PanacheFieldAccessEnhancer(modelInfo);
Map<String, Set<String>> classesUsingEntities = new HashMap<>();
// transform all users of those classes
for (String entityClassName : entitiesWithExternallyAccessibleFields) {
for (ClassInfo userClass : index.getIndex().getKnownUsers(entityClassName)) {
String userClassName = userClass.name().toString('.');
classesUsingEntities.computeIfAbsent(userClassName, k -> new HashSet<>())
.add(entityClassName.replace(".", "/"));
}
}
for (Entry<String, Set<String>> classUsingEntities : classesUsingEntities.entrySet()) {
//The following build item is not marked as CacheAble intentionally: see also https://github.com/quarkusio/quarkus/pull/40192#discussion_r1590605375.
//It shouldn't be too hard to improve on this by checking the related entities haven't been changed
//via LiveReloadBuildItem (#isLiveReload() && #getChangeInformation()) but I'm not comfortable in making this
//change without having solid integration tests.
final BytecodeTransformerBuildItem transformation = new BytecodeTransformerBuildItem.Builder()
.setClassToTransform(classUsingEntities.getKey())
.setCacheable(false) // TODO this would be nice to improve on: see note above.
.setVisitorFunction(panacheFieldAccessEnhancer)
.setRequireConstPoolEntry(classUsingEntities.getValue())
.build();
transformers.produce(transformation);
}
}
private EntityModel createEntityModel(ClassInfo classInfo) {
EntityModel entityModel = new EntityModel(classInfo);
// Unfortunately, at the moment Hibernate ORM's enhancement ignores XML mapping,
// so we need to be careful when we enhance private fields,
// because the corresponding `$_hibernate_{read/write}_*()` methods
// will only be generated for classes mapped through *annotations*.
boolean isManaged = classInfo.hasAnnotation(DOTNAME_ENTITY)
|| classInfo.hasAnnotation(DOTNAME_MAPPED_SUPERCLASS)
|| classInfo.hasAnnotation(DOTNAME_EMBEDDABLE);
boolean willBeEnhancedByHibernateOrm = isManaged
// Records are immutable, thus never enhanced
&& !classInfo.isRecord();
for (FieldInfo fieldInfo : classInfo.fields()) {
String name = fieldInfo.name();
if (!Modifier.isStatic(fieldInfo.flags())
&& !fieldInfo.hasAnnotation(DOTNAME_TRANSIENT)) {
String librarySpecificGetterName;
String librarySpecificSetterName;
if (willBeEnhancedByHibernateOrm) {
librarySpecificGetterName = EnhancerConstants.PERSISTENT_FIELD_READER_PREFIX + name;
librarySpecificSetterName = EnhancerConstants.PERSISTENT_FIELD_WRITER_PREFIX + name;
} else {
librarySpecificGetterName = null;
librarySpecificSetterName = null;
}
entityModel.addField(new EntityField(name, DescriptorUtils.typeToString(fieldInfo.type()),
EntityField.Visibility.get(fieldInfo.flags()),
librarySpecificGetterName, librarySpecificSetterName));
}
}
return entityModel;
}
}
| PanacheHibernateCommonResourceProcessor |
java | quarkusio__quarkus | extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuartzRuntimeConfig.java | {
"start": 4793,
"end": 5024
} | interface ____ {
/**
* The quartz misfire policy for this job.
*/
@WithDefault("smart-policy")
@WithParentName
QuartzMisfirePolicy misfirePolicy();
}
| QuartzMisfirePolicyConfig |
java | apache__avro | lang/java/trevni/core/src/main/java/org/apache/trevni/InputFile.java | {
"start": 1025,
"end": 1586
} | class ____ implements Input {
private FileChannel channel;
/** Construct for the given file. */
public InputFile(File file) throws IOException {
this.channel = new FileInputStream(file).getChannel();
}
@Override
public long length() throws IOException {
return channel.size();
}
@Override
public int read(long position, byte[] b, int start, int len) throws IOException {
return channel.read(ByteBuffer.wrap(b, start, len), position);
}
@Override
public void close() throws IOException {
channel.close();
}
}
| InputFile |
java | apache__camel | components/camel-ignite/src/generated/java/org/apache/camel/component/ignite/set/IgniteSetEndpointUriFactory.java | {
"start": 520,
"end": 2299
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":name";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(6);
props.add("configuration");
props.add("lazyStartProducer");
props.add("name");
props.add("operation");
props.add("propagateIncomingBodyIfNoReturnValue");
props.add("treatCollectionsAsCacheObjects");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
SECRET_PROPERTY_NAMES = Collections.emptySet();
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "ignite-set".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "name", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| IgniteSetEndpointUriFactory |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/FillMaskResults.java | {
"start": 584,
"end": 2678
} | class ____ extends NlpClassificationInferenceResults {
public static final String NAME = "fill_mask_result";
private final String predictedSequence;
public FillMaskResults(
String classificationLabel,
String predictedSequence,
List<TopClassEntry> topClasses,
String resultsField,
Double predictionProbability,
boolean isTruncated
) {
super(classificationLabel, topClasses, resultsField, predictionProbability, isTruncated);
this.predictedSequence = predictedSequence;
}
public FillMaskResults(StreamInput in) throws IOException {
super(in);
this.predictedSequence = in.readString();
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
super.doWriteTo(out);
out.writeString(predictedSequence);
}
public String getPredictedSequence() {
return predictedSequence;
}
@Override
void addMapFields(Map<String, Object> map) {
super.addMapFields(map);
map.put(resultsField + "_sequence", predictedSequence);
}
@Override
public Map<String, Object> asMap(String outputField) {
var map = super.asMap(outputField);
map.put(outputField + "_sequence", predictedSequence);
return map;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public void doXContentBody(XContentBuilder builder, Params params) throws IOException {
super.doXContentBody(builder, params);
builder.field(resultsField + "_sequence", predictedSequence);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (super.equals(o) == false) return false;
FillMaskResults that = (FillMaskResults) o;
return Objects.equals(predictedSequence, that.predictedSequence);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), predictedSequence);
}
}
| FillMaskResults |
java | elastic__elasticsearch | build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java | {
"start": 9212,
"end": 12589
} | class ____ implements Plugin<Project> {
@Inject
public abstract BuildEventsListenerRegistry getEventsListenerRegistry();
@SuppressWarnings("checkstyle:RedundantModifier")
@Inject
public TestClustersHookPlugin() {}
public void apply(Project project) {
if (project != project.getRootProject()) {
throw new IllegalStateException(this.getClass().getName() + " can only be applied to the root project.");
}
Provider<TestClustersRegistry> registryProvider = GradleUtils.getBuildService(
project.getGradle().getSharedServices(),
REGISTRY_SERVICE_NAME
);
Provider<TaskEventsService> testClusterTasksService = project.getGradle()
.getSharedServices()
.registerIfAbsent(TEST_CLUSTER_TASKS_SERVICE, TaskEventsService.class, spec -> {
spec.getParameters().getRegistry().set(registryProvider);
});
TestClustersRegistry registry = registryProvider.get();
// When we know what tasks will run, we claim the clusters of those task to differentiate between clusters
// that are defined in the build script and the ones that will actually be used in this invocation of gradle
// we use this information to determine when the last task that required the cluster executed so that we can
// terminate the cluster right away and free up resources.
configureClaimClustersHook(project.getGradle(), registry);
// Before each task, we determine if a cluster needs to be started for that task.
configureStartClustersHook(project.getGradle());
// After each task we determine if there are clusters that are no longer needed.
getEventsListenerRegistry().onTaskCompletion(testClusterTasksService);
}
private static void configureClaimClustersHook(Gradle gradle, TestClustersRegistry registry) {
// Once we know all the tasks that need to execute, we claim all the clusters that belong to those and count the
// claims so we'll know when it's safe to stop them.
gradle.getTaskGraph().whenReady(taskExecutionGraph -> {
taskExecutionGraph.getAllTasks()
.stream()
.filter(task -> task instanceof TestClustersAware)
.map(task -> (TestClustersAware) task)
.flatMap(task -> task.getClusters().stream())
.forEach(registry::claimCluster);
});
}
private void configureStartClustersHook(Gradle gradle) {
gradle.getTaskGraph().whenReady(taskExecutionGraph -> {
taskExecutionGraph.getAllTasks()
.stream()
.filter(task -> task instanceof TestClustersAware)
.map(task -> (TestClustersAware) task)
.forEach(awareTask -> {
awareTask.doFirst(task -> {
awareTask.beforeStart();
awareTask.getClusters().forEach(awareTask.getRegistry().get()::maybeStartCluster);
});
});
});
}
}
static public abstract | TestClustersHookPlugin |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/JoinOnClauseTest.java | {
"start": 3142,
"end": 3400
} | class ____{
@Id
long id;
public Price() {
}
public Price(int amount, String currency) {
this.amount = amount;
this.currency = currency;
}
int amount;
String currency;
}
@Entity(name = "Book")
@Table(name="BOOK")
public static | Price |
java | google__gson | extras/src/main/java/com/google/gson/interceptors/Intercept.java | {
"start": 1877,
"end": 2084
} | class ____ provides the methods that should be invoked after an instance has been
* deserialized.
*/
@SuppressWarnings("rawtypes")
public Class<? extends JsonPostDeserializer> postDeserialize();
}
| that |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/postgresql/ast/stmt/PGVacuumStatement.java | {
"start": 1170,
"end": 3973
} | class ____ extends SQLStatementImpl implements PGSQLStatement {
private boolean full;
private boolean freeze;
private boolean verbose;
private boolean skipLocked;
private boolean analyze;
private boolean disablePageSkipping;
private boolean processToast;
private boolean truncate;
// gaussDb specific
private boolean deltaMerge;
private boolean hdfsDirectory;
private List<SQLExprTableSource> tableSources = new ArrayList<>();
public PGVacuumStatement(DbType dbType) {
super.dbType = dbType;
}
public boolean isVerbose() {
return verbose;
}
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
public boolean isSkipLocked() {
return skipLocked;
}
public void setSkipLocked(boolean skipLocked) {
this.skipLocked = skipLocked;
}
public List<SQLExprTableSource> getTableSources() {
return tableSources;
}
public void setTableSources(List<SQLExprTableSource> tableSources) {
this.tableSources = tableSources;
}
public boolean isFull() {
return full;
}
public void setFull(boolean full) {
this.full = full;
}
public boolean isFreeze() {
return freeze;
}
public void setFreeze(boolean freeze) {
this.freeze = freeze;
}
public boolean isAnalyze() {
return analyze;
}
public void setAnalyze(boolean analyze) {
this.analyze = analyze;
}
public boolean isDisablePageSkipping() {
return disablePageSkipping;
}
public void setDisablePageSkipping(boolean disablePageSkipping) {
this.disablePageSkipping = disablePageSkipping;
}
public boolean isProcessToast() {
return processToast;
}
public void setProcessToast(boolean processToast) {
this.processToast = processToast;
}
public boolean isTruncate() {
return truncate;
}
public void setTruncate(boolean truncate) {
this.truncate = truncate;
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor instanceof PGASTVisitor) {
accept0((PGASTVisitor) visitor);
}
}
@Override
public void accept0(PGASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, tableSources);
}
visitor.endVisit(this);
}
public boolean isDeltaMerge() {
return deltaMerge;
}
public void setDeltaMerge(boolean deltaMerge) {
this.deltaMerge = deltaMerge;
}
public boolean isHdfsDirectory() {
return hdfsDirectory;
}
public void setHdfsDirectory(boolean hdfsDirectory) {
this.hdfsDirectory = hdfsDirectory;
}
}
| PGVacuumStatement |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/ResourceAllocationResult.java | {
"start": 1348,
"end": 3093
} | class ____ {
private final Set<JobID> unfulfillableJobs;
private final Map<JobID, Map<InstanceID, ResourceCounter>> allocationsOnRegisteredResources;
private final List<PendingTaskManager> pendingTaskManagersToAllocate;
private final Map<PendingTaskManagerId, Map<JobID, ResourceCounter>>
allocationsOnPendingResources;
private ResourceAllocationResult(
Set<JobID> unfulfillableJobs,
Map<JobID, Map<InstanceID, ResourceCounter>> allocationsOnRegisteredResources,
List<PendingTaskManager> pendingTaskManagersToAllocate,
Map<PendingTaskManagerId, Map<JobID, ResourceCounter>> allocationsOnPendingResources) {
this.unfulfillableJobs = unfulfillableJobs;
this.allocationsOnRegisteredResources = allocationsOnRegisteredResources;
this.pendingTaskManagersToAllocate = pendingTaskManagersToAllocate;
this.allocationsOnPendingResources = allocationsOnPendingResources;
}
public List<PendingTaskManager> getPendingTaskManagersToAllocate() {
return Collections.unmodifiableList(pendingTaskManagersToAllocate);
}
public Set<JobID> getUnfulfillableJobs() {
return Collections.unmodifiableSet(unfulfillableJobs);
}
public Map<JobID, Map<InstanceID, ResourceCounter>> getAllocationsOnRegisteredResources() {
return Collections.unmodifiableMap(allocationsOnRegisteredResources);
}
public Map<PendingTaskManagerId, Map<JobID, ResourceCounter>>
getAllocationsOnPendingResources() {
return Collections.unmodifiableMap(allocationsOnPendingResources);
}
public static Builder builder() {
return new Builder();
}
public static | ResourceAllocationResult |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_2100/Issue2179.java | {
"start": 4317,
"end": 4896
} | class ____ {
@JSONField(name = "l_k_assbalv4", deserializeUsing = EnumAwareSerializer2.class)
private ProductType2 type;
private ProductType2 type1;
public Model2(ProductType2 type, ProductType2 type1) {
this.type = type;
this.type1 = type1;
}
public ProductType2 getType() {
return type;
}
public void setType(ProductType2 type) {
this.type = type;
}
public ProductType2 getType1() {
return type1;
}
public void setType1(ProductType2 type1) {
this.type1 = type1;
}
}
@JSONType(serializeEnumAsJavaBean = true)
public static | Model2 |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/tmall/EngineResult.java | {
"start": 159,
"end": 426
} | class ____ {
@JSONField(name = "pinglun")
private RateResult rateResult;
public RateResult getRateResult () {
return rateResult;
}
public void setRateResult (RateResult rateResult) {
this.rateResult = rateResult;
}
}
| EngineResult |
java | quarkusio__quarkus | independent-projects/bootstrap/core/src/test/java/io/quarkus/bootstrap/resolver/test/ExclusionsTestCase.java | {
"start": 255,
"end": 1281
} | class ____ extends CollectDependenciesBase {
@Override
protected void setupDependencies() {
final TsArtifact requiredTransitive = new TsArtifact("required-transitive")
.addDependency(
new TsArtifact("excluded-dep", "2")
.addDependency(new TsArtifact("other-dep")));
install(requiredTransitive, true);
final TsArtifact otherDep2 = new TsArtifact("other-dep", "2");
install(otherDep2, true);
final TsArtifact otherRequiredTransitive = new TsArtifact("other-required-transitive")
.addDependency(otherDep2);
install(otherRequiredTransitive, true);
installAsDep(
new TsArtifact("required-dep1")
.addDependency(
new TsDependency(requiredTransitive)
.exclude("excluded-dep"))
.addDependency(otherRequiredTransitive));
}
}
| ExclusionsTestCase |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/dates/Dates_assertHasMonth_Test.java | {
"start": 1410,
"end": 3204
} | class ____ extends DatesBaseTest {
@Test
void should_fail_if_actual_has_not_given_month() {
AssertionInfo info = someInfo();
int month = 5;
Throwable error = catchThrowable(() -> dates.assertHasMonth(info, actual, month));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldHaveDateField(actual, "month", month));
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> dates.assertHasMonth(someInfo(), null, 1))
.withMessage(actualIsNull());
}
@Test
void should_pass_if_actual_has_given_month() {
dates.assertHasMonth(someInfo(), actual, 1);
}
@Test
void should_fail_if_actual_has_not_given_month_whatever_custom_comparison_strategy_is() {
AssertionInfo info = someInfo();
int month = 5;
Throwable error = catchThrowable(() -> datesWithCustomComparisonStrategy.assertHasMonth(info, actual, month));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldHaveDateField(actual, "month", month));
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> datesWithCustomComparisonStrategy.assertHasMonth(someInfo(),
null, 1))
.withMessage(actualIsNull());
}
@Test
void should_pass_if_actual_has_given_month_whatever_custom_comparison_strategy_is() {
datesWithCustomComparisonStrategy.assertHasMonth(someInfo(), actual, 1);
}
}
| Dates_assertHasMonth_Test |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/bulk/WriteAckDelay.java | {
"start": 1021,
"end": 2669
} | class ____ implements Consumer<Runnable> {
// Controls the interval in which write acknowledgement scheduled task will be executed
public static final Setting<TimeValue> WRITE_ACK_DELAY_INTERVAL = timeSetting(
"indices.write_ack_delay_interval",
TimeValue.ZERO,
Setting.Property.NodeScope
);
// Controls a max time bound after which the write acknowledgements will be completed after the scheduling task runs
public static final Setting<TimeValue> WRITE_ACK_DELAY_RANDOMNESS_BOUND = timeSetting(
"indices.write_ack_delay_randomness_bound",
TimeValue.timeValueMillis(70),
Setting.Property.NodeScope
);
private static final Logger logger = LogManager.getLogger(WriteAckDelay.class);
private final ThreadPool threadPool;
private final ConcurrentLinkedQueue<Runnable> writeCallbacks = new ConcurrentLinkedQueue<>();
private final TimeValue writeDelayInterval;
private final long writeDelayRandomnessBoundMillis;
public WriteAckDelay(long writeDelayIntervalNanos, long writeDelayRandomnessBoundMillis, ThreadPool threadPool) {
this.writeDelayInterval = TimeValue.timeValueNanos(writeDelayIntervalNanos);
this.writeDelayRandomnessBoundMillis = writeDelayRandomnessBoundMillis;
this.threadPool = threadPool;
this.threadPool.scheduleWithFixedDelay(
new ScheduleTask(),
TimeValue.timeValueNanos(writeDelayIntervalNanos),
this.threadPool.generic()
);
}
@Override
public void accept(Runnable runnable) {
writeCallbacks.add(runnable);
}
private | WriteAckDelay |
java | square__moshi | examples/src/main/java/com/squareup/moshi/recipes/IncludeNullsForAnnotatedTypes.java | {
"start": 1854,
"end": 1946
} | class ____ {
String name;
String emailAddress;
Car favoriteCar;
}
static | Driver |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ReturnValueIgnoredTest.java | {
"start": 11187,
"end": 11587
} | class ____ {
void f(Function<Integer, Void> f) {
f.apply(0);
}
}
""")
.doTest();
}
@Test
public void ignoreInTests() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static org.junit.Assert.fail;
import java.util.function.Function;
| Test |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JMTMRegistrationSuccess.java | {
"start": 1193,
"end": 1585
} | class ____ extends RegistrationResponse.Success {
private static final long serialVersionUID = -3528383155961318929L;
private final ResourceID resourceID;
public JMTMRegistrationSuccess(ResourceID resourceID) {
this.resourceID = Preconditions.checkNotNull(resourceID);
}
public ResourceID getResourceID() {
return resourceID;
}
}
| JMTMRegistrationSuccess |
java | processing__processing4 | app/src/processing/app/platform/LinuxPlatform.java | {
"start": 1135,
"end": 6061
} | class ____ extends DefaultPlatform {
String homeDir;
public void initBase(Base base) {
super.initBase(base);
JFrame.setDefaultLookAndFeelDecorated(true);
System.setProperty("flatlaf.menuBarEmbedded", "true");
// Set X11 WM_CLASS property which is used as the application
// name by Gnome 3 and other window managers.
// https://github.com/processing/processing/issues/2534
// For Java 17, this hack requires an addition to the command line:
// --add-opens=java.desktop/sun.awt.X11=ALL-UNNAMED
try {
Toolkit xToolkit = Toolkit.getDefaultToolkit();
java.lang.reflect.Field awtAppClassNameField =
xToolkit.getClass().getDeclaredField("awtAppClassName");
awtAppClassNameField.setAccessible(true);
awtAppClassNameField.set(xToolkit, "Processing");
} catch(Exception e) {
// In case the implementation details change
e.printStackTrace();
}
}
// The default Look & Feel is set in preferences.txt
// As of 3.0a6, defaults.txt is set to Nimbus for Linux.
// Java sets user.home to be /root for execution with sudo.
// This method attempts to use the user's real home directory instead.
public String getHomeDir() {
if (homeDir == null) {
// get home directory of SUDO_USER if set, else use user.home
homeDir = System.getProperty("user.home");
String sudoUser = System.getenv("SUDO_USER");
if (sudoUser != null && sudoUser.length() != 0) {
try {
homeDir = getHomeDir(sudoUser);
} catch (Exception ignored) { }
}
}
return homeDir;
}
static public String getHomeDir(String user) throws Exception {
Process p = PApplet.exec("/bin/sh", "-c", "echo ~" + user);
return PApplet.createReader(p.getInputStream()).readLine();
}
@Override
public File getSettingsFolder() throws Exception {
File override = Base.getSettingsOverride();
if (override != null) {
return override;
}
// https://github.com/processing/processing4/issues/203
// https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
File configHome = null;
// Check to see if the user has set a different location for their config
String configHomeEnv = System.getenv("XDG_CONFIG_HOME");
if (configHomeEnv != null && !configHomeEnv.isBlank()) {
configHome = new File(configHomeEnv);
if (!configHome.exists()) {
Messages.err("XDG_CONFIG_HOME is set to " + configHomeEnv + " but does not exist.");
configHome = null; // don't use non-existent folder
}
}
String snapUserCommon = System.getenv("SNAP_USER_COMMON");
if (snapUserCommon != null && !snapUserCommon.isBlank()) {
configHome = new File(snapUserCommon);
}
// If not set properly, use the default
if (configHome == null) {
configHome = new File(getHomeDir(), ".config");
}
return new File(configHome, "processing");
}
@Override
public File getDefaultSketchbookFolder() throws Exception {
return new File(getHomeDir(), "sketchbook");
}
@Override
public void openURL(String url) throws Exception {
if (Desktop.isDesktopSupported()) {
super.openURL(url);
} else if (openFolderAvailable()) {
String launcher = Preferences.get("launcher"); // guaranteed non-null
Runtime.getRuntime().exec(new String[] { launcher, url });
} else {
System.err.println("No launcher set, cannot open " + url);
}
}
@Override
public boolean openFolderAvailable() {
if (Preferences.get("launcher") != null) {
return true;
}
// Attempt to use xdg-open
try {
Process p = Runtime.getRuntime().exec(new String[] { "xdg-open" });
p.waitFor();
Preferences.set("launcher", "xdg-open");
return true;
} catch (Exception ignored) { }
// Attempt to use gnome-open
try {
Process p = Runtime.getRuntime().exec(new String[] { "gnome-open" });
p.waitFor();
// Not installed will throw an IOException (JDK 1.4.2, Ubuntu 7.04)
Preferences.set("launcher", "gnome-open");
return true;
} catch (Exception ignored) { }
// Attempt with kde-open
try {
Process p = Runtime.getRuntime().exec(new String[] { "kde-open" });
p.waitFor();
Preferences.set("launcher", "kde-open");
return true;
} catch (Exception ignored) { }
return false;
}
@Override
public void openFolder(File file) throws Exception {
if (Desktop.isDesktopSupported()) {
super.openFolder(file);
} else if (openFolderAvailable()) {
String launcher = Preferences.get("launcher");
String[] params = new String[] { launcher, file.getAbsolutePath() };
Runtime.getRuntime().exec(params);
} else {
System.err.println("No launcher set, cannot open " +
file.getAbsolutePath());
}
}
}
| LinuxPlatform |
java | apache__camel | components/camel-aws/camel-aws2-iam/src/main/java/org/apache/camel/component/aws2/iam/IAM2Endpoint.java | {
"start": 1691,
"end": 3637
} | class ____ extends ScheduledPollEndpoint implements EndpointServiceLocation {
private IamClient iamClient;
@UriParam
private IAM2Configuration configuration;
public IAM2Endpoint(String uri, Component component, IAM2Configuration configuration) {
super(uri, component);
this.configuration = configuration;
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("You cannot receive messages from this endpoint");
}
@Override
public Producer createProducer() throws Exception {
return new IAM2Producer(this);
}
@Override
public IAM2Component getComponent() {
return (IAM2Component) super.getComponent();
}
@Override
public void doStart() throws Exception {
super.doStart();
iamClient = configuration.getIamClient() != null
? configuration.getIamClient()
: IAM2ClientFactory.getIamClient(configuration).getIamClient();
}
@Override
public void doStop() throws Exception {
if (ObjectHelper.isEmpty(configuration.getIamClient())) {
if (iamClient != null) {
iamClient.close();
}
}
}
public IAM2Configuration getConfiguration() {
return configuration;
}
public IamClient getIamClient() {
return iamClient;
}
@Override
public String getServiceUrl() {
if (!configuration.isOverrideEndpoint()) {
if (ObjectHelper.isNotEmpty(configuration.getRegion())) {
return configuration.getRegion();
}
} else if (ObjectHelper.isNotEmpty(configuration.getUriEndpointOverride())) {
return configuration.getUriEndpointOverride();
}
return null;
}
@Override
public String getServiceProtocol() {
return "iam";
}
}
| IAM2Endpoint |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/creators/ValueInstantiatorTest.java | {
"start": 5239,
"end": 5648
} | class ____ extends InstantiatorBase
{
@Override
public String getValueTypeDesc() {
return MyList.class.getName();
}
@Override
public boolean canCreateUsingDefault() { return true; }
@Override
public MyList createUsingDefault(DeserializationContext ctxt) {
return new MyList(true);
}
}
static | MyListInstantiator |
java | apache__camel | components/camel-google/camel-google-sheets/src/main/java/org/apache/camel/component/google/sheets/GoogleSheetsConfiguration.java | {
"start": 1356,
"end": 5295
} | class ____ extends AbstractApiConfiguration {
@UriPath
@Metadata(required = true)
private GoogleSheetsApiName apiName;
@UriPath(enums = "create,get,update,append,batchUpdate,clear")
@Metadata(required = true)
private String methodName;
@UriParam
private String clientId;
@UriParam(label = "security", secret = true)
private String clientSecret;
@UriParam(label = "security", secret = true)
private String accessToken;
@UriParam(label = "security", secret = true)
private String refreshToken;
@UriParam
private String applicationName;
@UriParam
private String scopes;
/* Service account */
@UriParam(label = "security")
private String serviceAccountKey;
@UriParam
private String delegate;
public GoogleSheetsApiName getApiName() {
return apiName;
}
/**
* What kind of operation to perform
*/
public void setApiName(GoogleSheetsApiName apiName) {
this.apiName = apiName;
}
public String getMethodName() {
return methodName;
}
/**
* What sub operation to use for the selected operation
*/
public void setMethodName(String methodName) {
this.methodName = methodName;
}
public String getClientId() {
return clientId;
}
/**
* Client ID of the sheets application
*/
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getClientSecret() {
return clientSecret;
}
/**
* Client secret of the sheets application
*/
public void setClientSecret(String clientSecret) {
this.clientSecret = clientSecret;
}
public String getAccessToken() {
return accessToken;
}
/**
* OAuth 2 access token. This typically expires after an hour so refreshToken is recommended for long term usage.
*/
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public String getRefreshToken() {
return refreshToken;
}
/**
* OAuth 2 refresh token. Using this, the Google Sheets component can obtain a new accessToken whenever the current
* one expires - a necessity if the application is long-lived.
*/
public void setRefreshToken(String refreshToken) {
this.refreshToken = refreshToken;
}
public String getApplicationName() {
return applicationName;
}
/**
* Google Sheets application name. Example would be "camel-google-sheets/1.0"
*/
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
public String getScopes() {
return scopes;
}
public Collection<String> getScopesAsList() {
if (scopes != null) {
return List.of(scopes.split(","));
} else {
return null;
}
}
/**
* Specifies the level of permissions you want a sheets application to have to a user account. See
* https://developers.google.com/identity/protocols/googlescopes for more info. Multiple scopes can be separated by
* comma.
*
* @see com.google.api.services.sheets.v4.SheetsScopes
*/
public void setScopes(String scopes) {
this.scopes = scopes;
}
public String getServiceAccountKey() {
return serviceAccountKey;
}
/**
* Sets "*.json" file with credentials for Service account
*
* @param serviceAccountKey String file, classpath, or http url
*/
public void setServiceAccountKey(String serviceAccountKey) {
this.serviceAccountKey = serviceAccountKey;
}
public String getDelegate() {
return delegate;
}
/**
* Delegate for wide-domain service account
*/
public void setDelegate(String delegate) {
this.delegate = delegate;
}
}
| GoogleSheetsConfiguration |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/net/impl/DelegatingChannelPromise.java | {
"start": 880,
"end": 5031
} | class ____ extends AbstractFuture<Void> implements ChannelPromise {
private ChannelPromise bridge;
private final Channel channel;
private final Promise<Void> promise;
DelegatingChannelPromise(Promise<Void> promise, Channel channel) {
this.channel = Objects.requireNonNull(channel);
this.promise = Objects.requireNonNull(promise);
}
@Override
public Channel channel() {
return channel;
}
@Override
public ChannelPromise setSuccess(Void result) {
return setSuccess();
}
@Override
public ChannelPromise setSuccess() {
promise.succeed();
return this;
}
@Override
public boolean trySuccess() {
return promise.tryComplete();
}
@Override
public ChannelPromise setFailure(Throwable cause) {
promise.tryFail(cause);
return this;
}
@SuppressWarnings("unchecked")
@Override
public ChannelPromise addListener(GenericFutureListener<? extends Future<? super Void>> listener) {
bridge().addListeners(listener);
return this;
}
@SuppressWarnings("unchecked")
@Override
public ChannelPromise addListeners(GenericFutureListener<? extends Future<? super Void>>... listeners) {
bridge().addListeners(listeners);
return this;
}
@SuppressWarnings("unchecked")
@Override
public ChannelPromise removeListener(GenericFutureListener<? extends Future<? super Void>> listener) {
bridge().removeListeners(listener);
return this;
}
@SuppressWarnings("unchecked")
@Override
public ChannelPromise removeListeners(GenericFutureListener<? extends Future<? super Void>>... listeners) {
bridge().removeListeners(listeners);
return this;
}
@Override
public ChannelPromise sync() throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public ChannelPromise syncUninterruptibly() {
throw new UnsupportedOperationException();
}
@Override
public ChannelPromise await() throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public ChannelPromise awaitUninterruptibly() {
throw new UnsupportedOperationException();
}
@Override
public ChannelPromise unvoid() {
return this;
}
@Override
public boolean isVoid() {
return false;
}
@Override
public boolean trySuccess(Void result) {
return promise.tryComplete();
}
@Override
public boolean tryFailure(Throwable cause) {
return promise.tryFail(cause);
}
@Override
public boolean setUncancellable() {
return true;
}
@Override
public boolean isSuccess() {
return promise.future().succeeded();
}
@Override
public boolean isCancellable() {
return false;
}
@Override
public Throwable cause() {
return promise.future().cause();
}
@Override
public boolean await(long timeout, TimeUnit unit) throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public boolean await(long timeoutMillis) throws InterruptedException {
throw new UnsupportedOperationException();
}
@Override
public boolean awaitUninterruptibly(long timeout, TimeUnit unit) {
throw new UnsupportedOperationException();
}
@Override
public boolean awaitUninterruptibly(long timeoutMillis) {
throw new UnsupportedOperationException();
}
@Override
public Void getNow() {
return null;
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return promise.future().isComplete();
}
private ChannelPromise bridge() {
ChannelPromise p;
// Could use double-checked locking ?
synchronized (this) {
p = bridge;
if (p == null) {
ChannelPromise pr = channel.newPromise();
p = pr;
((FutureBase<?>)promise.future())
.addListener((result, failure) -> {
if (failure == null) {
pr.setSuccess();
} else {
pr.setFailure(failure);
}
});
bridge = p;
}
}
return p;
}
}
| DelegatingChannelPromise |
java | google__guava | android/guava-tests/test/com/google/common/collect/EnumBiMapTest.java | {
"start": 2202,
"end": 2348
} | enum ____ {
CANADA,
CHILE,
JAPAN,
SWITZERLAND,
UK
}
@AndroidIncompatible // test-suite builders
public static final | Country |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableSingleSingle.java | {
"start": 928,
"end": 1399
} | class ____<T> extends Single<T> {
final ObservableSource<? extends T> source;
final T defaultValue;
public ObservableSingleSingle(ObservableSource<? extends T> source, T defaultValue) {
this.source = source;
this.defaultValue = defaultValue;
}
@Override
public void subscribeActual(SingleObserver<? super T> t) {
source.subscribe(new SingleElementObserver<>(t, defaultValue));
}
static final | ObservableSingleSingle |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/inheritance/SingleTableInheritanceEagerAssociationTest.java | {
"start": 5067,
"end": 5438
} | class ____ extends User {
@OneToOne(cascade = CascadeType.ALL)
@JoinColumn(name = "ADDRESS_ID")
private AddressA addressA;
protected UserA() {
}
public UserA(String userId) {
super( userId );
}
public void setAddressA(AddressA addressA) {
this.addressA = addressA;
}
}
@Entity(name = "UserB")
@DiscriminatorValue("USER_B")
public static | UserA |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/flogger/FloggerArgumentToStringTest.java | {
"start": 938,
"end": 1560
} | class ____ {
private final CompilationTestHelper testHelper =
CompilationTestHelper.newInstance(FloggerArgumentToString.class, getClass());
private final BugCheckerRefactoringTestHelper refactoringHelper =
BugCheckerRefactoringTestHelper.newInstance(FloggerArgumentToString.class, getClass());
@Test
public void refactoring() {
refactoringHelper
.addInputLines(
"in/Test.java",
"""
import com.google.common.base.Ascii;
import com.google.common.flogger.FluentLogger;
import java.util.Arrays;
| FloggerArgumentToStringTest |
java | hibernate__hibernate-orm | hibernate-spatial/src/test/java/org/hibernate/spatial/testing/datareader/TestData.java | {
"start": 470,
"end": 3229
} | class ____ implements List<TestDataElement> {
private List<TestDataElement> testDataElements;
protected TestData() {
}
public static TestData fromFile(String fileName) {
TestDataReader reader = new TestDataReader();
return fromFile( fileName, reader );
}
public static TestData fromFile(String fileName, TestDataReader reader) {
List<TestDataElement> elements = reader.read( fileName );
TestData testData = new TestData();
testData.testDataElements = elements;
return testData;
}
public int size() {
return testDataElements.size();
}
public boolean isEmpty() {
return testDataElements.isEmpty();
}
public boolean contains(Object o) {
return testDataElements.contains( o );
}
public Iterator<TestDataElement> iterator() {
return testDataElements.iterator();
}
public Object[] toArray() {
return testDataElements.toArray();
}
public <T> T[] toArray(T[] a) {
return testDataElements.toArray( a );
}
public boolean add(TestDataElement testDataElement) {
return testDataElements.add( testDataElement );
}
public boolean remove(Object o) {
return testDataElements.remove( o );
}
public boolean containsAll(Collection<?> c) {
return testDataElements.containsAll( c );
}
public boolean addAll(Collection<? extends TestDataElement> c) {
return testDataElements.addAll( c );
}
public boolean addAll(int index, Collection<? extends TestDataElement> c) {
return testDataElements.addAll( index, c );
}
public boolean removeAll(Collection<?> c) {
return testDataElements.removeAll( c );
}
public boolean retainAll(Collection<?> c) {
return testDataElements.retainAll( c );
}
public void clear() {
testDataElements.clear();
}
public boolean equals(Object o) {
return testDataElements.equals( o );
}
public int hashCode() {
return testDataElements.hashCode();
}
public TestDataElement get(int index) {
return testDataElements.get( index );
}
public TestDataElement set(int index, TestDataElement element) {
return testDataElements.set( index, element );
}
public void add(int index, TestDataElement element) {
testDataElements.add( index, element );
}
public TestDataElement remove(int index) {
return testDataElements.remove( index );
}
public int indexOf(Object o) {
return testDataElements.indexOf( o );
}
public int lastIndexOf(Object o) {
return testDataElements.lastIndexOf( o );
}
public ListIterator<TestDataElement> listIterator() {
return testDataElements.listIterator();
}
public ListIterator<TestDataElement> listIterator(int index) {
return testDataElements.listIterator( index );
}
public List<TestDataElement> subList(int fromIndex, int toIndex) {
return testDataElements.subList( fromIndex, toIndex );
}
}
| TestData |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StaticQualifiedUsingExpressionTest.java | {
"start": 10243,
"end": 10495
} | class ____ {
void test() {
int x = Lib.CONST + new b.Lib().CONST;
}
}
""")
.addOutputLines(
"out/Test.java",
"""
import a.Lib;
| Test |
java | google__guava | android/guava/src/com/google/common/util/concurrent/AbstractService.java | {
"start": 18689,
"end": 18967
} | class ____ a consistent
* snapshot of the state and therefore it can be used to answer simple queries without needing to
* grab a lock.
*/
// @Immutable except that Throwable is mutable (initCause(), setStackTrace(), mutable subclasses).
private static final | represents |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/ObjectSerializerTest.java | {
"start": 884,
"end": 1201
} | enum ____ {
SUCCESS(1), ERROR(-1), UNKOWN_ERROR(999), LOGIN_FAILURE(8), INVALID_ARGUMENT(0),
SIGN_ERROR(17);
public final int value;
ResultCode(int value){
this.value = value;
}
}
public static | ResultCode |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableToCompletableTest.java | {
"start": 988,
"end": 3436
} | class ____ extends RxJavaTest {
@Test
public void justSingleItemObservable() {
TestSubscriber<String> subscriber = TestSubscriber.create();
Completable cmp = Flowable.just("Hello World!").ignoreElements();
cmp.<String>toFlowable().subscribe(subscriber);
subscriber.assertNoValues();
subscriber.assertComplete();
subscriber.assertNoErrors();
}
@Test
public void errorObservable() {
TestSubscriber<String> subscriber = TestSubscriber.create();
IllegalArgumentException error = new IllegalArgumentException("Error");
Completable cmp = Flowable.<String>error(error).ignoreElements();
cmp.<String>toFlowable().subscribe(subscriber);
subscriber.assertError(error);
subscriber.assertNoValues();
}
@Test
public void justTwoEmissionsObservableThrowsError() {
TestSubscriber<String> subscriber = TestSubscriber.create();
Completable cmp = Flowable.just("First", "Second").ignoreElements();
cmp.<String>toFlowable().subscribe(subscriber);
subscriber.assertNoErrors();
subscriber.assertNoValues();
}
@Test
public void emptyObservable() {
TestSubscriber<String> subscriber = TestSubscriber.create();
Completable cmp = Flowable.<String>empty().ignoreElements();
cmp.<String>toFlowable().subscribe(subscriber);
subscriber.assertNoErrors();
subscriber.assertNoValues();
subscriber.assertComplete();
}
@Test
public void neverObservable() {
TestSubscriberEx<String> subscriber = new TestSubscriberEx<>();
Completable cmp = Flowable.<String>never().ignoreElements();
cmp.<String>toFlowable().subscribe(subscriber);
subscriber.assertNotTerminated();
subscriber.assertNoValues();
}
@Test
public void shouldUseUnsafeSubscribeInternallyNotSubscribe() {
TestSubscriber<String> subscriber = TestSubscriber.create();
final AtomicBoolean unsubscribed = new AtomicBoolean(false);
Completable cmp = Flowable.just("Hello World!").doOnCancel(new Action() {
@Override
public void run() {
unsubscribed.set(true);
}}).ignoreElements();
cmp.<String>toFlowable().subscribe(subscriber);
subscriber.assertComplete();
assertFalse(unsubscribed.get());
}
}
| FlowableToCompletableTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/utils/TransformConfigVersionUtils.java | {
"start": 682,
"end": 5491
} | class ____ {
private static final List<TransformConfigVersion> ALL_VERSIONS = KnownTransformConfigVersions.ALL_VERSIONS;
/** Returns all released versions */
public static List<TransformConfigVersion> allReleasedVersions() {
return ALL_VERSIONS;
}
/** Returns the oldest known {@link TransformConfigVersion} */
public static TransformConfigVersion getFirstVersion() {
return ALL_VERSIONS.get(0);
}
/** Returns a random {@link TransformConfigVersion} from all available versions. */
public static TransformConfigVersion randomVersion() {
return ESTestCase.randomFrom(ALL_VERSIONS);
}
/** Returns a random {@link TransformConfigVersion} from all available versions without the ignore set */
public static TransformConfigVersion randomVersion(Set<TransformConfigVersion> ignore) {
return ESTestCase.randomFrom(ALL_VERSIONS.stream().filter(v -> ignore.contains(v) == false).collect(Collectors.toList()));
}
/** Returns a random {@link TransformConfigVersion} from all available versions. */
public static TransformConfigVersion randomVersion(Random random) {
return ALL_VERSIONS.get(random.nextInt(ALL_VERSIONS.size()));
}
/** Returns a random {@link TransformConfigVersion} between <code>minVersion</code> and <code>maxVersion</code> (inclusive). */
public static TransformConfigVersion randomVersionBetween(
Random random,
@Nullable TransformConfigVersion minVersion,
@Nullable TransformConfigVersion maxVersion
) {
if (minVersion != null && maxVersion != null && maxVersion.before(minVersion)) {
throw new IllegalArgumentException("maxVersion [" + maxVersion + "] cannot be less than minVersion [" + minVersion + "]");
}
int minVersionIndex = 0;
if (minVersion != null) {
minVersionIndex = Collections.binarySearch(ALL_VERSIONS, minVersion);
}
int maxVersionIndex = ALL_VERSIONS.size() - 1;
if (maxVersion != null) {
maxVersionIndex = Collections.binarySearch(ALL_VERSIONS, maxVersion);
}
if (minVersionIndex < 0) {
throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist.");
} else if (maxVersionIndex < 0) {
throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist.");
} else {
// minVersionIndex is inclusive so need to add 1 to this index
int range = maxVersionIndex + 1 - minVersionIndex;
return ALL_VERSIONS.get(minVersionIndex + random.nextInt(range));
}
}
public static TransformConfigVersion getPreviousVersion() {
TransformConfigVersion version = getPreviousVersion(TransformConfigVersion.CURRENT);
assert version.before(TransformConfigVersion.CURRENT);
return version;
}
public static TransformConfigVersion getPreviousVersion(TransformConfigVersion version) {
int place = Collections.binarySearch(ALL_VERSIONS, version);
if (place < 0) {
// version does not exist - need the item before the index this version should be inserted
place = -(place + 1);
}
if (place < 1) {
throw new IllegalArgumentException("couldn't find any released versions before [" + version + "]");
}
return ALL_VERSIONS.get(place - 1);
}
public static TransformConfigVersion getNextVersion(TransformConfigVersion version) {
int place = Collections.binarySearch(ALL_VERSIONS, version);
if (place < 0) {
// version does not exist - need the item at the index this version should be inserted
place = -(place + 1);
} else {
// need the *next* version
place++;
}
if (place < 0 || place >= ALL_VERSIONS.size()) {
throw new IllegalArgumentException("couldn't find any released versions after [" + version + "]");
}
return ALL_VERSIONS.get(place);
}
/** Returns a random {@code TransformConfigVersion} that is compatible with {@link TransformConfigVersion#CURRENT} */
public static TransformConfigVersion randomCompatibleVersion(Random random) {
return randomVersionBetween(random, TransformConfigVersion.FIRST_TRANSFORM_VERSION, TransformConfigVersion.CURRENT);
}
/** Returns a random {@code TransformConfigVersion} that is compatible with the previous version to {@code version} */
public static TransformConfigVersion randomPreviousCompatibleVersion(Random random, TransformConfigVersion version) {
return randomVersionBetween(random, TransformConfigVersion.FIRST_TRANSFORM_VERSION, getPreviousVersion(version));
}
}
| TransformConfigVersionUtils |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java | {
"start": 20079,
"end": 20712
} | class ____ extends AbstractBinder {
private Configuration configuration;
private ApplicationClientProtocol protocol;
HSJerseyBinder(Configuration pConfiguration,
ApplicationClientProtocol acProtocol) {
this.configuration = pConfiguration;
this.protocol = acProtocol;
}
@Override
protected void configure() {
bind(history).to(HistoryContext.class).named("ctx");
bind(configuration).to(Configuration.class).named("conf");
bind(webApp).to(WebApp.class).named("hsWebApp");
bind(protocol).to(ApplicationClientProtocol.class).named("appClient");
}
}
}
| HSJerseyBinder |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/json/GsonTesterTests.java | {
"start": 2482,
"end": 2744
} | class ____ {
public @Nullable GsonTester<ExampleObject> base;
public GsonTester<ExampleObject> baseSet = new GsonTester<>(InitFieldsBaseClass.class,
ResolvableType.forClass(ExampleObject.class), new GsonBuilder().create());
}
static | InitFieldsBaseClass |
java | quarkusio__quarkus | integration-tests/resteasy-jackson/src/test/java/io/quarkus/it/resteasy/jackson/GenericsResourceIT.java | {
"start": 126,
"end": 184
} | class ____ extends GenericsResourceTest {
}
| GenericsResourceIT |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/stream/StreamExecAsyncCalc.java | {
"start": 2342,
"end": 3690
} | class ____ extends CommonExecAsyncCalc implements StreamExecNode<RowData> {
public StreamExecAsyncCalc(
ReadableConfig tableConfig,
List<RexNode> projection,
InputProperty inputProperty,
RowType outputType,
String description) {
this(
ExecNodeContext.newNodeId(),
ExecNodeContext.newContext(StreamExecAsyncCalc.class),
ExecNodeContext.newPersistedConfig(StreamExecAsyncCalc.class, tableConfig),
projection,
Collections.singletonList(inputProperty),
outputType,
description);
}
@JsonCreator
public StreamExecAsyncCalc(
@JsonProperty(FIELD_NAME_ID) int id,
@JsonProperty(FIELD_NAME_TYPE) ExecNodeContext context,
@JsonProperty(FIELD_NAME_CONFIGURATION) ReadableConfig persistedConfig,
@JsonProperty(FIELD_NAME_PROJECTION) List<RexNode> projection,
@JsonProperty(FIELD_NAME_INPUT_PROPERTIES) List<InputProperty> inputProperties,
@JsonProperty(FIELD_NAME_OUTPUT_TYPE) RowType outputType,
@JsonProperty(FIELD_NAME_DESCRIPTION) String description) {
super(id, context, persistedConfig, projection, inputProperties, outputType, description);
}
}
| StreamExecAsyncCalc |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/autosuggest/AutoSuggestCommands.java | {
"start": 442,
"end": 3643
} | interface ____<K> extends RedisCommands {
/**
* Execute the command <a href="https://redis.io/commands/ft.sugadd/">FT.SUGADD</a>.
* Summary: Add a suggestion string to an auto-complete suggestion dictionary
* Group: auto-suggest
*
* @param key the suggestion dictionary key
* @param string the suggestion string to index
* @param score the floating point number of the suggestion string's weight
* @return A uni emitting the current size of the suggestion dictionary.
*/
default long ftSugAdd(K key, String string, double score) {
return ftSugAdd(key, string, score, false);
}
/**
* Execute the command <a href="https://redis.io/commands/ft.sugadd/">FT.SUGADD</a>.
* Summary: Add a suggestion string to an auto-complete suggestion dictionary
* Group: auto-suggest
*
* @param key the suggestion dictionary key
* @param string the suggestion string to index
* @param score the floating point number of the suggestion string's weight
* @param increment increments the existing entry of the suggestion by the given score, instead of replacing the score.
* This is useful for updating the dictionary based on user queries in real time.
* @return A uni emitting the current size of the suggestion dictionary.
*/
long ftSugAdd(K key, String string, double score, boolean increment);
/**
* Execute the command <a href="https://redis.io/commands/ft.sugdel/">FT.SUGDEL</a>.
* Summary: Delete a string from a suggestion index
* Group: auto-suggest
*
* @param key the suggestion dictionary key
* @param string the suggestion string to index
* @return A uni emitting {@code true} if the value was found, {@code false} otherwise
*/
boolean ftSugDel(K key, String string);
/**
* Execute the command <a href="https://redis.io/commands/ft.sugget/">FT.SUGGET</a>.
* Summary: Get completion suggestions for a prefix
* Group: auto-suggest
*
* @param key the suggestion dictionary key
* @param prefix is prefix to complete on.
* @return A uni emitting a list of the top suggestions matching the prefix, optionally with score after each entry.
*/
List<Suggestion> ftSugGet(K key, String prefix);
/**
* Execute the command <a href="https://redis.io/commands/ft.sugget/">FT.SUGGET</a>.
* Summary: Get completion suggestions for a prefix
* Group: auto-suggest
*
* @param key the suggestion dictionary key
* @param prefix is prefix to complete on.
* @param args the extra argument, must not be {@code null}
* @return A uni emitting {@code true} if the value was found, {@code false} otherwise
*/
List<Suggestion> ftSugGet(K key, String prefix, GetArgs args);
/**
* Execute the command <a href="https://redis.io/commands/ft.suglen/">FT.SUGLEN</a>.
* Summary: Get the size of an auto-complete suggestion dictionary
* Group: auto-suggest
*
* @param key the suggestion dictionary key
* @return A uni emitting the current size of the suggestion dictionary.
*/
long ftSugLen(K key);
}
| AutoSuggestCommands |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/indices/analysis/IncorrectSetupStablePluginsTests.java | {
"start": 4563,
"end": 6465
} | class ____ extends AbstractCharFilterFactory {
public MultipleConstructors() {}
public MultipleConstructors(OkAnalysisSettings settings) {}
}
public void testMultiplePublicConstructors() throws IOException {
var e = expectThrows(
IllegalStateException.class,
() -> getIndexAnalyzers(
Settings.builder()
.put("index.analysis.analyzer.char_filter_test.tokenizer", "standard")
.put("index.analysis.analyzer.char_filter_test.char_filter", "multipleConstructors")
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersionUtils.randomVersion())
.build(),
Map.of("multipleConstructors", new PluginInfo("multipleConstructors", MultipleConstructors.class.getName(), classLoader))
)
);
assertThat(e.getMessage(), equalTo("Plugin can only have one public constructor."));
}
public IndexAnalyzers getIndexAnalyzers(Settings settings, Map<String, PluginInfo> mapOfCharFilters) throws IOException {
AnalysisRegistry registry = setupRegistry(mapOfCharFilters);
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
return registry.build(IndexCreationContext.CREATE_INDEX, idxSettings);
}
private AnalysisRegistry setupRegistry(Map<String, PluginInfo> mapOfCharFilters) throws IOException {
AnalysisRegistry registry = new AnalysisModule(
TestEnvironment.newEnvironment(emptyNodeSettings),
emptyList(),
new StablePluginsRegistry(
new NamedComponentReader(),
Map.of(CharFilterFactory.class.getCanonicalName(), new NameToPluginInfo(mapOfCharFilters))
)
).getAnalysisRegistry();
return registry;
}
public abstract static | MultipleConstructors |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/voyageai/VoyageAIServiceSettingsTests.java | {
"start": 1022,
"end": 4187
} | class ____ extends AbstractWireSerializingTestCase<VoyageAIServiceSettings> {
public static VoyageAIServiceSettings createRandom() {
return new VoyageAIServiceSettings(randomAlphaOfLength(15), RateLimitSettingsTests.createRandom());
}
public void testFromMap() {
var model = "model";
var serviceSettings = VoyageAIServiceSettings.fromMap(
new HashMap<>(Map.of(VoyageAIServiceSettings.MODEL_ID, model)),
ConfigurationParseContext.REQUEST
);
MatcherAssert.assertThat(serviceSettings, is(new VoyageAIServiceSettings(model, null)));
}
public void testFromMap_WithRateLimit() {
var model = "model";
var serviceSettings = VoyageAIServiceSettings.fromMap(
new HashMap<>(
Map.of(
VoyageAIServiceSettings.MODEL_ID,
model,
RateLimitSettings.FIELD_NAME,
new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, 3))
)
),
ConfigurationParseContext.REQUEST
);
MatcherAssert.assertThat(serviceSettings, is(new VoyageAIServiceSettings(model, new RateLimitSettings(3))));
}
public void testFromMap_WhenUsingModelId() {
var model = "model";
var serviceSettings = VoyageAIServiceSettings.fromMap(
new HashMap<>(Map.of(VoyageAIServiceSettings.MODEL_ID, model)),
ConfigurationParseContext.PERSISTENT
);
MatcherAssert.assertThat(serviceSettings, is(new VoyageAIServiceSettings(model, null)));
}
public void testXContent_WritesModelId() throws IOException {
var entity = new VoyageAIServiceSettings("model", new RateLimitSettings(1));
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
assertThat(xContentResult, is("""
{"model_id":"model","rate_limit":{"requests_per_minute":1}}"""));
}
@Override
protected Writeable.Reader<VoyageAIServiceSettings> instanceReader() {
return VoyageAIServiceSettings::new;
}
@Override
protected VoyageAIServiceSettings createTestInstance() {
return createRandom();
}
@Override
protected VoyageAIServiceSettings mutateInstance(VoyageAIServiceSettings instance) throws IOException {
if (randomBoolean()) {
var modelId = randomValueOtherThan(instance.modelId(), () -> randomAlphaOfLength(15));
return new VoyageAIServiceSettings(modelId, instance.rateLimitSettings());
} else {
var rateLimitSettings = randomValueOtherThan(instance.rateLimitSettings(), RateLimitSettingsTests::createRandom);
return new VoyageAIServiceSettings(instance.modelId(), rateLimitSettings);
}
}
public static Map<String, Object> getServiceSettingsMap(String model) {
var map = new HashMap<String, Object>();
map.put(VoyageAIServiceSettings.MODEL_ID, model);
return map;
}
}
| VoyageAIServiceSettingsTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/inference/InferenceResults.java | {
"start": 764,
"end": 3156
} | interface ____ extends NamedWriteable, ToXContentFragment {
String MODEL_ID_RESULTS_FIELD = "model_id";
static void writeResult(InferenceResults results, IngestDocument ingestDocument, String resultField, String modelId) {
Objects.requireNonNull(results, "results");
Objects.requireNonNull(ingestDocument, "ingestDocument");
Objects.requireNonNull(resultField, "resultField");
Map<String, Object> resultMap = results.asMap();
resultMap.put(MODEL_ID_RESULTS_FIELD, modelId);
setOrAppendValue(resultField, resultMap, ingestDocument);
}
static void writeResultToField(
InferenceResults results,
IngestDocument ingestDocument,
@Nullable String basePath,
String outputField,
String modelId,
boolean includeModelId
) {
Objects.requireNonNull(results, "results");
Objects.requireNonNull(ingestDocument, "ingestDocument");
Objects.requireNonNull(outputField, "outputField");
Map<String, Object> resultMap = results.asMap(outputField);
if (includeModelId) {
resultMap.put(MODEL_ID_RESULTS_FIELD, modelId);
}
if (basePath == null) {
// insert the results into the root of the document
for (var entry : resultMap.entrySet()) {
setOrAppendValue(entry.getKey(), entry.getValue(), ingestDocument);
}
} else {
for (var entry : resultMap.entrySet()) {
setOrAppendValue(basePath + "." + entry.getKey(), entry.getValue(), ingestDocument);
}
}
}
private static void setOrAppendValue(String path, Object value, IngestDocument ingestDocument) {
if (ingestDocument.hasField(path)) {
ingestDocument.appendFieldValue(path, value);
} else {
ingestDocument.setFieldValue(path, value);
}
}
String getResultsField();
/**
* Convert to a map
* @return Map representation of the InferenceResult
*/
Map<String, Object> asMap();
/**
* Convert to a map placing the inference result in {@code outputField}
* @param outputField Write the inference result to this field
* @return Map representation of the InferenceResult
*/
Map<String, Object> asMap(String outputField);
Object predictedValue();
}
| InferenceResults |
java | apache__camel | test-infra/camel-test-infra-infinispan/src/main/java/org/apache/camel/test/infra/infinispan/services/InfinispanLocalContainerInfraService.java | {
"start": 1789,
"end": 3355
} | class ____ implements InfinispanInfraService, ContainerService<GenericContainer<?>> {
public static final String CONTAINER_NAME = "infinispan";
private static final String DEFAULT_USERNAME = "admin";
private static final String DEFAULT_PASSWORD = "password";
private static final Logger LOG = LoggerFactory.getLogger(InfinispanLocalContainerInfraService.class);
private final GenericContainer<?> container;
private final boolean isNetworkHost;
public InfinispanLocalContainerInfraService() {
this(LocalPropertyResolver.getProperty(
InfinispanLocalContainerInfraService.class,
InfinispanProperties.INFINISPAN_CONTAINER));
}
public InfinispanLocalContainerInfraService(String containerImage) {
isNetworkHost = isHostNetworkMode();
container = initContainer(containerImage, CONTAINER_NAME);
String name = ContainerEnvironmentUtil.containerName(this.getClass());
if (name != null) {
container.withCreateContainerCmdModifier(cmd -> cmd.withName(name));
}
}
public InfinispanLocalContainerInfraService(GenericContainer<?> container) {
isNetworkHost = isHostNetworkMode();
this.container = container;
}
protected GenericContainer<?> initContainer(String imageName, String containerName) {
final Logger containerLog = LoggerFactory.getLogger("container." + containerName);
final Consumer<OutputFrame> logConsumer = new Slf4jLogConsumer(containerLog);
| InfinispanLocalContainerInfraService |
java | apache__rocketmq | example/src/main/java/org/apache/rocketmq/example/filter/TagFilterProducer.java | {
"start": 1082,
"end": 1720
} | class ____ {
public static void main(String[] args) throws Exception {
DefaultMQProducer producer = new DefaultMQProducer("please_rename_unique_group_name");
producer.start();
String[] tags = new String[] {"TagA", "TagB", "TagC"};
for (int i = 0; i < 60; i++) {
Message msg = new Message("TagFilterTest",
tags[i % tags.length],
"Hello world".getBytes(RemotingHelper.DEFAULT_CHARSET));
SendResult sendResult = producer.send(msg);
System.out.printf("%s%n", sendResult);
}
producer.shutdown();
}
}
| TagFilterProducer |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/compliance/callback/listener/MultiListenersAppliedTest.java | {
"start": 2338,
"end": 2552
} | class ____ extends PersonCallback {
Person() {
}
public Person(Integer id, String name) {
this.id = id;
this.name = name;
}
@Id
private Integer id;
private String name;
}
public static | Person |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/aggregate/UniqValueCount.java | {
"start": 1125,
"end": 1522
} | class ____
extends org.apache.hadoop.mapreduce.lib.aggregate.UniqValueCount
implements ValueAggregator<Object> {
/**
* the default constructor
*
*/
public UniqValueCount() {
super();
}
/**
* constructor
* @param maxNum the limit in the number of unique values to keep.
*
*/
public UniqValueCount(long maxNum) {
super(maxNum);
}
}
| UniqValueCount |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/idgen/enhanced/forcedtable/Entity.java | {
"start": 216,
"end": 546
} | class ____ {
private Long id;
private String name;
public Entity() {
}
public Entity(String name) {
this.name = name;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| Entity |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/spi/PluralAttributeMapKeyManyToAnySource.java | {
"start": 181,
"end": 288
} | interface ____
extends PluralAttributeMapKeySource, AnyMappingSource {
}
| PluralAttributeMapKeyManyToAnySource |
java | apache__rocketmq | broker/src/main/java/org/apache/rocketmq/broker/controller/ReplicasManager.java | {
"start": 3490,
"end": 6276
} | class ____ {
private static final Logger LOGGER = LoggerFactory.getLogger(LoggerName.BROKER_LOGGER_NAME);
private static final int RETRY_INTERVAL_SECOND = 5;
private final ScheduledExecutorService scheduledService;
private final ExecutorService executorService;
private final ExecutorService scanExecutor;
private final BrokerController brokerController;
private final AutoSwitchHAService haService;
private final BrokerConfig brokerConfig;
private final String brokerAddress;
private final BrokerOuterAPI brokerOuterAPI;
private List<String> controllerAddresses;
private final ConcurrentMap<String, Boolean> availableControllerAddresses;
private volatile String controllerLeaderAddress = "";
private volatile State state = State.INITIAL;
private volatile RegisterState registerState = RegisterState.INITIAL;
private ScheduledFuture<?> checkSyncStateSetTaskFuture;
private ScheduledFuture<?> slaveSyncFuture;
private Long brokerControllerId;
private Long masterBrokerId;
private BrokerMetadata brokerMetadata;
private TempBrokerMetadata tempBrokerMetadata;
private Set<Long> syncStateSet;
private int syncStateSetEpoch = 0;
private String masterAddress = "";
private int masterEpoch = 0;
private long lastSyncTimeMs = System.currentTimeMillis();
private Random random = new Random();
public ReplicasManager(final BrokerController brokerController) {
this.brokerController = brokerController;
this.brokerOuterAPI = brokerController.getBrokerOuterAPI();
this.scheduledService = ThreadUtils.newScheduledThreadPool(3, new ThreadFactoryImpl("ReplicasManager_ScheduledService_", brokerController.getBrokerIdentity()));
this.executorService = ThreadUtils.newThreadPoolExecutor(3, new ThreadFactoryImpl("ReplicasManager_ExecutorService_", brokerController.getBrokerIdentity()));
this.scanExecutor = ThreadUtils.newThreadPoolExecutor(4, 10, 60, TimeUnit.SECONDS,
new ArrayBlockingQueue<>(32), new ThreadFactoryImpl("ReplicasManager_scan_thread_", brokerController.getBrokerIdentity()));
this.haService = (AutoSwitchHAService) brokerController.getMessageStore().getHaService();
this.brokerConfig = brokerController.getBrokerConfig();
this.availableControllerAddresses = new ConcurrentHashMap<>();
this.syncStateSet = new HashSet<>();
this.brokerAddress = brokerController.getBrokerAddr();
this.brokerMetadata = new BrokerMetadata(this.brokerController.getMessageStoreConfig().getStorePathBrokerIdentity());
this.tempBrokerMetadata = new TempBrokerMetadata(this.brokerController.getMessageStoreConfig().getStorePathBrokerIdentity() + "-temp");
}
| ReplicasManager |
java | redisson__redisson | redisson/src/main/java/org/redisson/codec/DefaultReferenceCodecProvider.java | {
"start": 2854,
"end": 3479
} | class ____.");
}
Class<?> codecClass;
if (anno.codec() == RObjectField.DEFAULT.class) {
codecClass = config.getCodec().getClass();
} else {
codecClass = anno.codec();
}
return this.getCodec((Class<T>) codecClass);
}
@Override
public <T extends Codec> void registerCodec(Class<T> cls, T codec) {
if (!cls.isInstance(codec)) {
throw new IllegalArgumentException("codec is not an instance of the class [" + cls.getCanonicalName() + "]");
}
codecCache.putIfAbsent(cls, codec);
}
}
| only |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/Endpoint2MustBeStartedBeforeSendProcessorTest.java | {
"start": 3797,
"end": 4721
} | class ____ extends DefaultEndpoint {
private MyEndpoint(String endpointUri, CamelContext camelContext) {
setCamelContext(camelContext);
setEndpointUri(endpointUri);
}
@Override
public Producer createProducer() {
return new MyProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) {
return new MyConsumer(this, null);
}
@Override
public boolean isSingleton() {
return true;
}
// in this test we use start/stop to implement logic
// this is however discouraged, as you should prefer to use
// doStart/doStop
@Override
public void doStart() {
order += "Endpoint";
}
@Override
public void doStop() {
order += "StopEndpoint";
}
}
private | MyEndpoint |
java | google__guava | android/guava/src/com/google/common/escape/ArrayBasedUnicodeEscaper.java | {
"start": 2053,
"end": 8744
} | class ____ extends UnicodeEscaper {
// The replacement array (see ArrayBasedEscaperMap).
private final char[][] replacements;
// The number of elements in the replacement array.
private final int replacementsLength;
// The first code point in the safe range.
private final int safeMin;
// The last code point in the safe range.
private final int safeMax;
// Cropped values used in the fast path range checks.
private final char safeMinChar;
private final char safeMaxChar;
/**
* Creates a new ArrayBasedUnicodeEscaper instance with the given replacement map and specified
* safe range. If {@code safeMax < safeMin} then no code points are considered safe.
*
* <p>If a code point has no mapped replacement then it is checked against the safe range. If it
* lies outside that, then {@link #escapeUnsafe} is called, otherwise no escaping is performed.
*
* @param replacementMap a map of characters to their escaped representations
* @param safeMin the lowest character value in the safe range
* @param safeMax the highest character value in the safe range
* @param unsafeReplacement the default replacement for unsafe characters or null if no default
* replacement is required
*/
protected ArrayBasedUnicodeEscaper(
Map<Character, String> replacementMap,
int safeMin,
int safeMax,
@Nullable String unsafeReplacement) {
this(ArrayBasedEscaperMap.create(replacementMap), safeMin, safeMax, unsafeReplacement);
}
/**
* Creates a new ArrayBasedUnicodeEscaper instance with the given replacement map and specified
* safe range. If {@code safeMax < safeMin} then no code points are considered safe. This
* initializer is useful when explicit instances of ArrayBasedEscaperMap are used to allow the
* sharing of large replacement mappings.
*
* <p>If a code point has no mapped replacement then it is checked against the safe range. If it
* lies outside that, then {@link #escapeUnsafe} is called, otherwise no escaping is performed.
*
* @param escaperMap the map of replacements
* @param safeMin the lowest character value in the safe range
* @param safeMax the highest character value in the safe range
* @param unsafeReplacement the default replacement for unsafe characters or null if no default
* replacement is required
*/
protected ArrayBasedUnicodeEscaper(
ArrayBasedEscaperMap escaperMap,
int safeMin,
int safeMax,
@Nullable String unsafeReplacement) {
checkNotNull(escaperMap); // GWT specific check (do not optimize)
this.replacements = escaperMap.getReplacementArray();
this.replacementsLength = replacements.length;
if (safeMax < safeMin) {
// If the safe range is empty, set the range limits to opposite extremes
// to ensure the first test of either value will fail.
safeMax = -1;
safeMin = Integer.MAX_VALUE;
}
this.safeMin = safeMin;
this.safeMax = safeMax;
// This is a bit of a hack but lets us do quicker per-character checks in
// the fast path code. The safe min/max values are very unlikely to extend
// into the range of surrogate characters, but if they do we must not test
// any values in that range. To see why, consider the case where:
// safeMin <= {hi,lo} <= safeMax
// where {hi,lo} are characters forming a surrogate pair such that:
// codePointOf(hi, lo) > safeMax
// which would result in the surrogate pair being (wrongly) considered safe.
// If we clip the safe range used during the per-character tests so it is
// below the values of characters in surrogate pairs, this cannot occur.
// This approach does mean that we break out of the fast path code in cases
// where we don't strictly need to, but this situation will almost never
// occur in practice.
if (safeMin >= Character.MIN_HIGH_SURROGATE) {
// The safe range is empty or the all safe code points lie in or above the
// surrogate range. Either way the character range is empty.
this.safeMinChar = Character.MAX_VALUE;
this.safeMaxChar = 0;
} else {
// The safe range is non-empty and contains values below the surrogate
// range but may extend above it. We may need to clip the maximum value.
this.safeMinChar = (char) safeMin;
this.safeMaxChar = (char) min(safeMax, Character.MIN_HIGH_SURROGATE - 1);
}
}
/*
* This is overridden to improve performance. Rough benchmarking shows that this almost doubles
* the speed when processing strings that do not require any escaping.
*/
@Override
public final String escape(String s) {
checkNotNull(s); // GWT specific check (do not optimize)
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if ((c < replacementsLength && replacements[c] != null)
|| c > safeMaxChar
|| c < safeMinChar) {
return escapeSlow(s, i);
}
}
return s;
}
/**
* Escapes a single Unicode code point using the replacement array and safe range values. If the
* given character does not have an explicit replacement and lies outside the safe range then
* {@link #escapeUnsafe} is called.
*
* @return the replacement characters, or {@code null} if no escaping was required
*/
@Override
protected final char @Nullable [] escape(int cp) {
if (cp < replacementsLength) {
char[] chars = replacements[cp];
if (chars != null) {
return chars;
}
}
if (cp >= safeMin && cp <= safeMax) {
return null;
}
return escapeUnsafe(cp);
}
/* Overridden for performance. */
@Override
protected final int nextEscapeIndex(CharSequence csq, int index, int end) {
while (index < end) {
char c = csq.charAt(index);
if ((c < replacementsLength && replacements[c] != null)
|| c > safeMaxChar
|| c < safeMinChar) {
break;
}
index++;
}
return index;
}
/**
* Escapes a code point that has no direct explicit value in the replacement array and lies
* outside the stated safe range. Subclasses should override this method to provide generalized
* escaping for code points if required.
*
* <p>Note that arrays returned by this method must not be modified once they have been returned.
* However it is acceptable to return the same array multiple times (even for different input
* characters).
*
* @param cp the Unicode code point to escape
* @return the replacement characters, or {@code null} if no escaping was required
*/
protected abstract char @Nullable [] escapeUnsafe(int cp);
}
| ArrayBasedUnicodeEscaper |
java | apache__camel | components/camel-google/camel-google-sheets/src/test/java/org/apache/camel/component/google/sheets/SheetsSpreadsheetsIT.java | {
"start": 3785,
"end": 4871
} | class ____ extends AbstractGoogleSheetsTestSupport {
private Spreadsheet testSheet = getSpreadsheet();
@Test
public void test() throws Exception {
final Spreadsheet result = requestBody("direct://GET", testSheet.getSpreadsheetId());
assertNotNull(result, "get result is null");
assertEquals(testSheet.getSpreadsheetId(), result.getSpreadsheetId());
LOG.debug("get: {}", result);
}
@Override
protected GoogleSheetsClientFactory getClientFactory() throws Exception {
return new MockGoogleSheetsClientFactory(new MockLowLevelHttpResponse().setContent(testSheet.toPrettyString()));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct://GET")
.to("google-sheets://" + PATH_PREFIX + "/get?inBody=spreadsheetId");
}
};
}
}
@Nested
| GetIT |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/TestDockerPullCommand.java | {
"start": 1157,
"end": 1908
} | class ____ {
private DockerPullCommand dockerPullCommand;
private static final String IMAGE_NAME = "foo";
@BeforeEach
public void setup() {
dockerPullCommand = new DockerPullCommand(IMAGE_NAME);
}
@Test
public void testGetCommandOption() {
assertEquals("pull", dockerPullCommand.getCommandOption());
}
@Test
public void testGetCommandWithArguments() {
assertEquals("pull", StringUtils.join(",",
dockerPullCommand.getDockerCommandWithArguments()
.get("docker-command")));
assertEquals("foo", StringUtils.join(",",
dockerPullCommand.getDockerCommandWithArguments().get("image")));
assertEquals(2, dockerPullCommand.getDockerCommandWithArguments().size());
}
}
| TestDockerPullCommand |
java | apache__hadoop | hadoop-cloud-storage-project/hadoop-tos/src/main/java/org/apache/hadoop/fs/tosfs/util/CommonUtils.java | {
"start": 920,
"end": 1377
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(CommonUtils.class);
private CommonUtils() {}
public static void runQuietly(RunWithException run) {
runQuietly(run, true);
}
public static void runQuietly(RunWithException run, boolean logError) {
try {
run.run();
} catch (Exception e) {
if (logError) {
LOG.info("Encounter error but can be ignored: ", e);
}
}
}
public | CommonUtils |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java | {
"start": 7222,
"end": 11175
} | class ____ {
private JobIdHistoryFileInfoMap cache;
private int maxSize;
private long maxAge;
public JobListCache(int maxSize, long maxAge) {
this.maxSize = maxSize;
this.maxAge = maxAge;
this.cache = new JobIdHistoryFileInfoMap();
}
public HistoryFileInfo addIfAbsent(HistoryFileInfo fileInfo) {
JobId jobId = fileInfo.getJobId();
if (LOG.isDebugEnabled()) {
LOG.debug("Adding " + jobId + " to job list cache with "
+ fileInfo.getJobIndexInfo());
}
HistoryFileInfo old = cache.putIfAbsent(jobId, fileInfo);
if (cache.size() > maxSize) {
//There is a race here, where more then one thread could be trying to
// remove entries. This could result in too many entries being removed
// from the cache. This is considered OK as the size of the cache
// should be rather large, and we would rather have performance over
// keeping the cache size exactly at the maximum.
Iterator<JobId> keys = cache.navigableKeySet().iterator();
long cutoff = System.currentTimeMillis() - maxAge;
// MAPREDUCE-6436: In order to reduce the number of logs written
// in case of a lot of move pending histories.
JobId firstInIntermediateKey = null;
int inIntermediateCount = 0;
JobId firstMoveFailedKey = null;
int moveFailedCount = 0;
while (cache.size() > maxSize && keys.hasNext()) {
JobId key = keys.next();
HistoryFileInfo firstValue = cache.get(key);
if (firstValue != null) {
if (firstValue.isMovePending()) {
if (firstValue.didMoveFail() &&
firstValue.jobIndexInfo.getFinishTime() <= cutoff) {
cache.remove(key);
// Now lets try to delete it
try {
firstValue.delete();
} catch (IOException e) {
LOG.error("Error while trying to delete history files" +
" that could not be moved to done.", e);
}
} else {
if (firstValue.didMoveFail()) {
if (moveFailedCount == 0) {
firstMoveFailedKey = key;
}
moveFailedCount += 1;
} else {
if (inIntermediateCount == 0) {
firstInIntermediateKey = key;
}
inIntermediateCount += 1;
}
}
} else {
cache.remove(key);
}
}
}
// Log output only for first jobhisotry in pendings to restrict
// the total number of logs.
if (inIntermediateCount > 0) {
LOG.warn("Waiting to remove IN_INTERMEDIATE state histories " +
"(e.g. " + firstInIntermediateKey + ") from JobListCache " +
"because it is not in done yet. Total count is " +
inIntermediateCount + ".");
}
if (moveFailedCount > 0) {
LOG.warn("Waiting to remove MOVE_FAILED state histories " +
"(e.g. " + firstMoveFailedKey + ") from JobListCache " +
"because it is not in done yet. Total count is " +
moveFailedCount + ".");
}
}
return old;
}
public void delete(HistoryFileInfo fileInfo) {
if (LOG.isDebugEnabled()) {
LOG.debug("Removing from cache " + fileInfo);
}
cache.remove(fileInfo.getJobId());
}
public Collection<HistoryFileInfo> values() {
return new ArrayList<HistoryFileInfo>(cache.values());
}
public HistoryFileInfo get(JobId jobId) {
return cache.get(jobId);
}
public boolean isFull() {
return cache.size() >= maxSize;
}
public int size() {
return cache.size();
}
}
/**
* This | JobListCache |
java | elastic__elasticsearch | x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/LatLonGeometryRelationVisitorTests.java | {
"start": 1952,
"end": 15196
} | class ____ extends ESTestCase {
public void testPoint() throws Exception {
doTestShapes(GeoTestUtil::nextPoint);
}
public void testLine() throws Exception {
doTestShapes(GeoTestUtil::nextLine);
}
public void testPolygon() throws Exception {
doTestShapes(GeoTestUtil::nextPolygon);
}
// Specific tests for known troublesome points from https://github.com/elastic/elasticsearch/issues/92151
public void testTroublesomePoints() throws Exception {
ArrayList<org.elasticsearch.geometry.Point> points = new ArrayList<>();
points.add(new org.elasticsearch.geometry.Point(-6.6957112520498185, 5.337277253715181E-129));
points.add(new org.elasticsearch.geometry.Point(0.0, 1.6938947866910307E-202));
points.add(new org.elasticsearch.geometry.Point(-114.40977624485328, -37.484381576244864));
points.add(new org.elasticsearch.geometry.Point(49.1828546738179, 23.813793855174865));
points.add(new org.elasticsearch.geometry.Point(60.5683489131913, 90.0));
points.add(new org.elasticsearch.geometry.Point(-79.65717776327665, -39.5));
points.add(new org.elasticsearch.geometry.Point(1.401298464324817E-45, 0.0));
MultiPoint geometry = new MultiPoint(points);
GeoShapeValues.GeoShapeValue geoShapeValue = GeoTestUtils.geoShapeValue(geometry);
GeometryDocValueReader reader = GeoTestUtils.geometryDocValueReader(geometry, CoordinateEncoder.GEO);
Point[] troublesome = new Point[] {
new Point(-6.404126347681029E-213, 0.0), // This point will match the last point in the multipoint
new Point(0.0, 0.0) // this point will match the second point in the multipoint
};
for (Point point : troublesome) {
doTestShape(geometry, geoShapeValue, reader, quantize(point));
}
}
public void testIdenticalPoint() throws Exception {
double x = quantizeLon(1);
double y = quantizeLat(1);
org.elasticsearch.geometry.Point shape = new org.elasticsearch.geometry.Point(x, y);
Point latLonGeometry = new Point(shape.getLat(), shape.getLon());
GeoShapeValues.GeoShapeValue geoShapeValue = GeoTestUtils.geoShapeValue(shape);
GeometryDocValueReader reader = GeoTestUtils.geometryDocValueReader(shape, CoordinateEncoder.GEO);
GeoRelation relation = geoShapeValue.relate(latLonGeometry);
assertThat("Identical points", relation, equalTo(GeoRelation.QUERY_INSIDE));
doTestShape(shape, reader, latLonGeometry, relation, true);
}
public void testVeryFlatPolygonDoesNotContainIntersectingLine() throws Exception {
double[] x = new double[] { -0.001, -0.001, 0.001, 0.001, -0.001 };
double[] y = new double[] { 1e-10, 0, -1e-10, 0, 1e-10 };
Geometry geometry = new org.elasticsearch.geometry.Polygon(new LinearRing(x, y));
GeoShapeValues.GeoShapeValue geoShapeValue = GeoTestUtils.geoShapeValue(geometry);
GeometryDocValueReader reader = GeoTestUtils.geometryDocValueReader(geometry, CoordinateEncoder.GEO);
double[] lons = new double[] { 0.0, 0.0 };
double[] lats = new double[] { 0.0, 0.001 };
Line line = new Line(lats, lons);
doTestShape(geometry, geoShapeValue, reader, line);
}
public void testContainedPolygons() throws Exception {
// Create simple rectangular polygon
double[] x = new double[] { -1, 1, 1, -1, -1 };
double[] y = new double[] { -1, -1, 1, 1, -1 };
quantize(y, x);
org.elasticsearch.geometry.Polygon shape = new org.elasticsearch.geometry.Polygon(new LinearRing(x, y));
// Setup tests for contains, identical and within
LinkedHashMap<Double, GeoRelation> tests = new LinkedHashMap<>();
tests.put(0.5, GeoRelation.QUERY_INSIDE);
tests.put(1.0, GeoRelation.QUERY_CONTAINS);
tests.put(2.0, GeoRelation.QUERY_CONTAINS);
for (Map.Entry<Double, GeoRelation> entry : tests.entrySet()) {
double factor = entry.getKey();
GeoRelation expected = entry.getValue();
double[] lats = new double[y.length];
double[] lons = new double[x.length];
for (int i = 0; i < x.length; i++) {
lats[i] = quantizeLat(y[i] * factor);
lons[i] = quantizeLon(x[i] * factor);
}
Polygon latLonGeometry = new Polygon(lats, lons);
boolean identical = factor == 1.0;
// Assert that polygons are identical
if (identical) {
for (int i = 0; i < latLonGeometry.numPoints(); i++) {
assertThat("Latitude[" + i + "]", latLonGeometry.getPolyLat(i), equalTo(shape.getPolygon().getLat(i)));
assertThat("Longitude[" + i + "]", latLonGeometry.getPolyLon(i), equalTo(shape.getPolygon().getLon(i)));
}
}
GeoShapeValues.GeoShapeValue geoShapeValue = GeoTestUtils.geoShapeValue(shape);
GeometryDocValueReader reader = GeoTestUtils.geometryDocValueReader(shape, CoordinateEncoder.GEO);
GeoRelation relation = geoShapeValue.relate(latLonGeometry);
assertThat("Polygon scaled by " + factor, relation, equalTo(expected));
doTestShape(shape, reader, latLonGeometry, relation, false);
}
}
/** Explicitly test failure found in <a href="https://github.com/elastic/elasticsearch/issues/98063">#98063</a> */
public void testOriginPointInMultipoint() throws Exception {
ArrayList<org.elasticsearch.geometry.Point> points = new ArrayList<>();
points.add(new org.elasticsearch.geometry.Point(0.0, 0.0));
points.add(new org.elasticsearch.geometry.Point(0.0, 0.0));
points.add(new org.elasticsearch.geometry.Point(0.0, 1.401298464324817E-45));
Geometry geometry = new MultiPoint(points);
GeoShapeValues.GeoShapeValue geoShapeValue = GeoTestUtils.geoShapeValue(geometry);
GeometryDocValueReader reader = GeoTestUtils.geometryDocValueReader(geometry, CoordinateEncoder.GEO);
doTestShape(geometry, geoShapeValue, reader, new Point(0, 0));
}
private <T extends LatLonGeometry> void doTestShapes(Supplier<T> supplier) throws Exception {
Geometry geometry = GeometryNormalizer.apply(Orientation.CCW, GeometryTestUtils.randomGeometryWithoutCircle(0, false));
GeoShapeValues.GeoShapeValue geoShapeValue = GeoTestUtils.geoShapeValue(geometry);
GeometryDocValueReader reader = GeoTestUtils.geometryDocValueReader(geometry, CoordinateEncoder.GEO);
for (int i = 0; i < 1000; i++) {
LatLonGeometry latLonGeometry = quantize(supplier.get());
doTestShape(geometry, geoShapeValue, reader, latLonGeometry);
}
}
private void doTestShape(
Geometry geometry,
GeoShapeValues.GeoShapeValue geoShapeValue,
GeometryDocValueReader reader,
LatLonGeometry latLonGeometry
) throws Exception {
doTestShape(geometry, reader, latLonGeometry, geoShapeValue.relate(latLonGeometry));
}
private void doTestShape(Geometry geometry, GeometryDocValueReader reader, LatLonGeometry latLonGeometry, GeoRelation relation)
throws Exception {
doTestShape(geometry, reader, latLonGeometry, relation, isIdenticalPoint(geometry, latLonGeometry));
}
private boolean isIdenticalPoint(Geometry geometry, LatLonGeometry latLonGeometry) {
if (latLonGeometry instanceof Point latLonPoint) {
return geometry.visit(new TestIdenticalPointVisitor(latLonPoint));
}
return false;
}
private boolean pointsOnly(Geometry geometry) {
return geometry instanceof org.elasticsearch.geometry.Point || geometry instanceof org.elasticsearch.geometry.MultiPoint;
}
private boolean pointsOnly(LatLonGeometry geometry) {
return geometry instanceof Point;
}
private void doTestShape(
Geometry geometry,
GeometryDocValueReader reader,
LatLonGeometry latLonGeometry,
GeoRelation relation,
boolean identicalPoint // When both geometries are points and identical, then CONTAINS==WITHIN
) throws Exception {
boolean pointsOnly = pointsOnly(geometry) && pointsOnly(latLonGeometry);
String description = "Geometry " + latLonGeometry + " relates to shape " + geometry.getClass().getSimpleName() + ": " + relation;
Component2D component2D = LatLonGeometry.create(latLonGeometry);
Component2DVisitor contains = visitQueryRelation(component2D, QueryRelation.CONTAINS, reader);
Component2DVisitor intersects = visitQueryRelation(component2D, QueryRelation.INTERSECTS, reader);
Component2DVisitor disjoint = visitQueryRelation(component2D, QueryRelation.DISJOINT, reader);
Component2DVisitor within = visitQueryRelation(component2D, QueryRelation.WITHIN, reader);
if (relation == GeoRelation.QUERY_INSIDE) {
assertThat("CONTAINS/" + relation + ": " + description, contains.matches(), equalTo(true));
assertThat("INTERSECTS/" + relation + ": " + description, intersects.matches(), equalTo(true));
assertThat("DISJOINT/" + relation + ": " + description, disjoint.matches(), equalTo(false));
assertThat("WITHIN/" + relation + ": " + description, within.matches(), equalTo(identicalPoint));
} else if (relation == GeoRelation.QUERY_CROSSES) {
if (pointsOnly == false) {
// When we have point comparisons, CROSSES can also allow CONTAINS
assertThat("CONTAINS/" + relation + ": " + description, contains.matches(), equalTo(false));
}
assertThat("INTERSECTS/" + relation + ": " + description, intersects.matches(), equalTo(true));
assertThat("DISJOINT/" + relation + ": " + description, disjoint.matches(), equalTo(false));
assertThat("WITHIN/" + relation + ": " + description, within.matches(), equalTo(false));
} else if (relation == GeoRelation.QUERY_CONTAINS) {
assertThat("CONTAINS/" + relation + ": " + description, contains.matches(), equalTo(identicalPoint));
assertThat("INTERSECTS/" + relation + ": " + description, intersects.matches(), equalTo(true));
assertThat("DISJOINT/" + relation + ": " + description, disjoint.matches(), equalTo(false));
assertThat("WITHIN/" + relation + ": " + description, within.matches(), equalTo(true));
} else {
assertThat("CONTAINS/" + relation + ": " + description, contains.matches(), equalTo(false));
assertThat("INTERSECTS/" + relation + ": " + description, intersects.matches(), equalTo(false));
assertThat("DISJOINT/" + relation + ": " + description, disjoint.matches(), equalTo(true));
assertThat("WITHIN/" + relation + ": " + description, within.matches(), equalTo(false));
}
}
private Component2DVisitor visitQueryRelation(Component2D component2D, QueryRelation queryRelation, GeometryDocValueReader reader)
throws IOException {
Component2DVisitor contains = Component2DVisitor.getVisitor(component2D, queryRelation, CoordinateEncoder.GEO);
reader.visit(contains);
return contains;
}
private LatLonGeometry quantize(LatLonGeometry geometry) {
if (geometry instanceof Point point) {
return quantize(point);
} else if (geometry instanceof Line line) {
return quantize(line);
} else if (geometry instanceof Polygon polygon) {
return quantize(polygon);
} else {
throw new IllegalArgumentException("Unimplemented: quantize(" + geometry.getClass().getSimpleName() + ")");
}
}
private Point quantize(Point point) {
return new Point(quantizeLat(point.getLat()), quantizeLon(point.getLon()));
}
private Line quantize(Line line) {
double[] lons = line.getLons();
double[] lats = line.getLats();
quantize(lats, lons);
return new Line(lats, lons);
}
private Polygon quantize(Polygon polygon) {
Polygon[] holes = polygon.getHoles();
for (int i = 0; i < holes.length; i++) {
holes[i] = quantize(holes[i]);
}
double[] lats = polygon.getPolyLats();
double[] lons = polygon.getPolyLons();
quantize(lats, lons);
return new Polygon(lats, lons, holes);
}
private void quantize(double[] lats, double[] lons) {
for (int i = 0; i < lons.length; i++) {
lats[i] = quantizeLat(lats[i]);
lons[i] = quantizeLon(lons[i]);
}
}
private double quantizeLat(double lat) {
return decodeLatitude(encodeLatitude(lat));
}
private double quantizeLon(double lon) {
return decodeLongitude(encodeLongitude(lon));
}
/**
* This visitor returns false if any point in the geometry is not identical to the provided point.
* Identical means that the encoded lat and lon values are the same.
*/
private static | LatLonGeometryRelationVisitorTests |
java | apache__flink | flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/data/BinaryStringDataTest.java | {
"start": 3916,
"end": 24904
} | enum ____ {
ONE_SEG,
MULTI_SEGS,
STRING,
RANDOM
}
private BinaryStringData fromString(String str) {
BinaryStringData string = BinaryStringData.fromString(str);
Mode mode = this.mode;
if (mode == Mode.RANDOM) {
int rnd = new Random().nextInt(3);
if (rnd == 0) {
mode = Mode.ONE_SEG;
} else if (rnd == 1) {
mode = Mode.MULTI_SEGS;
} else if (rnd == 2) {
mode = Mode.STRING;
}
}
if (mode == Mode.STRING) {
return string;
}
if (mode == Mode.ONE_SEG || string.getSizeInBytes() < 2) {
string.ensureMaterialized();
return string;
} else {
int numBytes = string.getSizeInBytes();
int pad = new Random().nextInt(5);
int numBytesWithPad = numBytes + pad;
int segSize = numBytesWithPad / 2 + 1;
byte[] bytes1 = new byte[segSize];
byte[] bytes2 = new byte[segSize];
if (segSize - pad > 0 && numBytes >= segSize - pad) {
string.getSegments()[0].get(0, bytes1, pad, segSize - pad);
}
string.getSegments()[0].get(segSize - pad, bytes2, 0, numBytes - segSize + pad);
return BinaryStringData.fromAddress(
new MemorySegment[] {
MemorySegmentFactory.wrap(bytes1), MemorySegmentFactory.wrap(bytes2)
},
pad,
numBytes);
}
}
private void checkBasic(String str, int len) {
BinaryStringData s1 = fromString(str);
BinaryStringData s2 = fromBytes(str.getBytes(StandardCharsets.UTF_8));
assertThat(len).isEqualTo(s1.numChars());
assertThat(len).isEqualTo(s2.numChars());
assertThat(str).isEqualTo(s1.toString());
assertThat(str).isEqualTo(s2.toString());
assertThat(s2).isEqualTo(s1);
assertThat(s2.hashCode()).isEqualTo(s1.hashCode());
assertThat(s1.compareTo(s2)).isEqualTo(0);
assertThat(s1.contains(s2)).isTrue();
assertThat(s2.contains(s1)).isTrue();
assertThat(s1.startsWith(s1)).isTrue();
assertThat(s1.endsWith(s1)).isTrue();
}
@TestTemplate
void basicTest() {
checkBasic("", 0);
checkBasic(",", 1);
checkBasic("hello", 5);
checkBasic("hello world", 11);
checkBasic("Flink中文社区", 9);
checkBasic("中 文 社 区", 7);
checkBasic("¡", 1); // 2 bytes char
checkBasic("ку", 2); // 2 * 2 bytes chars
checkBasic("︽﹋%", 3); // 3 * 3 bytes chars
checkBasic("\uD83E\uDD19", 1); // 4 bytes char
}
@TestTemplate
void emptyStringTest() {
assertThat(fromString("")).isEqualTo(empty);
assertThat(fromBytes(new byte[0])).isEqualTo(empty);
assertThat(empty.numChars()).isEqualTo(0);
assertThat(empty.getSizeInBytes()).isEqualTo(0);
}
@TestTemplate
void compareTo() {
assertThat(fromString(" ").compareTo(blankString(3))).isEqualTo(0);
assertThat(fromString("").compareTo(fromString("a"))).isLessThan(0);
assertThat(fromString("abc").compareTo(fromString("ABC"))).isGreaterThan(0);
assertThat(fromString("abc0").compareTo(fromString("abc"))).isGreaterThan(0);
assertThat(fromString("abcabcabc").compareTo(fromString("abcabcabc"))).isEqualTo(0);
assertThat(fromString("aBcabcabc").compareTo(fromString("Abcabcabc"))).isGreaterThan(0);
assertThat(fromString("Abcabcabc").compareTo(fromString("abcabcabC"))).isLessThan(0);
assertThat(fromString("abcabcabc").compareTo(fromString("abcabcabC"))).isGreaterThan(0);
assertThat(fromString("abc").compareTo(fromString("世界"))).isLessThan(0);
assertThat(fromString("你好").compareTo(fromString("世界"))).isGreaterThan(0);
assertThat(fromString("你好123").compareTo(fromString("你好122"))).isGreaterThan(0);
MemorySegment segment1 = MemorySegmentFactory.allocateUnpooledSegment(1024);
MemorySegment segment2 = MemorySegmentFactory.allocateUnpooledSegment(1024);
SortUtil.putStringNormalizedKey(fromString("abcabcabc"), segment1, 0, 9);
SortUtil.putStringNormalizedKey(fromString("abcabcabC"), segment2, 0, 9);
assertThat(segment1.compare(segment2, 0, 0, 9)).isGreaterThan(0);
SortUtil.putStringNormalizedKey(fromString("abcab"), segment1, 0, 9);
assertThat(segment1.compare(segment2, 0, 0, 9)).isLessThan(0);
}
@TestTemplate
void testMultiSegments() {
// prepare
MemorySegment[] segments1 = new MemorySegment[2];
segments1[0] = MemorySegmentFactory.wrap(new byte[10]);
segments1[1] = MemorySegmentFactory.wrap(new byte[10]);
segments1[0].put(5, "abcde".getBytes(UTF_8), 0, 5);
segments1[1].put(0, "aaaaa".getBytes(UTF_8), 0, 5);
MemorySegment[] segments2 = new MemorySegment[2];
segments2[0] = MemorySegmentFactory.wrap(new byte[5]);
segments2[1] = MemorySegmentFactory.wrap(new byte[5]);
segments2[0].put(0, "abcde".getBytes(UTF_8), 0, 5);
segments2[1].put(0, "b".getBytes(UTF_8), 0, 1);
// test go ahead both
BinaryStringData binaryString1 = BinaryStringData.fromAddress(segments1, 5, 10);
BinaryStringData binaryString2 = BinaryStringData.fromAddress(segments2, 0, 6);
assertThat(binaryString1.toString()).isEqualTo("abcdeaaaaa");
assertThat(binaryString2.toString()).isEqualTo("abcdeb");
assertThat(binaryString1.compareTo(binaryString2)).isEqualTo(-1);
// test needCompare == len
binaryString1 = BinaryStringData.fromAddress(segments1, 5, 5);
binaryString2 = BinaryStringData.fromAddress(segments2, 0, 5);
assertThat(binaryString1.toString()).isEqualTo("abcde");
assertThat(binaryString2.toString()).isEqualTo("abcde");
assertThat(binaryString1.compareTo(binaryString2)).isEqualTo(0);
// test find the first segment of this string
binaryString1 = BinaryStringData.fromAddress(segments1, 10, 5);
binaryString2 = BinaryStringData.fromAddress(segments2, 0, 5);
assertThat(binaryString1.toString()).isEqualTo("aaaaa");
assertThat(binaryString2.toString()).isEqualTo("abcde");
assertThat(binaryString1.compareTo(binaryString2)).isEqualTo(-1);
assertThat(binaryString2.compareTo(binaryString1)).isEqualTo(1);
// test go ahead single
segments2 = new MemorySegment[] {MemorySegmentFactory.wrap(new byte[10])};
segments2[0].put(4, "abcdeb".getBytes(UTF_8), 0, 6);
binaryString1 = BinaryStringData.fromAddress(segments1, 5, 10);
binaryString2 = BinaryStringData.fromAddress(segments2, 4, 6);
assertThat(binaryString1.toString()).isEqualTo("abcdeaaaaa");
assertThat(binaryString2.toString()).isEqualTo("abcdeb");
assertThat(binaryString1.compareTo(binaryString2)).isEqualTo(-1);
assertThat(binaryString2.compareTo(binaryString1)).isEqualTo(1);
}
@TestTemplate
void concatTest() {
assertThat(concat()).isEqualTo(empty);
assertThat(concat((BinaryStringData) null)).isNull();
assertThat(concat(empty)).isEqualTo(empty);
assertThat(concat(fromString("ab"))).isEqualTo(fromString("ab"));
assertThat(concat(fromString("a"), fromString("b"))).isEqualTo(fromString("ab"));
assertThat(concat(fromString("a"), fromString("b"), fromString("c")))
.isEqualTo(fromString("abc"));
assertThat(concat(fromString("a"), null, fromString("c"))).isNull();
assertThat(concat(fromString("a"), null, null)).isNull();
assertThat(concat(null, null, null)).isNull();
assertThat(concat(fromString("数据"), fromString("砖头"))).isEqualTo(fromString("数据砖头"));
}
@TestTemplate
void concatWsTest() {
// Returns empty if the separator is null
assertThat(concatWs(null, (BinaryStringData) null)).isNull();
assertThat(concatWs(null, fromString("a"))).isNull();
// If separator is null, concatWs should skip all null inputs and never return null.
BinaryStringData sep = fromString("哈哈");
assertThat(concatWs(sep, empty)).isEqualTo(empty);
assertThat(concatWs(sep, fromString("ab"))).isEqualTo(fromString("ab"));
assertThat(concatWs(sep, fromString("a"), fromString("b"))).isEqualTo(fromString("a哈哈b"));
assertThat(concatWs(sep, fromString("a"), fromString("b"), fromString("c")))
.isEqualTo(fromString("a哈哈b哈哈c"));
assertThat(concatWs(sep, fromString("a"), null, fromString("c")))
.isEqualTo(fromString("a哈哈c"));
assertThat(concatWs(sep, fromString("a"), null, null)).isEqualTo(fromString("a"));
assertThat(concatWs(sep, null, null, null)).isEqualTo(empty);
assertThat(concatWs(sep, fromString("数据"), fromString("砖头")))
.isEqualTo(fromString("数据哈哈砖头"));
}
@TestTemplate
void contains() {
assertThat(empty.contains(empty)).isTrue();
assertThat(fromString("hello").contains(fromString("ello"))).isTrue();
assertThat(fromString("hello").contains(fromString("vello"))).isFalse();
assertThat(fromString("hello").contains(fromString("hellooo"))).isFalse();
assertThat(fromString("大千世界").contains(fromString("千世界"))).isTrue();
assertThat(fromString("大千世界").contains(fromString("世千"))).isFalse();
assertThat(fromString("大千世界").contains(fromString("大千世界好"))).isFalse();
}
@TestTemplate
void startsWith() {
assertThat(empty.startsWith(empty)).isTrue();
assertThat(fromString("hello").startsWith(fromString("hell"))).isTrue();
assertThat(fromString("hello").startsWith(fromString("ell"))).isFalse();
assertThat(fromString("hello").startsWith(fromString("hellooo"))).isFalse();
assertThat(fromString("数据砖头").startsWith(fromString("数据"))).isTrue();
assertThat(fromString("大千世界").startsWith(fromString("千"))).isFalse();
assertThat(fromString("大千世界").startsWith(fromString("大千世界好"))).isFalse();
}
@TestTemplate
void endsWith() {
assertThat(empty.endsWith(empty)).isTrue();
assertThat(fromString("hello").endsWith(fromString("ello"))).isTrue();
assertThat(fromString("hello").endsWith(fromString("ellov"))).isFalse();
assertThat(fromString("hello").endsWith(fromString("hhhello"))).isFalse();
assertThat(fromString("大千世界").endsWith(fromString("世界"))).isTrue();
assertThat(fromString("大千世界").endsWith(fromString("世"))).isFalse();
assertThat(fromString("数据砖头").endsWith(fromString("我的数据砖头"))).isFalse();
}
@TestTemplate
void substring() {
assertThat(fromString("hello").substring(0, 0)).isEqualTo(empty);
assertThat(fromString("hello").substring(1, 3)).isEqualTo(fromString("el"));
assertThat(fromString("数据砖头").substring(0, 1)).isEqualTo(fromString("数"));
assertThat(fromString("数据砖头").substring(1, 3)).isEqualTo(fromString("据砖"));
assertThat(fromString("数据砖头").substring(3, 5)).isEqualTo(fromString("头"));
assertThat(fromString("ߵ梷").substring(0, 2)).isEqualTo(fromString("ߵ梷"));
}
@TestTemplate
void trims() {
assertThat(fromString("1").trim()).isEqualTo(fromString("1"));
assertThat(fromString(" hello ").trim()).isEqualTo(fromString("hello"));
assertThat(trimLeft(fromString(" hello "))).isEqualTo(fromString("hello "));
assertThat(trimRight(fromString(" hello "))).isEqualTo(fromString(" hello"));
assertThat(trim(fromString(" hello "), false, false, fromString(" ")))
.isEqualTo(fromString(" hello "));
assertThat(trim(fromString(" hello "), true, true, fromString(" ")))
.isEqualTo(fromString("hello"));
assertThat(trim(fromString(" hello "), true, false, fromString(" ")))
.isEqualTo(fromString("hello "));
assertThat(trim(fromString(" hello "), false, true, fromString(" ")))
.isEqualTo(fromString(" hello"));
assertThat(trim(fromString("xxxhellox"), true, true, fromString("x")))
.isEqualTo(fromString("hello"));
assertThat(trim(fromString("xxxhellox"), fromString("xoh"))).isEqualTo(fromString("ell"));
assertThat(trimLeft(fromString("xxxhellox"), fromString("xoh")))
.isEqualTo(fromString("ellox"));
assertThat(trimRight(fromString("xxxhellox"), fromString("xoh")))
.isEqualTo(fromString("xxxhell"));
assertThat(empty.trim()).isEqualTo(empty);
assertThat(fromString(" ").trim()).isEqualTo(empty);
assertThat(trimLeft(fromString(" "))).isEqualTo(empty);
assertThat(trimRight(fromString(" "))).isEqualTo(empty);
assertThat(fromString(" 数据砖头 ").trim()).isEqualTo(fromString("数据砖头"));
assertThat(trimLeft(fromString(" 数据砖头 "))).isEqualTo(fromString("数据砖头 "));
assertThat(trimRight(fromString(" 数据砖头 "))).isEqualTo(fromString(" 数据砖头"));
assertThat(fromString("数据砖头").trim()).isEqualTo(fromString("数据砖头"));
assertThat(trimLeft(fromString("数据砖头"))).isEqualTo(fromString("数据砖头"));
assertThat(trimRight(fromString("数据砖头"))).isEqualTo(fromString("数据砖头"));
assertThat(trim(fromString("年年岁岁, 岁岁年年"), fromString("年岁 "))).isEqualTo(fromString(","));
assertThat(trimLeft(fromString("年年岁岁, 岁岁年年"), fromString("年岁 ")))
.isEqualTo(fromString(", 岁岁年年"));
assertThat(trimRight(fromString("年年岁岁, 岁岁年年"), fromString("年岁 ")))
.isEqualTo(fromString("年年岁岁,"));
char[] charsLessThan0x20 = new char[10];
Arrays.fill(charsLessThan0x20, (char) (' ' - 1));
String stringStartingWithSpace =
new String(charsLessThan0x20) + "hello" + new String(charsLessThan0x20);
assertThat(fromString(stringStartingWithSpace).trim())
.isEqualTo(fromString(stringStartingWithSpace));
assertThat(trimLeft(fromString(stringStartingWithSpace)))
.isEqualTo(fromString(stringStartingWithSpace));
assertThat(trimRight(fromString(stringStartingWithSpace)))
.isEqualTo(fromString(stringStartingWithSpace));
}
@TestTemplate
void testSqlSubstring() {
assertThat(substringSQL(fromString("hello"), 2)).isEqualTo(fromString("ello"));
assertThat(substringSQL(fromString("hello"), 2, 3)).isEqualTo(fromString("ell"));
assertThat(substringSQL(empty, 2, 3)).isEqualTo(empty);
assertThat(substringSQL(fromString("hello"), 0, -1)).isNull();
assertThat(substringSQL(fromString("hello"), 10)).isEqualTo(empty);
assertThat(substringSQL(fromString("hello"), 0, 3)).isEqualTo(fromString("hel"));
assertThat(substringSQL(fromString("hello"), -2, 3)).isEqualTo(fromString("lo"));
assertThat(substringSQL(fromString("hello"), -100, 3)).isEqualTo(empty);
}
@TestTemplate
void reverseTest() {
assertThat(reverse(fromString("hello"))).isEqualTo(fromString("olleh"));
assertThat(reverse(fromString("中国"))).isEqualTo(fromString("国中"));
assertThat(reverse(fromString("hello, 中国"))).isEqualTo(fromString("国中 ,olleh"));
assertThat(reverse(empty)).isEqualTo(empty);
}
@TestTemplate
void indexOf() {
assertThat(empty.indexOf(empty, 0)).isEqualTo(0);
assertThat(empty.indexOf(fromString("l"), 0)).isEqualTo(-1);
assertThat(fromString("hello").indexOf(empty, 0)).isEqualTo(0);
assertThat(fromString("hello").indexOf(fromString("l"), 0)).isEqualTo(2);
assertThat(fromString("hello").indexOf(fromString("l"), 3)).isEqualTo(3);
assertThat(fromString("hello").indexOf(fromString("a"), 0)).isEqualTo(-1);
assertThat(fromString("hello").indexOf(fromString("ll"), 0)).isEqualTo(2);
assertThat(fromString("hello").indexOf(fromString("ll"), 4)).isEqualTo(-1);
assertThat(fromString("数据砖头").indexOf(fromString("据砖"), 0)).isEqualTo(1);
assertThat(fromString("数据砖头").indexOf(fromString("数"), 3)).isEqualTo(-1);
assertThat(fromString("数据砖头").indexOf(fromString("数"), 0)).isEqualTo(0);
assertThat(fromString("数据砖头").indexOf(fromString("头"), 0)).isEqualTo(3);
}
@TestTemplate
void testToNumeric() {
// Test to integer.
assertThat(toByte(fromString("123"))).isEqualTo(Byte.parseByte("123"));
assertThat(toByte(fromString("+123"))).isEqualTo(Byte.parseByte("123"));
assertThat(toByte(fromString("-123"))).isEqualTo(Byte.parseByte("-123"));
assertThat(toShort(fromString("123"))).isEqualTo(Short.parseShort("123"));
assertThat(toShort(fromString("+123"))).isEqualTo(Short.parseShort("123"));
assertThat(toShort(fromString("-123"))).isEqualTo(Short.parseShort("-123"));
assertThat(toInt(fromString("123"))).isEqualTo(Integer.parseInt("123"));
assertThat(toInt(fromString("+123"))).isEqualTo(Integer.parseInt("123"));
assertThat(toInt(fromString("-123"))).isEqualTo(Integer.parseInt("-123"));
assertThat(toLong(fromString("1234567890"))).isEqualTo(Long.parseLong("1234567890"));
assertThat(toLong(fromString("+1234567890"))).isEqualTo(Long.parseLong("+1234567890"));
assertThat(toLong(fromString("-1234567890"))).isEqualTo(Long.parseLong("-1234567890"));
// Test decimal string to integer.
assertThat(toInt(fromString("123.456789"))).isEqualTo(Integer.parseInt("123"));
assertThat(toLong(fromString("123.456789"))).isEqualTo(Long.parseLong("123"));
// Test negative cases.
assertThatThrownBy(() -> toInt(fromString("1a3.456789")))
.isInstanceOf(NumberFormatException.class);
assertThatThrownBy(() -> toInt(fromString("123.a56789")))
.isInstanceOf(NumberFormatException.class);
// Test composite in BinaryRowData.
BinaryRowData row = new BinaryRowData(20);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeString(0, BinaryStringData.fromString("1"));
writer.writeString(1, BinaryStringData.fromString("123"));
writer.writeString(2, BinaryStringData.fromString("12345"));
writer.writeString(3, BinaryStringData.fromString("123456789"));
writer.complete();
assertThat(toByte(((BinaryStringData) row.getString(0)))).isEqualTo(Byte.parseByte("1"));
assertThat(toShort(((BinaryStringData) row.getString(1))))
.isEqualTo(Short.parseShort("123"));
assertThat(toInt(((BinaryStringData) row.getString(2))))
.isEqualTo(Integer.parseInt("12345"));
assertThat(toLong(((BinaryStringData) row.getString(3))))
.isEqualTo(Long.parseLong("123456789"));
}
@TestTemplate
void testToUpperLowerCase() {
assertThat(fromString("我是中国人").toLowerCase()).isEqualTo(fromString("我是中国人"));
assertThat(fromString("我是中国人").toUpperCase()).isEqualTo(fromString("我是中国人"));
assertThat(fromString("aBcDeFg").toLowerCase()).isEqualTo(fromString("abcdefg"));
assertThat(fromString("aBcDeFg").toUpperCase()).isEqualTo(fromString("ABCDEFG"));
assertThat(fromString("!@#$%^*").toLowerCase()).isEqualTo(fromString("!@#$%^*"));
assertThat(fromString("!@#$%^*").toLowerCase()).isEqualTo(fromString("!@#$%^*"));
// Test composite in BinaryRowData.
BinaryRowData row = new BinaryRowData(20);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeString(0, BinaryStringData.fromString("a"));
writer.writeString(1, BinaryStringData.fromString("我是中国人"));
writer.writeString(3, BinaryStringData.fromString("aBcDeFg"));
writer.writeString(5, BinaryStringData.fromString("!@#$%^*"));
writer.complete();
assertThat(((BinaryStringData) row.getString(0)).toUpperCase()).isEqualTo(fromString("A"));
assertThat(((BinaryStringData) row.getString(1)).toUpperCase())
.isEqualTo(fromString("我是中国人"));
assertThat(((BinaryStringData) row.getString(1)).toLowerCase())
.isEqualTo(fromString("我是中国人"));
assertThat(((BinaryStringData) row.getString(3)).toUpperCase())
.isEqualTo(fromString("ABCDEFG"));
assertThat(((BinaryStringData) row.getString(3)).toLowerCase())
.isEqualTo(fromString("abcdefg"));
assertThat(((BinaryStringData) row.getString(5)).toUpperCase())
.isEqualTo(fromString("!@#$%^*"));
assertThat(((BinaryStringData) row.getString(5)).toLowerCase())
.isEqualTo(fromString("!@#$%^*"));
}
@TestTemplate
void testToDecimal() {
| Mode |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/transport/TransportService.java | {
"start": 51882,
"end": 59376
} | class ____ will be used to construct new instances for streaming
* @param executor The executor the request handling will be executed on
* @param forceExecution Force execution on the executor queue and never reject it
* @param canTripCircuitBreaker Check the request size and raise an exception in case the limit is breached.
* @param handler The handler itself that implements the request handling
*/
public <Request extends TransportRequest> void registerRequestHandler(
String action,
Executor executor,
boolean forceExecution,
boolean canTripCircuitBreaker,
Writeable.Reader<Request> requestReader,
TransportRequestHandler<Request> handler
) {
validateActionName(action);
handler = interceptor.interceptHandler(action, executor, forceExecution, handler);
RequestHandlerRegistry<Request> reg = new RequestHandlerRegistry<>(
action,
requestReader,
taskManager,
handler,
executor,
forceExecution,
canTripCircuitBreaker
);
transport.registerRequestHandler(reg);
}
/**
* called by the {@link Transport} implementation when an incoming request arrives but before
* any parsing of it has happened (with the exception of the requestId and action)
*/
@Override
public void onRequestReceived(long requestId, String action) {
if (handleIncomingRequests == false) {
throw new TransportNotReadyException();
}
if (tracerLog.isTraceEnabled() && shouldTraceAction(action)) {
tracerLog.trace("[{}][{}] received request", requestId, action);
}
}
/**
* called by the {@link Transport} implementation once a request has been sent
*/
@Override
public void onRequestSent(
DiscoveryNode node,
long requestId,
String action,
TransportRequest request,
TransportRequestOptions options
) {
if (tracerLog.isTraceEnabled() && shouldTraceAction(action)) {
tracerLog.trace("[{}][{}] sent to [{}] (timeout: [{}])", requestId, action, node, options.timeout());
}
}
@Override
@SuppressWarnings("rawtypes")
public void onResponseReceived(long requestId, Transport.ResponseContext holder) {
if (holder == null) {
checkForTimeout(requestId);
} else if (tracerLog.isTraceEnabled() && shouldTraceAction(holder.action())) {
tracerLog.trace("[{}][{}] received response from [{}]", requestId, holder.action(), holder.connection().getNode());
}
}
/**
* called by the {@link Transport} implementation once a response was sent to calling node
*/
@Override
public void onResponseSent(long requestId, String action) {
if (tracerLog.isTraceEnabled() && shouldTraceAction(action)) {
tracerLog.trace("[{}][{}] sent response", requestId, action);
}
}
/**
* called by the {@link Transport} implementation after an exception was sent as a response to an incoming request
*/
@Override
public void onResponseSent(long requestId, String action, Exception e) {
if (tracerLog.isTraceEnabled() && shouldTraceAction(action)) {
tracerLog.trace(() -> format("[%s][%s] sent error response", requestId, action), e);
}
}
public RequestHandlerRegistry<? extends TransportRequest> getRequestHandler(String action) {
return transport.getRequestHandlers().getHandler(action);
}
private void checkForTimeout(long requestId) {
// lets see if its in the timeout holder, but sync on mutex to make sure any ongoing timeout handling has finished
final DiscoveryNode sourceNode;
final String action;
assert responseHandlers.contains(requestId) == false;
TimeoutInfoHolder timeoutInfoHolder = timeoutInfoHandlers.remove(requestId);
if (timeoutInfoHolder != null) {
long time = threadPool.relativeTimeInMillis();
long sentMs = time - timeoutInfoHolder.sentTime();
long timedOutMs = time - timeoutInfoHolder.timeoutTime();
logger.warn(
"Received response for a request that has timed out, sent [{}/{}ms] ago, timed out [{}/{}ms] ago, "
+ "action [{}], node [{}], id [{}]",
TimeValue.timeValueMillis(sentMs),
sentMs,
TimeValue.timeValueMillis(timedOutMs),
timedOutMs,
timeoutInfoHolder.action(),
timeoutInfoHolder.node(),
requestId
);
action = timeoutInfoHolder.action();
sourceNode = timeoutInfoHolder.node();
} else {
logger.warn("Transport response handler not found of id [{}]", requestId);
action = null;
sourceNode = null;
}
// call tracer out of lock
if (tracerLog.isTraceEnabled() == false) {
return;
}
if (action == null) {
assert sourceNode == null;
tracerLog.trace("[{}] received response but can't resolve it to a request", requestId);
} else if (shouldTraceAction(action)) {
tracerLog.trace("[{}][{}] received response from [{}]", requestId, action, sourceNode);
}
}
@Override
public void onConnectionClosed(Transport.Connection connection) {
List<Transport.ResponseContext<? extends TransportResponse>> pruned = responseHandlers.prune(
h -> h.connection().getCacheKey().equals(connection.getCacheKey())
);
if (pruned.isEmpty()) {
return;
}
for (Transport.ResponseContext<?> holderToNotify : pruned) {
if (tracerLog.isTraceEnabled() && shouldTraceAction(holderToNotify.action())) {
tracerLog.trace(
"[{}][{}] pruning request because connection to node [{}] closed",
holderToNotify.requestId(),
holderToNotify.action(),
connection.getNode()
);
}
NodeDisconnectedException exception = new NodeDisconnectedException(connection.getNode(), holderToNotify.action());
TransportResponseHandler<?> handler = holderToNotify.handler();
// we used to fork to a different thread always to avoid stack overflows, but we avoid doing that now, expecting handlers
// to handle that themselves instead.
var executor = handler.executor();
if (executor == EsExecutors.DIRECT_EXECUTOR_SERVICE && enableStackOverflowAvoidance) {
executor = threadPool.generic();
}
if (executor == EsExecutors.DIRECT_EXECUTOR_SERVICE) {
handler.handleException(exception);
} else {
executor.execute(new ForkingResponseHandlerRunnable(handler, exception, executor) {
@Override
protected void doRun() {
handler.handleException(exception);
}
@Override
public String toString() {
return "onConnectionClosed/handleException[" + handler + "]";
}
});
}
}
}
final | that |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/SpringApplicationShutdownHook.java | {
"start": 5729,
"end": 7235
} | class ____ implements SpringApplicationShutdownHandlers, Runnable {
private final Set<Handler> actions = new LinkedHashSet<>();
@Override
public void add(Runnable action) {
Assert.notNull(action, "'action' must not be null");
addRuntimeShutdownHookIfNecessary();
synchronized (SpringApplicationShutdownHook.class) {
assertNotInProgress();
this.actions.add(new Handler(action));
}
}
@Override
public void remove(Runnable action) {
Assert.notNull(action, "'action' must not be null");
synchronized (SpringApplicationShutdownHook.class) {
assertNotInProgress();
this.actions.remove(new Handler(action));
}
}
Set<Handler> getActions() {
return this.actions;
}
@Override
public void run() {
SpringApplicationShutdownHook.this.run();
SpringApplicationShutdownHook.this.reset();
}
}
/**
* A single handler that uses object identity for {@link #equals(Object)} and
* {@link #hashCode()}.
*
* @param runnable the handler runner
*/
record Handler(Runnable runnable) {
@Override
public int hashCode() {
return System.identityHashCode(this.runnable);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
return this.runnable == ((Handler) obj).runnable;
}
void run() {
this.runnable.run();
}
}
/**
* {@link ApplicationListener} to track closed contexts.
*/
private final | Handlers |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/subscribers/LambdaSubscriber.java | {
"start": 1201,
"end": 3849
} | class ____<T> extends AtomicReference<Subscription>
implements FlowableSubscriber<T>, Subscription, Disposable, LambdaConsumerIntrospection {
private static final long serialVersionUID = -7251123623727029452L;
final Consumer<? super T> onNext;
final Consumer<? super Throwable> onError;
final Action onComplete;
final Consumer<? super Subscription> onSubscribe;
public LambdaSubscriber(Consumer<? super T> onNext, Consumer<? super Throwable> onError,
Action onComplete,
Consumer<? super Subscription> onSubscribe) {
super();
this.onNext = onNext;
this.onError = onError;
this.onComplete = onComplete;
this.onSubscribe = onSubscribe;
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.setOnce(this, s)) {
try {
onSubscribe.accept(this);
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
s.cancel();
onError(ex);
}
}
}
@Override
public void onNext(T t) {
if (!isDisposed()) {
try {
onNext.accept(t);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
get().cancel();
onError(e);
}
}
}
@Override
public void onError(Throwable t) {
if (get() != SubscriptionHelper.CANCELLED) {
lazySet(SubscriptionHelper.CANCELLED);
try {
onError.accept(t);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
RxJavaPlugins.onError(new CompositeException(t, e));
}
} else {
RxJavaPlugins.onError(t);
}
}
@Override
public void onComplete() {
if (get() != SubscriptionHelper.CANCELLED) {
lazySet(SubscriptionHelper.CANCELLED);
try {
onComplete.run();
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
RxJavaPlugins.onError(e);
}
}
}
@Override
public void dispose() {
cancel();
}
@Override
public boolean isDisposed() {
return get() == SubscriptionHelper.CANCELLED;
}
@Override
public void request(long n) {
get().request(n);
}
@Override
public void cancel() {
SubscriptionHelper.cancel(this);
}
@Override
public boolean hasCustomOnError() {
return onError != Functions.ON_ERROR_MISSING;
}
}
| LambdaSubscriber |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/JSONScannerTest_long.java | {
"start": 218,
"end": 2745
} | class ____ extends TestCase {
public void ftest_parse_long() throws Exception {
System.out.println(System.currentTimeMillis());
JSONScanner lexer = new JSONScanner("1293770846476");
lexer.scanNumber();
Assert.assertEquals(new Long(1293770846476L), (Long) lexer.integerValue());
Assert.assertEquals(1293770846476L, lexer.longValue());
}
public void ftest_parse_long_1() throws Exception {
System.out.println(System.currentTimeMillis());
JSONScanner lexer = new JSONScanner(Long.toString(Long.MAX_VALUE));
lexer.scanNumber();
Assert.assertEquals(new Long(Long.MAX_VALUE), (Long) lexer.integerValue());
Assert.assertEquals(Long.MAX_VALUE, lexer.longValue());
}
public void test_parse_long_2() throws Exception {
System.out.println(System.currentTimeMillis());
JSONScanner lexer = new JSONScanner(Long.toString(Long.MIN_VALUE));
lexer.scanNumber();
Assert.assertEquals(new Long(Long.MIN_VALUE), (Long) lexer.integerValue());
Assert.assertEquals(Long.MIN_VALUE, lexer.longValue());
}
public void test_error_0() {
Exception error = null;
try {
JSONScanner lexer = new JSONScanner("--");
lexer.scanNumber();
lexer.longValue();
} catch (Exception ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_1() {
Exception error = null;
try {
String text = Long.MAX_VALUE + "1234";
JSONScanner lexer = new JSONScanner(text);
lexer.scanNumber();
lexer.longValue();
} catch (Exception ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_2() {
Exception error = null;
try {
String text = Long.MIN_VALUE + "1234";
JSONScanner lexer = new JSONScanner(text);
lexer.scanNumber();
lexer.longValue();
} catch (Exception ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_3() {
Exception error = null;
try {
String text = "9223372036854775809";
JSONScanner lexer = new JSONScanner(text);
lexer.scanNumber();
lexer.longValue();
} catch (Exception ex) {
error = ex;
}
Assert.assertNotNull(error);
}
}
| JSONScannerTest_long |
java | elastic__elasticsearch | x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEnrichIT.java | {
"start": 726,
"end": 1037
} | class ____ extends RestEnrichTestCase {
@ClassRule
public static ElasticsearchCluster cluster = Clusters.testCluster();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
public RestEnrichIT(Mode mode) {
super(mode);
}
}
| RestEnrichIT |
java | apache__kafka | connect/mirror-client/src/test/java/org/apache/kafka/connect/mirror/SourceAndTargetTest.java | {
"start": 1010,
"end": 1462
} | class ____ {
@Test
public void testEquals() {
SourceAndTarget sourceAndTarget = new SourceAndTarget("source", "target");
SourceAndTarget sourceAndTarget2 = new SourceAndTarget("source", "target");
SourceAndTarget sourceAndTarget3 = new SourceAndTarget("error-source", "target");
assertEquals(sourceAndTarget, sourceAndTarget2);
assertNotEquals(sourceAndTarget, sourceAndTarget3);
| SourceAndTargetTest |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/ioc/mappers/ChristmasTypes.java | {
"start": 98,
"end": 532
} | interface ____ {
// tag::beans[]
@Introspected
record ChristmasPresent(
String packagingColor,
String type,
Float weight,
String greetingCard
) {
}
@Introspected
record PresentPackaging(
Float weight,
String color
) {
}
@Introspected
record Present(
Float weight,
String type
) {
}
// end::beans[]
}
| ChristmasTypes |
java | spring-projects__spring-boot | module/spring-boot-webmvc/src/test/java/org/springframework/boot/webmvc/autoconfigure/error/RemappedErrorViewIntegrationTests.java | {
"start": 3185,
"end": 3679
} | class ____ implements ErrorPageRegistrar {
@RequestMapping("/")
String home() {
throw new RuntimeException("Planned!");
}
@Override
public void registerErrorPages(ErrorPageRegistry errorPageRegistry) {
errorPageRegistry.addErrorPages(new ErrorPage("/spring/error"));
}
// For manual testing
static void main(String[] args) {
new SpringApplicationBuilder(TestConfiguration.class).properties("spring.mvc.servlet.path:spring/*")
.run(args);
}
}
}
| TestConfiguration |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/scheduling/annotation/AsyncExecutionTests.java | {
"start": 24082,
"end": 24406
} | class ____ implements ApplicationListener<ApplicationEvent> {
public AsyncClassListener() {
listenerConstructed++;
}
@Override
public void onApplicationEvent(ApplicationEvent event) {
listenerCalled++;
assertThat(Thread.currentThread().getName()).isNotEqualTo(originalThreadName);
}
}
}
| AsyncClassListener |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformCheckpointingInfoTests.java | {
"start": 472,
"end": 1576
} | class ____ extends AbstractWireSerializingTestCase<TransformCheckpointingInfo> {
public static TransformCheckpointingInfo randomTransformCheckpointingInfo() {
return new TransformCheckpointingInfo(
TransformCheckpointStatsTests.randomTransformCheckpointStats(),
TransformCheckpointStatsTests.randomTransformCheckpointStats(),
randomNonNegativeLong(),
randomBoolean() ? null : Instant.ofEpochMilli(randomLongBetween(1, 100000)),
randomBoolean() ? null : Instant.ofEpochMilli(randomLongBetween(1, 100000))
);
}
@Override
protected TransformCheckpointingInfo createTestInstance() {
return randomTransformCheckpointingInfo();
}
@Override
protected TransformCheckpointingInfo mutateInstance(TransformCheckpointingInfo instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Reader<TransformCheckpointingInfo> instanceReader() {
return TransformCheckpointingInfo::new;
}
}
| TransformCheckpointingInfoTests |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/nestedsourceproperties/_target/ChartEntryLabel.java | {
"start": 251,
"end": 785
} | class ____ {
private String name;
private String city;
private String recordedAt;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getRecordedAt() {
return recordedAt;
}
public void setRecordedAt(String recordedAt) {
this.recordedAt = recordedAt;
}
}
| ChartEntryLabel |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/metrics/MetricsTrackingMapState.java | {
"start": 13037,
"end": 13632
} | class ____<E> implements Iterable<E> {
private final Iterable<E> iterable;
private final ThrowingConsumer<E, IOException> trackElementSizeConsumer;
IterableWrapper(
Iterable<E> iterable, ThrowingConsumer<E, IOException> trackElementSizeConsumer) {
this.iterable = iterable;
this.trackElementSizeConsumer = trackElementSizeConsumer;
}
@Override
public Iterator<E> iterator() {
return new IteratorWrapper<>(iterable.iterator(), trackElementSizeConsumer);
}
}
private | IterableWrapper |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/ExchangeTestSupport.java | {
"start": 998,
"end": 1106
} | class ____ a test which requires a {@link CamelContext} and a populated {@link Exchange}
*/
public abstract | for |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/MilvusComponentBuilderFactory.java | {
"start": 1377,
"end": 1834
} | interface ____ {
/**
* Milvus (camel-milvus)
* Perform operations on the Milvus Vector Database.
*
* Category: database,ai
* Since: 4.5
* Maven coordinates: org.apache.camel:camel-milvus
*
* @return the dsl builder
*/
static MilvusComponentBuilder milvus() {
return new MilvusComponentBuilderImpl();
}
/**
* Builder for the Milvus component.
*/
| MilvusComponentBuilderFactory |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java | {
"start": 2839,
"end": 17756
} | class ____ extends ESTestCase {
public void testJoinDeduplication() {
DeterministicTaskQueue deterministicTaskQueue = new DeterministicTaskQueue();
CapturingTransport capturingTransport = new HandshakingCapturingTransport();
DiscoveryNode localNode = DiscoveryNodeUtils.create("node0");
final var threadPool = deterministicTaskQueue.getThreadPool();
final var clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
final var taskManger = new TaskManager(Settings.EMPTY, threadPool, Set.of());
TransportService transportService = new TransportService(
Settings.EMPTY,
capturingTransport,
threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> localNode,
clusterSettings,
new ClusterConnectionManager(Settings.EMPTY, capturingTransport, threadPool.getThreadContext()),
taskManger
);
JoinHelper joinHelper = new JoinHelper(
null,
new MasterService(Settings.EMPTY, clusterSettings, threadPool, taskManger),
new NoOpClusterApplier(),
transportService,
() -> 0L,
(joinRequest, joinCallback) -> {
throw new AssertionError();
},
startJoinRequest -> { throw new AssertionError(); },
(s, p, r) -> {},
() -> new StatusInfo(HEALTHY, "info"),
new JoinReasonService(() -> 0L),
new NoneCircuitBreakerService(),
Function.identity(),
(listener, term) -> listener.onResponse(null),
CompatibilityVersionsUtils.staticCurrent(),
new FeatureService(List.of())
);
transportService.start();
DiscoveryNode node1 = DiscoveryNodeUtils.create("node1");
DiscoveryNode node2 = DiscoveryNodeUtils.create("node2");
final boolean mightSucceed = randomBoolean();
assertFalse(joinHelper.isJoinPending());
// check that sending a join to node1 works
Optional<Join> optionalJoin1 = randomBoolean()
? Optional.empty()
: Optional.of(new Join(localNode, node1, randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()));
joinHelper.sendJoinRequest(node1, 0L, optionalJoin1);
CapturedRequest[] capturedRequests1 = capturingTransport.getCapturedRequestsAndClear();
assertThat(capturedRequests1.length, equalTo(1));
CapturedRequest capturedRequest1 = capturedRequests1[0];
assertEquals(node1, capturedRequest1.node());
assertTrue(joinHelper.isJoinPending());
final var join1Term = optionalJoin1.stream().mapToLong(Join::term).findFirst().orElse(0L);
final var join1Status = new JoinStatus(node1, join1Term, PENDING_JOIN_WAITING_RESPONSE, TimeValue.ZERO);
assertThat(joinHelper.getInFlightJoinStatuses(), equalTo(List.of(join1Status)));
// check that sending a join to node2 works
Optional<Join> optionalJoin2 = randomBoolean()
? Optional.empty()
: Optional.of(new Join(localNode, node2, randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()));
joinHelper.sendJoinRequest(node2, 0L, optionalJoin2);
CapturedRequest[] capturedRequests2 = capturingTransport.getCapturedRequestsAndClear();
assertThat(capturedRequests2.length, equalTo(1));
CapturedRequest capturedRequest2 = capturedRequests2[0];
assertEquals(node2, capturedRequest2.node());
final var join2Term = optionalJoin2.stream().mapToLong(Join::term).findFirst().orElse(0L);
final var join2Status = new JoinStatus(node2, join2Term, PENDING_JOIN_WAITING_RESPONSE, TimeValue.ZERO);
assertThat(
new HashSet<>(joinHelper.getInFlightJoinStatuses()),
equalTo(
Stream.of(join1Status, join2Status)
.filter(joinStatus -> joinStatus.term() == Math.max(join1Term, join2Term))
.collect(Collectors.toSet())
)
);
// check that sending another join to node1 is a noop as the previous join is still in progress
joinHelper.sendJoinRequest(node1, 0L, optionalJoin1);
assertThat(capturingTransport.getCapturedRequestsAndClear().length, equalTo(0));
// complete the previous join to node1
completeJoinRequest(capturingTransport, capturedRequest1, mightSucceed);
assertThat(joinHelper.getInFlightJoinStatuses(), equalTo(List.of(join2Status)));
// check that sending another join to node1 now works again
joinHelper.sendJoinRequest(node1, 0L, optionalJoin1);
CapturedRequest[] capturedRequests1a = capturingTransport.getCapturedRequestsAndClear();
assertThat(capturedRequests1a.length, equalTo(1));
CapturedRequest capturedRequest1a = capturedRequests1a[0];
assertEquals(node1, capturedRequest1a.node());
// check that sending another join to node2 works if the optionalJoin is different
Optional<Join> optionalJoin2a = optionalJoin2.isPresent() && randomBoolean()
? Optional.empty()
: Optional.of(new Join(localNode, node2, randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()));
joinHelper.sendJoinRequest(node2, 0L, optionalJoin2a);
CapturedRequest[] capturedRequests2a = capturingTransport.getCapturedRequestsAndClear();
assertThat(capturedRequests2a.length, equalTo(1));
CapturedRequest capturedRequest2a = capturedRequests2a[0];
assertEquals(node2, capturedRequest2a.node());
// complete all the joins and check that isJoinPending is updated
assertTrue(joinHelper.isJoinPending());
assertTrue(transportService.nodeConnected(node1));
assertTrue(transportService.nodeConnected(node2));
completeJoinRequest(capturingTransport, capturedRequest2, mightSucceed);
completeJoinRequest(capturingTransport, capturedRequest1a, mightSucceed);
completeJoinRequest(capturingTransport, capturedRequest2a, mightSucceed);
assertFalse(joinHelper.isJoinPending());
if (mightSucceed) {
// successful requests hold the connections open until the cluster state is applied
joinHelper.onClusterStateApplied();
}
assertFalse(transportService.nodeConnected(node1));
assertFalse(transportService.nodeConnected(node2));
}
private void completeJoinRequest(CapturingTransport capturingTransport, CapturedRequest request, boolean mightSucceed) {
if (mightSucceed && randomBoolean()) {
capturingTransport.handleResponse(request.requestId(), ActionResponse.Empty.INSTANCE);
} else {
capturingTransport.handleRemoteError(request.requestId(), new CoordinationStateRejectedException("dummy"));
}
}
public void testFailedJoinAttemptLogLevel() {
assertThat(JoinHelper.FailedJoinAttempt.getLogLevel(new TransportException("generic transport exception")), is(Level.INFO));
assertThat(
JoinHelper.FailedJoinAttempt.getLogLevel(
new RemoteTransportException("remote transport exception with generic cause", new Exception())
),
is(Level.INFO)
);
assertThat(
JoinHelper.FailedJoinAttempt.getLogLevel(
new RemoteTransportException("caused by CoordinationStateRejectedException", new CoordinationStateRejectedException("test"))
),
is(Level.DEBUG)
);
assertThat(
JoinHelper.FailedJoinAttempt.getLogLevel(
new RemoteTransportException(
"caused by FailedToCommitClusterStateException",
new FailedToCommitClusterStateException("test")
)
),
is(Level.DEBUG)
);
assertThat(
JoinHelper.FailedJoinAttempt.getLogLevel(
new RemoteTransportException("caused by NotMasterException", new NotMasterException("test"))
),
is(Level.DEBUG)
);
}
public void testJoinFailureOnUnhealthyNodes() {
DeterministicTaskQueue deterministicTaskQueue = new DeterministicTaskQueue();
CapturingTransport capturingTransport = new HandshakingCapturingTransport();
DiscoveryNode localNode = DiscoveryNodeUtils.create("node0");
ThreadPool threadPool = deterministicTaskQueue.getThreadPool();
final var clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
final var taskManger = new TaskManager(Settings.EMPTY, threadPool, Set.of());
TransportService transportService = new TransportService(
Settings.EMPTY,
capturingTransport,
threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> localNode,
clusterSettings,
new ClusterConnectionManager(Settings.EMPTY, capturingTransport, threadPool.getThreadContext()),
taskManger
);
AtomicReference<StatusInfo> nodeHealthServiceStatus = new AtomicReference<>(new StatusInfo(UNHEALTHY, "unhealthy-info"));
JoinHelper joinHelper = new JoinHelper(
null,
new MasterService(Settings.EMPTY, clusterSettings, threadPool, taskManger),
new NoOpClusterApplier(),
transportService,
() -> 0L,
(joinRequest, joinCallback) -> {
throw new AssertionError();
},
startJoinRequest -> { throw new AssertionError(); },
(s, p, r) -> {},
nodeHealthServiceStatus::get,
new JoinReasonService(() -> 0L),
new NoneCircuitBreakerService(),
Function.identity(),
(listener, term) -> listener.onResponse(null),
CompatibilityVersionsUtils.staticCurrent(),
new FeatureService(List.of())
);
transportService.start();
DiscoveryNode node1 = DiscoveryNodeUtils.create("node1");
DiscoveryNode node2 = DiscoveryNodeUtils.create("node2");
assertFalse(joinHelper.isJoinPending());
// check that sending a join to node1 doesn't work
Optional<Join> optionalJoin1 = randomBoolean()
? Optional.empty()
: Optional.of(new Join(localNode, node1, randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()));
joinHelper.sendJoinRequest(node1, randomNonNegativeLong(), optionalJoin1);
CapturedRequest[] capturedRequests1 = capturingTransport.getCapturedRequestsAndClear();
assertThat(capturedRequests1.length, equalTo(0));
assertFalse(joinHelper.isJoinPending());
// check that sending a join to node2 doesn't work
Optional<Join> optionalJoin2 = randomBoolean()
? Optional.empty()
: Optional.of(new Join(localNode, node2, randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()));
transportService.start();
joinHelper.sendJoinRequest(node2, randomNonNegativeLong(), optionalJoin2);
CapturedRequest[] capturedRequests2 = capturingTransport.getCapturedRequestsAndClear();
assertThat(capturedRequests2.length, equalTo(0));
assertFalse(joinHelper.isJoinPending());
nodeHealthServiceStatus.getAndSet(new StatusInfo(HEALTHY, "healthy-info"));
// check that sending another join to node1 now works again
joinHelper.sendJoinRequest(node1, 0L, optionalJoin1);
CapturedRequest[] capturedRequests1a = capturingTransport.getCapturedRequestsAndClear();
assertThat(capturedRequests1a.length, equalTo(1));
CapturedRequest capturedRequest1a = capturedRequests1a[0];
assertEquals(node1, capturedRequest1a.node());
}
@TestLogging(reason = "testing WARN logging", value = "org.elasticsearch.cluster.coordination.JoinHelper:WARN")
public void testLatestStoredStateFailure() {
DeterministicTaskQueue deterministicTaskQueue = new DeterministicTaskQueue();
CapturingTransport capturingTransport = new HandshakingCapturingTransport();
DiscoveryNode localNode = DiscoveryNodeUtils.create("node0");
final var threadPool = deterministicTaskQueue.getThreadPool();
final var clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
final var taskManger = new TaskManager(Settings.EMPTY, threadPool, Set.of());
TransportService transportService = new TransportService(
Settings.EMPTY,
capturingTransport,
threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> localNode,
clusterSettings,
new ClusterConnectionManager(Settings.EMPTY, capturingTransport, threadPool.getThreadContext()),
taskManger
);
JoinHelper joinHelper = new JoinHelper(
null,
new MasterService(Settings.EMPTY, clusterSettings, threadPool, taskManger),
new NoOpClusterApplier(),
transportService,
() -> 1L,
(joinRequest, joinCallback) -> {
throw new AssertionError();
},
startJoinRequest -> { throw new AssertionError(); },
(s, p, r) -> {},
() -> new StatusInfo(HEALTHY, "info"),
new JoinReasonService(() -> 0L),
new NoneCircuitBreakerService(),
Function.identity(),
(listener, term) -> listener.onFailure(new ElasticsearchException("simulated")),
CompatibilityVersionsUtils.staticCurrent(),
new FeatureService(List.of())
);
final var joinAccumulator = joinHelper.new CandidateJoinAccumulator();
final var joinListener = new PlainActionFuture<Void>();
joinAccumulator.handleJoinRequest(localNode, CompatibilityVersionsUtils.staticCurrent(), Set.of(), joinListener);
assert joinListener.isDone() == false;
try (var mockLog = MockLog.capture(JoinHelper.class)) {
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"warning log",
JoinHelper.class.getCanonicalName(),
Level.WARN,
"failed to retrieve latest stored state after winning election in term [1]"
)
);
joinAccumulator.close(Coordinator.Mode.LEADER);
mockLog.assertAllExpectationsMatched();
}
assertEquals("simulated", expectThrows(ElasticsearchException.class, () -> FutureUtils.get(joinListener)).getMessage());
}
private static | JoinHelperTests |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/internal/AbstractInvocationHandlerUnitTests.java | {
"start": 1777,
"end": 1835
} | interface ____ {
int returnOne();
}
}
| ReturnOne |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest30.java | {
"start": 837,
"end": 1067
} | class ____ extends TestCase {
public void test_false() throws Exception {
assertFalse(WallUtils.isValidateMySql(//
"select * from t where FID = 1 OR UNHEX(HEX('MySQL')) = 'MySQL'"));
}
}
| MySqlWallTest30 |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/resource/ParameterSubResClassSub.java | {
"start": 493,
"end": 1114
} | class ____ {
AtomicInteger resourceCounter = new AtomicInteger();
@Inject
ApplicationScopeObject appScope;
@Inject
RequestScopedObject requestScope;
@GET
@Produces("text/plain")
public String get(@Context HttpHeaders headers, @Context UriInfo uriInfo) {
Assertions.assertEquals("/path/subclass", uriInfo.getPath());
Assertions.assertNotNull(headers.getHeaderString("Host"));
return "resourceCounter:" + resourceCounter.incrementAndGet() + ",appscope:" + appScope.getCount() + ",requestScope:"
+ requestScope.getCount();
}
}
| ParameterSubResClassSub |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java | {
"start": 3885,
"end": 4031
} | class ____ the job output data.
*/
public Class<?> getOutputKeyClass() {
return conf.getOutputKeyClass();
}
/**
* Get the value | for |
java | micronaut-projects__micronaut-core | aop/src/main/java/io/micronaut/aop/internal/intercepted/ReactorInterceptedMethod.java | {
"start": 1280,
"end": 2644
} | class ____ extends PublisherInterceptedMethod {
public static final boolean REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono", ReactorInterceptedMethod.class.getClassLoader());
ReactorInterceptedMethod(MethodInvocationContext<?, ?> context, ConversionService conversionService) {
super(context, conversionService);
}
@Override
protected Object convertPublisherResult(ReturnType<?> returnType, Object result) {
return captureContext(super.convertPublisherResult(returnType, result));
}
@Override
protected Publisher<?> convertToPublisher(Object result) {
return captureContext(super.convertToPublisher(result));
}
private <T> T captureContext(T result) {
if (!PropagatedContext.exists()) {
return result;
}
if (result instanceof Mono<?> mono) {
PropagatedContext propagatedContext = PropagatedContext.get();
return (T) mono.contextWrite(ctx -> ReactorPropagation.addPropagatedContext(ctx, propagatedContext));
}
if (result instanceof Flux<?> flux) {
PropagatedContext propagatedContext = PropagatedContext.get();
return (T) flux.contextWrite(ctx -> ReactorPropagation.addPropagatedContext(ctx, propagatedContext));
}
return result;
}
}
| ReactorInterceptedMethod |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/dialect/DB2390DialectTestCase.java | {
"start": 897,
"end": 2286
} | class ____ {
@BeforeAll
public void populateSchema(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
for ( int i = 0; i < 10; ++i ) {
final SimpleEntity simpleEntity = new SimpleEntity( i, "Entity" + i );
entityManager.persist( simpleEntity );
}
} );
}
@AfterAll
public void cleanSchema(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> scope.getEntityManagerFactory().getSchemaManager().truncate() );
}
@Test
public void testLegacyLimitHandlerWithNoOffset(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<SimpleEntity> results = entityManager.createQuery( "FROM SimpleEntity", SimpleEntity.class )
.setMaxResults( 2 )
.getResultList();
assertEquals( Arrays.asList( 0, 1 ), results.stream().map( SimpleEntity::getId ).collect( Collectors.toList() ) );
} );
}
@Test
public void testLegacyLimitHandlerWithOffset(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<SimpleEntity> results = entityManager.createQuery( "FROM SimpleEntity", SimpleEntity.class )
.setFirstResult( 2 )
.setMaxResults( 2 )
.getResultList();
assertEquals( Arrays.asList( 2, 3 ), results.stream().map( SimpleEntity::getId ).collect( Collectors.toList() ) );
} );
}
@Entity(name = "SimpleEntity")
public static | DB2390DialectTestCase |
java | google__dagger | javatests/dagger/internal/codegen/SubcomponentValidationTest.java | {
"start": 1942,
"end": 2326
} | interface ____ {",
" ChildComponent newChildComponent();",
"}");
Source childComponentFile =
CompilerTests.javaSource(
"test.ChildComponent",
"package test;",
"",
"import dagger.Subcomponent;",
"",
"@Subcomponent(modules = ModuleWithParameters.class)",
" | TestComponent |
java | quarkusio__quarkus | independent-projects/qute/core/src/main/java/io/quarkus/qute/Qute.java | {
"start": 1374,
"end": 4404
} | class ____ {
/**
* If needed, a default engine is created lazily.
* <p>
* The default engine has in addition to {@link EngineBuilder#addDefaults()}:
* <ul>
* <li>{@link ReflectionValueResolver}</li>
* <li>{@link IndexedArgumentsParserHook}</li>
* <li>{@link HtmlEscaper} registered for {@code text/html} and {@code text/xml} content types</li>
* </ul>
*
* @return the engine
* @see #setEngine(Engine)
*/
public static Engine engine() {
Engine engine = Qute.engine;
if (engine == null) {
synchronized (Qute.class) {
if (engine == null) {
engine = newDefaultEngine();
Qute.engine = engine;
}
}
}
return engine;
}
/**
* Set a specific engine instance.
* <p>
* Note that the engine should have a {@link IndexedArgumentsParserHook} registered so that the
* {@link #fmt(String, Object...)} method works correcly.
* <p>
* The cache is always cleared when a new engine is set.
*
* @param engine
* @see #engine()
*/
public static void setEngine(Engine engine) {
clearCache();
Qute.engine = engine;
}
/**
*
* @param template
* @param data
* @return the rendered template
*/
public static String fmt(String template, Map<String, Object> data) {
return fmt(template).dataMap(data).render();
}
/**
* The data array is accessibe via the {@code data} key, e.g. {data[0]} is resolved to the first argument.
* <p>
* An empty expression <code>{}</code> is a placeholder that is replaced with an index-based array accessor
* <code>{data[n]}</code> where {@code n} is the index of the placeholder. The first placeholder is replace with
* <code>{data[0]}</code>, the second with <code>{data[1]}</code>, and so on. For example, <code>"Hello {}!"</code> becomes
* <code>Hello {data[0]}!</code>.
*
* @param template
* @param data
* @return the rendered template
*/
public static String fmt(String template, Object... data) {
return fmt(template).dataArray(data).render();
}
/**
*
* @param template
* @return a new format object
*/
public static Fmt fmt(String template) {
return new Fmt(template);
}
/**
* The template cache will be used by default.
*
* @see Fmt#cache()
* @see #clearCache()
*/
public static void enableCache() {
cacheByDefault = true;
}
/**
* The template cache will not be used by default.
*
* @see Fmt#noCache()
* @see #clearCache()
*/
public static void disableCache() {
cacheByDefault = false;
}
/**
* Clears the template cache.
*/
public static void clearCache() {
CACHE.clear();
}
/**
* This construct is not thread-safe.
*/
public final static | Qute |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/targetclass/mixed/PreDestroyOnTargetClassAndOutsideAndManySuperclassesWithOverridesTest.java | {
"start": 1506,
"end": 1720
} | class ____ extends Alpha {
@PreDestroy
void specialIntercept() {
MyBean.invocations.add("this should not be called as the method is overridden in Charlie");
}
}
static | Bravo |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/io/VFS.java | {
"start": 3614,
"end": 4162
} | class ____
*
* @return the class
*/
protected static Class<?> getClass(String className) {
try {
return Thread.currentThread().getContextClassLoader().loadClass(className);
// return ReflectUtil.findClass(className);
} catch (ClassNotFoundException e) {
if (log.isDebugEnabled()) {
log.debug("Class not found: " + className);
}
return null;
}
}
/**
* Get a method by name and parameter types. If the method is not found then return null.
*
* @param clazz
* The | name |
java | elastic__elasticsearch | build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/InternalAwareGradleRunner.java | {
"start": 1047,
"end": 4375
} | class ____ extends GradleRunner {
private GradleRunner delegate;
public InternalAwareGradleRunner(GradleRunner delegate) {
this.delegate = delegate;
}
@Override
public GradleRunner withGradleVersion(String gradleVersion) {
delegate.withGradleVersion(gradleVersion);
return this;
}
@Override
public GradleRunner withGradleInstallation(File file) {
delegate.withGradleInstallation(file);
return this;
}
@Override
public GradleRunner withGradleDistribution(URI uri) {
delegate.withGradleDistribution(uri);
return this;
}
@Override
public GradleRunner withTestKitDir(File file) {
delegate.withTestKitDir(file);
return this;
}
@Override
public File getProjectDir() {
return delegate.getProjectDir();
}
@Override
public GradleRunner withProjectDir(File projectDir) {
delegate.withProjectDir(projectDir);
return this;
}
@Override
public List<String> getArguments() {
return delegate.getArguments();
}
@Override
public GradleRunner withArguments(List<String> arguments) {
List<String> collect = Stream.concat(arguments.stream(), Stream.of("-Dtest.external=true")).collect(Collectors.toList());
delegate.withArguments(collect);
return this;
}
@Override
public GradleRunner withArguments(String... arguments) {
withArguments(List.of(arguments));
return this;
}
@Override
public List<? extends File> getPluginClasspath() {
return delegate.getPluginClasspath();
}
@Override
public GradleRunner withPluginClasspath() throws InvalidPluginMetadataException {
delegate.withPluginClasspath();
return this;
}
@Override
public GradleRunner withPluginClasspath(Iterable<? extends File> iterable) {
delegate.withPluginClasspath(iterable);
return this;
}
@Override
public boolean isDebug() {
return delegate.isDebug();
}
@Override
public GradleRunner withDebug(boolean b) {
delegate.withDebug(b);
return this;
}
@Override
public Map<String, String> getEnvironment() {
return delegate.getEnvironment();
}
@Override
public GradleRunner withEnvironment(Map<String, String> map) {
delegate.withEnvironment(map);
return this;
}
@Override
public GradleRunner forwardStdOutput(Writer writer) {
delegate.forwardStdOutput(writer);
return this;
}
@Override
public GradleRunner forwardStdError(Writer writer) {
delegate.forwardStdOutput(writer);
return this;
}
@Override
public GradleRunner forwardOutput() {
delegate.forwardOutput();
return this;
}
@Override
public BuildResult build() throws InvalidRunnerConfigurationException, UnexpectedBuildFailure {
return delegate.build();
}
@Override
public BuildResult buildAndFail() throws InvalidRunnerConfigurationException, UnexpectedBuildSuccess {
return delegate.buildAndFail();
}
@Override
public BuildResult run() throws InvalidRunnerConfigurationException {
return delegate.run();
}
}
| InternalAwareGradleRunner |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/validation/beanvalidation/OptionalValidatorFactoryBean.java | {
"start": 1040,
"end": 1265
} | class ____ by Spring's MVC configuration namespace,
* in case of the {@code jakarta.validation} API being present but no explicit
* Validator having been configured.
*
* @author Juergen Hoeller
* @since 4.0.1
*/
public | used |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/EmbeddableInheritanceReplaceTest.java | {
"start": 2001,
"end": 2198
} | class ____ extends Base {
private String str;
public Next(int num, String str) {
super( num, str );
this.str = str;
}
public Next() {
}
}
@Entity(name = "Ent")
public static | Next |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/util/UrlUtilsTests.java | {
"start": 807,
"end": 1778
} | class ____ {
@Test
public void absoluteUrlsAreMatchedAsAbsolute() {
assertThat(UrlUtils.isAbsoluteUrl("https://something/")).isTrue();
assertThat(UrlUtils.isAbsoluteUrl("http1://something/")).isTrue();
assertThat(UrlUtils.isAbsoluteUrl("HTTP://something/")).isTrue();
assertThat(UrlUtils.isAbsoluteUrl("https://something/")).isTrue();
assertThat(UrlUtils.isAbsoluteUrl("a://something/")).isTrue();
assertThat(UrlUtils.isAbsoluteUrl("zz+zz.zz-zz://something/")).isTrue();
}
@Test
public void isAbsoluteUrlWhenNullThenFalse() {
assertThat(UrlUtils.isAbsoluteUrl(null)).isFalse();
}
@Test
public void isAbsoluteUrlWhenEmptyThenFalse() {
assertThat(UrlUtils.isAbsoluteUrl("")).isFalse();
}
@Test
public void isValidRedirectUrlWhenNullThenFalse() {
assertThat(UrlUtils.isValidRedirectUrl(null)).isFalse();
}
@Test
public void isValidRedirectUrlWhenEmptyThenFalse() {
assertThat(UrlUtils.isValidRedirectUrl("")).isFalse();
}
}
| UrlUtilsTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/externalresource/ExternalResourceUtils.java | {
"start": 1964,
"end": 2180
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(ExternalResourceUtils.class);
private ExternalResourceUtils() {
throw new UnsupportedOperationException("This | ExternalResourceUtils |
java | grpc__grpc-java | services/src/main/java/io/grpc/protobuf/services/BinlogHelper.java | {
"start": 4955,
"end": 10720
} | class ____ "
+ "add one to be consistent with language agnostic conventions.");
Preconditions.checkArgument(
peerAddress == null || logger == GrpcLogEntry.Logger.LOGGER_SERVER,
"peerSocket can only be specified for server");
MaybeTruncated<io.grpc.binarylog.v1.Metadata.Builder> pair
= createMetadataProto(metadata, maxHeaderBytes);
io.grpc.binarylog.v1.ClientHeader.Builder clientHeaderBuilder
= io.grpc.binarylog.v1.ClientHeader.newBuilder()
.setMetadata(pair.proto)
.setMethodName("/" + methodName);
if (timeout != null) {
clientHeaderBuilder.setTimeout(timeout);
}
if (authority != null) {
clientHeaderBuilder.setAuthority(authority);
}
GrpcLogEntry.Builder entryBuilder = newTimestampedBuilder()
.setSequenceIdWithinCall(seq)
.setType(EventType.EVENT_TYPE_CLIENT_HEADER)
.setClientHeader(clientHeaderBuilder)
.setPayloadTruncated(pair.truncated)
.setLogger(logger)
.setCallId(callId);
if (peerAddress != null) {
entryBuilder.setPeer(socketToProto(peerAddress));
}
sink.write(entryBuilder.build());
}
@Override
void logServerHeader(
long seq,
Metadata metadata,
GrpcLogEntry.Logger logger,
long callId,
// null on server
@Nullable SocketAddress peerAddress) {
Preconditions.checkArgument(
peerAddress == null || logger == GrpcLogEntry.Logger.LOGGER_CLIENT,
"peerSocket can only be specified for client");
MaybeTruncated<io.grpc.binarylog.v1.Metadata.Builder> pair
= createMetadataProto(metadata, maxHeaderBytes);
GrpcLogEntry.Builder entryBuilder = newTimestampedBuilder()
.setSequenceIdWithinCall(seq)
.setType(EventType.EVENT_TYPE_SERVER_HEADER)
.setServerHeader(
io.grpc.binarylog.v1.ServerHeader.newBuilder()
.setMetadata(pair.proto))
.setPayloadTruncated(pair.truncated)
.setLogger(logger)
.setCallId(callId);
if (peerAddress != null) {
entryBuilder.setPeer(socketToProto(peerAddress));
}
sink.write(entryBuilder.build());
}
@Override
void logTrailer(
long seq,
Status status,
Metadata metadata,
GrpcLogEntry.Logger logger,
long callId,
// null on server, can be non null on client if this is a trailer-only response
@Nullable SocketAddress peerAddress) {
Preconditions.checkArgument(
peerAddress == null || logger == GrpcLogEntry.Logger.LOGGER_CLIENT,
"peerSocket can only be specified for client");
MaybeTruncated<io.grpc.binarylog.v1.Metadata.Builder> pair
= createMetadataProto(metadata, maxHeaderBytes);
io.grpc.binarylog.v1.Trailer.Builder trailerBuilder
= io.grpc.binarylog.v1.Trailer.newBuilder()
.setStatusCode(status.getCode().value())
.setMetadata(pair.proto);
String statusDescription = status.getDescription();
if (statusDescription != null) {
trailerBuilder.setStatusMessage(statusDescription);
}
byte[] statusDetailBytes = metadata.get(STATUS_DETAILS_KEY);
if (statusDetailBytes != null) {
trailerBuilder.setStatusDetails(ByteString.copyFrom(statusDetailBytes));
}
GrpcLogEntry.Builder entryBuilder = newTimestampedBuilder()
.setSequenceIdWithinCall(seq)
.setType(EventType.EVENT_TYPE_SERVER_TRAILER)
.setTrailer(trailerBuilder)
.setPayloadTruncated(pair.truncated)
.setLogger(logger)
.setCallId(callId);
if (peerAddress != null) {
entryBuilder.setPeer(socketToProto(peerAddress));
}
sink.write(entryBuilder.build());
}
@Override
<T> void logRpcMessage(
long seq,
EventType eventType,
Marshaller<T> marshaller,
T message,
GrpcLogEntry.Logger logger,
long callId) {
Preconditions.checkArgument(
eventType == EventType.EVENT_TYPE_CLIENT_MESSAGE
|| eventType == EventType.EVENT_TYPE_SERVER_MESSAGE,
"event type must correspond to client message or server message");
if (marshaller != BYTEARRAY_MARSHALLER) {
throw new IllegalStateException("Expected the BinaryLog's ByteArrayMarshaller");
}
MaybeTruncated<Message.Builder> pair = createMessageProto((byte[]) message, maxMessageBytes);
GrpcLogEntry.Builder entryBuilder = newTimestampedBuilder()
.setSequenceIdWithinCall(seq)
.setType(eventType)
.setMessage(pair.proto)
.setPayloadTruncated(pair.truncated)
.setLogger(logger)
.setCallId(callId);
sink.write(entryBuilder.build());
}
@Override
void logHalfClose(long seq, GrpcLogEntry.Logger logger, long callId) {
sink.write(
newTimestampedBuilder()
.setSequenceIdWithinCall(seq)
.setType(EventType.EVENT_TYPE_CLIENT_HALF_CLOSE)
.setLogger(logger)
.setCallId(callId)
.build());
}
@Override
void logCancel(long seq, GrpcLogEntry.Logger logger, long callId) {
sink.write(
newTimestampedBuilder()
.setSequenceIdWithinCall(seq)
.setType(EventType.EVENT_TYPE_CANCEL)
.setLogger(logger)
.setCallId(callId)
.build());
}
@Override
int getMaxHeaderBytes() {
return maxHeaderBytes;
}
@Override
int getMaxMessageBytes() {
return maxMessageBytes;
}
}
abstract static | will |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/filter/factory/FallbackHeadersGatewayFilterFactory.java | {
"start": 1211,
"end": 3363
} | class ____
extends AbstractGatewayFilterFactory<FallbackHeadersGatewayFilterFactory.Config> {
public FallbackHeadersGatewayFilterFactory() {
super(Config.class);
}
@Override
public List<String> shortcutFieldOrder() {
return singletonList(NAME_KEY);
}
@Override
public GatewayFilter apply(Config config) {
return (exchange, chain) -> {
Throwable exception = exchange.getAttribute(CIRCUITBREAKER_EXECUTION_EXCEPTION_ATTR);
ServerWebExchange filteredExchange;
if (exception == null) {
filteredExchange = exchange;
}
else {
filteredExchange = addFallbackHeaders(config, exchange, exception);
}
return chain.filter(filteredExchange);
};
}
private ServerWebExchange addFallbackHeaders(Config config, ServerWebExchange exchange,
Throwable executionException) {
ServerHttpRequest.Builder requestBuilder = exchange.getRequest().mutate();
requestBuilder.header(config.getExecutionExceptionMessageHeaderName(), executionException.getClass().getName());
String executionMessage = executionException.getMessage();
requestBuilder.header(config.getExecutionExceptionMessageHeaderName(),
executionMessage != null ? executionMessage : "");
Throwable rootCause = getRootCause(executionException);
if (rootCause != null) {
requestBuilder.header(config.getRootCauseExceptionTypeHeaderName(), rootCause.getClass().getName());
String rootCauseMessage = rootCause.getMessage();
requestBuilder.header(config.getRootCauseExceptionMessageHeaderName(),
rootCauseMessage != null ? rootCauseMessage : "");
}
return exchange.mutate().request(requestBuilder.build()).build();
}
private static @Nullable Throwable getRootCause(final Throwable throwable) {
final List<Throwable> list = getThrowableList(throwable);
return list.isEmpty() ? null : list.get(list.size() - 1);
}
private static List<Throwable> getThrowableList(Throwable throwable) {
final List<Throwable> list = new ArrayList<>();
while (throwable != null && !list.contains(throwable)) {
list.add(throwable);
throwable = throwable.getCause();
}
return list;
}
public static | FallbackHeadersGatewayFilterFactory |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java | {
"start": 3365,
"end": 3597
} | class ____<K, V> extends Fake_RR<K, V> {
@SuppressWarnings("unchecked")
public RR_ClassLoaderChecker(Configuration conf) {
assertTrue(conf.getClassLoader() instanceof Fake_ClassLoader,
"The | RR_ClassLoaderChecker |
java | elastic__elasticsearch | modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java | {
"start": 1660,
"end": 7306
} | class ____ implements Writeable, ToXContentObject {
/** List of search request to use for the evaluation */
private final List<RatedRequest> ratedRequests;
/** Definition of the quality metric, e.g. precision at N */
private final EvaluationMetric metric;
/** Maximum number of requests to execute in parallel. */
private int maxConcurrentSearches = MAX_CONCURRENT_SEARCHES;
/** Default max number of requests. */
private static final int MAX_CONCURRENT_SEARCHES = 10;
/** optional: Templates to base test requests on */
private final Map<String, Script> templates = new HashMap<>();
public RankEvalSpec(List<RatedRequest> ratedRequests, EvaluationMetric metric, Collection<ScriptWithId> templates) {
this.metric = Objects.requireNonNull(metric, "Cannot evaluate ranking if no evaluation metric is provided.");
if (ratedRequests == null || ratedRequests.isEmpty()) {
throw new IllegalArgumentException(
"Cannot evaluate ranking if no search requests with rated results are provided. Seen: " + ratedRequests
);
}
this.ratedRequests = ratedRequests;
if (templates == null || templates.isEmpty()) {
for (RatedRequest request : ratedRequests) {
if (request.getEvaluationRequest() == null) {
throw new IllegalStateException(
"Cannot evaluate ranking if neither template nor evaluation request is "
+ "provided. Seen for request id: "
+ request.getId()
);
}
}
}
if (templates != null) {
for (ScriptWithId idScript : templates) {
this.templates.put(idScript.id, idScript.script);
}
}
}
public RankEvalSpec(List<RatedRequest> ratedRequests, EvaluationMetric metric) {
this(ratedRequests, metric, null);
}
public RankEvalSpec(StreamInput in) throws IOException {
int specSize = in.readVInt();
ratedRequests = new ArrayList<>(specSize);
for (int i = 0; i < specSize; i++) {
ratedRequests.add(new RatedRequest(in));
}
metric = in.readNamedWriteable(EvaluationMetric.class);
int size = in.readVInt();
for (int i = 0; i < size; i++) {
String key = in.readString();
Script value = new Script(in);
this.templates.put(key, value);
}
maxConcurrentSearches = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeCollection(ratedRequests);
out.writeNamedWriteable(metric);
out.writeMap(templates, StreamOutput::writeWriteable);
out.writeVInt(maxConcurrentSearches);
}
/** Returns the metric to use for quality evaluation.*/
public EvaluationMetric getMetric() {
return metric;
}
/** Returns a list of intent to query translation specifications to evaluate. */
public List<RatedRequest> getRatedRequests() {
return Collections.unmodifiableList(ratedRequests);
}
/** Returns the template to base test requests on. */
public Map<String, Script> getTemplates() {
return this.templates;
}
/** Returns the max concurrent searches allowed. */
public int getMaxConcurrentSearches() {
return this.maxConcurrentSearches;
}
/** Set the max concurrent searches allowed. */
public void setMaxConcurrentSearches(int maxConcurrentSearches) {
this.maxConcurrentSearches = maxConcurrentSearches;
}
private static final ParseField TEMPLATES_FIELD = new ParseField("templates");
private static final ParseField METRIC_FIELD = new ParseField("metric");
private static final ParseField REQUESTS_FIELD = new ParseField("requests");
private static final ParseField MAX_CONCURRENT_SEARCHES_FIELD = new ParseField("max_concurrent_searches");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<RankEvalSpec, Predicate<NodeFeature>> PARSER = new ConstructingObjectParser<>(
"rank_eval",
a -> new RankEvalSpec((List<RatedRequest>) a[0], (EvaluationMetric) a[1], (Collection<ScriptWithId>) a[2])
);
static {
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> RatedRequest.fromXContent(p, c), REQUESTS_FIELD);
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> parseMetric(p), METRIC_FIELD);
PARSER.declareObjectArray(
ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> ScriptWithId.fromXContent(p),
TEMPLATES_FIELD
);
PARSER.declareInt(RankEvalSpec::setMaxConcurrentSearches, MAX_CONCURRENT_SEARCHES_FIELD);
}
private static EvaluationMetric parseMetric(XContentParser parser) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser);
EvaluationMetric metric = parser.namedObject(EvaluationMetric.class, parser.currentName(), null);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser);
return metric;
}
public static RankEvalSpec parse(XContentParser parser, Predicate<NodeFeature> clusterSupportsFeature) {
return PARSER.apply(parser, clusterSupportsFeature);
}
static | RankEvalSpec |
java | elastic__elasticsearch | x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java | {
"start": 2883,
"end": 3328
} | class ____ extends ActionType<ShardChangesAction.Response> {
public static final ShardChangesAction INSTANCE = new ShardChangesAction();
public static final String NAME = "indices:data/read/xpack/ccr/shard_changes";
public static final RemoteClusterActionType<Response> REMOTE_TYPE = new RemoteClusterActionType<>(NAME, Response::new);
private ShardChangesAction() {
super(NAME);
}
public static | ShardChangesAction |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/cuckoo/ReactiveTransactionalCuckooCommands.java | {
"start": 769,
"end": 8592
} | interface ____<K, V> extends ReactiveTransactionalRedisCommands {
/**
* Execute the command <a href="https://redis.io/commands/cf.add">CF.ADD</a>.
* Summary: Adds the specified element to the specified Cuckoo filter.
* Group: cuckoo
* <p>
* If the cuckoo filter does not exist, it creates a new one.
*
* @param key the key
* @param value the value, must not be {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfadd(K key, V value);
/**
* Execute the command <a href="https://redis.io/commands/cf.addnx">CF.ADDNX</a>.
* Summary: Adds an item to a cuckoo filter if the item did not exist previously.
* Group: cuckoo
* <p>
* If the cuckoo filter does not exist, it creates a new one.
*
* @param key the key
* @param value the value, must not be {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfaddnx(K key, V value);
/**
* Execute the command <a href="https://redis.io/commands/cf.count">CF.COUNT</a>.
* Summary: Returns the number of times an item may be in the filter. Because this is a probabilistic data structure,
* this may not necessarily be accurate.
* Group: cuckoo
* <p>
*
* @param key the key
* @param value the value, must not be {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfcount(K key, V value);
/**
* Execute the command <a href="https://redis.io/commands/cf.del">CF.DEL</a>.
* Summary: Deletes an item once from the filter. If the item exists only once, it will be removed from the filter.
* If the item was added multiple times, it will still be present.
* Group: cuckoo
* <p>
*
* @param key the key
* @param value the value, must not be {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfdel(K key, V value);
/**
* Execute the command <a href="https://redis.io/commands/cf.exists">CF.EXISTS</a>.
* Summary: Check if an item exists in a Cuckoo filter
* Group: cuckoo
* <p>
*
* @param key the key
* @param value the value, must not be {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfexists(K key, V value);
/**
* Execute the command <a href="https://redis.io/commands/cf.insert">CF.INSERT</a>.
* Summary: Adds one or more items to a cuckoo filter, allowing the filter to be created with a custom capacity if
* it does not exist yet.
* Group: cuckoo
* <p>
*
* @param key the key
* @param values the values, must not be {@code null}, must not be empty, must not contain {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfinsert(K key, V... values);
/**
* Execute the command <a href="https://redis.io/commands/cf.insert">CF.INSERT</a>.
* Summary: Adds one or more items to a cuckoo filter, allowing the filter to be created with a custom capacity if
* it does not exist yet.
* Group: cuckoo
* <p>
*
* @param key the key
* @param args the extra arguments
* @param values the values, must not be {@code null}, must not be empty, must not contain {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfinsert(K key, CfInsertArgs args, V... values);
/**
* Execute the command <a href="https://redis.io/commands/cf.insertnx">CF.INSERTNX</a>.
* Summary: Adds one or more items to a cuckoo filter, allowing the filter to be created with a custom capacity if
* it does not exist yet.
* Group: cuckoo
* <p>
*
* @param key the key
* @param values the values, must not be {@code null}, must not be empty, must not contain {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfinsertnx(K key, V... values);
/**
* Execute the command <a href="https://redis.io/commands/cf.insertnx">CF.INSERTNX</a>.
* Summary: Adds one or more items to a cuckoo filter, allowing the filter to be created with a custom capacity if
* it does not exist yet.
* Group: cuckoo
* <p>
*
* @param key the key
* @param args the extra arguments
* @param values the values, must not be {@code null}, must not be empty, must not contain {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfinsertnx(K key, CfInsertArgs args, V... values);
/**
* Execute the command <a href="https://redis.io/commands/cf.mexists">CF.MEXISTS</a>.
* Summary: Check if an item exists in a Cuckoo filter
* Group: cuckoo
* <p>
*
* @param key the key
* @param values the values, must not be {@code null}, must not contain {@code null}, must not be empty
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfmexists(K key, V... values);
/**
* Execute the command <a href="https://redis.io/commands/cf.reserve">CF.RESERVE</a>.
* Summary: Create a Cuckoo Filter as key with a single sub-filter for the initial amount of capacity for items.
* Group: cuckoo
* <p>
*
* @param key the key
* @param capacity the capacity
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfreserve(K key, long capacity);
/**
* Execute the command <a href="https://redis.io/commands/cf.reserve">CF.RESERVE</a>.
* Summary: Create a Cuckoo Filter as key with a single sub-filter for the initial amount of capacity for items.
* Group: cuckoo
* <p>
*
* @param key the key
* @param capacity the capacity
* @param args the extra parameters
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> cfreserve(K key, long capacity, CfReserveArgs args);
}
| ReactiveTransactionalCuckooCommands |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.