language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
elastic__elasticsearch
|
modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java
|
{
"start": 7570,
"end": 9401
}
|
class ____ extends CustomCode {
private static final String CODE = "toJson";
ToJsonCode(TemplateContext tc, DefaultMustacheFactory df, Mustache mustache, String variable) {
super(tc, df, mustache, CODE);
if (CODE.equalsIgnoreCase(variable) == false) {
throw new MustacheException("Mismatch function code [" + CODE + "] cannot be applied to [" + variable + "]");
}
}
@Override
@SuppressWarnings("unchecked")
protected Function<String, String> createFunction(Object resolved) {
return s -> {
if (resolved == null) {
return null;
}
try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) {
if (resolved instanceof Iterable) {
builder.startArray();
for (Object o : (Iterable<?>) resolved) {
builder.value(o);
}
builder.endArray();
} else if (resolved instanceof Map) {
builder.map((Map<String, ?>) resolved);
} else {
// Do not handle as JSON
return oh.stringify(resolved);
}
return Strings.toString(builder);
} catch (IOException e) {
throw new MustacheException("Failed to convert object to JSON", e);
}
};
}
static boolean match(String variable) {
return CODE.equalsIgnoreCase(variable);
}
}
/**
* This function concatenates the values of an {@link Iterable} using a given delimiter
*/
static
|
ToJsonCode
|
java
|
apache__camel
|
tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/EndpointSchemaGeneratorMojo.java
|
{
"start": 22784,
"end": 42870
}
|
enum ____ and <b>the name of the
* constant</b> is in the following format <i>${declaring-class-name}#${enum-constant-name}</i></li>
* </ul>
* </li>
* </ul>
*
* @param header the header in which the name of the header and its corresponding constant should be
* set.
* @param field the field corresponding to the name of a header.
* @param headersNameProvider the name of the field to get or the name of the method to invoke to get the name of
* the headers.
* @throws Exception if an error occurred while getting the name of the header
*/
private void setHeaderNames(EndpointHeaderModel header, Field field, String headersNameProvider) throws Exception {
final Class<?> declaringClass = field.getDeclaringClass();
if (field.getType().isEnum()) {
if (!headersNameProvider.isEmpty()) {
final Optional<?> value = Arrays.stream(declaringClass.getEnumConstants())
.filter(c -> ((Enum<?>) c).name().equals(field.getName()))
.findAny();
if (value.isPresent()) {
getLog().debug(String.format("The headers name provider has been set to %s", headersNameProvider));
final Optional<Field> headersNameProviderField = Arrays.stream(declaringClass.getFields())
.filter(f -> f.getName().equals(headersNameProvider))
.findAny();
if (headersNameProviderField.isPresent()) {
getLog().debug("A field corresponding to the headers name provider has been found");
header.setConstantName(
String.format("%s#%s@%s", declaringClass.getName(), field.getName(), headersNameProvider));
header.setName((String) headersNameProviderField.get().get(value.get()));
return;
}
getLog().debug(
String.format("No field %s could be found in the class %s", headersNameProvider, declaringClass));
final Optional<Method> headersNameProviderMethod = Arrays.stream(declaringClass.getMethods())
.filter(m -> m.getName().equals(headersNameProvider) && m.getParameterCount() == 0)
.findAny();
if (headersNameProviderMethod.isPresent()) {
getLog().debug("A method without parameters corresponding to the headers name provider has been found");
header.setConstantName(
String.format("%s#%s@%s()", declaringClass.getName(), field.getName(), headersNameProvider));
header.setName((String) headersNameProviderMethod.get().invoke(value.get()));
return;
}
getLog().debug(String.format("No method %s without parameters could be found in the class %s",
headersNameProvider, declaringClass));
}
}
header.setConstantName(String.format("%s#%s", declaringClass.getName(), field.getName()));
header.setName(field.getName());
return;
}
header.setConstantName(String.format("%s#%s", declaringClass.getName(), field.getName()));
header.setName((String) field.get(null));
}
/**
* @param headerField the field for which we want to extract the related Javadoc.
* @return the Javadoc of the header field if any. An empty string otherwise.
*/
private String getHeaderFieldJavadoc(Field headerField) {
JavaSource<?> source;
final String className = headerField.getDeclaringClass().getName();
try {
source = javaSource(className, JavaSource.class);
if (source == null) {
getLog().debug(String.format("The source of the class %s could not be found", className));
return "";
}
} catch (Exception e) {
getLog().debug(
String.format("An error occurred while loading the source of the class %s could not be found", className),
e);
return "";
}
JavaDocCapable<?> member = null;
if (source instanceof JavaEnumSource) {
member = ((JavaEnumSource) source).getEnumConstant(headerField.getName());
} else if (source instanceof FieldHolderSource) {
member = ((FieldHolderSource<?>) source).getField(headerField.getName());
} else {
getLog().debug(String.format("The header field cannot be retrieved from a source of type %s", source.getName()));
}
if (member != null) {
String doc = getJavaDocText(loadJavaSource(className), member);
if (!Strings.isNullOrEmpty(doc)) {
return doc;
}
}
return "";
}
private String getExcludedEnd(Metadata classElement) {
String excludedEndpointProperties = "";
if (classElement != null) {
excludedEndpointProperties = classElement.excludeProperties();
}
return excludedEndpointProperties;
}
/**
* Used for enhancing the component model with apiProperties for API based components (such as twilio, olingo and
* others)
*/
private void enhanceComponentModelWithApiModel(ComponentModel componentModel) {
for (AnnotationInstance ai : getIndex().getAnnotations(API_PARAMS)) {
Class<?> classElement = loadClass(ai.target().asClass().name().toString());
final ApiParams apiParams = classElement.getAnnotation(ApiParams.class);
if (apiParams != null) {
String apiName = apiParams.apiName();
if (!Strings.isNullOrEmpty(apiName)) {
final UriParams uriParams = classElement.getAnnotation(UriParams.class);
String extraPrefix = uriParams != null ? uriParams.prefix() : "";
findClassProperties(componentModel, classElement, Collections.emptySet(), extraPrefix,
null, null, false);
}
}
}
}
@Override
protected boolean updateResource(Path dir, String file, String data) {
resources.put(file, data);
return super.updateResource(dir, file, data);
}
private String loadResource(String fileName) {
if (resources.containsKey(fileName)) {
return resources.get(fileName);
}
String data;
try (InputStream is = getProjectClassLoader().getResourceAsStream(fileName)) {
if (is == null) {
throw new FileNotFoundException("Resource: " + fileName);
}
data = PackageHelper.loadText(is);
} catch (Exception e) {
throw new RuntimeException("Error while loading " + fileName + ": " + e, e);
}
resources.put(fileName, data);
return data;
}
void enhanceComponentModel(
ComponentModel componentModel, ComponentModel parentData, String excludedEndpointProperties,
String excludedComponentProperties) {
componentModel.getComponentOptions().removeIf(option -> filterOutOption(componentModel, option));
componentModel.getEndpointHeaders().forEach(option -> fixDoc(option, null));
componentModel.getComponentOptions()
.forEach(option -> fixDoc(option, parentData != null ? parentData.getComponentOptions() : null));
componentModel.getComponentOptions().sort(EndpointHelper.createGroupAndLabelComparator());
componentModel.getEndpointOptions().removeIf(option -> filterOutOption(componentModel, option));
componentModel.getEndpointOptions()
.forEach(option -> fixDoc(option, parentData != null ? parentData.getEndpointOptions() : null));
componentModel.getEndpointOptions().sort(EndpointHelper.createOverallComparator(componentModel.getSyntax()));
// merge with parent, remove excluded and override properties
if (parentData != null) {
Set<String> componentOptionNames
= componentModel.getComponentOptions().stream().map(BaseOptionModel::getName).collect(Collectors.toSet());
Set<String> endpointOptionNames
= componentModel.getEndpointOptions().stream().map(BaseOptionModel::getName).collect(Collectors.toSet());
Set<String> headerNames
= componentModel.getEndpointHeaders().stream().map(BaseOptionModel::getName).collect(Collectors.toSet());
Collections.addAll(componentOptionNames, excludedComponentProperties.split(","));
Collections.addAll(endpointOptionNames, excludedEndpointProperties.split(","));
parentData.getComponentOptions().stream()
.filter(option -> !componentOptionNames.contains(option.getName()))
.forEach(option -> componentModel.getComponentOptions().add(option));
parentData.getEndpointOptions().stream()
.filter(option -> !endpointOptionNames.contains(option.getName()))
.forEach(option -> componentModel.getEndpointOptions().add(option));
parentData.getEndpointHeaders().stream()
.filter(header -> !headerNames.contains(header.getName()))
.forEach(header -> componentModel.getEndpointHeaders().add(header));
}
}
private void fixDoc(BaseOptionModel option, List<? extends BaseOptionModel> parentOptions) {
String doc = getDocumentationWithNotes(option);
if (Strings.isNullOrEmpty(doc) && parentOptions != null) {
doc = parentOptions.stream().filter(opt -> Objects.equals(opt.getName(), option.getName()))
.map(this::getDocumentationWithNotes).findFirst().orElse(null);
}
// as its json we need to sanitize the docs
doc = JavadocHelper.sanitizeDescription(doc, false);
option.setDescription(doc);
if (isNullOrEmpty(doc)) {
throw new IllegalStateException(
"Empty doc for option: " + option.getName() + ", parent options: "
+ (parentOptions != null
? Jsoner.serialize(JsonMapper.asJsonObject(parentOptions)) : "<null>"));
}
}
private boolean filterOutOption(ComponentModel component, BaseOptionModel option) {
String label = option.getLabel();
if (label != null) {
return component.isConsumerOnly() && label.contains("producer")
|| component.isProducerOnly() && label.contains("consumer");
} else {
return false;
}
}
public String getDocumentationWithNotes(BaseOptionModel option) {
String description = option.getDescription();
if (description == null) {
return null;
}
StringBuilder sb = new StringBuilder(description.length() * 64);
sb.append(description);
if (option.isMultiValue() && option.getPrefix() != null) {
if (!sb.isEmpty() && sb.charAt(sb.length() - 1) != '.') {
sb.append('.');
}
sb.append(" This is a multi-value option with prefix: ").append(option.getPrefix());
}
if (!Strings.isNullOrEmpty(option.getDefaultValueNote())) {
if (!sb.isEmpty() && sb.charAt(sb.length() - 1) != '.') {
sb.append('.');
}
sb.append(" Default value notice: ").append(option.getDefaultValueNote());
}
if (!Strings.isNullOrEmpty(option.getDeprecationNote())) {
if (!sb.isEmpty() && sb.charAt(sb.length() - 1) != '.') {
sb.append('.');
}
sb.append(" Deprecation note: ").append(option.getDeprecationNote());
}
return sb.toString();
}
private void generateComponentConfigurer(
UriEndpoint uriEndpoint, String scheme, String[] schemes, ComponentModel componentModel,
ComponentModel parentData) {
if (!uriEndpoint.generateConfigurer()) {
return;
}
// only generate this once for the first scheme
if (isFirstScheme(scheme, schemes)) {
return;
}
String pfqn;
boolean hasSuper;
Class<?> superClazz = loadClass(componentModel.getJavaType()).getSuperclass();
if (parentData != null && superClazz.getName().equals(parentData.getJavaType())) {
// special for activemq and amqp scheme which should reuse jms
pfqn = parentData.getJavaType() + "Configurer";
hasSuper = true;
} else {
pfqn = "org.apache.camel.support.component.PropertyConfigurerSupport";
hasSuper = false;
parentData = null;
}
String psn = pfqn.substring(pfqn.lastIndexOf('.') + 1);
String fqComponentClassName = componentModel.getJavaType();
String componentClassName = fqComponentClassName.substring(fqComponentClassName.lastIndexOf('.') + 1);
String className = componentClassName + "Configurer";
String packageName = fqComponentClassName.substring(0, fqComponentClassName.lastIndexOf('.'));
String fqClassName = packageName + "." + className;
List<ComponentOptionModel> options;
if (parentData != null) {
Set<String> parentOptionsNames = parentData.getComponentOptions().stream()
.map(ComponentOptionModel::getName).collect(Collectors.toSet());
options = componentModel.getComponentOptions().stream().filter(o -> !parentOptionsNames.contains(o.getName()))
.toList();
} else {
options = componentModel.getComponentOptions();
}
generatePropertyConfigurer(packageName, className, fqClassName, componentClassName,
pfqn, psn,
componentModel.getScheme() + "-component", hasSuper, true,
options, componentModel);
}
private boolean isFirstScheme(String scheme, String[] schemes) {
if (schemes != null && !schemes[0].equals(scheme)) {
return true;
}
return false;
}
private void generateEndpointConfigurer(
Class<?> classElement, UriEndpoint uriEndpoint, String scheme, String[] schemes,
ComponentModel componentModel, ComponentModel parentData) {
if (!uriEndpoint.generateConfigurer()) {
return;
}
// only generate this once for the first scheme
if (isFirstScheme(scheme, schemes)) {
return;
}
Class<?> superClazz = loadClass(componentModel.getJavaType()).getSuperclass();
String pfqn;
boolean hasSuper;
if (parentData != null && superClazz.getName().equals(parentData.getJavaType())) {
try {
pfqn = classElement.getSuperclass().getName() + "Configurer";
hasSuper = true;
} catch (NoClassDefFoundError e) {
pfqn = "org.apache.camel.support.component.PropertyConfigurerSupport";
hasSuper = false;
parentData = null;
}
} else {
pfqn = "org.apache.camel.support.component.PropertyConfigurerSupport";
hasSuper = false;
}
String psn = pfqn.substring(pfqn.lastIndexOf('.') + 1);
String fqEndpointClassName = classElement.getName();
String endpointClassName = fqEndpointClassName.substring(fqEndpointClassName.lastIndexOf('.') + 1);
String className = endpointClassName + "Configurer";
String packageName = fqEndpointClassName.substring(0, fqEndpointClassName.lastIndexOf('.'));
String fqClassName = packageName + "." + className;
List<EndpointOptionModel> options;
if (parentData != null) {
Set<String> parentOptionsNames = parentData.getEndpointParameterOptions().stream()
.map(EndpointOptionModel::getName).collect(Collectors.toSet());
options = componentModel.getEndpointParameterOptions().stream()
.filter(o -> !parentOptionsNames.contains(o.getName()))
.toList();
} else {
options = componentModel.getEndpointParameterOptions();
}
generatePropertyConfigurer(packageName, className, fqClassName, endpointClassName,
pfqn, psn,
componentModel.getScheme() + "-endpoint", hasSuper, false,
options, componentModel);
}
protected ComponentModel findComponentProperties(
UriEndpoint uriEndpoint, Class<?> endpointClassElement, String title, String scheme,
String extendsScheme, String label, String[] schemes) {
ComponentModel model = new ComponentModel();
model.setScheme(scheme);
model.setName(scheme);
model.setExtendsScheme(extendsScheme);
// alternative schemes
if (schemes != null && schemes.length > 1) {
model.setAlternativeSchemes(String.join(",", schemes));
}
// if the scheme is an alias then replace the scheme name from the
// syntax with the alias
String syntax = scheme + ":" + Strings.after(uriEndpoint.syntax(), ":");
// alternative syntax is optional
if (!Strings.isNullOrEmpty(uriEndpoint.alternativeSyntax())) {
String alternativeSyntax = scheme + ":" + Strings.after(uriEndpoint.alternativeSyntax(), ":");
model.setAlternativeSyntax(alternativeSyntax);
}
model.setSyntax(syntax);
model.setTitle(title);
model.setLabel(label);
model.setConsumerOnly(uriEndpoint.consumerOnly());
model.setProducerOnly(uriEndpoint.producerOnly());
model.setLenientProperties(uriEndpoint.lenientProperties());
model.setRemote(uriEndpoint.remote());
model.setAsync(loadClass("org.apache.camel.AsyncEndpoint").isAssignableFrom(endpointClassElement));
model.setApi(loadClass("org.apache.camel.ApiEndpoint").isAssignableFrom(endpointClassElement));
model.setBrowsable(loadClass("org.apache.camel.spi.BrowsableEndpoint").isAssignableFrom(endpointClassElement));
model.setApiSyntax(uriEndpoint.apiSyntax());
// what is the first version this component was added to Apache Camel
String firstVersion = uriEndpoint.firstVersion();
if (Strings.isNullOrEmpty(firstVersion) && endpointClassElement.getAnnotation(Metadata.class) != null) {
// fallback to @Metadata if not from @UriEndpoint
firstVersion = endpointClassElement.getAnnotation(Metadata.class).firstVersion();
}
if (!Strings.isNullOrEmpty(firstVersion)) {
model.setFirstVersion(firstVersion);
}
model.setDescription(project.getDescription());
model.setGroupId(project.getGroupId());
model.setArtifactId(project.getArtifactId());
model.setVersion(project.getVersion());
// grab level from annotation, pom.xml or default to stable
String level = project.getProperties().getProperty("supportLevel");
boolean experimental = ClassUtil.hasAnnotation("org.apache.camel.Experimental", endpointClassElement);
if (experimental) {
model.setSupportLevel(SupportLevel.Experimental);
} else if (level != null) {
model.setSupportLevel(SupportLevel.safeValueOf(level));
} else {
model.setSupportLevel(SupportLevelHelper.defaultSupportLevel(model.getFirstVersion(), model.getVersion()));
}
// get the java type
|
constant
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/multipart/MultipartDetectionTest.java
|
{
"start": 1239,
"end": 4789
}
|
class ____ {
@TestHTTPResource
URI baseUri;
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot(
jar -> jar.addClasses(Resource.class, Client.class, Person.class, TestJacksonBasicMessageBodyReader.class,
TestJacksonBasicMessageBodyWriter.class));
@Test
void shouldCallExplicitEndpoints() throws IOException {
Client client = RestClientBuilder.newBuilder().baseUri(baseUri).build(Client.class);
File file = File.createTempFile("MultipartTest", ".txt");
Files.writeString(file.toPath(), "Hello");
file.deleteOnExit();
assertThat(client.postMultipartExplicit(file.getName(), file))
.isEqualTo(file.getName() + " " + file.getName() + " Hello");
assertThat(client.postUrlencodedExplicit(file.getName())).isEqualTo(file.getName());
}
@Test
void shouldCallImplicitEndpoints() throws IOException {
Client client = RestClientBuilder.newBuilder().baseUri(baseUri).build(Client.class);
File file = File.createTempFile("MultipartTest", ".txt");
byte[] contents = "Hello".getBytes(StandardCharsets.UTF_8);
Files.write(file.toPath(), contents);
file.deleteOnExit();
Byte[] contentsForMulti = new Byte[contents.length];
for (int i = 0; i < contents.length; i++) {
contentsForMulti[i] = contents[i];
}
Person person = new Person();
person.firstName = "Stef";
person.lastName = "Epardaud";
assertThat(client.postMultipartImplicit(file.getName(), file))
.isEqualTo(file.getName() + " " + file.getName() + " Hello");
assertThat(client.postMultipartImplicit(file.getName(), file.toPath()))
.isEqualTo(file.getName() + " " + file.getName() + " Hello");
assertThat(client.postMultipartImplicit(file.getName(), contents)).isEqualTo(file.getName() + " file Hello");
assertThat(client.postMultipartImplicit(file.getName(), Buffer.buffer(contents)))
.isEqualTo(file.getName() + " file Hello");
assertThat(client.postMultipartImplicit(file.getName(), Multi.createFrom().items(contentsForMulti)))
.isEqualTo(file.getName() + " file Hello");
assertThat(client.postMultipartEntityImplicit(file.getName(), person))
.isEqualTo(file.getName() + " Stef:Epardaud");
assertThat(client.postMultipartImplicitFileUpload("Foo", new FileUpload() {
@Override
public String name() {
return "file";
}
@Override
public java.nio.file.Path filePath() {
return file.toPath();
}
@Override
public String fileName() {
return file.getName();
}
@Override
public long size() {
return -1;
}
@Override
public String contentType() {
return "application/octet-stream";
}
@Override
public String charSet() {
return "";
}
@Override
public MultivaluedMap<String, String> getHeaders() {
return new QuarkusMultivaluedHashMap<>();
}
}))
.isEqualTo("Foo " + file.getName() + " Hello");
}
@Path("form")
@ApplicationScoped
public static
|
MultipartDetectionTest
|
java
|
square__javapoet
|
src/test/java/com/squareup/javapoet/AnnotatedTypeNameTest.java
|
{
"start": 1518,
"end": 9077
}
|
interface ____ {}
@Test(expected=NullPointerException.class) public void nullAnnotationArray() {
TypeName.BOOLEAN.annotated((AnnotationSpec[]) null);
}
@Test(expected=NullPointerException.class) public void nullAnnotationList() {
TypeName.DOUBLE.annotated((List<AnnotationSpec>) null);
}
@Test public void annotated() {
TypeName simpleString = TypeName.get(String.class);
assertFalse(simpleString.isAnnotated());
assertEquals(simpleString, TypeName.get(String.class));
TypeName annotated = simpleString.annotated(NEVER_NULL);
assertTrue(annotated.isAnnotated());
assertEquals(annotated, annotated.annotated());
}
@Test public void annotatedType() {
TypeName type = TypeName.get(String.class);
TypeName actual = type.annotated(TYPE_USE_ANNOTATION);
assertThat(actual.toString()).isEqualTo("java.lang. @" + TUA + " String");
}
@Test public void annotatedTwice() {
TypeName type = TypeName.get(String.class);
TypeName actual =
type.annotated(NEVER_NULL)
.annotated(TYPE_USE_ANNOTATION);
assertThat(actual.toString())
.isEqualTo("java.lang. @" + NN + " @" + TUA + " String");
}
@Test public void annotatedParameterizedType() {
TypeName type = ParameterizedTypeName.get(List.class, String.class);
TypeName actual = type.annotated(TYPE_USE_ANNOTATION);
assertThat(actual.toString()).isEqualTo("java.util. @" + TUA + " List<java.lang.String>");
}
@Test public void annotatedArgumentOfParameterizedType() {
TypeName type = TypeName.get(String.class).annotated(TYPE_USE_ANNOTATION);
TypeName actual = ParameterizedTypeName.get(ClassName.get(List.class), type);
assertThat(actual.toString()).isEqualTo("java.util.List<java.lang. @" + TUA + " String>");
}
@Test public void annotatedWildcardTypeNameWithSuper() {
TypeName type = TypeName.get(String.class).annotated(TYPE_USE_ANNOTATION);
TypeName actual = WildcardTypeName.supertypeOf(type);
assertThat(actual.toString()).isEqualTo("? super java.lang. @" + TUA + " String");
}
@Test public void annotatedWildcardTypeNameWithExtends() {
TypeName type = TypeName.get(String.class).annotated(TYPE_USE_ANNOTATION);
TypeName actual = WildcardTypeName.subtypeOf(type);
assertThat(actual.toString()).isEqualTo("? extends java.lang. @" + TUA + " String");
}
@Test public void annotatedEquivalence() {
annotatedEquivalence(TypeName.VOID);
annotatedEquivalence(ArrayTypeName.get(Object[].class));
annotatedEquivalence(ClassName.get(Object.class));
annotatedEquivalence(ParameterizedTypeName.get(List.class, Object.class));
annotatedEquivalence(TypeVariableName.get(Object.class));
annotatedEquivalence(WildcardTypeName.get(Object.class));
}
private void annotatedEquivalence(TypeName type) {
assertFalse(type.isAnnotated());
assertEquals(type, type);
assertEquals(type.annotated(TYPE_USE_ANNOTATION), type.annotated(TYPE_USE_ANNOTATION));
assertNotEquals(type, type.annotated(TYPE_USE_ANNOTATION));
assertEquals(type.hashCode(), type.hashCode());
assertEquals(type.annotated(TYPE_USE_ANNOTATION).hashCode(),
type.annotated(TYPE_USE_ANNOTATION).hashCode());
assertNotEquals(type.hashCode(), type.annotated(TYPE_USE_ANNOTATION).hashCode());
}
// https://github.com/square/javapoet/issues/431
@Test public void annotatedNestedType() {
TypeName type = TypeName.get(Map.Entry.class).annotated(TYPE_USE_ANNOTATION);
assertThat(type.toString()).isEqualTo("java.util.Map. @" + TUA + " Entry");
}
@Test public void annotatedEnclosingAndNestedType() {
TypeName type = ((ClassName) TypeName.get(Map.class).annotated(TYPE_USE_ANNOTATION))
.nestedClass("Entry").annotated(TYPE_USE_ANNOTATION);
assertThat(type.toString()).isEqualTo("java.util. @" + TUA + " Map. @" + TUA + " Entry");
}
// https://github.com/square/javapoet/issues/431
@Test public void annotatedNestedParameterizedType() {
TypeName type = ParameterizedTypeName.get(Map.Entry.class, Byte.class, Byte.class)
.annotated(TYPE_USE_ANNOTATION);
assertThat(type.toString())
.isEqualTo("java.util.Map. @" + TUA + " Entry<java.lang.Byte, java.lang.Byte>");
}
@Test public void withoutAnnotationsOnAnnotatedEnclosingAndNestedType() {
TypeName type = ((ClassName) TypeName.get(Map.class).annotated(TYPE_USE_ANNOTATION))
.nestedClass("Entry").annotated(TYPE_USE_ANNOTATION);
assertThat(type.isAnnotated()).isTrue();
assertThat(type.withoutAnnotations()).isEqualTo(TypeName.get(Map.Entry.class));
}
@Test public void withoutAnnotationsOnAnnotatedEnclosingType() {
TypeName type = ((ClassName) TypeName.get(Map.class).annotated(TYPE_USE_ANNOTATION))
.nestedClass("Entry");
assertThat(type.isAnnotated()).isTrue();
assertThat(type.withoutAnnotations()).isEqualTo(TypeName.get(Map.Entry.class));
}
@Test public void withoutAnnotationsOnAnnotatedNestedType() {
TypeName type = ((ClassName) TypeName.get(Map.class))
.nestedClass("Entry").annotated(TYPE_USE_ANNOTATION);
assertThat(type.isAnnotated()).isTrue();
assertThat(type.withoutAnnotations()).isEqualTo(TypeName.get(Map.Entry.class));
}
// https://github.com/square/javapoet/issues/614
@Test public void annotatedArrayType() {
TypeName type = ArrayTypeName.of(ClassName.get(Object.class)).annotated(TYPE_USE_ANNOTATION);
assertThat(type.toString()).isEqualTo("java.lang.Object @" + TUA + " []");
}
@Test public void annotatedArrayElementType() {
TypeName type = ArrayTypeName.of(ClassName.get(Object.class).annotated(TYPE_USE_ANNOTATION));
assertThat(type.toString()).isEqualTo("java.lang. @" + TUA + " Object[]");
}
// https://github.com/square/javapoet/issues/614
@Test public void annotatedOuterMultidimensionalArrayType() {
TypeName type = ArrayTypeName.of(ArrayTypeName.of(ClassName.get(Object.class)))
.annotated(TYPE_USE_ANNOTATION);
assertThat(type.toString()).isEqualTo("java.lang.Object @" + TUA + " [][]");
}
// https://github.com/square/javapoet/issues/614
@Test public void annotatedInnerMultidimensionalArrayType() {
TypeName type = ArrayTypeName.of(ArrayTypeName.of(ClassName.get(Object.class))
.annotated(TYPE_USE_ANNOTATION));
assertThat(type.toString()).isEqualTo("java.lang.Object[] @" + TUA + " []");
}
// https://github.com/square/javapoet/issues/614
@Test public void annotatedArrayTypeVarargsParameter() {
TypeName type = ArrayTypeName.of(ArrayTypeName.of(ClassName.get(Object.class)))
.annotated(TYPE_USE_ANNOTATION);
MethodSpec varargsMethod = MethodSpec.methodBuilder("m")
.addParameter(
ParameterSpec.builder(type, "p")
.build())
.varargs()
.build();
assertThat(varargsMethod.toString()).isEqualTo(""
+ "void m(java.lang.Object @" + TUA + " []... p) {\n"
+ "}\n");
}
// https://github.com/square/javapoet/issues/614
@Test public void annotatedArrayTypeInVarargsParameter() {
TypeName type = ArrayTypeName.of(ArrayTypeName.of(ClassName.get(Object.class))
.annotated(TYPE_USE_ANNOTATION));
MethodSpec varargsMethod = MethodSpec.methodBuilder("m")
.addParameter(
ParameterSpec.builder(type, "p")
.build())
.varargs()
.build();
assertThat(varargsMethod.toString()).isEqualTo(""
+ "void m(java.lang.Object[] @" + TUA + " ... p) {\n"
+ "}\n");
}
}
|
TypeUseAnnotation
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/OidcProviderConfigurationTests.java
|
{
"start": 13730,
"end": 14840
}
|
class ____ {
@Bean
SecurityFilterChain authorizationServerSecurityFilterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.oauth2AuthorizationServer((authorizationServer) ->
authorizationServer
.oidc(Customizer.withDefaults()) // Enable OpenID Connect 1.0
);
// @formatter:on
return http.build();
}
@Bean
RegisteredClientRepository registeredClientRepository() {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
return new InMemoryRegisteredClientRepository(registeredClient);
}
@Bean
JWKSource<SecurityContext> jwkSource() {
return new ImmutableJWKSet<>(new JWKSet(TestJwks.DEFAULT_RSA_JWK));
}
@Bean
JwtDecoder jwtDecoder(JWKSource<SecurityContext> jwkSource) {
return OAuth2AuthorizationServerConfiguration.jwtDecoder(jwkSource);
}
@Bean
AuthorizationServerSettings authorizationServerSettings() {
return AuthorizationServerSettings.builder().issuer(ISSUER).build();
}
}
@EnableWebSecurity
@Configuration(proxyBeanMethods = false)
static
|
AuthorizationServerConfiguration
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/bytecode/internal/bytebuddy/SuperTypesEnhancementTest.java
|
{
"start": 1517,
"end": 4359
}
|
class ____ {
private static final ByteBuddyProxyHelper helper = new ByteBuddyProxyHelper( new ByteBuddyState() );
private static Stream<Arguments> superTypeMethods() {
return Arrays.stream( PrimeAmongSecondarySupertypes.class.getDeclaredMethods() )
// need to filter out methods added by jacoco
.filter( method -> !method.isSynthetic() )
.map( Arguments::of );
}
private static Stream<Arguments> interfaces() {
return Arrays.stream( PrimeAmongSecondarySupertypes.class.getDeclaredMethods() )
// need to filter out methods added by jacoco
.filter( method -> !method.isSynthetic() )
.map( m -> m.getReturnType() )
.map( e -> Arguments.of( e ) );
}
@ParameterizedTest
@MethodSource("superTypeMethods")
public void testNamingConventions(Method m) {
final Class<?> returnType = m.getReturnType();
final String expectedMethodName = "as" + returnType.getSimpleName();
assertEquals( expectedMethodName, m.getName() );
assertNotNull( m.isDefault() );
}
@ParameterizedTest
@MethodSource("superTypeMethods")
public void testAllsubInterfacesExtendTheSingleparent(Method m) {
final Class<?> returnType = m.getReturnType();
assertTrue( PrimeAmongSecondarySupertypes.class.isAssignableFrom( returnType ) );
}
@ParameterizedTest
@MethodSource("superTypeMethods")
public void testSubInterfaceOverrides(Method m) throws NoSuchMethodException {
final Class<?> returnType = m.getReturnType();
final Method subMethod = returnType.getMethod( m.getName(), m.getParameterTypes() );
assertNotNull( subMethod );
assertNotNull( subMethod.isDefault() );
}
@Test
public void testHibernateProxyGeneration() {
ProxyFactory enhancer = createProxyFactory( SampleClass.class, HibernateProxy.class );
final Object proxy = enhancer.getProxy( Integer.valueOf( 1 ), null );
assertTrue( HibernateProxy.class.isAssignableFrom( proxy.getClass() ) );
assertTrue( proxy instanceof HibernateProxy );
PrimeAmongSecondarySupertypes casted = (PrimeAmongSecondarySupertypes) proxy;
final HibernateProxy extracted = casted.asHibernateProxy();
assertNotNull( extracted );
Assertions.assertSame( proxy, extracted );
testForLIE( (SampleClass) proxy );
}
/**
* Self-check: verify that this is in fact a lazy proxy
*/
private void testForLIE(SampleClass sampleProxy) {
SampleClass other = new SampleClass();
assertEquals( 7, other.additionMethod( 3,4 ) );
Assertions.assertThrows( LazyInitializationException.class, () -> sampleProxy.additionMethod( 3, 4 ) );
}
private ProxyFactory createProxyFactory(Class<?> persistentClass, Class<?>... interfaces) {
ByteBuddyProxyFactory proxyFactory = new ByteBuddyProxyFactory( helper );
proxyFactory.postInstantiate( "", persistentClass, Set.of( interfaces ), null, null, null );
return proxyFactory;
}
//Just a
|
SuperTypesEnhancementTest
|
java
|
apache__logging-log4j2
|
log4j-jcl/src/main/java/org/apache/logging/log4j/jcl/LogFactoryImpl.java
|
{
"start": 1440,
"end": 2881
}
|
class ____ extends LogFactory {
private final LoggerAdapter<Log> adapter = new LogAdapter();
private final ConcurrentMap<String, Object> attributes = new ConcurrentHashMap<>();
@Override
public Log getInstance(final String name) throws LogConfigurationException {
return adapter.getLogger(name);
}
@Override
public Object getAttribute(final String name) {
return attributes.get(name);
}
@Override
public String[] getAttributeNames() {
return attributes.keySet().toArray(Strings.EMPTY_ARRAY);
}
@Override
public Log getInstance(@SuppressWarnings("rawtypes") final Class clazz) throws LogConfigurationException {
return getInstance(clazz.getName());
}
/**
* This method is supposed to clear all loggers. In this implementation it will clear all the logger
* wrappers but the loggers managed by the underlying logger context will not be.
*/
@Override
public void release() {
try {
adapter.close();
} catch (final IOException ignored) {
}
}
@Override
public void removeAttribute(final String name) {
attributes.remove(name);
}
@Override
public void setAttribute(final String name, final Object value) {
if (value != null) {
attributes.put(name, value);
} else {
removeAttribute(name);
}
}
}
|
LogFactoryImpl
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java
|
{
"start": 1118,
"end": 1503
}
|
class ____ extends ActionType<UpgradeJobModelSnapshotAction.Response> {
public static final UpgradeJobModelSnapshotAction INSTANCE = new UpgradeJobModelSnapshotAction();
public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/upgrade";
private UpgradeJobModelSnapshotAction() {
super(NAME);
}
public static
|
UpgradeJobModelSnapshotAction
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/params/ParameterizedClassIntegrationTests.java
|
{
"start": 43794,
"end": 44163
}
|
enum ____ {
BAR
}
@ParameterizedClass
@MethodSource("parameters")
record MethodSourceConstructorInjectionTestCase(String value) {
static Stream<String> parameters() {
return Stream.of("foo", "bar");
}
@Test
void test() {
assertTrue(value.equals("foo") || value.equals("bar"));
}
}
@ParameterizedClass
@MethodSource("parameters")
static
|
EnumTwo
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProgramDriver.java
|
{
"start": 4003,
"end": 5575
}
|
class ____ the rest
* of the command line arguments.
* @param args The argument from the user. args[0] is the command to run.
* @return -1 on error, 0 on success
* @throws NoSuchMethodException when a particular method cannot be found.
* @throws SecurityException security manager to indicate a security violation.
* @throws IllegalAccessException for backward compatibility.
* @throws IllegalArgumentException if the arg is invalid.
* @throws Throwable Anything thrown by the example program's main
*/
public int run(String[] args)
throws Throwable
{
// Make sure they gave us a program name.
if (args.length == 0) {
System.out.println("An example program must be given as the" +
" first argument.");
printUsage(programs);
return -1;
}
// And that it is good.
ProgramDescription pgm = programs.get(args[0]);
if (pgm == null) {
System.out.println("Unknown program '" + args[0] + "' chosen.");
printUsage(programs);
return -1;
}
// Remove the leading argument and call main
String[] new_args = new String[args.length - 1];
for(int i=1; i < args.length; ++i) {
new_args[i-1] = args[i];
}
pgm.invoke(new_args);
return 0;
}
/**
* API compatible with Hadoop 1.x.
*
* @param argv argv.
* @throws Throwable Anything thrown
* by the example program's main
*/
public void driver(String[] argv) throws Throwable {
if (run(argv) == -1) {
System.exit(-1);
}
}
}
|
with
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-s3/src/test/java/org/apache/camel/component/aws2/s3/integration/S3StreamUploadTimestampTimeoutIT.java
|
{
"start": 1512,
"end": 4980
}
|
class ____ extends Aws2S3Base {
@EndpointInject
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint result;
@Test
public void sendInWithTimestampAndTimeout() throws Exception {
for (int i = 1; i <= 2; i++) {
int count = i * 23;
result.expectedMessageCount(count);
long beforeUpload = System.currentTimeMillis();
for (int j = 0; j < 23; j++) {
template.sendBody("direct:stream1", "TestData\n");
}
long afterUpload = System.currentTimeMillis();
Awaitility.await().atMost(11, TimeUnit.SECONDS)
.untilAsserted(() -> MockEndpoint.assertIsSatisfied(context));
Awaitility.await().atMost(11, TimeUnit.SECONDS)
.untilAsserted(() -> {
Exchange ex = template.request("direct:listObjects", this::process);
List<S3Object> resp = ex.getMessage().getBody(List.class);
assertEquals(1, resp.size());
// Verify the uploaded file uses timestamp naming strategy
S3Object s3Object = resp.get(0);
String key = s3Object.key();
// The file should either be the base name or have a timestamp suffix
if ("fileTest.txt".equals(key)) {
// This is fine - it's the base file
} else if (key.startsWith("fileTest-") && key.endsWith(".txt")) {
// Extract and validate timestamp
String timestampStr = key.substring("fileTest-".length(), key.length() - ".txt".length());
try {
long timestamp = Long.parseLong(timestampStr);
assertTrue(timestamp >= beforeUpload && timestamp <= afterUpload + 11000, // Allow extra time for timeout
"Timestamp " + timestamp + " should be within expected range");
} catch (NumberFormatException e) {
throw new AssertionError("Expected numeric timestamp in filename: " + key, e);
}
} else {
throw new AssertionError("Unexpected filename format: " + key);
}
});
}
}
private void process(Exchange exchange) {
exchange.getIn().setHeader(AWS2S3Constants.S3_OPERATION, AWS2S3Operations.listObjects);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
String awsEndpoint1
= String.format(
"aws2-s3://%s?autoCreateBucket=true&streamingUploadMode=true&keyName=fileTest.txt&batchMessageNumber=25&namingStrategy=timestamp&streamingUploadTimeout=10000",
name.get());
from("direct:stream1").to(awsEndpoint1).to("mock:result");
String awsEndpoint = String.format("aws2-s3://%s?autoCreateBucket=true",
name.get());
from("direct:listObjects").to(awsEndpoint);
}
};
}
}
|
S3StreamUploadTimestampTimeoutIT
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/FinallyTest.java
|
{
"start": 4261,
"end": 7744
}
|
class ____ {
public void completeWithReturn(boolean flag) {
try {
} finally {
// BUG: Diagnostic contains:
return;
}
}
public void completeWithThrow(boolean flag) throws Exception {
try {
} finally {
// BUG: Diagnostic contains:
throw new Exception();
}
}
public void unreachableThrow(boolean flag) throws Exception {
try {
} finally {
if (flag) {
// BUG: Diagnostic contains:
throw new Exception();
}
}
}
public void nestedBlocks(int i, boolean flag) throws Exception {
try {
} finally {
switch (i) {
default:
{
while (flag) {
do {
if (flag) {
} else {
// BUG: Diagnostic contains:
throw new Exception();
}
} while (flag);
}
}
}
}
}
public void nestedFinally() throws Exception {
try {
} finally {
try {
} finally {
// BUG: Diagnostic contains:
throw new IOException();
}
}
}
public void returnFromTryNestedInFinally() {
try {
} finally {
try {
// BUG: Diagnostic contains:
return;
} finally {
}
}
}
public void returnFromCatchNestedInFinally() {
try {
} finally {
try {
} catch (Exception e) {
// BUG: Diagnostic contains:
return;
} finally {
}
}
}
public void throwUncaughtFromNestedTryInFinally() throws Exception {
try {
} finally {
try {
// BUG: Diagnostic contains:
throw new Exception();
} finally {
}
}
}
public void throwFromNestedCatchInFinally() throws Exception {
try {
} finally {
try {
} catch (Exception e) {
// BUG: Diagnostic contains:
throw new Exception();
} finally {
}
}
}
}\
""")
.doTest();
}
@Test
public void negativeCase1() {
compilationHelper
.addSourceLines(
"FinallyNegativeCase1.java",
"""
package com.google.errorprone.bugpatterns.testdata;
/**
* @author eaftan@google.com (Eddie Aftandilian)
*/
public
|
FinallyPositiveCase2
|
java
|
redisson__redisson
|
redisson/src/test/java/org/redisson/executor/RedissonExecutorServiceTest.java
|
{
"start": 1058,
"end": 9164
}
|
class ____ extends RedisDockerTest {
private static RedissonNode node;
@BeforeEach
public void before() {
Config config = createConfig();
RedissonNodeConfig nodeConfig = new RedissonNodeConfig(config);
nodeConfig.setExecutorServiceWorkers(Collections.singletonMap("test", 1));
node = RedissonNode.create(nodeConfig);
node.start();
}
@AfterEach
public void after() {
node.shutdown();
}
private void cancel(RExecutorFuture<?> future) throws InterruptedException, ExecutionException {
assertThat(future.cancel(true)).isTrue();
boolean canceled = false;
try {
future.get();
} catch (CancellationException e) {
canceled = true;
}
assertThat(canceled).isTrue();
}
@Test
public void testTaskCount() throws InterruptedException {
RExecutorService e = redisson.getExecutorService("test");
assertThat(e.getTaskCount()).isEqualTo(0);
e.submit(new DelayedTask(1000, "testcounter"));
e.submit(new DelayedTask(1000, "testcounter"));
assertThat(e.getTaskCount()).isEqualTo(2);
for (int i = 0; i < 20; i++) {
e.submit(new RunnableTask());
}
assertThat(e.getTaskCount()).isBetween(5, 22);
Thread.sleep(1500);
assertThat(e.getTaskCount()).isZero();
}
@Test
public void testBatchSubmitRunnable() throws InterruptedException, ExecutionException, TimeoutException {
RExecutorService e = redisson.getExecutorService("test");
RExecutorBatchFuture future = e.submit(new IncrementRunnableTask("myCounter"), new IncrementRunnableTask("myCounter"),
new IncrementRunnableTask("myCounter"), new IncrementRunnableTask("myCounter"));
future.get(5, TimeUnit.SECONDS);
future.getTaskFutures().stream().forEach(x -> x.toCompletableFuture().join());
redisson.getKeys().delete("myCounter");
assertThat(redisson.getKeys().count()).isZero();
}
@Test
public void testBatchSubmitCallable() throws InterruptedException, ExecutionException, TimeoutException {
RExecutorService e = redisson.getExecutorService("test");
RExecutorBatchFuture future = e.submit(new IncrementCallableTask("myCounter"), new IncrementCallableTask("myCounter"),
new IncrementCallableTask("myCounter"), new IncrementCallableTask("myCounter"));
future.get(5, TimeUnit.SECONDS);
future.getTaskFutures().stream().forEach(x -> assertThat(x.toCompletableFuture().getNow(null)).isEqualTo("1234"));
redisson.getKeys().delete("myCounter");
assertThat(redisson.getKeys().count()).isZero();
}
@Test
public void testBatchExecuteNPE() {
Assertions.assertThrows(NullPointerException.class, () -> {
RExecutorService e = redisson.getExecutorService("test");
e.execute();
});
}
// @Test
public void testTaskFinishing() throws Exception {
AtomicInteger counter = new AtomicInteger();
new MockUp<TasksRunnerService>() {
@Mock
private void finish(Invocation invocation, String requestId) {
if (counter.incrementAndGet() > 1) {
invocation.proceed();
}
}
};
Config config = createConfig();
RedissonNodeConfig nodeConfig = new RedissonNodeConfig(config);
nodeConfig.setExecutorServiceWorkers(Collections.singletonMap("test2", 1));
node.shutdown();
node = RedissonNode.create(nodeConfig);
node.start();
RExecutorService executor = redisson.getExecutorService("test2");
RExecutorFuture<?> f = executor.submit(new FailoverTask("finished"));
Thread.sleep(2000);
node.shutdown();
f.get();
assertThat(redisson.<Boolean>getBucket("finished").get()).isTrue();
}
@Test
public void testFailoverInSentinel() throws Exception {
withSentinel((nodes, config) -> {
node.shutdown();
config.useSentinelServers().setRetryAttempts(10);
RedissonNodeConfig nodeConfig = new RedissonNodeConfig(config);
nodeConfig.setExecutorServiceWorkers(Collections.singletonMap("test2", 1));
node = RedissonNode.create(nodeConfig);
node.start();
RedissonClient redisson = Redisson.create(config);
RExecutorService executor = redisson.getExecutorService("test2", ExecutorOptions.defaults().taskRetryInterval(25, TimeUnit.SECONDS));
for (int i = 0; i < 10; i++) {
executor.submit(new DelayedTask(2000, "counter"));
}
try {
Thread.sleep(2500);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
assertThat(redisson.getAtomicLong("counter").get()).isEqualTo(1);
Integer port = nodes.get(0).getFirstMappedPort();
nodes.get(0).stop();
System.out.println("master stopped! " + port);
Awaitility.waitAtMost(Duration.ofSeconds(120)).untilAsserted(() -> {
assertThat(redisson.getAtomicLong("counter").get()).isEqualTo(10);
});
redisson.shutdown();
}, 1);
}
@Test
public void testNodeFailover() throws Exception {
AtomicInteger counter = new AtomicInteger();
new MockUp<TasksRunnerService>() {
@Mock
void finish(Invocation invocation, String requestId, boolean removeTask) {
if (counter.incrementAndGet() > 1) {
invocation.proceed();
} else {
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
};
Config config = createConfig();
RedissonNodeConfig nodeConfig = new RedissonNodeConfig(config);
nodeConfig.setExecutorServiceWorkers(Collections.singletonMap("test2", 1));
node.shutdown();
node = RedissonNode.create(nodeConfig);
node.start();
RExecutorService executor = redisson.getExecutorService("test2", ExecutorOptions.defaults().taskRetryInterval(10, TimeUnit.SECONDS));
RExecutorFuture<?> f = executor.submit(new IncrementRunnableTask("counter"));
assertThat(executor.getTaskCount()).isEqualTo(1);
Thread.sleep(1000);
assertThat(redisson.getAtomicLong("counter").get()).isEqualTo(1);
Thread.sleep(1000);
System.out.println("shutdown");
node.shutdown();
assertThat(executor.getTaskCount()).isEqualTo(1);
node = RedissonNode.create(nodeConfig);
node.start();
assertThat(executor.getTaskCount()).isEqualTo(1);
Thread.sleep(8500);
assertThat(executor.getTaskCount()).isEqualTo(0);
assertThat(redisson.getAtomicLong("counter").get()).isEqualTo(2);
Thread.sleep(16000);
assertThat(executor.getTaskCount()).isEqualTo(0);
assertThat(redisson.getAtomicLong("counter").get()).isEqualTo(2);
redisson.getKeys().delete("counter");
f.get();
assertThat(redisson.getKeys().count()).isEqualTo(3);
}
@Test
public void testBatchExecute() {
RExecutorService e = redisson.getExecutorService("test");
e.execute(new IncrementRunnableTask("myCounter"), new IncrementRunnableTask("myCounter"),
new IncrementRunnableTask("myCounter"), new IncrementRunnableTask("myCounter"));
await().atMost(Duration.ofSeconds(5)).until(() -> redisson.getAtomicLong("myCounter").get() == 4);
redisson.getKeys().delete("myCounter");
assertThat(redisson.getKeys().count()).isZero();
}
public static
|
RedissonExecutorServiceTest
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/concurrent/LazyInitializerSingleInstanceTest.java
|
{
"start": 1430,
"end": 2386
}
|
class ____ extends LazyInitializer<Object> {
@Override
protected Object initialize() {
return new Object();
}
}
/** The initializer to be tested. */
private LazyInitializerTestImpl initializer;
/**
* Creates the initializer to be tested. This implementation returns the {@code LazyInitializer} created in the {@code setUp()} method.
*
* @return the initializer to be tested
*/
@Override
protected LazyInitializer<Object> createInitializer() {
return initializer;
}
@BeforeEach
public void setUp() {
initializer = new LazyInitializerTestImpl();
}
@Test
void testIsInitialized() throws ConcurrentException {
final LazyInitializer<Object> initializer = createInitializer();
assertFalse(initializer.isInitialized());
initializer.get();
assertTrue(initializer.isInitialized());
}
}
|
LazyInitializerTestImpl
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_470.java
|
{
"start": 946,
"end": 1017
}
|
class ____ {
public boolean value;
}
private static
|
VO
|
java
|
apache__flink
|
flink-core-api/src/main/java/org/apache/flink/api/common/state/StateDeclarations.java
|
{
"start": 6982,
"end": 8519
}
|
class ____<IN, OUT, ACC> implements Serializable {
private static final long serialVersionUID = 1L;
private final String name;
private final TypeDescriptor<ACC> stateTypeDescriptor;
private final AggregateFunction<IN, ACC, OUT> aggregateFunction;
public AggregatingStateDeclarationBuilder(
String name,
TypeDescriptor<ACC> stateTypeDescriptor,
AggregateFunction<IN, ACC, OUT> aggregateFunction) {
this.name = name;
this.stateTypeDescriptor = stateTypeDescriptor;
this.aggregateFunction = aggregateFunction;
}
AggregatingStateDeclaration<IN, ACC, OUT> build() {
return new AggregatingStateDeclaration<IN, ACC, OUT>() {
@Override
public TypeDescriptor<ACC> getTypeDescriptor() {
return stateTypeDescriptor;
}
@Override
public AggregateFunction<IN, ACC, OUT> getAggregateFunction() {
return aggregateFunction;
}
@Override
public String getName() {
return name;
}
@Override
public RedistributionMode getRedistributionMode() {
return RedistributionMode.NONE;
}
};
}
}
/** Builder for {@link MapStateDeclaration}. */
@Experimental
public static
|
AggregatingStateDeclarationBuilder
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java
|
{
"start": 127150,
"end": 127355
}
|
class ____ {
@RequestMapping(method = RequestMethod.GET)
public void doGet(HttpServletRequest req, HttpServletResponse resp) {
}
}
@Controller
@RequestMapping("/child/test")
static
|
ParentController
|
java
|
apache__camel
|
core/camel-main/src/test/java/org/apache/camel/main/support/MyDummyConfiguration.java
|
{
"start": 850,
"end": 1272
}
|
class ____ {
private boolean log;
private NestedConfig nested;
public boolean isLog() {
return log;
}
public MyDummyConfiguration setLog(boolean log) {
this.log = log;
return this;
}
public NestedConfig getNested() {
return nested;
}
public void setNested(NestedConfig nested) {
this.nested = nested;
}
public static
|
MyDummyConfiguration
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassBeanDefinitionReader.java
|
{
"start": 15394,
"end": 18569
}
|
class ____ kicks in...
if (!this.registry.isBeanDefinitionOverridable(beanName)) {
throw new BeanDefinitionOverrideException(beanName,
new ConfigurationClassBeanDefinition(configClass, beanMethod.getMetadata(), beanName),
existingBeanDef,
"@Bean definition illegally overridden by existing bean definition: " + existingBeanDef);
}
if (logger.isDebugEnabled()) {
logger.debug("Skipping bean definition for %s: a definition for bean '%s' already exists. " +
"This top-level bean definition is considered as an override.".formatted(beanMethod, beanName));
}
return true;
}
private void loadBeanDefinitionsFromImportedResources(
Map<String, Class<? extends BeanDefinitionReader>> importedResources) {
Map<Class<?>, BeanDefinitionReader> readerInstanceCache = new HashMap<>();
importedResources.forEach((resource, readerClass) -> {
// Default reader selection necessary?
if (BeanDefinitionReader.class == readerClass) {
if (StringUtils.endsWithIgnoreCase(resource, ".groovy")) {
// When clearly asking for Groovy, that's what they'll get...
readerClass = GroovyBeanDefinitionReader.class;
}
else {
// Primarily ".xml" files but for any other extension as well
readerClass = XmlBeanDefinitionReader.class;
}
}
BeanDefinitionReader reader = readerInstanceCache.get(readerClass);
if (reader == null) {
try {
Constructor<? extends BeanDefinitionReader> constructor =
readerClass.getDeclaredConstructor(BeanDefinitionRegistry.class);
// Instantiate the specified BeanDefinitionReader
reader = BeanUtils.instantiateClass(constructor, this.registry);
// Delegate the current ResourceLoader and Environment to it if possible
if (reader instanceof AbstractBeanDefinitionReader abdr) {
abdr.setResourceLoader(this.resourceLoader);
abdr.setEnvironment(this.environment);
}
readerInstanceCache.put(readerClass, reader);
}
catch (Throwable ex) {
throw new IllegalStateException(
"Could not instantiate BeanDefinitionReader class [" + readerClass.getName() + "]", ex);
}
}
reader.loadBeanDefinitions(resource);
});
}
private void loadBeanDefinitionsFromImportBeanDefinitionRegistrars(
Map<ImportBeanDefinitionRegistrar, AnnotationMetadata> registrars) {
registrars.forEach((registrar, metadata) ->
registrar.registerBeanDefinitions(metadata, this.registry, this.importBeanNameGenerator));
}
private void loadBeanDefinitionsFromBeanRegistrars(MultiValueMap<String, BeanRegistrar> registrars) {
if (!(this.registry instanceof ListableBeanFactory beanFactory)) {
throw new IllegalStateException("Cannot support bean registrars since " +
this.registry.getClass().getName() + " does not implement ListableBeanFactory");
}
registrars.values().forEach(registrarList -> registrarList.forEach(registrar -> registrar.register(new BeanRegistryAdapter(
this.registry, beanFactory, this.environment, registrar.getClass()), this.environment)));
}
/**
* {@link RootBeanDefinition} marker subclass used to signify that a bean definition
* was created from a configuration
|
processing
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/jobmaster/DefaultExecutionDeploymentReconcilerTest.java
|
{
"start": 6117,
"end": 7169
}
|
class ____
implements ExecutionDeploymentReconciliationHandler {
private final Collection<ExecutionAttemptID> missingExecutions = new ArrayList<>();
private final Collection<ExecutionAttemptID> unknownExecutions = new ArrayList<>();
@Override
public void onMissingDeploymentsOf(
Collection<ExecutionAttemptID> executionAttemptIds,
ResourceID hostingTaskExecutor) {
missingExecutions.addAll(executionAttemptIds);
}
@Override
public void onUnknownDeploymentsOf(
Collection<ExecutionAttemptID> executionAttemptIds,
ResourceID hostingTaskExecutor) {
unknownExecutions.addAll(executionAttemptIds);
}
public Collection<ExecutionAttemptID> getMissingExecutions() {
return missingExecutions;
}
public Collection<ExecutionAttemptID> getUnknownExecutions() {
return unknownExecutions;
}
}
}
|
TestingExecutionDeploymentReconciliationHandler
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CaffeineLoadCacheEndpointBuilderFactory.java
|
{
"start": 1615,
"end": 12316
}
|
interface ____
extends
EndpointProducerBuilder {
default AdvancedCaffeineLoadCacheEndpointBuilder advanced() {
return (AdvancedCaffeineLoadCacheEndpointBuilder) this;
}
/**
* To configure the default cache action. If an action is set in the
* message header, then the operation from the header takes precedence.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param action the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder action(String action) {
doSetProperty("action", action);
return this;
}
/**
* Automatic create the Caffeine cache if none has been configured or
* exists in the registry.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param createCacheIfNotExist the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder createCacheIfNotExist(boolean createCacheIfNotExist) {
doSetProperty("createCacheIfNotExist", createCacheIfNotExist);
return this;
}
/**
* Automatic create the Caffeine cache if none has been configured or
* exists in the registry.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param createCacheIfNotExist the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder createCacheIfNotExist(String createCacheIfNotExist) {
doSetProperty("createCacheIfNotExist", createCacheIfNotExist);
return this;
}
/**
* Set the eviction Type for this cache.
*
* The option is a:
* <code>org.apache.camel.component.caffeine.EvictionType</code> type.
*
* Default: SIZE_BASED
* Group: producer
*
* @param evictionType the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder evictionType(org.apache.camel.component.caffeine.EvictionType evictionType) {
doSetProperty("evictionType", evictionType);
return this;
}
/**
* Set the eviction Type for this cache.
*
* The option will be converted to a
* <code>org.apache.camel.component.caffeine.EvictionType</code> type.
*
* Default: SIZE_BASED
* Group: producer
*
* @param evictionType the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder evictionType(String evictionType) {
doSetProperty("evictionType", evictionType);
return this;
}
/**
* Specifies that each entry should be automatically removed from the
* cache once a fixed duration has elapsed after the entry's creation,
* the most recent replacement of its value, or its last read. Access
* time is reset by all cache read and write operations. The unit is in
* seconds.
*
* The option is a: <code>int</code> type.
*
* Default: 300
* Group: producer
*
* @param expireAfterAccessTime the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder expireAfterAccessTime(int expireAfterAccessTime) {
doSetProperty("expireAfterAccessTime", expireAfterAccessTime);
return this;
}
/**
* Specifies that each entry should be automatically removed from the
* cache once a fixed duration has elapsed after the entry's creation,
* the most recent replacement of its value, or its last read. Access
* time is reset by all cache read and write operations. The unit is in
* seconds.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 300
* Group: producer
*
* @param expireAfterAccessTime the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder expireAfterAccessTime(String expireAfterAccessTime) {
doSetProperty("expireAfterAccessTime", expireAfterAccessTime);
return this;
}
/**
* Specifies that each entry should be automatically removed from the
* cache once a fixed duration has elapsed after the entry's creation,
* or the most recent replacement of its value. The unit is in seconds.
*
* The option is a: <code>int</code> type.
*
* Default: 300
* Group: producer
*
* @param expireAfterWriteTime the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder expireAfterWriteTime(int expireAfterWriteTime) {
doSetProperty("expireAfterWriteTime", expireAfterWriteTime);
return this;
}
/**
* Specifies that each entry should be automatically removed from the
* cache once a fixed duration has elapsed after the entry's creation,
* or the most recent replacement of its value. The unit is in seconds.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 300
* Group: producer
*
* @param expireAfterWriteTime the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder expireAfterWriteTime(String expireAfterWriteTime) {
doSetProperty("expireAfterWriteTime", expireAfterWriteTime);
return this;
}
/**
* Sets the minimum total size for the internal data structures.
* Providing a large enough estimate at construction time avoids the
* need for expensive resizing operations later, but setting this value
* unnecessarily high wastes memory.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: producer
*
* @param initialCapacity the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder initialCapacity(Integer initialCapacity) {
doSetProperty("initialCapacity", initialCapacity);
return this;
}
/**
* Sets the minimum total size for the internal data structures.
* Providing a large enough estimate at construction time avoids the
* need for expensive resizing operations later, but setting this value
* unnecessarily high wastes memory.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Group: producer
*
* @param initialCapacity the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder initialCapacity(String initialCapacity) {
doSetProperty("initialCapacity", initialCapacity);
return this;
}
/**
* To configure the default action key. If a key is set in the message
* header, then the key from the header takes precedence.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param key the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder key(String key) {
doSetProperty("key", key);
return this;
}
/**
* Specifies the maximum number of entries the cache may contain. Note
* that the cache may evict an entry before this limit is exceeded or
* temporarily exceed the threshold while evicting. As the cache size
* grows close to the maximum, the cache evicts entries that are less
* likely to be used again. For example, the cache may evict an entry
* because it hasn't been used recently or very often. When size is
* zero, elements will be evicted immediately after being loaded into
* the cache. This can be useful in testing or to disable caching
* temporarily without a code change. As eviction is scheduled on the
* configured executor, tests may instead prefer to configure the cache
* to execute tasks directly on the same thread.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: producer
*
* @param maximumSize the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder maximumSize(Integer maximumSize) {
doSetProperty("maximumSize", maximumSize);
return this;
}
/**
* Specifies the maximum number of entries the cache may contain. Note
* that the cache may evict an entry before this limit is exceeded or
* temporarily exceed the threshold while evicting. As the cache size
* grows close to the maximum, the cache evicts entries that are less
* likely to be used again. For example, the cache may evict an entry
* because it hasn't been used recently or very often. When size is
* zero, elements will be evicted immediately after being loaded into
* the cache. This can be useful in testing or to disable caching
* temporarily without a code change. As eviction is scheduled on the
* configured executor, tests may instead prefer to configure the cache
* to execute tasks directly on the same thread.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Group: producer
*
* @param maximumSize the value to set
* @return the dsl builder
*/
default CaffeineLoadCacheEndpointBuilder maximumSize(String maximumSize) {
doSetProperty("maximumSize", maximumSize);
return this;
}
}
/**
* Advanced builder for endpoint for the Caffeine LoadCache component.
*/
public
|
CaffeineLoadCacheEndpointBuilder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/event/internal/PostInsertEventListenerStandardImpl.java
|
{
"start": 519,
"end": 1192
}
|
class ____ implements PostInsertEventListener, CallbackRegistryConsumer {
private CallbackRegistry callbackRegistry;
public PostInsertEventListenerStandardImpl() {
}
@Override
public void injectCallbackRegistry(CallbackRegistry callbackRegistry) {
this.callbackRegistry = callbackRegistry;
}
@Override
public void onPostInsert(PostInsertEvent event) {
Object entity = event.getEntity();
callbackRegistry.postCreate( entity );
}
@Override
public boolean requiresPostCommitHandling(EntityPersister persister) {
return callbackRegistry.hasRegisteredCallbacks( persister.getMappedClass(), CallbackType.POST_PERSIST );
}
}
|
PostInsertEventListenerStandardImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/legacy/Root.java
|
{
"start": 300,
"end": 2573
}
|
class ____ implements Serializable, Named {
private Long id;
private Root otherRoot;
private Set details = new HashSet();
private Set moreDetails = new HashSet();
private Set incoming = new HashSet();
private Set outgoing = new HashSet();
private String name="root";
private Date stamp;
private int version;
private BigDecimal bigDecimal = new BigDecimal("1234.123");
private int x;
private Collection allDetails;
public Root() {
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
public Set getDetails() {
return details;
}
private void setDetails(Set details) {
this.details = details;
}
public void addDetail(Detail d) {
details.add(d);
}
public void removeDetail(Detail d) {
details.remove(d);
}
public void addIncoming(Root m) {
incoming.add(m);
}
public void removeIncoming(Root m) {
incoming.remove(m);
}
public void addOutgoing(Root m) {
outgoing.add(m);
}
public void removeOutgoing(Root m) {
outgoing.remove(m);
}
public Set getIncoming() {
return incoming;
}
public void setIncoming(Set incoming) {
this.incoming = incoming;
}
public Set getOutgoing() {
return outgoing;
}
public void setOutgoing(Set outgoing) {
this.outgoing = outgoing;
}
public Set getMoreDetails() {
return moreDetails;
}
public void setMoreDetails(Set moreDetails) {
this.moreDetails = moreDetails;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Date getStamp() {
return stamp;
}
public void setStamp(Date stamp) {
this.stamp = stamp;
}
public BigDecimal getBigDecimal() {
return bigDecimal;
}
public void setBigDecimal(BigDecimal bigDecimal) {
this.bigDecimal = bigDecimal;
}
/**
* @return
*/
public Root getOtherRoot() {
return otherRoot;
}
/**
* @param root
*/
public void setOtherRoot(Root root) {
otherRoot = root;
}
/**
* @return Returns the allDetails.
*/
public Collection getAllDetails() {
return allDetails;
}
/**
* @param allDetails The allDetails to set.
*/
public void setAllDetails(Collection allDetails) {
this.allDetails = allDetails;
}
}
|
Root
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/javadoc/InvalidSnippet.java
|
{
"start": 1653,
"end": 2616
}
|
class ____ extends BugChecker
implements ClassTreeMatcher, MethodTreeMatcher, VariableTreeMatcher {
private void scanTags(VisitorState state, DocTreePath path) {
new InvalidTagChecker(state).scan(path, null);
}
@Override
public Description matchClass(ClassTree classTree, VisitorState state) {
DocTreePath path = Utils.getDocTreePath(state);
if (path != null) {
scanTags(state, path);
}
return Description.NO_MATCH;
}
@Override
public Description matchMethod(MethodTree methodTree, VisitorState state) {
DocTreePath path = Utils.getDocTreePath(state);
if (path != null) {
scanTags(state, path);
}
return Description.NO_MATCH;
}
@Override
public Description matchVariable(VariableTree variableTree, VisitorState state) {
DocTreePath path = Utils.getDocTreePath(state);
if (path != null) {
scanTags(state, path);
}
return Description.NO_MATCH;
}
final
|
InvalidSnippet
|
java
|
quarkusio__quarkus
|
integration-tests/smallrye-graphql/src/test/java/io/quarkus/it/smallrye/graphql/GreetingsResourceTest.java
|
{
"start": 415,
"end": 1992
}
|
class ____ {
@Test
public void test() throws IOException {
String loadMutation = getPayload(mutation);
given()
.when()
.accept(MEDIATYPE_JSON)
.contentType(MEDIATYPE_JSON)
.body(loadMutation)
.post("/graphql")
.then()
.statusCode(200)
.and()
.body(containsString("afr"))
.body(containsString("Goeie more"))
.body(containsString("Goeie middag"))
.body(containsString("Goeie naand"));
}
private String mutation = "mutation LoadGreetings {\n" +
" load(greetings : \n" +
" {\n" +
" language: \"afr\",\n" +
" hellos:[\n" +
" {\n" +
" message:\"Goeie more\",\n" +
" time: \"07:00\"\n" +
" },\n" +
" {\n" +
" message:\"Goeie middag\",\n" +
" time: \"13:00\"\n" +
" \n" +
" },\n" +
" {\n" +
" message:\"Goeie naand\",\n" +
" time: \"18:00\"\n" +
" }\n" +
" ]\n" +
" }\n" +
" ){\n" +
" language,\n" +
" hellos {\n" +
" time\n" +
" message\n" +
" }\n" +
" }\n" +
"}";
}
|
GreetingsResourceTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateConfig.java
|
{
"start": 582,
"end": 4745
}
|
class ____ {
private final String templateName;
private final String fileName;
private final int version;
private final String versionProperty;
private final Map<String, String> variables;
/**
* Describes a template to be loaded from a resource file. Includes handling for substituting a version property into the template.
*
* The {@code versionProperty} parameter will be used to substitute the value of {@code version} into the template. For example,
* this template:
* {@code {"myTemplateVersion": "${my.version.property}"}}
* With {@code version = "42"; versionProperty = "my.version.property"} will result in {@code {"myTemplateVersion": "42"}}.
*
* Note that this code does not automatically insert the {@code version} index template property - include that in the JSON file
* defining the template, preferably using the version variable provided to this constructor.
*
* @param templateName The name that will be used for the index template. Literal, include the version in this string if
* it should be used.
* @param fileName The filename the template should be loaded from. Literal, should include leading {@literal /} and
* extension if necessary.
* @param version The version of the template. Substituted for {@code versionProperty} as described above.
* @param versionProperty The property that will be replaced with the {@code version} string as described above.
*/
public IndexTemplateConfig(String templateName, String fileName, int version, String versionProperty) {
this(templateName, fileName, version, versionProperty, Collections.emptyMap());
}
/**
* Describes a template to be loaded from a resource file. Includes handling for substituting a version property into the template.
*
* The {@code versionProperty} parameter will be used to substitute the value of {@code version} into the template. For example,
* this template:
* {@code {"myTemplateVersion": "${my.version.property}"}}
* With {@code version = "42"; versionProperty = "my.version.property"} will result in {@code {"myTemplateVersion": "42"}}.
*
* @param templateName The name that will be used for the index template. Literal, include the version in this string if
* it should be used.
* @param fileName The filename the template should be loaded from. Literal, should include leading {@literal /} and
* extension if necessary.
* @param version The version of the template. Substituted for {@code versionProperty} as described above.
* @param versionProperty The property that will be replaced with the {@code version} string as described above.
* @param variables A map of additional variable substitutions. The map's keys are the variable names.
* The corresponding values will replace the variable names.
*/
public IndexTemplateConfig(String templateName, String fileName, int version, String versionProperty, Map<String, String> variables) {
this.templateName = templateName;
this.fileName = fileName;
this.version = version;
this.versionProperty = versionProperty;
this.variables = Objects.requireNonNull(variables);
}
public String getFileName() {
return fileName;
}
public String getTemplateName() {
return templateName;
}
public int getVersion() {
return version;
}
/**
* Loads the template from disk as a UTF-8 byte array.
* @return The template as a UTF-8 byte array.
*/
public byte[] loadBytes() {
String template = TemplateUtils.loadTemplate(fileName, Integer.toString(version), versionProperty, variables);
assert template != null && template.length() > 0;
assert Pattern.compile("\"version\"\\s*:\\s*" + version).matcher(template).find()
: "index template must have a version property set to the given version property";
return template.getBytes(StandardCharsets.UTF_8);
}
}
|
IndexTemplateConfig
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/cursor/defaults/DefaultCursor.java
|
{
"start": 5130,
"end": 6158
}
|
class ____ implements Iterator<T> {
/**
* Holder for the next object to be returned.
*/
T object;
/**
* Index of objects returned using next(), and as such, visible to users.
*/
int iteratorIndex = -1;
@Override
public boolean hasNext() {
if (!objectWrapperResultHandler.fetched) {
object = fetchNextUsingRowBound();
}
return objectWrapperResultHandler.fetched;
}
@Override
public T next() {
// Fill next with object fetched from hasNext()
T next = object;
if (!objectWrapperResultHandler.fetched) {
next = fetchNextUsingRowBound();
}
if (objectWrapperResultHandler.fetched) {
objectWrapperResultHandler.fetched = false;
object = null;
iteratorIndex++;
return next;
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException("Cannot remove element from Cursor");
}
}
}
|
CursorIterator
|
java
|
apache__flink
|
flink-clients/src/test/java/org/apache/flink/client/program/DefaultPackagedProgramRetrieverITCase.java
|
{
"start": 13930,
"end": 15423
}
|
class ____ the system
// classpath (analogously to
// what we already do for the user classpath)
// see
// testDeriveEntryClassInformationFromSystemClasspathWithNonExistingJobClassName
testInstance.getPackagedProgram();
})
.isInstanceOf(FlinkException.class);
}
@Test
void testEntryClassNotFoundOnUserClasspath() {
final String jobClassName = "NotExistingClass";
assertThatThrownBy(
() ->
DefaultPackagedProgramRetriever.create(
noEntryClassClasspathProvider.getDirectory(),
jobClassName,
new String[0],
new Configuration())
.getPackagedProgram())
.isInstanceOf(FlinkException.class)
.hasMessageContaining("Could not load the provided entrypoint class.")
.cause()
.hasMessageContaining(
"The program's entry point class '%s' was not found in the jar file.",
jobClassName);
}
@Test
void testWithoutJobClassAndMultipleEntryClassesOnUserClasspath() {
// without a job
|
on
|
java
|
google__truth
|
core/src/main/java/com/google/common/truth/LazyMessage.java
|
{
"start": 945,
"end": 2174
}
|
class ____ {
private final String format;
private final @Nullable Object[] args;
private LazyMessage(String format, @Nullable Object[] args) {
this.format = format;
this.args = args;
int placeholders = countPlaceholders(format);
checkArgument(
placeholders == args.length,
"Incorrect number of args (%s) for the given placeholders (%s) in string template:\"%s\"",
args.length,
placeholders,
format);
}
@Override
public String toString() {
return lenientFormatForFailure(format, args);
}
@VisibleForTesting
static int countPlaceholders(String template) {
int index = 0;
int count = 0;
while (true) {
index = template.indexOf("%s", index);
if (index == -1) {
break;
}
index++;
count++;
}
return count;
}
static ImmutableList<String> evaluateAll(List<LazyMessage> messages) {
ImmutableList.Builder<String> result = ImmutableList.builder();
for (LazyMessage message : messages) {
result.add(message.toString());
}
return result.build();
}
static LazyMessage create(String format, @Nullable Object[] args) {
return new LazyMessage(format, args);
}
}
|
LazyMessage
|
java
|
apache__spark
|
sql/catalyst/src/main/java/org/apache/spark/sql/connector/ExternalCommandRunner.java
|
{
"start": 1285,
"end": 1405
}
|
interface ____ be instantiated when end users call `SparkSession#executeCommand`.
*
* @since 3.0.0
*/
@Stable
public
|
will
|
java
|
quarkusio__quarkus
|
integration-tests/smallrye-graphql/src/main/java/io/quarkus/it/smallrye/graphql/metricresources/TestRandom.java
|
{
"start": 122,
"end": 458
}
|
class ____ {
private double value;
public TestRandom() {
this(Math.random());
}
public TestRandom(double value) {
super();
this.value = value;
}
public double getValue() {
return value;
}
public void setValue(double value) {
this.value = value;
}
}
|
TestRandom
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/main/java/org/springframework/boot/test/json/JsonContentAssert.java
|
{
"start": 3070,
"end": 5285
}
|
class ____ to load resources
* @param charset the charset of the JSON resources
* @param json the actual JSON content
* @param configuration the json-path configuration
*/
JsonContentAssert(Class<?> resourceLoadClass, @Nullable Charset charset, @Nullable CharSequence json,
Configuration configuration) {
super(json, JsonContentAssert.class);
this.configuration = configuration;
this.loader = new JsonLoader(resourceLoadClass, charset);
}
/**
* Overridden version of {@code isEqualTo} to perform JSON tests based on the object
* type.
* @see org.assertj.core.api.AbstractAssert#isEqualTo(java.lang.Object)
*/
@Override
public JsonContentAssert isEqualTo(@Nullable Object expected) {
if (expected == null || expected instanceof CharSequence) {
return isEqualToJson((CharSequence) expected);
}
if (expected instanceof byte[] bytes) {
return isEqualToJson(bytes);
}
if (expected instanceof File file) {
return isEqualToJson(file);
}
if (expected instanceof InputStream inputStream) {
return isEqualToJson(inputStream);
}
if (expected instanceof Resource resource) {
return isEqualToJson(resource);
}
failWithMessage("Unsupported type for JSON assert %s", expected.getClass());
return this;
}
/**
* Verifies that the actual value is {@link JSONCompareMode#LENIENT leniently} equal
* to the specified JSON. The {@code expected} value can contain the JSON itself or,
* if it ends with {@code .json}, the name of a resource to be loaded using
* {@code resourceLoadClass}.
* @param expected the expected JSON or the name of a resource containing the expected
* JSON
* @return {@code this} assertion object
* @throws AssertionError if the actual JSON value is not equal to the given one
*/
public JsonContentAssert isEqualToJson(@Nullable CharSequence expected) {
String expectedJson = this.loader.getJson(expected);
return assertNotFailed(compare(expectedJson, JSONCompareMode.LENIENT));
}
/**
* Verifies that the actual value is {@link JSONCompareMode#LENIENT leniently} equal
* to the specified JSON resource.
* @param path the name of a resource containing the expected JSON
* @param resourceLoadClass the source
|
used
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/bug/Issue869_1.java
|
{
"start": 2176,
"end": 2259
}
|
class ____{
public String id;
public String title;
}
}
|
Properties
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/issues/SentExchangeEventNotifierIssueTest.java
|
{
"start": 1489,
"end": 3826
}
|
class ____ extends EventNotifierSupport {
private int counter;
@Override
public void notify(CamelEvent event) {
counter++;
}
@Override
public boolean isEnabled(CamelEvent event) {
return event instanceof ExchangeSentEvent;
}
public int getCounter() {
return counter;
}
public void reset() {
counter = 0;
}
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.getManagementStrategy().addEventNotifier(notifier);
return context;
}
@Test
public void testExchangeSentNotifier() {
notifier.reset();
String out = template.requestBody("direct:start", "Hello World", String.class);
assertEquals("I was here", out);
// should only be one event
assertEquals(1, notifier.getCounter());
}
@Test
public void testExchangeSentNotifierExchange() {
notifier.reset();
Exchange out = template.request("direct:start", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setBody("Hello World");
}
});
assertEquals("I was here", out.getIn().getBody());
// should only be one event
assertEquals(1, notifier.getCounter());
}
@Test
public void testExchangeSentNotifierManualExchange() {
notifier.reset();
Exchange exchange = new DefaultExchange(context);
exchange.getIn().setBody("Hello World");
template.send("direct:start", exchange);
assertEquals("I was here", exchange.getIn().getBody());
// should only be one event
assertEquals(1, notifier.getCounter());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").process(new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setBody("I was here");
}
});
}
};
}
}
|
MyNotifier
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/InterruptEscalator.java
|
{
"start": 6203,
"end": 7191
}
|
class ____ implements Runnable {
private final int shutdownTimeMillis;
private final AtomicBoolean serviceWasShutdown =
new AtomicBoolean(false);
private Service service;
public ServiceForcedShutdown(Service service, int shutdownTimeMillis) {
this.shutdownTimeMillis = shutdownTimeMillis;
this.service = service;
}
/**
* Shutdown callback: stop the service and set an atomic boolean
* if it stopped within the shutdown time.
*/
@Override
public void run() {
if (service != null) {
service.stop();
serviceWasShutdown.set(
service.waitForServiceToStop(shutdownTimeMillis));
} else {
serviceWasShutdown.set(true);
}
}
/**
* Probe for the service being shutdown.
* @return true if the service has been shutdown in the runnable
*/
private boolean getServiceWasShutdown() {
return serviceWasShutdown.get();
}
}
}
|
ServiceForcedShutdown
|
java
|
redisson__redisson
|
redisson/src/test/java/org/redisson/RedissonScoredSortedSetReactiveTest.java
|
{
"start": 489,
"end": 11805
}
|
class ____ extends BaseReactiveTest {
@Test
public void testFirstLast() {
RScoredSortedSetReactive<String> set = redisson.getScoredSortedSet("simple");
sync(set.add(0.1, "a"));
sync(set.add(0.2, "b"));
sync(set.add(0.3, "c"));
sync(set.add(0.4, "d"));
Assertions.assertEquals("a", sync(set.first()));
Assertions.assertEquals("d", sync(set.last()));
}
@Test
public void testRemoveRangeByScore() {
RScoredSortedSetReactive<String> set = redisson.getScoredSortedSet("simple");
sync(set.add(0.1, "a"));
sync(set.add(0.2, "b"));
sync(set.add(0.3, "c"));
sync(set.add(0.4, "d"));
sync(set.add(0.5, "e"));
sync(set.add(0.6, "f"));
sync(set.add(0.7, "g"));
Assertions.assertEquals(2, sync(set.removeRangeByScore(0.1, false, 0.3, true)).intValue());
assertThat(sync(set)).containsOnly("a", "d", "e", "f", "g");
}
@Test
public void testRemoveRangeByRank() {
RScoredSortedSetReactive<String> set = redisson.getScoredSortedSet("simple");
sync(set.add(0.1, "a"));
sync(set.add(0.2, "b"));
sync(set.add(0.3, "c"));
sync(set.add(0.4, "d"));
sync(set.add(0.5, "e"));
sync(set.add(0.6, "f"));
sync(set.add(0.7, "g"));
Assertions.assertEquals(2, sync(set.removeRangeByRank(0, 1)).intValue());
assertThat(sync(set)).containsOnly("c", "d", "e", "f", "g");
}
@Test
public void testRank() {
RScoredSortedSetReactive<String> set = redisson.getScoredSortedSet("simple");
sync(set.add(0.1, "a"));
sync(set.add(0.2, "b"));
sync(set.add(0.3, "c"));
sync(set.add(0.4, "d"));
sync(set.add(0.5, "e"));
sync(set.add(0.6, "f"));
sync(set.add(0.7, "g"));
Assertions.assertEquals(3, sync(set.rank("d")).intValue());
}
@Test
public void testAddAsync() throws InterruptedException, ExecutionException {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple");
Assertions.assertTrue(sync(set.add(0.323, 2)));
Assertions.assertFalse(sync(set.add(0.323, 2)));
Assertions.assertTrue(sync(set.contains(2)));
}
@Test
public void testRemoveAsync() throws InterruptedException, ExecutionException {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple");
sync(set.add(0.11, 1));
sync(set.add(0.22, 3));
sync(set.add(0.33, 7));
Assertions.assertTrue(sync(set.remove(1)));
Assertions.assertFalse(sync(set.contains(1)));
assertThat(sync(set)).containsExactly(3, 7);
Assertions.assertFalse(sync(set.remove(1)));
assertThat(sync(set)).containsExactly(3, 7);
sync(set.remove(3));
Assertions.assertFalse(sync(set.contains(3)));
assertThat(sync(set)).containsExactly(7);
}
@Test
public void testIteratorNextNext() {
RScoredSortedSetReactive<String> set = redisson.getScoredSortedSet("simple");
sync(set.add(1, "1"));
sync(set.add(2, "4"));
Iterator<String> iter = toIterator(set.iterator());
Assertions.assertEquals("1", iter.next());
Assertions.assertEquals("4", iter.next());
Assertions.assertFalse(iter.hasNext());
}
@Test
public void testIteratorSequence() {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple");
for (int i = 0; i < 1000; i++) {
sync(set.add(i, Integer.valueOf(i)));
}
Set<Integer> setCopy = new HashSet<Integer>();
for (int i = 0; i < 1000; i++) {
setCopy.add(Integer.valueOf(i));
}
checkIterator(set, setCopy);
}
private void checkIterator(RScoredSortedSetReactive<Integer> set, Set<Integer> setCopy) {
for (Iterator<Integer> iterator = toIterator(set.iterator()); iterator.hasNext();) {
Integer value = iterator.next();
if (!setCopy.remove(value)) {
Assertions.fail();
}
}
Assertions.assertEquals(0, setCopy.size());
}
@Test
public void testRetainAll() {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple");
for (int i = 0; i < 20000; i++) {
sync(set.add(i, i));
}
Assertions.assertTrue(sync(set.retainAll(Arrays.asList(1, 2))));
assertThat(sync(set)).contains(1, 2);
Assertions.assertEquals(2, sync(set.size()).intValue());
}
@Test
public void testRemoveAll() {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple");
sync(set.add(0.1, 1));
sync(set.add(0.2, 2));
sync(set.add(0.3, 3));
Assertions.assertTrue(sync(set.removeAll(Arrays.asList(1, 2))));
assertThat(sync(set)).contains(3);
Assertions.assertEquals(1, sync(set.size()).intValue());
}
@Test
public void testSort() {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple");
Assertions.assertTrue(sync(set.add(4, 2)));
Assertions.assertTrue(sync(set.add(5, 3)));
Assertions.assertTrue(sync(set.add(3, 1)));
Assertions.assertTrue(sync(set.add(6, 4)));
Assertions.assertTrue(sync(set.add(1000, 10)));
Assertions.assertTrue(sync(set.add(1, -1)));
Assertions.assertTrue(sync(set.add(2, 0)));
assertThat(sync(set)).containsExactly(-1, 0, 1, 2, 3, 4, 10);
Assertions.assertEquals(-1, (int)sync(set.first()));
Assertions.assertEquals(10, (int)sync(set.last()));
}
@Test
public void testRemove() {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple");
sync(set.add(4, 5));
sync(set.add(2, 3));
sync(set.add(0, 1));
sync(set.add(1, 2));
sync(set.add(3, 4));
Assertions.assertFalse(sync(set.remove(0)));
Assertions.assertTrue(sync(set.remove(3)));
assertThat(sync(set)).containsExactly(1, 2, 4, 5);
}
@Test
public void testContainsAll() {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple");
for (int i = 0; i < 200; i++) {
sync(set.add(i, i));
}
Assertions.assertTrue(sync(set.containsAll(Arrays.asList(30, 11))));
Assertions.assertFalse(sync(set.containsAll(Arrays.asList(30, 711, 11))));
}
@Test
public void testContains() {
RScoredSortedSetReactive<TestObject> set = redisson.getScoredSortedSet("simple");
sync(set.add(0, new TestObject("1", "2")));
sync(set.add(1, new TestObject("1", "2")));
sync(set.add(2, new TestObject("2", "3")));
sync(set.add(3, new TestObject("3", "4")));
sync(set.add(4, new TestObject("5", "6")));
Assertions.assertTrue(sync(set.contains(new TestObject("2", "3"))));
Assertions.assertTrue(sync(set.contains(new TestObject("1", "2"))));
Assertions.assertFalse(sync(set.contains(new TestObject("1", "9"))));
}
@Test
public void testDuplicates() {
RScoredSortedSetReactive<TestObject> set = redisson.getScoredSortedSet("simple");
Assertions.assertTrue(sync(set.add(0, new TestObject("1", "2"))));
Assertions.assertFalse(sync(set.add(0, new TestObject("1", "2"))));
Assertions.assertTrue(sync(set.add(2, new TestObject("2", "3"))));
Assertions.assertTrue(sync(set.add(3, new TestObject("3", "4"))));
Assertions.assertTrue(sync(set.add(4, new TestObject("5", "6"))));
Assertions.assertEquals(4, sync(set.size()).intValue());
}
@Test
public void testSize() {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple");
sync(set.add(0, 1));
sync(set.add(1, 2));
sync(set.add(2, 3));
sync(set.add(2, 3));
sync(set.add(3, 4));
sync(set.add(4, 5));
sync(set.add(4, 5));
Assertions.assertEquals(5, sync(set.size()).intValue());
}
@Test
public void testValueRange() {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple");
sync(set.add(0, 1));
sync(set.add(1, 2));
sync(set.add(2, 3));
sync(set.add(3, 4));
sync(set.add(4, 5));
sync(set.add(4, 5));
Collection<Integer> vals = sync(set.valueRange(0, -1));
assertThat(sync(set)).containsExactly(1, 2, 3, 4, 5);
}
@Test
public void testEntryRange() {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple");
sync(set.add(10, 1));
sync(set.add(20, 2));
sync(set.add(30, 3));
sync(set.add(40, 4));
sync(set.add(50, 5));
Collection<ScoredEntry<Integer>> vals = sync(set.entryRange(0, -1));
assertThat(vals).contains(new ScoredEntry<Integer>(10D, 1),
new ScoredEntry<Integer>(20D, 2),
new ScoredEntry<Integer>(30D, 3),
new ScoredEntry<Integer>(40D, 4),
new ScoredEntry<Integer>(50D, 5));
}
@Test
public void testScoredSortedSetValueRange() {
RScoredSortedSetReactive<String> set = redisson.<String>getScoredSortedSet("simple");
sync(set.add(0, "a"));
sync(set.add(1, "b"));
sync(set.add(2, "c"));
sync(set.add(3, "d"));
sync(set.add(4, "e"));
Collection<String> r = sync(set.valueRange(1, true, 4, false, 1, 2));
String[] a = r.toArray(new String[0]);
Assertions.assertArrayEquals(new String[]{"c", "d"}, a);
}
@Test
public void testScoredSortedSetEntryRange() {
RScoredSortedSetReactive<String> set = redisson.<String>getScoredSortedSet("simple");
sync(set.add(0, "a"));
sync(set.add(1, "b"));
sync(set.add(2, "c"));
sync(set.add(3, "d"));
sync(set.add(4, "e"));
Collection<ScoredEntry<String>> r = sync(set.entryRange(1, true, 4, false, 1, 2));
ScoredEntry<String>[] a = r.toArray(new ScoredEntry[0]);
Assertions.assertEquals(2d, a[0].getScore(), 0);
Assertions.assertEquals(3d, a[1].getScore(), 0);
Assertions.assertEquals("c", a[0].getValue());
Assertions.assertEquals("d", a[1].getValue());
}
@Test
public void testAddAndGet() throws InterruptedException {
RScoredSortedSetReactive<Integer> set = redisson.getScoredSortedSet("simple", StringCodec.INSTANCE);
sync(set.add(1, 100));
Double res = sync(set.addScore(100, 11));
Assertions.assertEquals(12, (double)res, 0);
Double score = sync(set.getScore(100));
Assertions.assertEquals(12, (double)score, 0);
RScoredSortedSetReactive<Integer> set2 = redisson.getScoredSortedSet("simple", StringCodec.INSTANCE);
sync(set2.add(100.2, 1));
Double res2 = sync(set2.addScore(1, new Double(12.1)));
Assertions.assertTrue(new Double(112.3).compareTo(res2) == 0);
res2 = sync(set2.getScore(1));
Assertions.assertTrue(new Double(112.3).compareTo(res2) == 0);
}
}
|
RedissonScoredSortedSetReactiveTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/batch/BatchSubselectCollection2Test.java
|
{
"start": 1545,
"end": 5993
}
|
class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
EntityA entityA = new EntityA();
EntityB entityB = new EntityB();
EntityB entityB1 = new EntityB();
EntityC entityC = new EntityC();
EntityD entityD = new EntityD();
EntityD entityD1 = new EntityD();
EntityE entityE = new EntityE();
entityA.entityB = entityB;
entityB.entityD = entityD;
entityB.listOfEntitiesC.add( entityC );
entityC.entityB = entityB;
entityD.listOfEntitiesB.add( entityB );
entityD.openingB = entityB;
entityD1.openingB = entityB1;
entityE.entityD = entityD;
session.persist( entityA );
session.persist( entityB );
session.persist( entityB1 );
session.persist( entityC );
session.persist( entityD );
session.persist( entityD1 );
session.persist( entityE );
}
);
}
@Test
public void testSelectEntityE(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
List<EntityE> entitiesE = session.createQuery( "select e from EntityE e", EntityE.class )
.getResultList();
assertThat( entitiesE ).hasSize( 1 );
EntityE entityE = entitiesE.get( 0 );
EntityD entityD = entityE.getEntityD();
assertThat( entityD ).isNotNull();
assertThat( entityD.getClosingB() ).isNull();
EntityB openingB = entityD.getOpeningB();
assertThat( openingB ).isNotNull();
assertThat( openingB.getListOfEntitiesC() ).hasSize( 1 );
}
);
}
@Test
public void testSelectingEntityAAfterSelectingEntityE(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
List<EntityE> entitiesE = session.createQuery( "select e from EntityE e", EntityE.class )
.getResultList();
assertThat( entitiesE ).hasSize( 1 );
EntityE entityE = entitiesE.get( 0 );
EntityD entityD = entityE.getEntityD();
assertThat( entityD ).isNotNull();
assertThat( entityD.getClosingB() ).isNull();
EntityB openingB = entityD.getOpeningB();
assertThat( openingB ).isNotNull();
assertThat( openingB.getListOfEntitiesC() ).hasSize( 1 );
List<EntityA> entitiesA = session.createQuery( "select a from EntityA a", EntityA.class )
.getResultList();
assertThat( entitiesA ).hasSize( 1 );
EntityB entityB = entitiesA.get( 0 ).getEntityB();
assertThat( entityB ).isNotNull();
assertThat( entityB ).isSameAs( openingB );
assertThat( openingB.getListOfEntitiesC() ).hasSize( 1 );
}
);
}
@Test
public void testSelectEntityA(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
List<EntityA> entitiesA = session.createQuery( "select a from EntityA a", EntityA.class )
.getResultList();
assertThat( entitiesA ).hasSize( 1 );
EntityB entityB = entitiesA.get( 0 ).getEntityB();
assertThat( entityB ).isNotNull();
assertThat( entityB.getListOfEntitiesC() ).hasSize( 1 );
}
);
}
@Test
public void testGetEntityD(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
EntityD entityD = session.get( EntityD.class, 1 );
assertThat( entityD ).isNotNull();
EntityB entityB = entityD.getOpeningB();
assertThat( entityB ).isNotNull();
assertThat( entityB.getListOfEntitiesC() ).hasSize( 1 );
}
);
}
@Test
public void testSelectEntityDWithJoins(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
List<EntityD> entityDs = session.createQuery(
"from EntityD d left join fetch d.openingB left join fetch d.closingB" ).list();
assertThat( entityDs.size() ).isEqualTo( 2 );
EntityD entityD = entityDs.get( 0 );
assertThat( entityD ).isNotNull();
EntityB entityB = entityD.getOpeningB();
assertThat( entityB ).isNotNull();
assertThat( entityB.getListOfEntitiesC() ).hasSize( 1 );
}
);
}
@Test
public void testSelectEntityD(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
List<EntityD> entitiesD = session.createQuery( "select d from EntityD d", EntityD.class )
.getResultList();
assertThat( entitiesD ).hasSize( 2 );
EntityB entityB = entitiesD.get( 0 ).getOpeningB();
assertThat( entityB ).isNotNull();
assertThat( entityB.getListOfEntitiesC() ).hasSize( 1 );
}
);
}
@Entity(name = "EntityA")
@Table(name = "ENTITY_A")
public static
|
BatchSubselectCollection2Test
|
java
|
quarkusio__quarkus
|
extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java
|
{
"start": 633,
"end": 2412
}
|
class ____ {
private static Class<?>[] testClasses = {
FrontendResource.class,
ProtectedResource.class,
AccessTokenPropagationService.class,
RolesResource.class,
RolesService.class,
RolesSecurityIdentityAugmentor.class
};
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(testClasses)
.addAsResource("application.properties")
.addAsResource(
new StringAsset(
"quarkus.resteasy-client-oidc-token-propagation.enabled-during-authentication=true\n" +
"quarkus.rest-client.\"roles\".uri=http://localhost:8081/roles\n" +
"quarkus.http.auth.proactive=false\n"),
"META-INF/microprofile-config.properties"));
@Test
public void testGetUserNameWithTokenPropagation() {
// request only succeeds if SecurityIdentityAugmentor managed to acquire 'tester' role for user 'alice'
// and that is only possible if access token is propagated during augmentation
RestAssured.given().auth().oauth2(getBearerAccessToken())
.when().get("/frontend/token-propagation-with-augmentor")
.then()
.statusCode(200)
.body(equalTo("Token issued to alice has been exchanged, new user name: bob"));
}
public String getBearerAccessToken() {
return OidcWiremockTestResource.getAccessToken(SUPPORTED_USER, Set.of("admin"));
}
}
|
OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest
|
java
|
quarkusio__quarkus
|
independent-projects/tools/registry-client/src/main/java/io/quarkus/registry/config/RegistryMavenConfig.java
|
{
"start": 197,
"end": 559
}
|
interface ____ {
/**
* Registry Maven repository configuration.
*
* @return registry Maven repository configuration
*/
RegistryMavenRepoConfig getRepository();
/** @return a mutable copy of this configuration */
default Mutable mutable() {
return new RegistryMavenConfigImpl.Builder(this);
}
|
RegistryMavenConfig
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java
|
{
"start": 842,
"end": 2853
}
|
class ____<Request extends MasterNodeReadRequest<Request>> extends MasterNodeRequest<Request> {
protected boolean local = false;
/**
* @param masterNodeTimeout Specifies how long to wait when the master has not been discovered yet, or is disconnected, or is busy
* processing other tasks:
* <ul>
* <li>
* For requests which originate in the REST layer, use
* {@link org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the timeout.
* </li>
* <li>
* For internally-generated requests, choose an appropriate timeout. Often this will be an infinite
* timeout, {@link #INFINITE_MASTER_NODE_TIMEOUT}, since it is reasonable to wait for as long as necessary
* for internal requests to complete.
* </li>
* </ul>
*/
protected MasterNodeReadRequest(TimeValue masterNodeTimeout) {
super(masterNodeTimeout);
}
protected MasterNodeReadRequest(StreamInput in) throws IOException {
super(in);
local = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(local);
}
@SuppressWarnings("unchecked")
public final Request local(boolean local) {
this.local = local;
return (Request) this;
}
/**
* Return local information, do not retrieve the state from master node (default: false).
* @return <code>true</code> if local information is to be returned;
* <code>false</code> if information is to be retrieved from master node (default).
*/
public final boolean local() {
return local;
}
}
|
MasterNodeReadRequest
|
java
|
apache__camel
|
components/camel-kafka/src/test/java/org/apache/camel/component/kafka/integration/commit/KafkaConsumerSyncCommitIT.java
|
{
"start": 1263,
"end": 2526
}
|
class ____ extends BaseManualCommitTestSupport {
public static final String TOPIC = "testManualCommitSyncTest";
@AfterEach
public void after() {
cleanupKafka(TOPIC);
}
@Override
protected RouteBuilder createRouteBuilder() {
String from = "kafka:" + TOPIC
+ "?groupId=KafkaConsumerSyncCommitIT&pollTimeoutMs=1000&autoCommitEnable=false"
+ "&allowManualCommit=true&autoOffsetReset=earliest&kafkaManualCommitFactory=#class:org.apache.camel.component.kafka.consumer.DefaultKafkaManualCommitFactory";
return new RouteBuilder() {
@Override
public void configure() {
from(from).routeId("foo").to(KafkaTestUtil.MOCK_RESULT).process(e -> {
KafkaManualCommit manual = e.getIn().getHeader(KafkaConstants.MANUAL_COMMIT, KafkaManualCommit.class);
assertNotNull(manual);
manual.commit();
});
from(from).routeId("bar").autoStartup(false).to(KafkaTestUtil.MOCK_RESULT_BAR);
}
};
}
@RepeatedTest(1)
public void kafkaManualCommit() throws Exception {
kafkaManualCommitTest(TOPIC);
}
}
|
KafkaConsumerSyncCommitIT
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/cluster/metadata/ResettableValue.java
|
{
"start": 941,
"end": 1274
}
|
class ____ a value of type @{param T} that can be in one of 3 states:
* - It has a concrete value, or
* - It is missing, or
* - It is meant to reset any other when it is composed with it.
* It is mainly used in template composition to capture the case when the user wished to reset any previous values.
* @param <T>
*/
public
|
holds
|
java
|
apache__rocketmq
|
filter/src/main/java/org/apache/rocketmq/filter/parser/ParseException.java
|
{
"start": 1483,
"end": 7525
}
|
class ____.
*/
private static final long serialVersionUID = 1L;
/**
* This constructor is used by the method "generateParseException"
* in the generated parser. Calling this constructor generates
* a new object of this type with the fields "currentToken",
* "expectedTokenSequences", and "TOKEN_IMAGE" set.
*/
public ParseException(Token currentTokenVal,
int[][] expectedTokenSequencesVal,
String[] tokenImageVal
) {
super(initialise(currentTokenVal, expectedTokenSequencesVal, tokenImageVal));
currentToken = currentTokenVal;
expectedTokenSequences = expectedTokenSequencesVal;
tokenImage = tokenImageVal;
}
/**
* The following constructors are for use by you for whatever
* purpose you can think of. Constructing the exception in this
* manner makes the exception behave in the normal way - i.e., as
* documented in the class "Throwable". The fields "errorToken",
* "expectedTokenSequences", and "TOKEN_IMAGE" do not contain
* relevant information. The JavaCC generated code does not use
* these constructors.
*/
public ParseException() {
super();
}
/**
* Constructor with message.
*/
public ParseException(String message) {
super(message);
}
/**
* This is the last token that has been consumed successfully. If
* this object has been created due to a parse error, the token
* followng this token will (therefore) be the first error token.
*/
public Token currentToken;
/**
* Each entry in this array is an array of integers. Each array
* of integers represents a sequence of tokens (by their ordinal
* values) that is expected at this point of the parse.
*/
public int[][] expectedTokenSequences;
/**
* This is a reference to the "TOKEN_IMAGE" array of the generated
* parser within which the parse error occurred. This array is
* defined in the generated ...Constants interface.
*/
public String[] tokenImage;
/**
* It uses "currentToken" and "expectedTokenSequences" to generate a parse
* error message and returns it. If this object has been created
* due to a parse error, and you do not catch it (it gets thrown
* from the parser) the correct error message
* gets displayed.
*/
private static String initialise(Token currentToken,
int[][] expectedTokenSequences,
String[] tokenImage) {
String eol = System.getProperty("line.separator", "\n");
StringBuilder expected = new StringBuilder();
int maxSize = 0;
for (int i = 0; i < expectedTokenSequences.length; i++) {
if (maxSize < expectedTokenSequences[i].length) {
maxSize = expectedTokenSequences[i].length;
}
for (int j = 0; j < expectedTokenSequences[i].length; j++) {
expected.append(tokenImage[expectedTokenSequences[i][j]]).append(' ');
}
if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) {
expected.append("...");
}
expected.append(eol).append(" ");
}
String retval = "Encountered \"";
Token tok = currentToken.next;
for (int i = 0; i < maxSize; i++) {
if (i != 0) {
retval += " ";
}
if (tok.kind == 0) {
retval += tokenImage[0];
break;
}
retval += " " + tokenImage[tok.kind];
retval += " \"";
retval += add_escapes(tok.image);
retval += " \"";
tok = tok.next;
}
retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn;
retval += "." + eol;
if (expectedTokenSequences.length == 1) {
retval += "Was expecting:" + eol + " ";
} else {
retval += "Was expecting one of:" + eol + " ";
}
retval += expected.toString();
return retval;
}
/**
* The end of line string for this machine.
*/
protected String eol = System.getProperty("line.separator", "\n");
/**
* Used to convert raw characters to their escaped version
* when these raw version cannot be used as part of an ASCII
* string literal.
*/
static String add_escapes(String str) {
StringBuilder retval = new StringBuilder();
char ch;
for (int i = 0; i < str.length(); i++) {
switch (str.charAt(i)) {
case 0:
continue;
case '\b':
retval.append("\\b");
continue;
case '\t':
retval.append("\\t");
continue;
case '\n':
retval.append("\\n");
continue;
case '\f':
retval.append("\\f");
continue;
case '\r':
retval.append("\\r");
continue;
case '\"':
retval.append("\\\"");
continue;
case '\'':
retval.append("\\\'");
continue;
case '\\':
retval.append("\\\\");
continue;
default:
if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
String s = "0000" + Integer.toString(ch, 16);
retval.append("\\u" + s.substring(s.length() - 4, s.length()));
} else {
retval.append(ch);
}
continue;
}
}
return retval.toString();
}
}
/* JavaCC - OriginalChecksum=60cf9c227a487e4be49599bc903f0a6a (do not edit this line) */
|
changes
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/runtime/ShuffleCompressionITCase.java
|
{
"start": 5058,
"end": 6012
}
|
class ____ extends AbstractInvokable {
public LongValueSource(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
ResultPartitionWriter resultPartitionWriter = getEnvironment().getWriter(0);
RecordWriterBuilder<LongValue> recordWriterBuilder = new RecordWriterBuilder<>();
if (useBroadcastPartitioner) {
recordWriterBuilder.setChannelSelector(new BroadcastPartitioner());
}
RecordWriter<LongValue> writer = recordWriterBuilder.build(resultPartitionWriter);
for (int i = 0; i < NUM_RECORDS_TO_SEND; ++i) {
writer.broadcastEmit(RECORD_TO_SEND);
}
writer.flushAll();
writer.close();
}
}
/** Test sink that receives {@link LongValue} and verifies the received records. */
public static final
|
LongValueSource
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/engine/jdbc/env/spi/SQLStateType.java
|
{
"start": 308,
"end": 794
}
|
enum ____ {
/**
* The reported codes follow the X/Open spec
*/
XOpen,
/**
* The reported codes follow the SQL spec
*/
SQL99,
/**
* It is unknown. Might follow another spec completely, or be a mixture.
*/
UNKNOWN;
public static SQLStateType interpretReportedSQLStateType(int sqlStateType) {
return switch ( sqlStateType ) {
case DatabaseMetaData.sqlStateSQL99 -> SQL99;
case DatabaseMetaData.sqlStateXOpen -> XOpen;
default -> UNKNOWN;
};
}
}
|
SQLStateType
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/method/TestPermissionEvaluator.java
|
{
"start": 838,
"end": 1202
}
|
class ____ implements PermissionEvaluator {
@Override
public boolean hasPermission(Authentication authentication, Object targetDomainObject, Object permission) {
return false;
}
@Override
public boolean hasPermission(Authentication authentication, Serializable targetId, String targetType,
Object permission) {
return false;
}
}
|
TestPermissionEvaluator
|
java
|
elastic__elasticsearch
|
x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/support/SamlFactory.java
|
{
"start": 2057,
"end": 10121
}
|
class ____ {
private final XMLObjectBuilderFactory builderFactory;
private final SecureRandom random;
private static final Logger LOGGER = LogManager.getLogger(SamlFactory.class);
public SamlFactory() {
SamlInit.initialize();
builderFactory = XMLObjectProviderRegistrySupport.getBuilderFactory();
random = new SecureRandom();
}
public <T extends XMLObject> T object(Class<T> type, QName elementName) {
final XMLObject obj = builderFactory.getBuilder(elementName).buildObject(elementName);
return cast(type, elementName, obj);
}
public <T extends XMLObject> T object(Class<T> type, QName elementName, QName schemaType) {
final XMLObject obj = builderFactory.getBuilder(schemaType).buildObject(elementName, schemaType);
return cast(type, elementName, obj);
}
private static <T extends XMLObject> T cast(Class<T> type, QName elementName, XMLObject obj) {
if (type.isInstance(obj)) {
return type.cast(obj);
} else {
throw new IllegalArgumentException(
"Object for element " + elementName.getLocalPart() + " is of type " + obj.getClass() + " not " + type
);
}
}
public String secureIdentifier() {
return randomNCName(20);
}
private String randomNCName(int numberBytes) {
final byte[] randomBytes = new byte[numberBytes];
random.nextBytes(randomBytes);
// NCNames (https://www.w3.org/TR/xmlschema-2/#NCName) can't start with a number, so start them all with "_" to be safe
return "_".concat(MessageDigests.toHexString(randomBytes));
}
public <T extends XMLObject> T buildObject(Class<T> type, QName elementName) {
final XMLObject obj = builderFactory.getBuilder(elementName).buildObject(elementName);
if (type.isInstance(obj)) {
return type.cast(obj);
} else {
throw new IllegalArgumentException(
"Object for element " + elementName.getLocalPart() + " is of type " + obj.getClass() + " not " + type
);
}
}
public String toString(Element element, boolean pretty) {
try {
StringWriter writer = new StringWriter();
print(element, writer, pretty);
return writer.toString();
} catch (TransformerException e) {
return "[" + element.getNamespaceURI() + "]" + element.getLocalName();
}
}
public static <T extends XMLObject> T buildXmlObject(Element element, Class<T> type) {
try {
UnmarshallerFactory unmarshallerFactory = getUnmarshallerFactory();
Unmarshaller unmarshaller = unmarshallerFactory.getUnmarshaller(element);
if (unmarshaller == null) {
throw new ElasticsearchSecurityException(
"XML element [{}] cannot be unmarshalled to SAML type [{}] (no unmarshaller)",
element.getTagName(),
type
);
}
final XMLObject object = unmarshaller.unmarshall(element);
if (type.isInstance(object)) {
return type.cast(object);
}
Object[] args = new Object[] { element.getTagName(), type.getName(), object.getClass().getName() };
throw new ElasticsearchSecurityException("SAML object [{}] is incorrect type. Expected [{}] but was [{}]", args);
} catch (UnmarshallingException e) {
throw new ElasticsearchSecurityException("Failed to unmarshall SAML content [{}]", e, element.getTagName());
}
}
void print(Element element, Writer writer, boolean pretty) throws TransformerException {
final Transformer serializer = XmlUtils.getHardenedXMLTransformer();
if (pretty) {
serializer.setOutputProperty(OutputKeys.INDENT, "yes");
}
serializer.transform(new DOMSource(element), new StreamResult(writer));
}
public String getXmlContent(SAMLObject object) {
return getXmlContent(object, false);
}
public String getXmlContent(SAMLObject object, boolean prettyPrint) {
try {
return toString(XMLObjectSupport.marshall(object), prettyPrint);
} catch (MarshallingException e) {
LOGGER.info("Error marshalling SAMLObject ", e);
return "_unserializable_";
}
}
public static boolean elementNameMatches(Element element, String namespace, String localName) {
return localName.equals(element.getLocalName()) && namespace.equals(element.getNamespaceURI());
}
public static String text(Element dom, int length) {
return text(dom, length, 0);
}
protected static String text(Element dom, int prefixLength, int suffixLength) {
final String text = dom.getTextContent().trim();
final int totalLength = prefixLength + suffixLength;
if (text.length() > totalLength) {
final String prefix = Strings.cleanTruncate(text, prefixLength) + "...";
if (suffixLength == 0) {
return prefix;
}
int suffixIndex = text.length() - suffixLength;
if (Character.isHighSurrogate(text.charAt(suffixIndex))) {
suffixIndex++;
}
return prefix + text.substring(suffixIndex);
} else {
return text;
}
}
public static String describeCredentials(Collection<? extends Credential> credentials) {
return credentials.stream().map(c -> {
if (c == null) {
return "<null>";
}
byte[] encoded;
if (c instanceof X509Credential x) {
try {
encoded = x.getEntityCertificate().getEncoded();
} catch (CertificateEncodingException e) {
encoded = c.getPublicKey().getEncoded();
}
} else {
encoded = c.getPublicKey().getEncoded();
}
return Base64.getEncoder().encodeToString(encoded).substring(0, 64) + "...";
}).collect(Collectors.joining(","));
}
public static Element toDomElement(XMLObject object) {
try {
return XMLObjectSupport.marshall(object);
} catch (MarshallingException e) {
throw new ElasticsearchSecurityException("failed to marshall SAML object to DOM element", e);
}
}
/**
* Constructs a DocumentBuilder with all the necessary features for it to be secure
*
* @throws ParserConfigurationException if one of the features can't be set on the DocumentBuilderFactory
*/
public static DocumentBuilder getHardenedBuilder(String[] schemaFiles) throws ParserConfigurationException {
return XmlUtils.getHardenedBuilder(resolveSchemaFilePaths(schemaFiles));
}
public static String getJavaAlgorithmNameFromUri(String sigAlg) {
return switch (sigAlg) {
case "http://www.w3.org/2000/09/xmldsig#dsa-sha1" -> "SHA1withDSA";
case "http://www.w3.org/2000/09/xmldsig#dsa-sha256" -> "SHA256withDSA";
case "http://www.w3.org/2000/09/xmldsig#rsa-sha1" -> "SHA1withRSA";
case "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" -> "SHA256withRSA";
case "http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256" -> "SHA256withECDSA";
default -> throw new IllegalArgumentException("Unsupported signing algorithm identifier: " + sigAlg);
};
}
private static String[] resolveSchemaFilePaths(String[] relativePaths) {
return Arrays.stream(relativePaths).map(file -> {
try {
return SamlFactory.class.getResource(file).toURI().toString();
} catch (URISyntaxException e) {
LOGGER.warn("Error resolving schema file path", e);
return null;
}
}).filter(Objects::nonNull).toArray(String[]::new);
}
private static
|
SamlFactory
|
java
|
elastic__elasticsearch
|
modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java
|
{
"start": 7561,
"end": 40354
}
|
class ____ extends Plugin implements AnalysisPlugin, ScriptPlugin {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(CommonAnalysisPlugin.class);
private final SetOnce<ScriptService> scriptServiceHolder = new SetOnce<>();
private final SetOnce<SynonymsManagementAPIService> synonymsManagementServiceHolder = new SetOnce<>();
@Override
public Collection<?> createComponents(PluginServices services) {
this.scriptServiceHolder.set(services.scriptService());
this.synonymsManagementServiceHolder.set(new SynonymsManagementAPIService(services.client()));
return Collections.emptyList();
}
@Override
public List<ScriptContext<?>> getContexts() {
return Collections.singletonList(AnalysisPredicateScript.CONTEXT);
}
@Override
public Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> analyzers = new TreeMap<>();
analyzers.put("fingerprint", FingerprintAnalyzerProvider::new);
analyzers.put("keyword", KeywordAnalyzerProvider::new);
analyzers.put("pattern", PatternAnalyzerProvider::new);
analyzers.put("simple", SimpleAnalyzerProvider::new);
analyzers.put("snowball", SnowballAnalyzerProvider::new);
analyzers.put("stop", StopAnalyzerProvider::new);
analyzers.put("whitespace", WhitespaceAnalyzerProvider::new);
// Language analyzers:
analyzers.put("arabic", ArabicAnalyzerProvider::new);
analyzers.put("armenian", ArmenianAnalyzerProvider::new);
analyzers.put("basque", BasqueAnalyzerProvider::new);
analyzers.put("bengali", BengaliAnalyzerProvider::new);
analyzers.put("brazilian", BrazilianAnalyzerProvider::new);
analyzers.put("bulgarian", BulgarianAnalyzerProvider::new);
analyzers.put("catalan", CatalanAnalyzerProvider::new);
analyzers.put("chinese", ChineseAnalyzerProvider::new);
analyzers.put("cjk", CjkAnalyzerProvider::new);
analyzers.put("czech", CzechAnalyzerProvider::new);
analyzers.put("danish", DanishAnalyzerProvider::new);
analyzers.put("dutch", DutchAnalyzerProvider::new);
analyzers.put("english", EnglishAnalyzerProvider::new);
analyzers.put("estonian", EstonianAnalyzerProvider::new);
analyzers.put("finnish", FinnishAnalyzerProvider::new);
analyzers.put("french", FrenchAnalyzerProvider::new);
analyzers.put("galician", GalicianAnalyzerProvider::new);
analyzers.put("german", GermanAnalyzerProvider::new);
analyzers.put("greek", GreekAnalyzerProvider::new);
analyzers.put("hindi", HindiAnalyzerProvider::new);
analyzers.put("hungarian", HungarianAnalyzerProvider::new);
analyzers.put("indonesian", IndonesianAnalyzerProvider::new);
analyzers.put("irish", IrishAnalyzerProvider::new);
analyzers.put("italian", ItalianAnalyzerProvider::new);
analyzers.put("latvian", LatvianAnalyzerProvider::new);
analyzers.put("lithuanian", LithuanianAnalyzerProvider::new);
analyzers.put("norwegian", NorwegianAnalyzerProvider::new);
analyzers.put("persian", PersianAnalyzerProvider::new);
analyzers.put("portuguese", PortugueseAnalyzerProvider::new);
analyzers.put("romanian", RomanianAnalyzerProvider::new);
analyzers.put("russian", RussianAnalyzerProvider::new);
analyzers.put("serbian", SerbianAnalyzerProvider::new);
analyzers.put("sorani", SoraniAnalyzerProvider::new);
analyzers.put("spanish", SpanishAnalyzerProvider::new);
analyzers.put("swedish", SwedishAnalyzerProvider::new);
analyzers.put("turkish", TurkishAnalyzerProvider::new);
analyzers.put("thai", ThaiAnalyzerProvider::new);
return analyzers;
}
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
Map<String, AnalysisProvider<TokenFilterFactory>> filters = new TreeMap<>();
filters.put("apostrophe", ApostropheFilterFactory::new);
filters.put("arabic_normalization", ArabicNormalizationFilterFactory::new);
filters.put("arabic_stem", ArabicStemTokenFilterFactory::new);
filters.put("asciifolding", ASCIIFoldingTokenFilterFactory::new);
filters.put("bengali_normalization", BengaliNormalizationFilterFactory::new);
filters.put("brazilian_stem", BrazilianStemTokenFilterFactory::new);
filters.put("cjk_bigram", CJKBigramFilterFactory::new);
filters.put("cjk_width", CJKWidthFilterFactory::new);
filters.put("classic", ClassicFilterFactory::new);
filters.put("czech_stem", CzechStemTokenFilterFactory::new);
filters.put("common_grams", requiresAnalysisSettings(CommonGramsTokenFilterFactory::new));
filters.put(
"condition",
requiresAnalysisSettings((i, e, n, s) -> new ScriptedConditionTokenFilterFactory(i, n, s, scriptServiceHolder.get()))
);
filters.put("decimal_digit", DecimalDigitFilterFactory::new);
filters.put("delimited_payload", DelimitedPayloadTokenFilterFactory::new);
filters.put("dictionary_decompounder", requiresAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new));
filters.put("dutch_stem", DutchStemTokenFilterFactory::new);
filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new);
filters.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
return new EdgeNGramTokenFilterFactory(indexSettings, environment, name, settings) {
@Override
public TokenStream create(TokenStream tokenStream) {
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [edge_ngram] instead."
);
} else {
deprecationLogger.warn(
DeprecationCategory.ANALYSIS,
"edgeNGram_deprecation",
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [edge_ngram] instead."
);
}
return super.create(tokenStream);
}
};
});
filters.put("elision", requiresAnalysisSettings(ElisionTokenFilterFactory::new));
filters.put("fingerprint", FingerprintTokenFilterFactory::new);
filters.put("flatten_graph", FlattenGraphTokenFilterFactory::new);
filters.put("french_stem", FrenchStemTokenFilterFactory::new);
filters.put("german_normalization", GermanNormalizationFilterFactory::new);
filters.put("german_stem", GermanStemTokenFilterFactory::new);
filters.put("hindi_normalization", HindiNormalizationFilterFactory::new);
filters.put("hyphenation_decompounder", requiresAnalysisSettings(HyphenationCompoundWordTokenFilterFactory::new));
filters.put("indic_normalization", IndicNormalizationFilterFactory::new);
filters.put("keep", requiresAnalysisSettings(KeepWordFilterFactory::new));
filters.put("keep_types", requiresAnalysisSettings(KeepTypesFilterFactory::new));
filters.put("keyword_marker", requiresAnalysisSettings(KeywordMarkerTokenFilterFactory::new));
filters.put("kstem", KStemTokenFilterFactory::new);
filters.put("length", LengthTokenFilterFactory::new);
filters.put("limit", LimitTokenCountFilterFactory::new);
filters.put("lowercase", LowerCaseTokenFilterFactory::new);
filters.put("min_hash", MinHashTokenFilterFactory::new);
filters.put("multiplexer", MultiplexerTokenFilterFactory::new);
filters.put("ngram", NGramTokenFilterFactory::new);
filters.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
return new NGramTokenFilterFactory(indexSettings, environment, name, settings) {
@Override
public TokenStream create(TokenStream tokenStream) {
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) {
throw new IllegalArgumentException(
"The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [ngram] instead."
);
} else {
deprecationLogger.warn(
DeprecationCategory.ANALYSIS,
"nGram_deprecation",
"The [nGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [ngram] instead."
);
}
return super.create(tokenStream);
}
};
});
filters.put("pattern_capture", requiresAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new));
filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceTokenFilterFactory::new));
filters.put("persian_normalization", PersianNormalizationFilterFactory::new);
filters.put("persian_stem", PersianStemTokenFilterFactory::new);
filters.put("porter_stem", PorterStemTokenFilterFactory::new);
filters.put(
"predicate_token_filter",
requiresAnalysisSettings((i, e, n, s) -> new PredicateTokenFilterScriptFactory(i, n, s, scriptServiceHolder.get()))
);
filters.put("remove_duplicates", RemoveDuplicatesTokenFilterFactory::new);
filters.put("reverse", ReverseTokenFilterFactory::new);
filters.put("russian_stem", RussianStemTokenFilterFactory::new);
filters.put("scandinavian_folding", ScandinavianFoldingFilterFactory::new);
filters.put("scandinavian_normalization", ScandinavianNormalizationFilterFactory::new);
filters.put("serbian_normalization", SerbianNormalizationFilterFactory::new);
filters.put("snowball", SnowballTokenFilterFactory::new);
filters.put("sorani_normalization", SoraniNormalizationFilterFactory::new);
filters.put("stemmer_override", requiresAnalysisSettings(StemmerOverrideTokenFilterFactory::new));
filters.put("stemmer", StemmerTokenFilterFactory::new);
filters.put(
"synonym",
requiresAnalysisSettings((i, e, n, s) -> new SynonymTokenFilterFactory(i, e, n, s, synonymsManagementServiceHolder.get()))
);
filters.put(
"synonym_graph",
requiresAnalysisSettings((i, e, n, s) -> new SynonymGraphTokenFilterFactory(i, e, n, s, synonymsManagementServiceHolder.get()))
);
filters.put("trim", TrimTokenFilterFactory::new);
filters.put("truncate", requiresAnalysisSettings(TruncateTokenFilterFactory::new));
filters.put("unique", UniqueTokenFilterFactory::new);
filters.put("uppercase", UpperCaseTokenFilterFactory::new);
filters.put("word_delimiter_graph", WordDelimiterGraphTokenFilterFactory::new);
filters.put("word_delimiter", WordDelimiterTokenFilterFactory::new);
return filters;
}
@Override
public Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
Map<String, AnalysisProvider<CharFilterFactory>> filters = new TreeMap<>();
filters.put("html_strip", HtmlStripCharFilterFactory::new);
filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceCharFilterFactory::new));
filters.put("mapping", requiresAnalysisSettings(MappingCharFilterFactory::new));
return filters;
}
@Override
public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
Map<String, AnalysisProvider<TokenizerFactory>> tokenizers = new TreeMap<>();
tokenizers.put("simple_pattern", SimplePatternTokenizerFactory::new);
tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new);
tokenizers.put("thai", ThaiTokenizerFactory::new);
tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) {
throw new IllegalArgumentException(
"The [nGram] tokenizer name was deprecated in 7.6. "
+ "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead."
);
} else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0)) {
deprecationLogger.warn(
DeprecationCategory.ANALYSIS,
"nGram_tokenizer_deprecation",
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [ngram] instead."
);
}
return new NGramTokenizerFactory(indexSettings, environment, name, settings);
});
tokenizers.put("ngram", NGramTokenizerFactory::new);
tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] tokenizer name was deprecated in 7.6. "
+ "Please use the tokenizer name to [edge_nGram] for indices created in versions 8 or higher instead."
);
} else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0)) {
deprecationLogger.warn(
DeprecationCategory.ANALYSIS,
"edgeNGram_tokenizer_deprecation",
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [edge_ngram] instead."
);
}
return new EdgeNGramTokenizerFactory(indexSettings, environment, name, settings);
});
tokenizers.put("edge_ngram", EdgeNGramTokenizerFactory::new);
tokenizers.put("char_group", CharGroupTokenizerFactory::new);
tokenizers.put("classic", ClassicTokenizerFactory::new);
tokenizers.put("letter", LetterTokenizerFactory::new);
// TODO deprecate and remove in API
tokenizers.put("lowercase", XLowerCaseTokenizerFactory::new);
tokenizers.put("path_hierarchy", PathHierarchyTokenizerFactory::new);
tokenizers.put("PathHierarchy", PathHierarchyTokenizerFactory::new);
tokenizers.put("pattern", PatternTokenizerFactory::new);
tokenizers.put("uax_url_email", UAX29URLEmailTokenizerFactory::new);
tokenizers.put("whitespace", WhitespaceTokenizerFactory::new);
tokenizers.put("keyword", KeywordTokenizerFactory::new);
return tokenizers;
}
@Override
public List<PreBuiltAnalyzerProviderFactory> getPreBuiltAnalyzerProviderFactories() {
List<PreBuiltAnalyzerProviderFactory> analyzers = new ArrayList<>();
analyzers.add(
new PreBuiltAnalyzerProviderFactory(
"pattern",
CachingStrategy.INDEX,
() -> new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET)
)
);
analyzers.add(
new PreBuiltAnalyzerProviderFactory(
"snowball",
CachingStrategy.LUCENE,
() -> new SnowballAnalyzer("English", EnglishAnalyzer.ENGLISH_STOP_WORDS_SET)
)
);
// Language analyzers:
analyzers.add(new PreBuiltAnalyzerProviderFactory("arabic", CachingStrategy.LUCENE, ArabicAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("armenian", CachingStrategy.LUCENE, ArmenianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("basque", CachingStrategy.LUCENE, BasqueAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("bengali", CachingStrategy.LUCENE, BengaliAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("brazilian", CachingStrategy.LUCENE, BrazilianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("bulgarian", CachingStrategy.LUCENE, BulgarianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("catalan", CachingStrategy.LUCENE, CatalanAnalyzer::new));
// chinese analyzer: only for old indices, best effort
analyzers.add(
new PreBuiltAnalyzerProviderFactory(
"chinese",
CachingStrategy.ONE,
() -> new StandardAnalyzer(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET)
)
);
analyzers.add(new PreBuiltAnalyzerProviderFactory("cjk", CachingStrategy.LUCENE, CJKAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("czech", CachingStrategy.LUCENE, CzechAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("danish", CachingStrategy.LUCENE, DanishAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("dutch", CachingStrategy.LUCENE, DutchAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("english", CachingStrategy.LUCENE, EnglishAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("estonian", CachingStrategy.LUCENE, EstonianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("finnish", CachingStrategy.LUCENE, FinnishAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("french", CachingStrategy.LUCENE, FrenchAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("galician", CachingStrategy.LUCENE, GalicianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("german", CachingStrategy.LUCENE, GermanAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("greek", CachingStrategy.LUCENE, GreekAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("hindi", CachingStrategy.LUCENE, HindiAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("hungarian", CachingStrategy.LUCENE, HungarianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("indonesian", CachingStrategy.LUCENE, IndonesianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("irish", CachingStrategy.LUCENE, IrishAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("italian", CachingStrategy.LUCENE, ItalianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("latvian", CachingStrategy.LUCENE, LatvianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("lithuanian", CachingStrategy.LUCENE, LithuanianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("norwegian", CachingStrategy.LUCENE, NorwegianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("persian", CachingStrategy.LUCENE, PersianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("portuguese", CachingStrategy.LUCENE, PortugueseAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("romanian", CachingStrategy.LUCENE, RomanianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("russian", CachingStrategy.LUCENE, RussianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("serbian", CachingStrategy.LUCENE, SerbianAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("sorani", CachingStrategy.LUCENE, SoraniAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("spanish", CachingStrategy.LUCENE, SpanishAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("swedish", CachingStrategy.LUCENE, SwedishAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("turkish", CachingStrategy.LUCENE, TurkishAnalyzer::new));
analyzers.add(new PreBuiltAnalyzerProviderFactory("thai", CachingStrategy.LUCENE, ThaiAnalyzer::new));
return analyzers;
}
@Override
public List<PreConfiguredCharFilter> getPreConfiguredCharFilters() {
List<PreConfiguredCharFilter> filters = new ArrayList<>();
filters.add(PreConfiguredCharFilter.singleton("html_strip", false, HTMLStripCharFilter::new));
return filters;
}
@Override
public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
List<PreConfiguredTokenFilter> filters = new ArrayList<>();
filters.add(PreConfiguredTokenFilter.singleton("apostrophe", false, ApostropheFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("arabic_normalization", true, ArabicNormalizationFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("arabic_stem", false, ArabicStemFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("asciifolding", true, ASCIIFoldingFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("bengali_normalization", true, BengaliNormalizationFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("brazilian_stem", false, BrazilianStemFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("cjk_bigram", false, CJKBigramFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("cjk_width", true, CJKWidthFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("classic", false, ClassicFilter::new));
filters.add(
PreConfiguredTokenFilter.singleton("common_grams", false, false, input -> new CommonGramsFilter(input, CharArraySet.EMPTY_SET))
);
filters.add(PreConfiguredTokenFilter.singleton("czech_stem", false, CzechStemFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("decimal_digit", true, DecimalDigitFilter::new));
filters.add(
PreConfiguredTokenFilter.singleton(
"delimited_payload",
false,
input -> new DelimitedPayloadTokenFilter(
input,
DelimitedPayloadTokenFilterFactory.DEFAULT_DELIMITER,
DelimitedPayloadTokenFilterFactory.DEFAULT_ENCODER
)
)
);
filters.add(PreConfiguredTokenFilter.singleton("dutch_stem", false, input -> new SnowballFilter(input, new DutchStemmer())));
filters.add(PreConfiguredTokenFilter.singleton("edge_ngram", false, false, input -> new EdgeNGramTokenFilter(input, 1)));
filters.add(
PreConfiguredTokenFilter.singleton("elision", true, input -> new ElisionFilter(input, FrenchAnalyzer.DEFAULT_ARTICLES))
);
filters.add(PreConfiguredTokenFilter.singleton("french_stem", false, input -> new SnowballFilter(input, new FrenchStemmer())));
filters.add(PreConfiguredTokenFilter.singleton("german_normalization", true, GermanNormalizationFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("german_stem", false, GermanStemFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("hindi_normalization", true, HindiNormalizationFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("indic_normalization", true, IndicNormalizationFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("keyword_repeat", false, false, KeywordRepeatFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("kstem", false, KStemFilter::new));
// TODO this one seems useless
filters.add(PreConfiguredTokenFilter.singleton("length", false, input -> new LengthFilter(input, 0, Integer.MAX_VALUE)));
filters.add(
PreConfiguredTokenFilter.singleton(
"limit",
false,
input -> new LimitTokenCountFilter(
input,
LimitTokenCountFilterFactory.DEFAULT_MAX_TOKEN_COUNT,
LimitTokenCountFilterFactory.DEFAULT_CONSUME_ALL_TOKENS
)
)
);
filters.add(PreConfiguredTokenFilter.singleton("ngram", false, false, reader -> new NGramTokenFilter(reader, 1, 2, false)));
filters.add(PreConfiguredTokenFilter.singleton("persian_normalization", true, PersianNormalizationFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("porter_stem", false, PorterStemFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("reverse", false, ReverseStringFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("russian_stem", false, input -> new SnowballFilter(input, "Russian")));
filters.add(PreConfiguredTokenFilter.singleton("scandinavian_folding", true, ScandinavianFoldingFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("scandinavian_normalization", true, ScandinavianNormalizationFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("shingle", false, false, input -> {
TokenStream ts = new ShingleFilter(input);
/**
* We disable the graph analysis on this token stream
* because it produces shingles of different size.
* Graph analysis on such token stream is useless and dangerous as it may create too many paths
* since shingles of different size are not aligned in terms of positions.
*/
ts.addAttribute(DisableGraphAttribute.class);
return ts;
}));
filters.add(PreConfiguredTokenFilter.singleton("snowball", false, input -> new SnowballFilter(input, "English")));
filters.add(PreConfiguredTokenFilter.singleton("sorani_normalization", true, SoraniNormalizationFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("stemmer", false, PorterStemFilter::new));
// The stop filter is in lucene-core but the English stop words set is in lucene-analyzers-common
filters.add(
PreConfiguredTokenFilter.singleton("stop", false, input -> new StopFilter(input, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET))
);
filters.add(PreConfiguredTokenFilter.singleton("trim", true, TrimFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("truncate", false, input -> new TruncateTokenFilter(input, 10)));
filters.add(PreConfiguredTokenFilter.singleton("type_as_payload", false, TypeAsPayloadTokenFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("unique", false, UniqueTokenFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("uppercase", true, UpperCaseFilter::new));
filters.add(
PreConfiguredTokenFilter.singleton(
"word_delimiter",
false,
false,
input -> new WordDelimiterFilter(
input,
WordDelimiterFilter.GENERATE_WORD_PARTS | WordDelimiterFilter.GENERATE_NUMBER_PARTS
| WordDelimiterFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterFilter.SPLIT_ON_NUMERICS
| WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE,
null
)
)
);
filters.add(PreConfiguredTokenFilter.indexVersion("word_delimiter_graph", false, false, (input, version) -> {
boolean adjustOffsets = version.onOrAfter(IndexVersions.V_7_3_0);
return new WordDelimiterGraphFilter(
input,
adjustOffsets,
WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE,
WordDelimiterGraphFilter.GENERATE_WORD_PARTS | WordDelimiterGraphFilter.GENERATE_NUMBER_PARTS
| WordDelimiterGraphFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterGraphFilter.SPLIT_ON_NUMERICS
| WordDelimiterGraphFilter.STEM_ENGLISH_POSSESSIVE,
null
);
}));
return filters;
}
@Override
public List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {
List<PreConfiguredTokenizer> tokenizers = new ArrayList<>();
tokenizers.add(PreConfiguredTokenizer.singleton("keyword", KeywordTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.singleton("classic", ClassicTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.singleton("uax_url_email", UAX29URLEmailTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.singleton("path_hierarchy", PathHierarchyTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.singleton("letter", LetterTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.indexVersion("edge_ngram", (version) -> {
if (version.onOrAfter(IndexVersions.V_7_3_0)) {
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
}
return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
}));
tokenizers.add(PreConfiguredTokenizer.singleton("pattern", () -> new PatternTokenizer(Regex.compile("\\W+", null), -1)));
tokenizers.add(PreConfiguredTokenizer.singleton("thai", ThaiTokenizer::new));
// TODO deprecate and remove in API
// This is already broken with normalization, so backwards compat isn't necessary?
tokenizers.add(PreConfiguredTokenizer.singleton("lowercase", XLowerCaseTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.indexVersion("nGram", (version) -> {
if (version.onOrAfter(IndexVersions.V_8_0_0)) {
throw new IllegalArgumentException(
"The [nGram] tokenizer name was deprecated in 7.6. "
+ "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead."
);
} else if (version.onOrAfter(IndexVersions.V_7_6_0)) {
deprecationLogger.warn(
DeprecationCategory.ANALYSIS,
"nGram_tokenizer_deprecation",
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [ngram] instead."
);
}
return new NGramTokenizer();
}));
tokenizers.add(PreConfiguredTokenizer.indexVersion("edgeNGram", (version) -> {
if (version.onOrAfter(IndexVersions.V_8_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] tokenizer name was deprecated in 7.6. "
+ "Please use the tokenizer name to [edge_ngram] for indices created in versions 8 or higher instead."
);
} else if (version.onOrAfter(IndexVersions.V_7_6_0)) {
deprecationLogger.warn(
DeprecationCategory.ANALYSIS,
"edgeNGram_tokenizer_deprecation",
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [edge_ngram] instead."
);
}
if (version.onOrAfter(IndexVersions.V_7_3_0)) {
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
}
return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
}));
tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new));
return tokenizers;
}
}
|
CommonAnalysisPlugin
|
java
|
grpc__grpc-java
|
interop-testing/src/main/java/io/grpc/testing/integration/StressTestClient.java
|
{
"start": 16011,
"end": 20356
}
|
class ____ implements Runnable {
// Interval at which the QPS stats of metrics service are updated.
private static final long METRICS_COLLECTION_INTERVAL_SECS = 5;
private final ManagedChannel channel;
private final List<TestCaseWeightPair> testCaseWeightPairs;
private final Integer durationSec;
private final String gaugeName;
Worker(ManagedChannel channel, List<TestCaseWeightPair> testCaseWeightPairs,
int durationSec, String gaugeName) {
Preconditions.checkArgument(durationSec >= -1, "durationSec must be gte -1.");
this.channel = Preconditions.checkNotNull(channel, "channel");
this.testCaseWeightPairs =
Preconditions.checkNotNull(testCaseWeightPairs, "testCaseWeightPairs");
this.durationSec = durationSec == -1 ? null : durationSec;
this.gaugeName = Preconditions.checkNotNull(gaugeName, "gaugeName");
}
@Override
public void run() {
// Simplify debugging if the worker crashes / never terminates.
Thread.currentThread().setName(gaugeName);
Tester tester = new Tester();
// The client stream tracers that AbstractInteropTest installs by default would fill up the
// heap in no time in a long running stress test with many requests.
tester.setEnableClientStreamTracers(false);
tester.setUp();
WeightedTestCaseSelector testCaseSelector = new WeightedTestCaseSelector(testCaseWeightPairs);
Long endTime = durationSec == null ? null : System.nanoTime() + SECONDS.toNanos(durationSecs);
long lastMetricsCollectionTime = initLastMetricsCollectionTime();
// Number of interop testcases run since the last time metrics have been updated.
long testCasesSinceLastMetricsCollection = 0;
while (!Thread.currentThread().isInterrupted() && !shutdown
&& (endTime == null || endTime - System.nanoTime() > 0)) {
try {
runTestCase(tester, testCaseSelector.nextTestCase());
} catch (Exception e) {
throw new RuntimeException(e);
}
testCasesSinceLastMetricsCollection++;
totalCallCount.incrementAndGet();
double durationSecs = computeDurationSecs(lastMetricsCollectionTime);
if (durationSecs >= METRICS_COLLECTION_INTERVAL_SECS) {
long qps = (long) Math.ceil(testCasesSinceLastMetricsCollection / durationSecs);
Metrics.GaugeResponse gauge = Metrics.GaugeResponse
.newBuilder()
.setName(gaugeName)
.setLongValue(qps)
.build();
gauges.put(gaugeName, gauge);
lastMetricsCollectionTime = System.nanoTime();
testCasesSinceLastMetricsCollection = 0;
}
}
}
private long initLastMetricsCollectionTime() {
return System.nanoTime() - SECONDS.toNanos(METRICS_COLLECTION_INTERVAL_SECS);
}
private double computeDurationSecs(long lastMetricsCollectionTime) {
return (System.nanoTime() - lastMetricsCollectionTime) / 1000000000.0;
}
private void runTestCase(Tester tester, TestCases testCase) throws Exception {
// TODO(buchgr): Implement tests requiring auth, once C++ supports it.
switch (testCase) {
case EMPTY_UNARY:
tester.emptyUnary();
break;
case LARGE_UNARY:
tester.largeUnary();
break;
case CLIENT_STREAMING:
tester.clientStreaming();
break;
case SERVER_STREAMING:
tester.serverStreaming();
break;
case PING_PONG:
tester.pingPong();
break;
case EMPTY_STREAM:
tester.emptyStream();
break;
case UNIMPLEMENTED_METHOD: {
tester.unimplementedMethod();
break;
}
case UNIMPLEMENTED_SERVICE: {
tester.unimplementedService();
break;
}
case CANCEL_AFTER_BEGIN: {
tester.cancelAfterBegin();
break;
}
case CANCEL_AFTER_FIRST_RESPONSE: {
tester.cancelAfterFirstResponse();
break;
}
case TIMEOUT_ON_SLEEPING_SERVER: {
tester.timeoutOnSleepingServer();
break;
}
default:
throw new IllegalArgumentException("Unknown test case: " + testCase);
}
}
|
Worker
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/expr/OracleIntervalExpr.java
|
{
"start": 1145,
"end": 6572
}
|
class ____ extends SQLExprImpl implements SQLLiteralExpr, OracleExpr, SQLReplaceable {
private SQLExpr value;
private OracleIntervalType type;
private SQLExpr precision;
private Integer factionalSecondsPrecision;
private OracleIntervalType toType;
private SQLExpr toFactionalSecondsPrecision;
public OracleIntervalExpr() {
}
@Override
public OracleIntervalExpr clone() {
OracleIntervalExpr x = new OracleIntervalExpr();
if (value != null) {
x.setValue(value.clone());
}
x.type = type;
x.precision = precision;
x.factionalSecondsPrecision = factionalSecondsPrecision;
x.toType = toType;
x.toFactionalSecondsPrecision = toFactionalSecondsPrecision;
return x;
}
@Override
public boolean replace(SQLExpr expr, SQLExpr target) {
if (this.value == expr) {
setValue(target);
return true;
}
if (this.precision == expr) {
setPrecision(target);
return true;
}
if (this.toFactionalSecondsPrecision == expr) {
setToFactionalSecondsPrecision(target);
return true;
}
return false;
}
@Override
public List<SQLObject> getChildren() {
return Collections.<SQLObject>singletonList(this.value);
}
public SQLExpr getValue() {
return this.value;
}
public void setValue(SQLExpr value) {
if (value != null) {
value.setParent(this);
}
this.value = value;
}
public OracleIntervalType getType() {
return this.type;
}
public void setType(OracleIntervalType type) {
this.type = type;
}
public SQLExpr getPrecision() {
return this.precision;
}
public void setPrecision(Integer precision) {
this.setPrecision(new SQLIntegerExpr(precision));
}
public void setPrecision(SQLExpr precision) {
if (precision != null) {
precision.setParent(this);
}
this.precision = precision;
}
public Integer getFactionalSecondsPrecision() {
return this.factionalSecondsPrecision;
}
public void setFactionalSecondsPrecision(Integer factionalSecondsPrecision) {
this.factionalSecondsPrecision = factionalSecondsPrecision;
}
public OracleIntervalType getToType() {
return this.toType;
}
public void setToType(OracleIntervalType toType) {
this.toType = toType;
}
public SQLExpr getToFactionalSecondsPrecision() {
return this.toFactionalSecondsPrecision;
}
public void setToFactionalSecondsPrecision(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.toFactionalSecondsPrecision = x;
}
@Override
protected void accept0(SQLASTVisitor visitor) {
this.accept0((OracleASTVisitor) visitor);
}
@Override
public void accept0(OracleASTVisitor visitor) {
if (visitor.visit(this)) {
this.acceptChild(visitor, value);
this.acceptChild(visitor, precision);
this.acceptChild(visitor, toFactionalSecondsPrecision);
}
visitor.endVisit(this);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((factionalSecondsPrecision == null) ? 0 : factionalSecondsPrecision.hashCode());
result = prime * result + ((precision == null) ? 0 : precision.hashCode());
result = prime * result + ((toFactionalSecondsPrecision == null) ? 0 : toFactionalSecondsPrecision.hashCode());
result = prime * result + ((toType == null) ? 0 : toType.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
OracleIntervalExpr other = (OracleIntervalExpr) obj;
if (factionalSecondsPrecision == null) {
if (other.factionalSecondsPrecision != null) {
return false;
}
} else if (!factionalSecondsPrecision.equals(other.factionalSecondsPrecision)) {
return false;
}
if (precision == null) {
if (other.precision != null) {
return false;
}
} else if (!precision.equals(other.precision)) {
return false;
}
if (toFactionalSecondsPrecision == null) {
if (other.toFactionalSecondsPrecision != null) {
return false;
}
} else if (!toFactionalSecondsPrecision.equals(other.toFactionalSecondsPrecision)) {
return false;
}
if (toType != other.toType) {
return false;
}
if (type != other.type) {
return false;
}
if (value == null) {
if (other.value != null) {
return false;
}
} else if (!value.equals(other.value)) {
return false;
}
return true;
}
}
|
OracleIntervalExpr
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/features/WriteNonStringValueAsStringTestByteField.java
|
{
"start": 813,
"end": 883
}
|
class ____ {
public byte id;
}
private static
|
VO
|
java
|
apache__kafka
|
server/src/main/java/org/apache/kafka/server/share/fetch/PartitionRotateStrategy.java
|
{
"start": 1329,
"end": 3679
}
|
enum ____ {
ROUND_ROBIN;
@Override
public String toString() {
return super.toString().toLowerCase(Locale.ROOT);
}
}
/**
* Rotate the partitions based on the strategy.
*
* @param topicIdPartitions the topicIdPartitions to rotate
* @param metadata the metadata to rotate
*
* @return the rotated topicIdPartitions
*/
List<TopicIdPartition> rotate(List<TopicIdPartition> topicIdPartitions, PartitionRotateMetadata metadata);
static PartitionRotateStrategy type(StrategyType type) {
return switch (type) {
case ROUND_ROBIN -> PartitionRotateStrategy::rotateRoundRobin;
};
}
/**
* Rotate the partitions based on the round-robin strategy.
*
* @param topicIdPartitions the topicIdPartitions to rotate
* @param metadata the metadata to rotate
*
* @return the rotated topicIdPartitions
*/
static List<TopicIdPartition> rotateRoundRobin(
List<TopicIdPartition> topicIdPartitions,
PartitionRotateMetadata metadata
) {
if (topicIdPartitions.isEmpty() || topicIdPartitions.size() == 1 || metadata.sessionEpoch < 1) {
// No need to rotate the partitions if there are no partitions, only one partition or the
// session epoch is initial or final.
return topicIdPartitions;
}
int rotateAt = metadata.sessionEpoch % topicIdPartitions.size();
if (rotateAt == 0) {
// No need to rotate the partitions if the rotateAt is 0.
return topicIdPartitions;
}
// Avoid modifying the original list, create copy.
List<TopicIdPartition> rotatedPartitions = new ArrayList<>(topicIdPartitions);
// Elements from the list should move left by the distance provided i.e. if the original list is [1,2,3],
// and rotation is by 1, then output should be [2,3,1] and not [3,1,2]. Hence, negate the distance here.
Collections.rotate(rotatedPartitions, -1 * rotateAt);
return rotatedPartitions;
}
/**
* The partition rotate metadata which can be used to store the metadata for the partition rotation.
*
* @param sessionEpoch the share session epoch.
*/
record PartitionRotateMetadata(int sessionEpoch) { }
}
|
StrategyType
|
java
|
apache__avro
|
lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java
|
{
"start": 1685,
"end": 3484
}
|
class ____ {
private static final AtomicInteger mapCloseCalls = new AtomicInteger();
private static final AtomicInteger mapConfigureCalls = new AtomicInteger();
private static final AtomicInteger reducerCloseCalls = new AtomicInteger();
private static final AtomicInteger reducerConfigureCalls = new AtomicInteger();
@AfterEach
public void tearDown() {
mapCloseCalls.set(0);
mapConfigureCalls.set(0);
reducerCloseCalls.set(0);
reducerConfigureCalls.set(0);
}
/** Uses default mapper with no reduces for a map-only identity job. */
@Test
@SuppressWarnings("deprecation")
void mapOnly() throws Exception {
JobConf job = new JobConf();
String inDir = System.getProperty("share.dir", "../../../share") + "/test/data";
Path input = new Path(inDir + "/weather.avro");
Path output = new Path("target/test/weather-ident");
output.getFileSystem(job).delete(output);
job.setJobName("identity map weather");
AvroJob.setInputSchema(job, Weather.SCHEMA$);
AvroJob.setOutputSchema(job, Weather.SCHEMA$);
FileInputFormat.setInputPaths(job, input);
FileOutputFormat.setOutputPath(job, output);
FileOutputFormat.setCompressOutput(job, true);
job.setNumReduceTasks(0); // map-only
JobClient.runJob(job);
// check output is correct
DatumReader<Weather> reader = new SpecificDatumReader<>();
DataFileReader<Weather> check = new DataFileReader<>(new File(inDir + "/weather.avro"), reader);
DataFileReader<Weather> sorted = new DataFileReader<>(new File(output.toString() + "/part-00000.avro"), reader);
for (Weather w : sorted)
assertEquals(check.next(), w);
check.close();
sorted.close();
}
// maps input Weather to Pair<Weather,Void>, to sort by Weather
public static
|
TestWeather
|
java
|
apache__camel
|
components/camel-pqc/src/main/java/org/apache/camel/component/pqc/lifecycle/AwsSecretsManagerKeyLifecycleManager.java
|
{
"start": 28271,
"end": 29162
}
|
class ____ {
private String metadata;
private String keyId;
private String algorithm;
public MetadataData() {
}
public MetadataData(String metadata, String keyId, String algorithm) {
this.metadata = metadata;
this.keyId = keyId;
this.algorithm = algorithm;
}
public String getMetadata() {
return metadata;
}
public void setMetadata(String metadata) {
this.metadata = metadata;
}
public String getKeyId() {
return keyId;
}
public void setKeyId(String keyId) {
this.keyId = keyId;
}
public String getAlgorithm() {
return algorithm;
}
public void setAlgorithm(String algorithm) {
this.algorithm = algorithm;
}
}
}
|
MetadataData
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/filter/wall/NoMatchDbWallProvider.java
|
{
"start": 331,
"end": 869
}
|
class ____ extends WallProvider {
public NoMatchDbWallProvider(WallConfig config) {
super(config);
}
public NoMatchDbWallProvider(WallConfig config, DbType dbType) {
super(config, dbType);
}
@Override
public SQLStatementParser createParser(String sql) {
return null;
}
@Override
public WallVisitor createWallVisitor() {
return null;
}
@Override
public ExportParameterVisitor createExportParameterVisitor() {
return null;
}
}
|
NoMatchDbWallProvider
|
java
|
elastic__elasticsearch
|
modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataBasedDoubleValuesSource.java
|
{
"start": 800,
"end": 1384
}
|
class ____ extends DoubleValuesSource {
FieldDataBasedDoubleValuesSource(IndexFieldData<?> fieldData) {
this.fieldData = Objects.requireNonNull(fieldData);
}
protected final IndexFieldData<?> fieldData;
@Override
public boolean needsScores() {
return false;
}
@Override
public DoubleValuesSource rewrite(IndexSearcher reader) {
return this;
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return DocValues.isCacheable(ctx, fieldData.getFieldName());
}
}
|
FieldDataBasedDoubleValuesSource
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/time/DurationFromTest.java
|
{
"start": 1068,
"end": 2584
}
|
class ____ {
private final CompilationTestHelper helper =
CompilationTestHelper.newInstance(DurationFrom.class, getClass());
@SuppressWarnings("DurationFrom")
@Test
public void failures() {
assertThrows(UnsupportedTemporalTypeException.class, () -> Duration.from(Period.ZERO));
assertThrows(UnsupportedTemporalTypeException.class, () -> Duration.from(Period.ofDays(1)));
assertThrows(UnsupportedTemporalTypeException.class, () -> Duration.from(Period.ofDays(-1)));
assertThrows(UnsupportedTemporalTypeException.class, () -> Duration.from(Period.ofWeeks(1)));
assertThrows(UnsupportedTemporalTypeException.class, () -> Duration.from(Period.ofWeeks(-1)));
assertThrows(UnsupportedTemporalTypeException.class, () -> Duration.from(Period.ofMonths(1)));
assertThrows(UnsupportedTemporalTypeException.class, () -> Duration.from(Period.ofMonths(-1)));
assertThrows(UnsupportedTemporalTypeException.class, () -> Duration.from(Period.ofYears(1)));
assertThrows(UnsupportedTemporalTypeException.class, () -> Duration.from(Period.ofYears(-1)));
TemporalAmount temporalAmount = Period.ofDays(3);
assertThrows(UnsupportedTemporalTypeException.class, () -> Duration.from(temporalAmount));
}
@Test
public void durationFrom() {
helper
.addSourceLines(
"TestClass.java",
"import java.time.Duration;",
"import java.time.Period;",
"import java.time.temporal.TemporalAmount;",
"public
|
DurationFromTest
|
java
|
square__retrofit
|
retrofit-adapters/rxjava2/src/test/java/retrofit2/adapter/rxjava2/ObservableTest.java
|
{
"start": 1125,
"end": 1305
}
|
class ____ {
@Rule public final MockWebServer server = new MockWebServer();
@Rule public final RecordingObserver.Rule observerRule = new RecordingObserver.Rule();
|
ObservableTest
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/paths/Paths_assertIsCanonical_Test.java
|
{
"start": 1269,
"end": 2624
}
|
class ____ extends PathsBaseTest {
@Test
void should_fail_if_actual_is_null() {
// WHEN
var error = expectAssertionError(() -> underTest.assertIsCanonical(INFO, null));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_rethrow_IOException_as_UncheckedIOException() throws IOException {
// GIVEN
Path actual = mock(Path.class);
IOException exception = new IOException("boom!");
given(actual.toRealPath()).willThrow(exception);
// WHEN
Throwable thrown = catchThrowable(() -> underTest.assertIsCanonical(INFO, actual));
// THEN
then(thrown).isInstanceOf(UncheckedIOException.class)
.hasCause(exception);
}
@Test
void should_fail_if_actual_is_not_canonical() throws IOException {
// GIVEN
Path file = createFile(tempDir.resolve("file"));
Path actual = tryToCreateSymbolicLink(tempDir.resolve("actual"), file);
// WHEN
var error = expectAssertionError(() -> underTest.assertIsCanonical(INFO, actual));
// THEN
then(error).hasMessage(shouldBeCanonicalPath(actual).create());
}
@Test
void should_pass_if_actual_is_canonical() throws IOException {
// GIVEN
Path actual = createFile(tempDir.resolve("actual")).toRealPath();
// WHEN/THEN
underTest.assertIsCanonical(INFO, actual);
}
}
|
Paths_assertIsCanonical_Test
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/bean/override/easymock/EasyMockBeanOverrideProcessor.java
|
{
"start": 1106,
"end": 1546
}
|
class ____ implements BeanOverrideProcessor {
@Override
public BeanOverrideHandler createHandler(Annotation annotation, Class<?> testClass, Field field) {
EasyMockBean easyMockBean = (EasyMockBean) annotation;
String beanName = (StringUtils.hasText(easyMockBean.name()) ? easyMockBean.name() : null);
return new EasyMockBeanOverrideHandler(field, field.getType(), beanName, easyMockBean.mockType());
}
}
|
EasyMockBeanOverrideProcessor
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/config/PojoAndStringConfig.java
|
{
"start": 1033,
"end": 1356
}
|
class ____ directly to the
* beans defined in {@code CoreContextConfigurationAppCtxTests-context.xml}.
* Consequently, the application contexts loaded from these two sources
* should be identical with regard to bean definitions.
*
* @author Sam Brannen
* @since 3.1
*/
@Configuration(proxyBeanMethods = false)
public
|
map
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/streaming/util/CollectingSourceContext.java
|
{
"start": 1192,
"end": 2247
}
|
class ____<T extends Serializable>
implements SourceFunction.SourceContext<T> {
private final Object lock;
private final Collection<T> collection;
public CollectingSourceContext(Object lock, Collection<T> collection) {
this.lock = lock;
this.collection = collection;
}
@Override
public void collect(T element) {
try {
collection.add(CommonTestUtils.createCopySerializable(element));
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
@Override
public void collectWithTimestamp(T element, long timestamp) {
collect(element);
}
@Override
public void emitWatermark(Watermark mark) {
throw new UnsupportedOperationException();
}
@Override
public void markAsTemporarilyIdle() {
throw new UnsupportedOperationException();
}
@Override
public Object getCheckpointLock() {
return lock;
}
@Override
public void close() {}
}
|
CollectingSourceContext
|
java
|
resilience4j__resilience4j
|
resilience4j-framework-common/src/main/java/io/github/resilience4j/common/retry/configuration/RetryConfigCustomizer.java
|
{
"start": 339,
"end": 1352
}
|
interface ____ extends CustomizerWithName {
/**
* Retry configuration builder.
*
* @param configBuilder to be customized
*/
void customize(RetryConfig.Builder configBuilder);
/**
* A convenient method to create RetryConfigCustomizer using {@link Consumer}
*
* @param instanceName the name of the instance
* @param consumer delegate call to Consumer when {@link RetryConfigCustomizer#customize(RetryConfig.Builder)}
* is called
* @return Customizer instance
*/
static RetryConfigCustomizer of(@NonNull String instanceName,
@NonNull Consumer<RetryConfig.Builder> consumer) {
return new RetryConfigCustomizer() {
@Override
public void customize(RetryConfig.Builder builder) {
consumer.accept(builder);
}
@Override
public String name() {
return instanceName;
}
};
}
}
|
RetryConfigCustomizer
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/sync/ForStStateKeysIterator.java
|
{
"start": 1328,
"end": 3608
}
|
class ____<K> extends AbstractForStStateKeysIterator<K>
implements Iterator<K> {
@Nonnull private final byte[] namespaceBytes;
private K nextKey;
private K previousKey;
public ForStStateKeysIterator(
@Nonnull ForStIteratorWrapper iterator,
@Nonnull String state,
@Nonnull TypeSerializer<K> keySerializer,
int keyGroupPrefixBytes,
boolean ambiguousKeyPossible,
@Nonnull byte[] namespaceBytes) {
super(iterator, state, keySerializer, keyGroupPrefixBytes, ambiguousKeyPossible);
this.namespaceBytes = namespaceBytes;
this.nextKey = null;
this.previousKey = null;
}
@Override
public boolean hasNext() {
try {
while (nextKey == null && iterator.isValid()) {
final byte[] keyBytes = iterator.key();
final K currentKey = deserializeKey(keyBytes, byteArrayDataInputView);
final int namespaceByteStartPos = byteArrayDataInputView.getPosition();
if (isMatchingNameSpace(keyBytes, namespaceByteStartPos)
&& !Objects.equals(previousKey, currentKey)) {
previousKey = currentKey;
nextKey = currentKey;
}
iterator.next();
}
} catch (Exception e) {
throw new FlinkRuntimeException("Failed to access state [" + state + "]", e);
}
return nextKey != null;
}
@Override
public K next() {
if (!hasNext()) {
throw new NoSuchElementException("Failed to access state [" + state + "]");
}
K tmpKey = nextKey;
nextKey = null;
return tmpKey;
}
private boolean isMatchingNameSpace(@Nonnull byte[] key, int beginPos) {
final int namespaceBytesLength = namespaceBytes.length;
final int basicLength = namespaceBytesLength + beginPos;
if (key.length >= basicLength) {
for (int i = 0; i < namespaceBytesLength; ++i) {
if (key[beginPos + i] != namespaceBytes[i]) {
return false;
}
}
return true;
}
return false;
}
}
|
ForStStateKeysIterator
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java
|
{
"start": 93652,
"end": 94125
}
|
class ____ implements ThreadFactory {
private final String namePrefix;
private final AtomicInteger threadNumber = new AtomicInteger(1);
AsyncThreadFactory(String namePrefix) {
this.namePrefix = namePrefix;
}
@Override
public Thread newThread(@NonNull Runnable r) {
Thread thread = new SubjectInheritingThread(r, namePrefix + threadNumber.getAndIncrement());
thread.setDaemon(true);
return thread;
}
}
}
|
AsyncThreadFactory
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ClassModifierShouldBe.java
|
{
"start": 1325,
"end": 5877
}
|
class ____ was %s.",
actual, modifier, modifiers(actual));
}
/**
* Creates a new instance for a positive check of the {@code final} modifier.
*
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBeFinal(Class<?> actual) {
return new ClassModifierShouldBe(actual, true, Modifier.toString(Modifier.FINAL));
}
/**
* Creates a new instance for a negative check of the {@code final} modifier.
*
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldNotBeFinal(Class<?> actual) {
return new ClassModifierShouldBe(actual, false, Modifier.toString(Modifier.FINAL));
}
/**
* Creates a new instance for a positive check of the {@code public} modifier.
*
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBePublic(Class<?> actual) {
return new ClassModifierShouldBe(actual, true, Modifier.toString(Modifier.PUBLIC));
}
/**
* Creates a new instance for a positive check of the {@code protected} modifier.
*
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBeProtected(Class<?> actual) {
return new ClassModifierShouldBe(actual, true, Modifier.toString(Modifier.PROTECTED));
}
/**
* Creates a new instance for a positive check of the {@code package-private} modifier.
*
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBePackagePrivate(Class<?> actual) {
return new ClassModifierShouldBe(actual, true, PACKAGE_PRIVATE);
}
/**
* Creates a new instance for a positive check of the {@code private} modifier.
*
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBePrivate(Class<?> actual) {
return new ClassModifierShouldBe(actual, true, Modifier.toString(Modifier.PRIVATE));
}
/**
* Creates a new instance for a positive check of the {@code static} modifier.
*
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBeStatic(Class<?> actual) {
return new ClassModifierShouldBe(actual, true, Modifier.toString(Modifier.STATIC));
}
/**
* Creates a new instance for a negative check of the {@code static} modifier.
*
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldNotBeStatic(Class<?> actual) {
return new ClassModifierShouldBe(actual, false, Modifier.toString(Modifier.STATIC));
}
private static String modifiers(Class<?> actual) {
int modifiers = actual.getModifiers();
boolean isPackagePrivate = !isPublic(modifiers) && !isProtected(modifiers) && !isPrivate(modifiers);
String modifiersDescription = toString(modifiers);
StringJoiner sj = new StringJoiner(" ");
if (isPackagePrivate) {
sj.add(PACKAGE_PRIVATE);
}
if (!modifiersDescription.isEmpty()) {
sj.add(modifiersDescription);
}
return sj.toString();
}
/**
* Return a string describing the access modifier flags in the specified modifiers,
* filtering out the {@link Modifier#SYNCHRONIZED SYNCHRONIZED},
* {@link Modifier#STRICT STRICT}, and {@link Modifier#VOLATILE VOLATILE} bits as
* Valhalla's {@link Modifier#toString(int)}} mis-interprets them.
*
* @param modifiers a set of modifiers
* @return a string representation of the set of modifiers, with {@code SYNCHRONIZED},
* {@code STRICT}, and {@code VOLATILE} filtered out
*
* @see Modifier#toString(int)
* @see <a href="https://bugs.openjdk.org/browse/JDK-8370935">JDK-8370935</a>
* @see <a href="https://github.com/openjdk/valhalla/blob/296fe862f73ad92093d62372141dc848a3e42d72/src/java.base/share/classes/java/lang/Class.java#L326-L328">Class.java#L326-L328</a>
*/
private static String toString(int modifiers) {
return Modifier.toString(modifiers & ~(Modifier.SYNCHRONIZED | Modifier.STRICT | Modifier.VOLATILE));
}
}
|
but
|
java
|
google__guice
|
core/test/com/google/inject/name/NamedEquivalanceTest.java
|
{
"start": 10405,
"end": 11457
}
|
class ____ implements jakarta.inject.Named, Serializable {
private final String value;
public JsrNamed(String value) {
this.value = value;
}
@Override
public String value() {
return this.value;
}
@Override
public int hashCode() {
// This is specified in java.lang.Annotation.
return (127 * "value".hashCode()) ^ value.hashCode();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof jakarta.inject.Named)) {
return false;
}
jakarta.inject.Named other = (jakarta.inject.Named) o;
return value.equals(other.value());
}
@Override
public String toString() {
return "@"
+ jakarta.inject.Named.class.getName()
+ "(value="
+ Annotations.memberValueString("value", value)
+ ")";
}
@Override
public Class<? extends Annotation> annotationType() {
return jakarta.inject.Named.class;
}
private static final long serialVersionUID = 0;
}
private static
|
JsrNamed
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/support/JpaEvaluationContextExtension.java
|
{
"start": 1060,
"end": 1647
}
|
class ____ implements EvaluationContextExtension {
private final JpaRootObject root;
/**
* Creates a new {@link JpaEvaluationContextExtension} for the given escape character.
*
* @param escapeCharacter the character to be used to escape parameters for LIKE expression.
*/
public JpaEvaluationContextExtension(char escapeCharacter) {
this.root = JpaRootObject.of(EscapeCharacter.of(escapeCharacter));
}
@Override
public String getExtensionId() {
return "jpa";
}
@Override
public Object getRootObject() {
return root;
}
public static
|
JpaEvaluationContextExtension
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/sink/constraint/ConstraintEnforcerExecutor.java
|
{
"start": 21160,
"end": 21635
}
|
class ____ {
private final int fieldIdx;
private final LogicalType elementType;
private NestedArrayInfo(final int fieldIdx, final LogicalType elementType) {
this.fieldIdx = fieldIdx;
this.elementType = elementType;
}
public int getFieldIdx() {
return fieldIdx;
}
public LogicalType getElementType() {
return elementType;
}
}
private static
|
NestedArrayInfo
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/constraint/ForeignKeyConstraintTest.java
|
{
"start": 7956,
"end": 8598
}
|
class ____ {
@Id
@GeneratedValue
@jakarta.persistence.Column( nullable = false, unique = true)
public long id;
@OneToMany
@JoinColumn(name = "PERSON_CC_ID", foreignKey = @ForeignKey( name = "FK_PERSON_CC" ) )
public List<CreditCard> creditCards;
@OneToMany
@JoinColumn(name = "PERSON_CC_ID2", foreignKey = @ForeignKey( name = "FK_PERSON_CC2", value = ConstraintMode.NO_CONSTRAINT ) )
public List<CreditCard> creditCards2;
}
@Entity(name = "Professor")
@PrimaryKeyJoinColumn(
name = "PERSON_ID",
foreignKey = @ForeignKey( name = "FK_PROFESSOR_PERSON", value = ConstraintMode.NO_CONSTRAINT )
)
public static
|
Person
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/ECAdmin.java
|
{
"start": 16887,
"end": 18725
}
|
class ____ implements AdminHelper.Command {
@Override
public String getName() {
return "-enablePolicy";
}
@Override
public String getShortUsage() {
return "[" + getName() + " -policy <policy>]\n";
}
@Override
public String getLongUsage() {
TableListing listing = AdminHelper.getOptionDescriptionListing();
listing.addRow("<policy>", "The name of the erasure coding policy");
return getShortUsage() + "\n" +
"Enable the erasure coding policy.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
final String ecPolicyName = StringUtils.popOptionWithArgument("-policy",
args);
if (ecPolicyName == null) {
System.err.println("Please specify the policy name.\nUsage: " +
getLongUsage());
return 1;
}
if (args.size() > 0) {
System.err.println(getName() + ": Too many arguments");
return 1;
}
final DistributedFileSystem dfs = AdminHelper.getDFS(conf);
try {
dfs.enableErasureCodingPolicy(ecPolicyName);
System.out
.println("Erasure coding policy " + ecPolicyName + " is enabled");
ECTopologyVerifierResult result =
dfs.getECTopologyResultForPolicies(ecPolicyName);
if (!result.isSupported()) {
System.err.println(
"Warning: The cluster setup does not support " + "EC policy "
+ ecPolicyName + ". Reason: " + result.getResultMessage());
}
} catch (IOException e) {
System.err.println(AdminHelper.prettifyException(e));
return 2;
}
return 0;
}
}
/** Command to disable an existing erasure coding policy. */
private static
|
EnableECPolicyCommand
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/contract/hdfs/TestHDFSContractDelete.java
|
{
"start": 1198,
"end": 1608
}
|
class ____ extends AbstractContractDeleteTest {
@BeforeAll
public static void createCluster() throws IOException {
HDFSContract.createCluster();
}
@AfterAll
public static void teardownCluster() throws IOException {
HDFSContract.destroyCluster();
}
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new HDFSContract(conf);
}
}
|
TestHDFSContractDelete
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest106.java
|
{
"start": 804,
"end": 1128
}
|
class ____ extends TestCase {
public void test_false() throws Exception {
WallProvider provider = new MySqlWallProvider();
provider.getConfig().setCommentAllow(false);
String sql = "select * from t where id = ? or bin(1) = 1";
assertFalse(provider.checkValid(sql));
}
}
|
MySqlWallTest106
|
java
|
quarkusio__quarkus
|
extensions/reactive-routes/deployment/src/main/java/io/quarkus/vertx/web/deployment/ReactiveRoutesProcessor.java
|
{
"start": 93108,
"end": 93377
}
|
interface ____ {
Expr get(MethodParameterInfo methodParam, Set<AnnotationInstance> annotations, Var routingContext,
BlockCreator bc, BuildProducer<ReflectiveHierarchyBuildItem> reflectiveHierarchy);
}
@FunctionalInterface
|
ValueProvider
|
java
|
apache__flink
|
flink-connectors/flink-connector-base/src/test/java/org/apache/flink/connector/base/source/reader/AlignedWatermarksITCase.java
|
{
"start": 7380,
"end": 7713
}
|
class ____ implements WatermarkGenerator<Long> {
@Override
public void onEvent(Long event, long eventTimestamp, WatermarkOutput output) {
output.emitWatermark(new Watermark(eventTimestamp));
}
@Override
public void onPeriodicEmit(WatermarkOutput output) {}
}
}
|
PunctuatedGenerator
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/properties/PropertiesConfigAdapter.java
|
{
"start": 2885,
"end": 3138
}
|
interface ____<V> {
/**
* Gets the fallback value, if any.
* @return the value or {@code null}
*/
@Nullable V get();
}
/**
* Gets the fallback value.
*
* @param <V> the type of the value
*/
@FunctionalInterface
protected
|
Fallback
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-sqs/src/main/java/org/apache/camel/component/aws2/sqs/transform/AWS2SQSCloudEventDataTypeTransformer.java
|
{
"start": 1554,
"end": 2616
}
|
class ____ extends Transformer {
@Override
public void transform(Message message, DataType fromType, DataType toType) {
final Map<String, Object> headers = message.getHeaders();
CloudEvent cloudEvent = CloudEvents.v1_0;
headers.putIfAbsent(CloudEvents.CAMEL_CLOUD_EVENT_ID, message.getExchange().getExchangeId());
headers.putIfAbsent(CloudEvent.CAMEL_CLOUD_EVENT_VERSION, cloudEvent.version());
headers.put(CloudEvents.CAMEL_CLOUD_EVENT_TYPE, "org.apache.camel.event.aws.sqs.receiveMessage");
if (message.getHeaders().containsKey(Sqs2Constants.RECEIPT_HANDLE)) {
headers.put(CloudEvents.CAMEL_CLOUD_EVENT_SOURCE,
"aws.sqs.queue." + message.getHeader(Sqs2Constants.RECEIPT_HANDLE, String.class));
}
headers.put(CloudEvents.CAMEL_CLOUD_EVENT_SUBJECT, message.getHeader(Sqs2Constants.MESSAGE_ID, String.class));
headers.put(CloudEvents.CAMEL_CLOUD_EVENT_TIME, cloudEvent.getEventTime(message.getExchange()));
}
}
|
AWS2SQSCloudEventDataTypeTransformer
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ClassEndpointBuilderFactory.java
|
{
"start": 1296,
"end": 1445
}
|
class ____.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public
|
name
|
java
|
spring-projects__spring-framework
|
spring-expression/src/main/java/org/springframework/expression/TypeLocator.java
|
{
"start": 1085,
"end": 1505
}
|
interface ____ {
/**
* Find a type by name.
* <p>The name may or may not be fully qualified — for example,
* {@code String} or {@code java.lang.String}.
* @param typeName the type to be located
* @return the {@code Class} object representing that type
* @throws EvaluationException if there is a problem finding the type
*/
Class<?> findType(String typeName) throws EvaluationException;
}
|
TypeLocator
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/network/Send.java
|
{
"start": 882,
"end": 943
}
|
interface ____ the in-progress sending of data.
*/
public
|
models
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/RangeSetAssert_doesNotEncloseAnyRangesOf_with_RangeSet_Test.java
|
{
"start": 1417,
"end": 3830
}
|
class ____ {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
RangeSet<Integer> actual = null;
RangeSet<Integer> rangeSet = ImmutableRangeSet.of(closed(0, 1));
// WHEN
var error = expectAssertionError(() -> assertThat(actual).doesNotEncloseAnyRangesOf(rangeSet));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_rangeSet_is_null() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of();
RangeSet<Integer> rangeSet = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).doesNotEncloseAnyRangesOf(rangeSet));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("rangeSet").create());
}
@Test
void should_fail_if_rangeSet_is_empty() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(0, 1));
RangeSet<Integer> rangeSet = ImmutableRangeSet.of();
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).doesNotEncloseAnyRangesOf(rangeSet));
// THEN
then(thrown).isInstanceOf(IllegalArgumentException.class)
.hasMessage("Expecting rangeSet not to be empty");
}
@Test
void should_fail_if_actual_encloses_rangeSet() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(0, 100));
RangeSet<Integer> rangeSet = ImmutableRangeSet.<Integer> builder()
.add(open(10, 50))
.add(open(50, 110))
.build();
// WHEN
var error = expectAssertionError(() -> assertThat(actual).doesNotEncloseAnyRangesOf(rangeSet));
// THEN
then(error).hasMessage(shouldNotEnclose(actual, rangeSet, singleton(open(10, 50))).create());
}
@Test
void should_pass_if_actual_does_not_enclose_rangeSet() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(0, 100));
RangeSet<Integer> rangeSet = ImmutableRangeSet.<Integer> builder()
.add(open(-10, 50))
.add(open(50, 110))
.build();
// WHEN/THEN
assertThat(actual).doesNotEncloseAnyRangesOf(rangeSet);
}
}
|
RangeSetAssert_doesNotEncloseAnyRangesOf_with_RangeSet_Test
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/type/JavaTypeTest.java
|
{
"start": 869,
"end": 1211
}
|
enum ____ {
A(1) {
@Override public String toString() {
return "a";
}
},
B(2) {
@Override public String toString() {
return "b";
}
}
;
private MyEnumSub(int value) { }
}
// [databind#728]
static
|
MyEnumSub
|
java
|
spring-projects__spring-framework
|
spring-beans/src/testFixtures/java/org/springframework/beans/testfixture/beans/factory/annotation/PackagePrivateFieldInjectionSample.java
|
{
"start": 819,
"end": 904
}
|
class ____ {
@Autowired
Environment environment;
}
|
PackagePrivateFieldInjectionSample
|
java
|
quarkusio__quarkus
|
extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/runtime/binder/mpmetrics/MpMetricRegistrationTest.java
|
{
"start": 366,
"end": 4221
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.setFlatClassPath(true)
.withConfigurationResource("test-logging.properties")
.overrideConfigKey("quarkus.micrometer.binder.mp-metrics.enabled", "true")
.overrideConfigKey("quarkus.micrometer.binder-enabled-default", "false")
.overrideConfigKey("quarkus.micrometer.registry-enabled-default", "false")
.overrideConfigKey("quarkus.redis.devservices.enabled", "false");
@Inject
MetricRegistryAdapter mpRegistry;
@Inject
MeterRegistry registry;
@Test
public void metricsWithSameMetadata() {
Metadata metadata1 = Metadata.builder().withName("meter").withDescription("description1").build();
Metadata metadata2 = Metadata.builder().withName("meter").withDescription("description1").build();
MeterAdapter meter1 = (MeterAdapter) mpRegistry.meter(metadata1);
MeterAdapter meter2 = (MeterAdapter) mpRegistry.meter(metadata2);
Assertions.assertSame(meter1, meter2);
}
@Test
public void metricsWithDifferentType() {
Metadata metadata1 = Metadata.builder().withName("metric1")
.withDescription("description1").build();
Metadata metadata2 = Metadata.builder().withName("metric1")
.withDescription("description2").build();
mpRegistry.histogram(metadata1);
Assertions.assertThrows(IllegalArgumentException.class, () -> {
mpRegistry.meter(metadata2);
});
}
@Test
public void wrongTypeInMetadata() {
Metadata metadata1 = Metadata.builder().withName("metric1")
.withDescription("description1").build();
Metadata metadata2 = Metadata.builder()
.withName("metric1")
.withType(MetricType.COUNTER)
.build();
Assertions.assertThrows(IllegalArgumentException.class, () -> {
mpRegistry.histogram(metadata2);
});
}
@Test
public void descriptionChanged() {
Metadata metadata1 = Metadata.builder().withName("metric1")
.withDescription("description1").build();
Metadata metadata2 = Metadata.builder().withName("metric1")
.withDescription("description2").build();
// harmless re-registration
mpRegistry.histogram(metadata1);
HistogramAdapter histogram = (HistogramAdapter) mpRegistry.histogram(metadata1);
Assertions.assertEquals("description1", histogram.getMeter().getId().getDescription(),
"Description should match first set value");
}
@Test
public void metricsWithSameName() {
int cmSize = mpRegistry.constructedMeters.size();
int mdSize = mpRegistry.metadataMap.size();
Metadata metadata1 = Metadata.builder().withName("mycounter").withDescription("description1").build();
CounterAdapter counter1 = (CounterAdapter) mpRegistry.counter(metadata1);
CounterAdapter counter2 = (CounterAdapter) mpRegistry.counter("mycounter", new Tag("color", "blue"));
Assertions.assertNotEquals(counter1, counter2);
Assertions.assertEquals("description1", counter1.getMeter().getId().getDescription(),
"Description should match shared value");
Assertions.assertEquals("description1", counter2.getMeter().getId().getDescription(),
"Description should match shared value");
mpRegistry.remove("mycounter");
Assertions.assertEquals(cmSize, mpRegistry.constructedMeters.size(),
"Both counters should have been removed");
Assertions.assertEquals(mdSize, mpRegistry.metadataMap.size(),
"mycounter metadata should have been removed");
}
}
|
MpMetricRegistrationTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/javadoc/InvalidLinkTest.java
|
{
"start": 2027,
"end": 2232
}
|
interface ____ {
/** {@link https://foo/bar/baz foo} */
void foo();
}
""")
.addOutputLines(
"Test.java",
"""
|
Test
|
java
|
apache__rocketmq
|
proxy/src/main/java/org/apache/rocketmq/proxy/processor/channel/ChannelExtendAttributeGetter.java
|
{
"start": 864,
"end": 948
}
|
interface ____ {
String getChannelExtendAttribute();
}
|
ChannelExtendAttributeGetter
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/descriptor/jdbc/XmlArrayJdbcType.java
|
{
"start": 3795,
"end": 4741
}
|
class ____<X> extends BasicBinder<X> {
public XmlArrayBinder(JavaType<X> javaType, XmlArrayJdbcType jdbcType) {
super( javaType, jdbcType );
}
private XmlArrayJdbcType getXmlArrayJdbcType() {
return (XmlArrayJdbcType) getJdbcType();
}
private SQLXML getSqlxml(PreparedStatement st, X value, WrapperOptions options) throws SQLException {
final String xml = getXmlArrayJdbcType().toString( value, getJavaType(), options );
SQLXML sqlxml = st.getConnection().createSQLXML();
sqlxml.setString( xml );
return sqlxml;
}
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
st.setSQLXML( index, getSqlxml( st, value, options ) );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
st.setSQLXML( name, getSqlxml( st, value, options ) );
}
}
}
|
XmlArrayBinder
|
java
|
resilience4j__resilience4j
|
resilience4j-feign/src/main/java/io/github/resilience4j/feign/FeignDecorators.java
|
{
"start": 7646,
"end": 8449
}
|
interface ____ when calling
* {@link Resilience4jFeign.Builder#target(Class, String)}.
* @param filter the filter must return <code>true</code> for the fallback to be called.
* @return the builder
*/
public Builder withFallback(Object fallback, Predicate<Exception> filter) {
decorators.add(new FallbackDecorator<>(new DefaultFallbackHandler<>(fallback), filter));
return this;
}
/**
* Adds a fallback to the decorator chain. A factory can consume the exception thrown on
* error. Multiple fallbacks can be applied with the next fallback being called when the
* previous one fails.
*
* @param fallbackFactory must match the feign interface, i.e. the
|
specified
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/scheduling/AdaptiveSchedulerITCase.java
|
{
"start": 4464,
"end": 6677
}
|
class ____ extends TestLogger {
@Rule public TemporaryFolder tempFolder = new TemporaryFolder();
private static final int NUMBER_TASK_MANAGERS = 2;
private static final int NUMBER_SLOTS_PER_TASK_MANAGER = 2;
private static final int PARALLELISM = NUMBER_SLOTS_PER_TASK_MANAGER * NUMBER_TASK_MANAGERS;
private static final Configuration configuration = getConfiguration();
private static Configuration getConfiguration() {
final Configuration conf = new Configuration();
conf.set(JobManagerOptions.SCHEDULER, JobManagerOptions.SchedulerType.Adaptive);
conf.set(HeartbeatManagerOptions.HEARTBEAT_INTERVAL, Duration.ofMillis(1_000L));
conf.set(HeartbeatManagerOptions.HEARTBEAT_TIMEOUT, Duration.ofMillis(5_000L));
return conf;
}
@ClassRule
public static final MiniClusterWithClientResource MINI_CLUSTER_WITH_CLIENT_RESOURCE =
new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(configuration)
.setNumberTaskManagers(NUMBER_TASK_MANAGERS)
.setNumberSlotsPerTaskManager(NUMBER_SLOTS_PER_TASK_MANAGER)
.build());
@Before
public void ensureAdaptiveSchedulerEnabled() {
assumeThat(ClusterOptions.isAdaptiveSchedulerEnabled(configuration)).isTrue();
}
@After
public void cancelRunningJobs() {
MINI_CLUSTER_WITH_CLIENT_RESOURCE.cancelAllJobsAndWaitUntilSlotsAreFreed();
}
/** Tests that the adaptive scheduler can recover stateful operators. */
@Test
public void testGlobalFailoverCanRecoverState() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
env.enableCheckpointing(20L, CheckpointingMode.EXACTLY_ONCE);
final DataStreamSource<Integer> input = env.addSource(new SimpleSource());
// TODO replace this by sink v2 after source is ported to FLIP-27.
input.addSink(new DiscardingSink<>());
env.execute();
}
private
|
AdaptiveSchedulerITCase
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inject/AutoFactoryAtInjectTest.java
|
{
"start": 2994,
"end": 3269
}
|
class ____ {
@Inject
OnDifferentConstructors(String string) {}
@AutoFactory
OnDifferentConstructors(Object object) {}
}
}\
""")
.doTest();
}
}
|
OnDifferentConstructors
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GooglePubsubLiteEndpointBuilderFactory.java
|
{
"start": 7999,
"end": 13910
}
|
interface ____
extends
EndpointConsumerBuilder {
default GooglePubsubLiteEndpointConsumerBuilder basic() {
return (GooglePubsubLiteEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedGooglePubsubLiteEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedGooglePubsubLiteEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedGooglePubsubLiteEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedGooglePubsubLiteEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedGooglePubsubLiteEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedGooglePubsubLiteEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
}
/**
* Builder for endpoint producers for the Google PubSub Lite component.
*/
public
|
AdvancedGooglePubsubLiteEndpointConsumerBuilder
|
java
|
apache__dubbo
|
dubbo-cluster/src/test/java/org/apache/dubbo/rpc/cluster/support/BroadCastClusterInvokerTest.java
|
{
"start": 4427,
"end": 5310
}
|
class ____ implements Invoker<DemoService> {
private static int count = 0;
private URL url = URL.valueOf("test://127.0.0.1:8080/test");
private boolean throwEx = false;
private boolean invoked = false;
@Override
public URL getUrl() {
return url;
}
@Override
public boolean isAvailable() {
return false;
}
@Override
public void destroy() {}
@Override
public Class<DemoService> getInterface() {
return DemoService.class;
}
@Override
public Result invoke(Invocation invocation) throws RpcException {
invoked = true;
if (throwEx) {
throwEx = false;
throw new RpcException();
}
return null;
}
public void invokeThrowEx() {
throwEx = true;
}
public boolean isInvoked() {
return invoked;
}
}
|
MockInvoker
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlShowJobStatusStatement.java
|
{
"start": 794,
"end": 1438
}
|
class ____ extends MySqlStatementImpl implements MySqlShowStatement {
private boolean sync;
private SQLExpr where;
public MySqlShowJobStatusStatement() {
}
public boolean isSync() {
return sync;
}
public void setSync(boolean sync) {
this.sync = sync;
}
public SQLExpr getWhere() {
return where;
}
public void setWhere(SQLExpr where) {
this.where = where;
}
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, where);
}
visitor.endVisit(this);
}
}
|
MySqlShowJobStatusStatement
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/main/java/org/springframework/boot/test/context/SpringBootContextLoader.java
|
{
"start": 21144,
"end": 21907
}
|
class ____
implements ApplicationListener<ApplicationEnvironmentPreparedEvent>, PriorityOrdered {
private final MergedContextConfiguration mergedConfig;
PrepareEnvironmentListener(MergedContextConfiguration mergedConfig) {
this.mergedConfig = mergedConfig;
}
@Override
public int getOrder() {
return Ordered.HIGHEST_PRECEDENCE;
}
@Override
public void onApplicationEvent(ApplicationEnvironmentPreparedEvent event) {
prepareEnvironment(this.mergedConfig, event.getSpringApplication(), event.getEnvironment(), true);
}
}
/**
* {@link SpringApplicationHook} used to capture {@link ApplicationContext} instances
* and to trigger early exit for the {@link Mode#AOT_PROCESSING} mode.
*/
private static
|
PrepareEnvironmentListener
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/launcher/core/CompositeTestExecutionListenerTests.java
|
{
"start": 9306,
"end": 10450
}
|
class ____ implements TestExecutionListener {
@Override
public void testPlanExecutionStarted(TestPlan testPlan) {
throw new RuntimeException("failed to invoke listener");
}
@Override
public void testPlanExecutionFinished(TestPlan testPlan) {
throw new RuntimeException("failed to invoke listener");
}
@Override
public void dynamicTestRegistered(TestIdentifier testIdentifier) {
throw new RuntimeException("failed to invoke listener");
}
@Override
public void executionStarted(TestIdentifier testIdentifier) {
throw new RuntimeException("failed to invoke listener");
}
@Override
public void executionSkipped(TestIdentifier testIdentifier, String reason) {
throw new RuntimeException("failed to invoke listener");
}
@Override
public void executionFinished(TestIdentifier testIdentifier, TestExecutionResult testExecutionResult) {
throw new RuntimeException("failed to invoke listener");
}
@Override
public void reportingEntryPublished(TestIdentifier testIdentifier, ReportEntry entry) {
throw new RuntimeException("failed to invoke listener");
}
}
}
|
ThrowingTestExecutionListener
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/http/RememberMeConfigTests.java
|
{
"start": 14573,
"end": 14673
}
|
class ____ {
@GetMapping("/authenticated")
String ok() {
return "ok";
}
}
}
|
BasicController
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/ttl/TtlListState.java
|
{
"start": 6807,
"end": 8462
}
|
class ____ implements Iterator<T> {
private final Iterator<TtlValue<T>> originalIterator;
private boolean anyUnexpired = false;
private boolean uncleared = true;
private T nextUnexpired = null;
private IteratorWithCleanup(Iterator<TtlValue<T>> ttlIterator) {
this.originalIterator = ttlIterator;
}
@Override
public boolean hasNext() {
findNextUnexpired();
cleanupIfEmpty();
return nextUnexpired != null;
}
private void cleanupIfEmpty() {
boolean endOfIter = !originalIterator.hasNext() && nextUnexpired == null;
if (uncleared && !anyUnexpired && endOfIter) {
original.clear();
uncleared = false;
}
}
@Override
public T next() {
if (hasNext()) {
T result = nextUnexpired;
nextUnexpired = null;
return result;
}
throw new NoSuchElementException();
}
private void findNextUnexpired() {
while (nextUnexpired == null && originalIterator.hasNext()) {
TtlValue<T> ttlValue = originalIterator.next();
if (ttlValue == null) {
break;
}
boolean unexpired = !expired(ttlValue);
if (unexpired) {
anyUnexpired = true;
}
if (unexpired || returnExpired) {
nextUnexpired = ttlValue.getUserValue();
}
}
}
}
}
|
IteratorWithCleanup
|
java
|
quarkusio__quarkus
|
extensions/panache/panache-common/runtime/src/main/java/io/quarkus/panache/common/Sort.java
|
{
"start": 711,
"end": 1007
}
|
enum ____ {
/**
* Sort in ascending order (the default).
*/
Ascending,
/**
* Sort in descending order (opposite to the default).
*/
Descending;
}
/**
* Represents the order of null columns.
*/
public
|
Direction
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/Phase.java
|
{
"start": 1383,
"end": 7084
}
|
class ____ implements ToXContentObject, Writeable {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(Phase.class);
public static final ParseField MIN_AGE = new ParseField("min_age");
public static final ParseField ACTIONS_FIELD = new ParseField("actions");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<Phase, String> PARSER = new ConstructingObjectParser<>("phase", false, (a, name) -> {
final List<LifecycleAction> lifecycleActions = (List<LifecycleAction>) a[1];
Map<String, LifecycleAction> map = Maps.newMapWithExpectedSize(lifecycleActions.size());
for (LifecycleAction lifecycleAction : lifecycleActions) {
if (map.put(lifecycleAction.getWriteableName(), lifecycleAction) != null) {
throw new IllegalStateException("Duplicate key");
}
}
return new Phase(name, (TimeValue) a[0], map);
});
static {
PARSER.declareField(
ConstructingObjectParser.optionalConstructorArg(),
(ContextParser<String, Object>) (p, c) -> TimeValue.parseTimeValue(p.text(), MIN_AGE.getPreferredName()),
MIN_AGE,
ValueType.VALUE
);
PARSER.declareNamedObjects(
ConstructingObjectParser.constructorArg(),
(p, c, n) -> p.namedObject(LifecycleAction.class, n, null),
v -> {
throw new IllegalArgumentException("ordered " + ACTIONS_FIELD.getPreferredName() + " are not supported");
},
ACTIONS_FIELD
);
}
public static Phase parse(XContentParser parser, String name) {
return PARSER.apply(parser, name);
}
private final String name;
private final Map<String, LifecycleAction> actions;
private final TimeValue minimumAge;
/**
* @param name
* the name of this {@link Phase}.
* @param minimumAge
* the age of the index when the index should move to this
* {@link Phase}.
* @param actions
* a {@link Map} of the {@link LifecycleAction}s to run when
* during his {@link Phase}. The keys in this map are the associated
* action names. The order of these actions is defined
* by the {@link LifecycleType}
*/
public Phase(String name, TimeValue minimumAge, Map<String, LifecycleAction> actions) {
this.name = name;
if (minimumAge == null) {
this.minimumAge = TimeValue.ZERO;
} else {
this.minimumAge = minimumAge;
}
this.actions = actions;
}
/**
* For Serialization
*/
public Phase(StreamInput in) throws IOException {
this.name = in.readString();
this.minimumAge = in.readTimeValue();
int size = in.readVInt();
TreeMap<String, LifecycleAction> actions = new TreeMap<>();
for (int i = 0; i < size; i++) {
actions.put(in.readString(), in.readNamedWriteable(LifecycleAction.class));
}
this.actions = actions;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeTimeValue(minimumAge);
out.writeVInt(actions.size());
for (Map.Entry<String, LifecycleAction> entry : actions.entrySet()) {
out.writeString(entry.getKey());
out.writeNamedWriteable(entry.getValue());
}
}
/**
* @return the age of the index when the index should move to this
* {@link Phase}.
*/
public TimeValue getMinimumAge() {
return minimumAge;
}
/**
* @return the name of this {@link Phase}
*/
public String getName() {
return name;
}
/**
* @return a {@link Map} of the {@link LifecycleAction}s to run when during
* his {@link Phase}.
*/
public Map<String, LifecycleAction> getActions() {
return actions;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(MIN_AGE.getPreferredName(), minimumAge.getStringRep());
builder.xContentValuesMap(ACTIONS_FIELD.getPreferredName(), actions);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(name, minimumAge, actions);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
Phase other = (Phase) obj;
return Objects.equals(name, other.name) && Objects.equals(minimumAge, other.minimumAge) && Objects.equals(actions, other.actions);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
@UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT)
public boolean maybeAddDeprecationWarningForFreezeAction(String policyName) {
if (getActions().containsKey(FreezeAction.NAME)) {
deprecationLogger.warn(
DeprecationCategory.OTHER,
"ilm_freeze_action_deprecation",
"The freeze action in ILM is deprecated and will be removed in a future version;"
+ " this action is already a noop so it can be safely removed. Please remove the freeze action from the '"
+ policyName
+ "' policy."
);
return true;
}
return false;
}
}
|
Phase
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/imports/SecondSourceTargetMapper.java
|
{
"start": 560,
"end": 827
}
|
interface ____ {
SecondSourceTargetMapper INSTANCE = Mappers.getMapper( SecondSourceTargetMapper.class );
FooWrapper fooWrapperToFooWrapper(org.mapstruct.ap.test.imports.from.FooWrapper foo);
Target sourceToTarget(Source source);
}
|
SecondSourceTargetMapper
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.