language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | quarkusio__quarkus | extensions/hibernate-envers/deployment/src/test/java/io/quarkus/hibernate/orm/envers/config/EnversAllowIdentifierReuseTestCase.java | {
"start": 386,
"end": 1044
} | class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyAuditedEntity.class, EnversTestAllowIdentifierReuseResource.class,
AbstractEnversResource.class)
.addAsResource("application-with-allow-identifier-reuse.properties",
"application.properties"));
@Test
public void testValidityStrategyFieldNameOverrides() {
RestAssured.when().get("/envers-allow-identifier-reuse").then()
.body(is("OK"));
}
}
| EnversAllowIdentifierReuseTestCase |
java | elastic__elasticsearch | test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java | {
"start": 971,
"end": 10913
} | class ____ extends AbstractClientYamlTestFragmentParserTestCase {
public void testParseIsTrue() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "get.fields._timestamp");
IsTrueAssertion trueAssertion = IsTrueAssertion.parse(parser);
assertThat(trueAssertion, notNullValue());
assertThat(trueAssertion.getField(), equalTo("get.fields._timestamp"));
}
public void testParseIsFalse() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "docs.1._source");
IsFalseAssertion falseAssertion = IsFalseAssertion.parse(parser);
assertThat(falseAssertion, notNullValue());
assertThat(falseAssertion.getField(), equalTo("docs.1._source"));
}
public void testParseGreaterThan() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "{ field: 3}");
GreaterThanAssertion greaterThanAssertion = GreaterThanAssertion.parse(parser);
assertThat(greaterThanAssertion, notNullValue());
assertThat(greaterThanAssertion.getField(), equalTo("field"));
assertThat(greaterThanAssertion.getExpectedValue(), instanceOf(Integer.class));
assertThat((Integer) greaterThanAssertion.getExpectedValue(), equalTo(3));
}
public void testParseLessThan() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "{ field: 3}");
LessThanAssertion lessThanAssertion = LessThanAssertion.parse(parser);
assertThat(lessThanAssertion, notNullValue());
assertThat(lessThanAssertion.getField(), equalTo("field"));
assertThat(lessThanAssertion.getExpectedValue(), instanceOf(Integer.class));
assertThat((Integer) lessThanAssertion.getExpectedValue(), equalTo(3));
}
public void testParseLength() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "{ _id: 22}");
LengthAssertion lengthAssertion = LengthAssertion.parse(parser);
assertThat(lengthAssertion, notNullValue());
assertThat(lengthAssertion.getField(), equalTo("_id"));
assertThat(lengthAssertion.getExpectedValue(), instanceOf(Integer.class));
assertThat((Integer) lengthAssertion.getExpectedValue(), equalTo(22));
}
public void testParseIsAfter() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "{ field: 2021-05-25T12:30:00.000Z}");
IsAfterAssertion isAfterAssertion = IsAfterAssertion.parse(parser);
assertThat(isAfterAssertion, notNullValue());
assertThat(isAfterAssertion.getField(), equalTo("field"));
assertThat(isAfterAssertion.getExpectedValue(), instanceOf(String.class));
assertThat(isAfterAssertion.getExpectedValue(), equalTo("2021-05-25T12:30:00.000Z"));
}
public void testParseMatchSimpleIntegerValue() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "{ field: 10 }");
MatchAssertion matchAssertion = MatchAssertion.parse(parser);
assertThat(matchAssertion, notNullValue());
assertThat(matchAssertion.getField(), equalTo("field"));
assertThat(matchAssertion.getExpectedValue(), instanceOf(Integer.class));
assertThat((Integer) matchAssertion.getExpectedValue(), equalTo(10));
}
public void testParseMatchSimpleStringValue() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "{ foo: bar }");
MatchAssertion matchAssertion = MatchAssertion.parse(parser);
assertThat(matchAssertion, notNullValue());
assertThat(matchAssertion.getField(), equalTo("foo"));
assertThat(matchAssertion.getExpectedValue(), instanceOf(String.class));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("bar"));
}
public void testParseMatchArray() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "{'matches': ['test_percolator_1', 'test_percolator_2']}");
MatchAssertion matchAssertion = MatchAssertion.parse(parser);
assertThat(matchAssertion, notNullValue());
assertThat(matchAssertion.getField(), equalTo("matches"));
assertThat(matchAssertion.getExpectedValue(), instanceOf(List.class));
List<?> strings = (List<?>) matchAssertion.getExpectedValue();
assertThat(strings, transformedItemsMatch(Object::toString, contains("test_percolator_1", "test_percolator_2")));
}
@SuppressWarnings("unchecked")
public void testParseContains() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "{testKey: { someKey: someValue } }");
ContainsAssertion containsAssertion = ContainsAssertion.parse(parser);
assertThat(containsAssertion, notNullValue());
assertThat(containsAssertion.getField(), equalTo("testKey"));
assertThat(containsAssertion.getExpectedValue(), instanceOf(Map.class));
assertThat(((Map<String, String>) containsAssertion.getExpectedValue()).get("someKey"), equalTo("someValue"));
}
@SuppressWarnings("unchecked")
public void testParseMatchSourceValues() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "{ _source: { responses.0.hits.total: 3, foo: bar }}");
MatchAssertion matchAssertion = MatchAssertion.parse(parser);
assertThat(matchAssertion, notNullValue());
assertThat(matchAssertion.getField(), equalTo("_source"));
assertThat(matchAssertion.getExpectedValue(), instanceOf(Map.class));
Map<String, Object> expectedValue = (Map<String, Object>) matchAssertion.getExpectedValue();
assertThat(expectedValue.size(), equalTo(2));
Object o = expectedValue.get("responses.0.hits.total");
assertThat(o, instanceOf(Integer.class));
assertThat((Integer) o, equalTo(3));
o = expectedValue.get("foo");
assertThat(o, instanceOf(String.class));
assertThat(o.toString(), equalTo("bar"));
}
public void testCloseTo() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "{ field: { value: 42.2, error: 0.001 } }");
CloseToAssertion closeToAssertion = CloseToAssertion.parse(parser);
assertThat(closeToAssertion, notNullValue());
assertThat(closeToAssertion.getField(), equalTo("field"));
assertThat(closeToAssertion.getExpectedValue(), instanceOf(Double.class));
assertThat((Double) closeToAssertion.getExpectedValue(), equalTo(42.2));
assertThat(closeToAssertion.getError(), equalTo(0.001));
closeToAssertion.doAssert(42.2 + randomDoubleBetween(-0.001, 0.001, false), closeToAssertion.getExpectedValue());
AssertionError e = expectThrows(
AssertionError.class,
() -> closeToAssertion.doAssert(
42.2 + (randomBoolean() ? 1 : -1) * randomDoubleBetween(0.001001, 10, false),
closeToAssertion.getExpectedValue()
)
);
assertThat(e.getMessage(), containsString("Expected: a numeric value within <0.001> of <42.2>"));
}
public void testInvalidCloseTo() throws Exception {
parser = createParser(YamlXContent.yamlXContent, "{ field: 42 }");
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> CloseToAssertion.parse(parser));
assertThat(exception.getMessage(), equalTo("expected a map with value and error but got Integer"));
parser = createParser(YamlXContent.yamlXContent, "{ field: { } }");
exception = expectThrows(IllegalArgumentException.class, () -> CloseToAssertion.parse(parser));
assertThat(exception.getMessage(), equalTo("expected a map with value and error but got a map with 0 fields"));
parser = createParser(YamlXContent.yamlXContent, "{ field: { foo: 13, value: 15 } }");
exception = expectThrows(IllegalArgumentException.class, () -> CloseToAssertion.parse(parser));
assertThat(exception.getMessage(), equalTo("error is missing or not a number"));
parser = createParser(YamlXContent.yamlXContent, "{ field: { foo: 13, bar: 15 } }");
exception = expectThrows(IllegalArgumentException.class, () -> CloseToAssertion.parse(parser));
assertThat(exception.getMessage(), equalTo("value is missing or not a number"));
}
public void testExists() throws IOException {
parser = createParser(YamlXContent.yamlXContent, "get.fields._timestamp");
ExistsAssertion existsAssertion = ExistsAssertion.parse(parser);
assertThat(existsAssertion, notNullValue());
assertThat(existsAssertion.getField(), equalTo("get.fields._timestamp"));
existsAssertion.doAssert(randomFrom(1, "", "non-empty", List.of(), Map.of()), existsAssertion.getExpectedValue());
AssertionError e = expectThrows(AssertionError.class, () -> existsAssertion.doAssert(null, existsAssertion.getExpectedValue()));
assertThat(e.getMessage(), containsString("field [get.fields._timestamp] does not exist"));
}
public void testDoesNotExist() throws IOException {
parser = createParser(YamlXContent.yamlXContent, "get.fields._timestamp");
NotExistsAssertion existnotExistsAssertion = NotExistsAssertion.parse(parser);
assertThat(existnotExistsAssertion, notNullValue());
assertThat(existnotExistsAssertion.getField(), equalTo("get.fields._timestamp"));
existnotExistsAssertion.doAssert(null, existnotExistsAssertion.getExpectedValue());
AssertionError e = expectThrows(
AssertionError.class,
() -> existnotExistsAssertion.doAssert(
randomFrom(1, "", "non-empty", List.of(), Map.of(), 0, false),
existnotExistsAssertion.getExpectedValue()
)
);
assertThat(e.getMessage(), containsString("field [get.fields._timestamp] exists, but should not"));
}
}
| AssertionTests |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/visitor/OracleSchemaStatVisitorTest1.java | {
"start": 978,
"end": 3814
} | class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "select a.name, b.name FROM users a, usergroups b on a.groupId = b.id";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
assertEquals(1, statementList.size());
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
statemen.accept(visitor);
System.out.println(sql);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("relationShip : " + visitor.getRelationships());
assertEquals(2, visitor.getTables().size());
assertEquals(true, visitor.containsTable("users"));
assertEquals(true, visitor.containsTable("usergroups"));
assertEquals(4, visitor.getColumns().size());
assertEquals(true, visitor.getColumns().contains(new Column("users", "groupId")));
assertEquals(true, visitor.getColumns().contains(new Column("users", "name")));
assertEquals(true, visitor.getColumns().contains(new Column("usergroups", "id")));
assertEquals(true, visitor.getColumns().contains(new Column("usergroups", "name")));
assertEquals(1, visitor.getRelationships().size());
assertEquals("users.groupId = usergroups.id", visitor.getRelationships().iterator().next().toString());
}
public void test_1() throws Exception {
String sql = "select a.name, b.name FROM users a, usergroups b on a.groupId = b.id where a.groupID = ?";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
assertEquals(1, statementList.size());
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
statemen.accept(visitor);
System.out.println(sql);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
assertEquals(2, visitor.getTables().size());
assertEquals(true, visitor.containsTable("users"));
assertEquals(true, visitor.containsTable("usergroups"));
assertEquals(4, visitor.getColumns().size());
assertEquals(true, visitor.getColumns().contains(new Column("users", "groupId")));
assertEquals(true, visitor.getColumns().contains(new Column("users", "name")));
assertEquals(true, visitor.getColumns().contains(new Column("usergroups", "id")));
assertEquals(true, visitor.getColumns().contains(new Column("usergroups", "name")));
}
}
| OracleSchemaStatVisitorTest1 |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ProcessTableFunctionTestUtils.java | {
"start": 40394,
"end": 42568
} | class ____ extends ProcessTableFunction<Row>
implements ChangelogFunction {
public void eval(
@StateHint Tuple1<Integer> score,
@StateHint Tuple1<String> city,
@ArgumentHint({SET_SEMANTIC_TABLE, SUPPORT_UPDATES}) Row scoreTable,
@ArgumentHint({SET_SEMANTIC_TABLE, SUPPORT_UPDATES}) Row cityTable)
throws Exception {
final boolean wasMatch = isMatch(score, city);
if (isDelete(scoreTable) || isDelete(cityTable)) {
if (wasMatch) {
collect(Row.ofKind(RowKind.DELETE, (Object) null));
}
}
if (scoreTable != null) {
apply(score, scoreTable.getFieldAs("score"), scoreTable.getKind());
}
if (cityTable != null) {
apply(city, cityTable.getFieldAs("city"), cityTable.getKind());
}
if (isMatch(score, city)) {
collect(
Row.ofKind(
wasMatch ? RowKind.UPDATE_AFTER : RowKind.INSERT,
"score " + score.f0 + " in city " + city.f0));
}
}
public boolean isDelete(Row r) {
return r != null && r.getKind() == RowKind.DELETE;
}
public boolean isMatch(Tuple1<Integer> score, Tuple1<String> city) {
return score.f0 != null && city.f0 != null;
}
@Override
public ChangelogMode getChangelogMode(ChangelogContext changelogContext) {
return ChangelogMode.upsert();
}
private static <T> void apply(Tuple1<T> t, T o, RowKind op) {
if (op == RowKind.INSERT || op == RowKind.UPDATE_AFTER) {
t.f0 = o;
} else {
t.f0 = null;
}
}
}
// --------------------------------------------------------------------------------------------
// Helpers
// --------------------------------------------------------------------------------------------
/** POJO for typed tables. */
public static | UpdatingJoinFunction |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/discovery/PackageNameFilter.java | {
"start": 772,
"end": 3589
} | interface ____ extends DiscoveryFilter<String> {
/**
* Create a new <em>include</em> {@link PackageNameFilter} based on the
* supplied package names.
*
* <p>The names are combined using OR semantics, i.e. if the fully
* qualified name of a package starts with at least one of the names,
* the package will be included in the result set.
*
* @param names package names that we be compared against fully qualified
* package names; never {@code null}, empty, or containing {@code null}
* @see Package#getName()
* @see #includePackageNames(List)
* @see #excludePackageNames(String...)
*/
static PackageNameFilter includePackageNames(String... names) {
return new IncludePackageNameFilter(names);
}
/**
* Create a new <em>include</em> {@link PackageNameFilter} based on the
* supplied package names.
*
* <p>The names are combined using OR semantics, i.e. if the fully
* qualified name of a package starts with at least one of the names,
* the package will be included in the result set.
*
* @param names package names that we be compared against fully qualified
* package names; never {@code null}, empty, or containing {@code null}
* @see Package#getName()
* @see #includePackageNames(String...)
* @see #excludePackageNames(String...)
*/
static PackageNameFilter includePackageNames(List<String> names) {
return includePackageNames(names.toArray(new String[0]));
}
/**
* Create a new <em>exclude</em> {@link PackageNameFilter} based on the
* supplied package names.
*
* <p>The names are combined using OR semantics, i.e. if the fully
* qualified name of a package starts with at least one of the names,
* the package will be excluded in the result set.
*
* @param names package names that we be compared against fully qualified
* package names; never {@code null}, empty, or containing {@code null}
* @see Package#getName()
* @see #excludePackageNames(List)
* @see #includePackageNames(String...)
*/
static PackageNameFilter excludePackageNames(String... names) {
return new ExcludePackageNameFilter(names);
}
/**
* Create a new <em>exclude</em> {@link PackageNameFilter} based on the
* supplied package names.
*
* <p>The names are combined using OR semantics, i.e. if the fully
* qualified name of a package starts with at least one of the names,
* the package will be excluded in the result set.
*
* @param names package names that we be compared against fully qualified
* package names; never {@code null}, empty, or containing {@code null}
* @see Package#getName()
* @see #excludePackageNames(String...)
* @see #includePackageNames(String...)
*/
static PackageNameFilter excludePackageNames(List<String> names) {
return excludePackageNames(names.toArray(new String[0]));
}
}
| PackageNameFilter |
java | apache__rocketmq | controller/src/main/java/org/apache/rocketmq/controller/helper/BrokerLiveInfoGetter.java | {
"start": 928,
"end": 1043
} | interface ____ {
BrokerLiveInfo get(String clusterName, String brokerName, Long brokerId);
}
| BrokerLiveInfoGetter |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/parameters/ParameterExtractor.java | {
"start": 490,
"end": 644
} | class ____ is used to provide async method parameters.
*
* This is very simple to reduce the number of required allocations.
*/
public | that |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/ChoiceWhenBeanExpressionTest.java | {
"start": 3055,
"end": 3254
} | class ____ {
private final int grade;
Student(int grade) {
this.grade = grade;
}
public int getGrade() {
return grade;
}
}
}
| Student |
java | google__dagger | javatests/dagger/internal/codegen/RawTypeInjectionTest.java | {
"start": 2784,
"end": 3619
} | class ____ {",
" @Provides",
" int provideFoo(Foo foo) {", // Fail: requesting raw type
" return 0;",
" }",
"}");
CompilerTests.daggerCompiler(component, foo, module)
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
"Foo cannot be provided without an @Provides-annotated method.")
.onSource(component)
.onLine(6);
});
}
@Test
public void rawInjectConstructorRequestTest() {
Source component =
CompilerTests.javaSource(
"test.TestComponent",
"package test;",
"",
"import dagger.Component;",
"",
"@Component",
" | TestModule |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/type/ClobTypeHandlerTest.java | {
"start": 1030,
"end": 2928
} | class ____ extends BaseTypeHandlerTest {
private static final TypeHandler<String> TYPE_HANDLER = new ClobTypeHandler();
@Mock
protected Clob clob;
@Override
@Test
public void shouldSetParameter() throws Exception {
TYPE_HANDLER.setParameter(ps, 1, "Hello", null);
verify(ps).setCharacterStream(ArgumentMatchers.eq(1), ArgumentMatchers.any(Reader.class), ArgumentMatchers.eq(5));
}
@Override
@Test
public void shouldGetResultFromResultSetByName() throws Exception {
when(rs.getClob("column")).thenReturn(clob);
when(clob.length()).thenReturn(3L);
when(clob.getSubString(1, 3)).thenReturn("Hello");
assertEquals("Hello", TYPE_HANDLER.getResult(rs, "column"));
}
@Override
@Test
public void shouldGetResultNullFromResultSetByName() throws Exception {
when(rs.getClob("column")).thenReturn(null);
assertNull(TYPE_HANDLER.getResult(rs, "column"));
}
@Override
@Test
public void shouldGetResultFromResultSetByPosition() throws Exception {
when(rs.getClob(1)).thenReturn(clob);
when(clob.length()).thenReturn(3L);
when(clob.getSubString(1, 3)).thenReturn("Hello");
assertEquals("Hello", TYPE_HANDLER.getResult(rs, 1));
}
@Override
@Test
public void shouldGetResultNullFromResultSetByPosition() throws Exception {
when(rs.getClob(1)).thenReturn(null);
assertNull(TYPE_HANDLER.getResult(rs, 1));
}
@Override
@Test
public void shouldGetResultFromCallableStatement() throws Exception {
when(cs.getClob(1)).thenReturn(clob);
when(clob.length()).thenReturn(3L);
when(clob.getSubString(1, 3)).thenReturn("Hello");
assertEquals("Hello", TYPE_HANDLER.getResult(cs, 1));
}
@Override
@Test
public void shouldGetResultNullFromCallableStatement() throws Exception {
when(cs.getClob(1)).thenReturn(null);
assertNull(TYPE_HANDLER.getResult(cs, 1));
}
}
| ClobTypeHandlerTest |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/support/JpaMetamodelEntityInformationIntegrationTests.java | {
"start": 10923,
"end": 11037
} | class ____ {
@Id Long id;
@Id Long feedRunId;
}
@Entity
@Access(AccessType.FIELD)
public static | Identifiable |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/health/node/selection/HealthNodeTaskParamsXContentTests.java | {
"start": 719,
"end": 1380
} | class ____ extends AbstractXContentTestCase<HealthNodeTaskParams> {
@Override
protected HealthNodeTaskParams createTestInstance() {
return new HealthNodeTaskParams();
}
@Override
protected HealthNodeTaskParams doParseInstance(XContentParser parser) throws IOException {
return HealthNodeTaskParams.PARSER.parse(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(HealthNodeTaskExecutor.getNamedXContentParsers());
}
}
| HealthNodeTaskParamsXContentTests |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/io/service/ServiceScanner.java | {
"start": 10547,
"end": 10785
} | class ____ in compiler
}
return typeNames;
}
}
/**
* Initializes and filters the entry.
*
* @param <S> The type
*/
@SuppressWarnings("java:S1948")
private static final | used |
java | apache__camel | components/camel-mail/src/test/java/org/apache/camel/component/mail/MailConsumerIdleMessageTest.java | {
"start": 1389,
"end": 2235
} | class ____ extends CamelTestSupport {
private static final MailboxUser james = Mailbox.getOrCreateUser("james", "secret");
@Test
public void testConsumeIdleMessages() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(2);
MockEndpoint.assertIsSatisfied(context);
assertNull(mock.getExchanges().get(0).getIn().getBody());
assertNull(mock.getExchanges().get(1).getIn().getBody());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(james.uriPrefix(Protocol.pop3) + "&initialDelay=100&delay=100&sendEmptyMessageWhenIdle=true")
.to("mock:result");
}
};
}
}
| MailConsumerIdleMessageTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cache/NoCachingRegionFactoryTest.java | {
"start": 889,
"end": 1147
} | class ____ {
@Test
@JiraKey( value = "HHH-12508" )
public void testSessionFactoryOptionsConsistent(SessionFactoryScope scope) {
assertFalse( scope.getSessionFactory().getSessionFactoryOptions().isSecondLevelCacheEnabled() );
}
}
| NoCachingRegionFactoryTest |
java | apache__kafka | tools/src/main/java/org/apache/kafka/tools/LineMessageReader.java | {
"start": 2610,
"end": 10021
} | class ____ implements RecordReader {
private String topic;
private boolean parseKey;
private String keySeparator = "\t";
private boolean parseHeaders;
private String headersDelimiter = "\t";
private String headersSeparator = ",";
private String headersKeySeparator = ":";
private boolean ignoreError;
private int lineNumber;
private final boolean printPrompt = System.console() != null;
private Pattern headersSeparatorPattern;
private String nullMarker;
@Override
public void configure(Map<String, ?> props) {
topic = props.get("topic").toString();
if (props.containsKey("parse.key"))
parseKey = props.get("parse.key").toString().trim().equalsIgnoreCase("true");
if (props.containsKey("key.separator"))
keySeparator = props.get("key.separator").toString();
if (props.containsKey("parse.headers"))
parseHeaders = props.get("parse.headers").toString().trim().equalsIgnoreCase("true");
if (props.containsKey("headers.delimiter"))
headersDelimiter = props.get("headers.delimiter").toString();
if (props.containsKey("headers.separator"))
headersSeparator = props.get("headers.separator").toString();
headersSeparatorPattern = Pattern.compile(headersSeparator);
if (props.containsKey("headers.key.separator"))
headersKeySeparator = props.get("headers.key.separator").toString();
if (props.containsKey("ignore.error"))
ignoreError = props.get("ignore.error").toString().trim().equalsIgnoreCase("true");
if (headersDelimiter.equals(headersSeparator))
throw new KafkaException("headers.delimiter and headers.separator may not be equal");
if (headersDelimiter.equals(headersKeySeparator))
throw new KafkaException("headers.delimiter and headers.key.separator may not be equal");
if (headersSeparator.equals(headersKeySeparator))
throw new KafkaException("headers.separator and headers.key.separator may not be equal");
if (props.containsKey("null.marker"))
nullMarker = props.get("null.marker").toString();
if (keySeparator.equals(nullMarker))
throw new KafkaException("null.marker and key.separator may not be equal");
if (headersSeparator.equals(nullMarker))
throw new KafkaException("null.marker and headers.separator may not be equal");
if (headersDelimiter.equals(nullMarker))
throw new KafkaException("null.marker and headers.delimiter may not be equal");
if (headersKeySeparator.equals(nullMarker))
throw new KafkaException("null.marker and headers.key.separator may not be equal");
}
@Override
public Iterator<ProducerRecord<byte[], byte[]>> readRecords(InputStream inputStream) {
return new Iterator<ProducerRecord<byte[], byte[]>>() {
private final BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
private ProducerRecord<byte[], byte[]> current;
@Override
public boolean hasNext() {
if (current != null) {
return true;
} else {
lineNumber += 1;
if (printPrompt) {
System.out.print(">");
}
String line;
try {
line = reader.readLine();
} catch (IOException e) {
throw new KafkaException(e);
}
if (line == null) {
current = null;
} else {
String headers = parse(parseHeaders, line, 0, headersDelimiter, "headers delimiter");
int headerOffset = headers == null ? 0 : headers.length() + headersDelimiter.length();
String key = parse(parseKey, line, headerOffset, keySeparator, "key separator");
int keyOffset = key == null ? 0 : key.length() + keySeparator.length();
String value = line.substring(headerOffset + keyOffset);
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(
topic,
key != null && !key.equals(nullMarker) ? key.getBytes(StandardCharsets.UTF_8) : null,
value != null && !value.equals(nullMarker) ? value.getBytes(StandardCharsets.UTF_8) : null
);
if (headers != null && !headers.equals(nullMarker)) {
stream(splitHeaders(headers)).forEach(header -> record.headers().add(header.key(), header.value()));
}
current = record;
}
return current != null;
}
}
@Override
public ProducerRecord<byte[], byte[]> next() {
if (!hasNext()) {
throw new NoSuchElementException("no more record");
} else {
try {
return current;
} finally {
current = null;
}
}
}
};
}
private String parse(boolean enabled, String line, int startIndex, String demarcation, String demarcationName) {
if (!enabled) {
return null;
}
int index = line.indexOf(demarcation, startIndex);
if (index == -1) {
if (ignoreError) {
return null;
}
throw new KafkaException("No " + demarcationName + " found on line number " + lineNumber + ": '" + line + "'");
}
return line.substring(startIndex, index);
}
private Header[] splitHeaders(String headers) {
return stream(headersSeparatorPattern.split(headers))
.map(pair -> {
int i = pair.indexOf(headersKeySeparator);
if (i == -1) {
if (ignoreError) {
return new RecordHeader(pair, null);
}
throw new KafkaException("No header key separator found in pair '" + pair + "' on line number " + lineNumber);
}
String headerKey = pair.substring(0, i);
if (headerKey.equals(nullMarker)) {
throw new KafkaException("Header keys should not be equal to the null marker '" + nullMarker + "' as they can't be null");
}
String value = pair.substring(i + headersKeySeparator.length());
byte[] headerValue = value.equals(nullMarker) ? null : value.getBytes(StandardCharsets.UTF_8);
return new RecordHeader(headerKey, headerValue);
}).toArray(Header[]::new);
}
// Visible for testing
String keySeparator() {
return keySeparator;
}
// Visible for testing
boolean parseKey() {
return parseKey;
}
// Visible for testing
boolean parseHeaders() {
return parseHeaders;
}
}
| LineMessageReader |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/mapping/ordering/ast/FkDomainPathContinuation.java | {
"start": 494,
"end": 1861
} | class ____ extends DomainPathContinuation {
private final Set<String> possiblePaths;
public FkDomainPathContinuation(
NavigablePath navigablePath,
DomainPath lhs,
ToOneAttributeMapping referencedModelPart) {
super( navigablePath, lhs, referencedModelPart );
this.possiblePaths = referencedModelPart.getTargetKeyPropertyNames();
}
public FkDomainPathContinuation(
NavigablePath navigablePath,
DomainPath lhs,
ModelPart referencedModelPart,
Set<String> possiblePaths) {
super( navigablePath, lhs, referencedModelPart );
this.possiblePaths = possiblePaths;
}
@Override
public SequencePart resolvePathPart(
String name,
String identifier,
boolean isTerminal,
TranslationContext translationContext) {
if ( !possiblePaths.contains( name ) ) {
throw new PathResolutionException( name );
}
final HashSet<String> furtherPaths = new HashSet<>();
for ( String possiblePath : possiblePaths ) {
if ( possiblePath.startsWith( name ) && possiblePath.length() > name.length()
&& possiblePath.charAt( name.length() ) == '.' ) {
furtherPaths.add( possiblePath.substring( name.length() + 2 ) );
}
}
return new FkDomainPathContinuation(
navigablePath.append( name ),
this,
( (ModelPartContainer) referencedModelPart ).findSubPart( name, null ),
furtherPaths
);
}
}
| FkDomainPathContinuation |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/beans/SyntheticInterceptorTest.java | {
"start": 2528,
"end": 2950
} | class ____ implements InterceptorCreator {
@Override
public InterceptFunction create(SyntheticCreationalContext<Object> context) {
assertInterceptedBean(context);
return ic -> {
EVENTS.add(TestAroundInvoke.class.getName());
return Boolean.parseBoolean(ic.proceed().toString()) ? "ok" : "nok";
};
}
}
static | TestAroundInvoke |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/reactive/EnableWebFluxSecurityTests.java | {
"start": 14876,
"end": 15021
} | interface ____ {
@AliasFor(attribute = "expression", annotation = AuthenticationPrincipal.class)
String value() default "id";
}
| Property |
java | playframework__playframework | web/play-java-forms/src/test/java/play/data/Birthday.java | {
"start": 207,
"end": 643
} | class ____ {
@play.data.format.Formats.DateTime(pattern = "customFormats.date")
private Date date;
// No annotation
private Date alternativeDate;
public Date getDate() {
return this.date;
}
public void setDate(Date date) {
this.date = date;
}
public Date getAlternativeDate() {
return this.alternativeDate;
}
public void setAlternativeDate(Date date) {
this.alternativeDate = date;
}
}
| Birthday |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java | {
"start": 12324,
"end": 12790
} | class ____ {
@Nullable String[] getMessage(boolean b, String[] s) {
return b ? s : null;
}
}
""")
.addOutputLines(
"com/google/errorprone/bugpatterns/nullness/LiteralNullReturnTest.java",
"""
package com.google.errorprone.bugpatterns.nullness;
import org.checkerframework.checker.nullness.qual.Nullable;
public | LiteralNullReturnTest |
java | quarkusio__quarkus | extensions/jaxb/runtime/src/main/java/io/quarkus/jaxb/runtime/JaxbContextConfigRecorder.java | {
"start": 208,
"end": 749
} | class ____ {
private volatile static Set<Class<?>> classesToBeBound = new HashSet<>();
public void addClassesToBeBound(Collection<Class<?>> classes) {
this.classesToBeBound.addAll(classes);
}
public void reset() {
classesToBeBound.clear();
}
public static Set<Class<?>> getClassesToBeBound() {
return Collections.unmodifiableSet(classesToBeBound);
}
public static boolean isClassBound(Class<?> clazz) {
return classesToBeBound.contains(clazz);
}
}
| JaxbContextConfigRecorder |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageUtil.java | {
"start": 1460,
"end": 3351
} | class ____ {
public static final byte[] MAGIC_HEADER =
"HDFSIMG1".getBytes(StandardCharsets.UTF_8);
public static final int FILE_VERSION = 1;
public static boolean checkFileFormat(RandomAccessFile file)
throws IOException {
if (file.length() < Loader.MINIMUM_FILE_LENGTH)
return false;
byte[] magic = new byte[MAGIC_HEADER.length];
file.readFully(magic);
if (!Arrays.equals(MAGIC_HEADER, magic))
return false;
return true;
}
public static FileSummary loadSummary(RandomAccessFile file)
throws IOException {
final int FILE_LENGTH_FIELD_SIZE = 4;
long fileLength = file.length();
file.seek(fileLength - FILE_LENGTH_FIELD_SIZE);
int summaryLength = file.readInt();
if (summaryLength <= 0) {
throw new IOException("Negative length of the file");
}
file.seek(fileLength - FILE_LENGTH_FIELD_SIZE - summaryLength);
byte[] summaryBytes = new byte[summaryLength];
file.readFully(summaryBytes);
FileSummary summary = FileSummary
.parseDelimitedFrom(new ByteArrayInputStream(summaryBytes));
if (summary.getOndiskVersion() != FILE_VERSION) {
throw new IOException("Unsupported file version "
+ summary.getOndiskVersion());
}
if (!NameNodeLayoutVersion.supports(Feature.PROTOBUF_FORMAT,
summary.getLayoutVersion())) {
throw new IOException("Unsupported layout version "
+ summary.getLayoutVersion());
}
return summary;
}
public static InputStream wrapInputStreamForCompression(
Configuration conf, String codec, InputStream in) throws IOException {
if (codec.isEmpty())
return in;
FSImageCompression compression = FSImageCompression.createCompression(
conf, codec);
CompressionCodec imageCodec = compression.getImageCodec();
return imageCodec.createInputStream(in);
}
}
| FSImageUtil |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/configuration/internal/ClassesAuditingData.java | {
"start": 1212,
"end": 1378
} | class ____ auditing meta-data for all persistent classes during boot-time.
*
* @author Adam Warski (adam at warski dot org)
* @author Chris Cranford
*/
public | holding |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/table/stream/compact/CompactMessages.java | {
"start": 1123,
"end": 1626
} | class ____ all compaction messages.
*
* <p>The compaction operator graph is: TempFileWriter|parallel ---(InputFile&EndInputFile)--->
* CompactCoordinator|non-parallel
* ---(CompactionUnit&EndCompaction)--->CompactOperator|parallel---(PartitionCommitInfo)--->
* PartitionCommitter|non-parallel
*
* <p>Because the end message is a kind of barrier of record messages, they can only be transmitted
* in the way of full broadcast in the link from coordinator to compact operator.
*/
@Internal
public | for |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/monitor/jvm/GcNames.java | {
"start": 516,
"end": 2910
} | class ____ {
public static final String YOUNG = "young";
public static final String OLD = "old";
public static final String SURVIVOR = "survivor";
private GcNames() {}
/**
* Resolves the memory area name by the memory pool name provided by {@link java.lang.management.MemoryPoolMXBean#getName()}
*
* @param poolName the name of the memory pool from {@link java.lang.management.MemoryPoolMXBean}
* @param defaultName the name to return if the pool name does not match any known memory area
* @return memory area name corresponding to the pool name or {@code defaultName} if no match is found
*/
public static String getByMemoryPoolName(String poolName, String defaultName) {
if ("Eden Space".equals(poolName)
|| "PS Eden Space".equals(poolName)
|| "Par Eden Space".equals(poolName)
|| "G1 Eden Space".equals(poolName)) {
return YOUNG;
}
if ("Survivor Space".equals(poolName)
|| "PS Survivor Space".equals(poolName)
|| "Par Survivor Space".equals(poolName)
|| "G1 Survivor Space".equals(poolName)) {
return SURVIVOR;
}
if ("Tenured Gen".equals(poolName)
|| "PS Old Gen".equals(poolName)
|| "CMS Old Gen".equals(poolName)
|| "G1 Old Gen".equals(poolName)) {
return OLD;
}
return defaultName;
}
/**
* Resolves the GC type by the GC name provided by {@link java.lang.management.GarbageCollectorMXBean#getName()}
*
* @param gcName the name of the GC from {@link java.lang.management.GarbageCollectorMXBean}
* @param defaultName the name to return if the GC name does not match any known GC type
* @return GC type corresponding to the GC name or {@code defaultName} if no match is found
*/
public static String getByGcName(String gcName, String defaultName) {
if ("Copy".equals(gcName) || "PS Scavenge".equals(gcName) || "ParNew".equals(gcName) || "G1 Young Generation".equals(gcName)) {
return YOUNG;
}
if ("MarkSweepCompact".equals(gcName)
|| "PS MarkSweep".equals(gcName)
|| "ConcurrentMarkSweep".equals(gcName)
|| "G1 Old Generation".equals(gcName)) {
return OLD;
}
return defaultName;
}
}
| GcNames |
java | apache__camel | components/camel-ai/camel-tensorflow-serving/src/main/java/org/apache/camel/component/tensorflow/serving/TensorFlowServingEndpoint.java | {
"start": 1688,
"end": 3932
} | class ____ extends DefaultEndpoint {
@UriPath(enums = "model-status,model-metadata,classify,regress,predict", description = "The TensorFlow Serving API")
@Metadata(required = true)
private final String api;
@UriParam
private TensorFlowServingConfiguration configuration;
private ManagedChannel channel;
private ModelServiceGrpc.ModelServiceBlockingStub modelService;
private PredictionServiceGrpc.PredictionServiceBlockingStub predictionService;
public TensorFlowServingEndpoint(String uri, TensorFlowServingComponent component, String path,
TensorFlowServingConfiguration configuration) {
super(uri, component);
this.api = path;
this.configuration = configuration;
}
@Override
protected void doInit() throws Exception {
super.doInit();
ChannelCredentials credentials = configuration.getCredentials() != null
? configuration.getCredentials()
: InsecureChannelCredentials.create();
channel = Grpc.newChannelBuilder(configuration.getTarget(), credentials).build();
modelService = ModelServiceGrpc.newBlockingStub(channel);
predictionService = PredictionServiceGrpc.newBlockingStub(channel);
}
@Override
public void doStop() throws Exception {
super.doStop();
// Close the channel
channel.shutdown();
}
@Override
public Producer createProducer() {
return new TensorFlowServingProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) {
throw new UnsupportedOperationException("Consumer not supported");
}
public String getApi() {
return api;
}
public TensorFlowServingConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(TensorFlowServingConfiguration configuration) {
this.configuration = configuration;
}
public ModelServiceGrpc.ModelServiceBlockingStub getModelService() {
return modelService;
}
public PredictionServiceGrpc.PredictionServiceBlockingStub getPredictionService() {
return predictionService;
}
}
| TensorFlowServingEndpoint |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jdbc/internal/AggressiveReleaseTest.java | {
"start": 1343,
"end": 10808
} | class ____ extends BaseSessionFactoryFunctionalTest {
private PreparedStatementSpyConnectionProvider connectionProvider = new PreparedStatementSpyConnectionProvider(
true
);
@Override
protected void applySettings(StandardServiceRegistryBuilder builer) {
builer.applySetting(
AvailableSettings.CONNECTION_PROVIDER,
connectionProvider
);
builer.applySetting(
AvailableSettings.CONNECTION_HANDLING,
PhysicalConnectionHandlingMode.DELAYED_ACQUISITION_AND_RELEASE_AFTER_STATEMENT
);
}
private BasicTestingJdbcServiceImpl services = new BasicTestingJdbcServiceImpl();
@BeforeEach
protected void prepareTest() throws Exception {
services.prepare( true );
Connection connection = null;
Statement stmnt = null;
try {
connection = services.getBootstrapJdbcConnectionAccess().obtainConnection();
stmnt = connection.createStatement();
stmnt.execute( "drop table SANDBOX_JDBC_TST if exists" );
stmnt.execute( "create table SANDBOX_JDBC_TST ( ID integer, NAME varchar(100) )" );
}
finally {
if ( stmnt != null ) {
try {
stmnt.close();
}
catch (SQLException ignore) {
}
}
if ( connection != null ) {
try {
services.getBootstrapJdbcConnectionAccess().releaseConnection( connection );
}
catch (SQLException ignore) {
}
}
}
}
@AfterEach
protected void cleanupTest() throws Exception {
Connection connection = null;
Statement stmnt = null;
try {
connection = services.getBootstrapJdbcConnectionAccess().obtainConnection();
stmnt = connection.createStatement();
stmnt.execute( "drop table SANDBOX_JDBC_TST if exists" );
}
finally {
if ( stmnt != null ) {
try {
stmnt.close();
}
catch (SQLException ignore) {
}
}
if ( connection != null ) {
try {
services.getBootstrapJdbcConnectionAccess().releaseConnection( connection );
}
catch (SQLException ignore) {
}
}
}
services.release();
}
@Test
public void testBasicRelease() {
ResourceRegistry registry = sessionFactoryScope().fromSession(
session -> {
connectionProvider.clear();
JdbcCoordinatorImpl jdbcCoord = (JdbcCoordinatorImpl) session.getJdbcCoordinator();
ResourceRegistry resourceRegistry = jdbcCoord.getLogicalConnection().getResourceRegistry();
try {
String sql = "insert into SANDBOX_JDBC_TST( ID, NAME ) values ( ?, ? )";
PreparedStatement ps = jdbcCoord.getStatementPreparer().prepareStatement(
sql );
ps.setLong( 1, 1 );
ps.setString( 2, "name" );
jdbcCoord.getResultSetReturn().execute( ps, sql );
assertTrue( jdbcCoord.getLogicalConnection().getResourceRegistry().hasRegisteredResources() );
assertEquals( 1, connectionProvider.getAcquiredConnections().size() );
assertEquals( 0, connectionProvider.getReleasedConnections().size() );
resourceRegistry.release( ps );
jdbcCoord.afterStatementExecution();
assertFalse( resourceRegistry.hasRegisteredResources() );
assertEquals( 0, connectionProvider.getAcquiredConnections().size() );
assertEquals( 1, connectionProvider.getReleasedConnections().size() );
}
catch (SQLException sqle) {
fail( "incorrect exception type : sqlexception" );
}
finally {
jdbcCoord.close();
}
return resourceRegistry;
}
);
assertFalse( registry.hasRegisteredResources() );
}
@Test
public void testReleaseCircumventedByHeldResources() {
ResourceRegistry registry = sessionFactoryScope().fromSession(
session -> {
connectionProvider.clear();
JdbcCoordinatorImpl jdbcCoord = (JdbcCoordinatorImpl) session.getJdbcCoordinator();
ResourceRegistry resourceRegistry = jdbcCoord.getLogicalConnection().getResourceRegistry();
try {
String sql = "insert into SANDBOX_JDBC_TST( ID, NAME ) values ( ?, ? )";
PreparedStatement ps = jdbcCoord.getStatementPreparer().prepareStatement(
sql );
ps.setLong( 1, 1 );
ps.setString( 2, "name" );
jdbcCoord.getResultSetReturn().execute( ps , sql);
assertTrue( resourceRegistry.hasRegisteredResources() );
assertEquals( 1, connectionProvider.getAcquiredConnections().size() );
assertEquals( 0, connectionProvider.getReleasedConnections().size() );
resourceRegistry.release( ps );
jdbcCoord.afterStatementExecution();
assertFalse( resourceRegistry.hasRegisteredResources() );
assertEquals( 0, connectionProvider.getAcquiredConnections().size() );
assertEquals( 1, connectionProvider.getReleasedConnections().size() );
// open a result set and hold it open...
sql = "select * from SANDBOX_JDBC_TST";
ps = jdbcCoord.getStatementPreparer().prepareStatement( sql );
jdbcCoord.getResultSetReturn().extract( ps, sql );
assertTrue( resourceRegistry.hasRegisteredResources() );
assertEquals( 1, connectionProvider.getAcquiredConnections().size() );
assertEquals( 1, connectionProvider.getReleasedConnections().size() );
// open a second result set
PreparedStatement ps2 = jdbcCoord.getStatementPreparer().prepareStatement( sql );
jdbcCoord.getResultSetReturn().execute( ps, sql );
assertTrue( resourceRegistry.hasRegisteredResources() );
assertEquals( 1, connectionProvider.getAcquiredConnections().size() );
assertEquals( 1, connectionProvider.getReleasedConnections().size() );
// and close it...
resourceRegistry.release( ps2 );
jdbcCoord.afterStatementExecution();
// the release should be circumvented...
assertTrue( resourceRegistry.hasRegisteredResources() );
assertEquals( 1, connectionProvider.getAcquiredConnections().size() );
assertEquals( 1, connectionProvider.getReleasedConnections().size() );
// let the close of the logical connection below release all resources (hopefully)...
}
catch (SQLException sqle) {
fail( "incorrect exception type : sqlexception" );
}
finally {
jdbcCoord.close();
}
return resourceRegistry;
} );
assertFalse( registry.hasRegisteredResources() );
assertEquals( 0, connectionProvider.getAcquiredConnections().size() );
assertEquals( 2, connectionProvider.getReleasedConnections().size() );
}
@Test
public void testReleaseCircumventedManually() {
ResourceRegistry registry = sessionFactoryScope().fromSession(
session -> {
connectionProvider.clear();
JdbcCoordinatorImpl jdbcCoord = (JdbcCoordinatorImpl) session.getJdbcCoordinator();
ResourceRegistry resourceRegistry = jdbcCoord.getLogicalConnection().getResourceRegistry();
try {
String sql = "insert into SANDBOX_JDBC_TST( ID, NAME ) values ( ?, ? )";
PreparedStatement ps = jdbcCoord.getStatementPreparer().prepareStatement(
sql );
ps.setLong( 1, 1 );
ps.setString( 2, "name" );
jdbcCoord.getResultSetReturn().execute( ps , sql);
assertTrue( resourceRegistry.hasRegisteredResources() );
assertEquals( 1, connectionProvider.getAcquiredConnections().size() );
assertEquals( 0, connectionProvider.getReleasedConnections().size() );
resourceRegistry.release( ps );
jdbcCoord.afterStatementExecution();
assertFalse( resourceRegistry.hasRegisteredResources() );
assertEquals( 0, connectionProvider.getAcquiredConnections().size() );
assertEquals( 1, connectionProvider.getReleasedConnections().size() );
// disable releases...
jdbcCoord.disableReleases();
// open a result set...
sql = "select * from SANDBOX_JDBC_TST";
ps = jdbcCoord.getStatementPreparer().prepareStatement( sql );
jdbcCoord.getResultSetReturn().extract( ps, sql );
assertTrue( resourceRegistry.hasRegisteredResources() );
assertEquals( 1, connectionProvider.getAcquiredConnections().size() );
assertEquals( 1, connectionProvider.getReleasedConnections().size() );
// and close it...
resourceRegistry.release( ps );
jdbcCoord.afterStatementExecution();
// the release should be circumvented...
assertFalse( resourceRegistry.hasRegisteredResources() );
assertEquals( 1, connectionProvider.getAcquiredConnections().size() );
assertEquals( 1, connectionProvider.getReleasedConnections().size() );
// let the close of the logical connection below release all resources (hopefully)...
}
catch (SQLException sqle) {
fail( "incorrect exception type : sqlexception" );
}
finally {
jdbcCoord.close();
}
return resourceRegistry;
} );
assertFalse( registry.hasRegisteredResources() );
assertEquals( 0, connectionProvider.getAcquiredConnections().size() );
assertEquals( 2, connectionProvider.getReleasedConnections().size() );
}
@Test
@Jira("https://hibernate.atlassian.net/browse/HHH-19477")
public void testHql() {
sessionFactoryScope().inTransaction( session -> {
connectionProvider.clear();
JdbcCoordinatorImpl jdbcCoord = (JdbcCoordinatorImpl) session.getJdbcCoordinator();
ResourceRegistry resourceRegistry = jdbcCoord.getLogicalConnection().getResourceRegistry();
session.createSelectionQuery( "select 1" ).uniqueResult();
assertFalse( resourceRegistry.hasRegisteredResources() );
assertEquals( 0, connectionProvider.getAcquiredConnections().size() );
assertEquals( 1, connectionProvider.getReleasedConnections().size() );
} );
}
}
| AggressiveReleaseTest |
java | spring-projects__spring-boot | cli/spring-boot-cli/src/main/java/org/springframework/boot/cli/command/shell/ExitCommand.java | {
"start": 938,
"end": 1157
} | class ____ extends AbstractCommand {
ExitCommand() {
super("exit", "Quit the embedded shell");
}
@Override
public ExitStatus run(String... args) throws Exception {
throw new ShellExitException();
}
}
| ExitCommand |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsDefaultDisabledTest.java | {
"start": 767,
"end": 1743
} | class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClass(DefaultEntity.class)
.addClass(User.class)
.addClass(Plane.class)
.addAsResource("application-multiple-persistence-units-default-disabled.properties",
"application.properties"));
@Inject
@PersistenceUnit("users")
EntityManager usersEntityManager;
@Inject
@PersistenceUnit("inventory")
EntityManager inventoryEntityManager;
@Test
@Transactional
public void defaultEntityManagerNotCreated() {
assertNotNull(usersEntityManager);
assertNotNull(inventoryEntityManager);
EntityManager defaultEntityManager = Arc.container().instance(EntityManager.class).get();
assertThat(defaultEntityManager).isNull();
}
}
| MultiplePersistenceUnitsDefaultDisabledTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/format/MapEntryFormatTest.java | {
"start": 1094,
"end": 1672
} | class ____ implements Map.Entry<String,String> {
protected String key, value;
protected MapEntryAsObject() { }
public MapEntryAsObject(String k, String v) {
key = k;
value = v;
}
@Override
public String getKey() {
return key;
}
@Override
public String getValue() {
return value;
}
@Override
public String setValue(String v) {
value = v;
return v; // wrong, whatever
}
}
static | MapEntryAsObject |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ComputeIfAbsentAmbiguousReferenceTest.java | {
"start": 2957,
"end": 3263
} | class ____ {
InnerClass1(long l) {}
}
private void doWorkInnerClass2(Map<Integer, InnerClass2> map) {
map.computeIfAbsent(0, InnerClass2::new);
}
/** Class with two 1-argument constructors. * */
| InnerClass1 |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/typesafe/InterfaceValidationSuccessTest.java | {
"start": 2386,
"end": 2456
} | interface ____ extends Wrapper<Integer> {
}
public | NumericWrapper |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/it/vertx/CustomExecutorService.java | {
"start": 697,
"end": 1342
} | class ____ extends ThreadPoolExecutor {
public CustomExecutorService(ThreadFactory threadFactory, int corePoolSize, int maximumPoolSize) {
super(corePoolSize, maximumPoolSize, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), threadFactory);
}
public static final ThreadLocal<Boolean> executing = ThreadLocal.withInitial(() -> false);
@Override
protected void beforeExecute(Thread t, Runnable r) {
executing.set(true);
}
@Override
protected void afterExecute(Runnable r, Throwable t) {
executing.set(false);
}
@Override
protected void terminated() {
NUM.decrementAndGet();
}
}
| CustomExecutorService |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java | {
"start": 1832,
"end": 11201
} | class ____ extends InputStream implements ReadableByteChannel {
public static final Logger LOG =
LoggerFactory.getLogger(SaslInputStream.class);
private final DataInputStream inStream;
/** Should we wrap the communication channel? */
private final boolean useWrap;
/*
* data read from the underlying input stream before being processed by SASL
*/
private byte[] saslToken;
private final SaslClient saslClient;
private final SaslServer saslServer;
private byte[] lengthBuf = new byte[4];
/*
* buffer holding data that have been processed by SASL, but have not been
* read out
*/
private byte[] obuffer;
// position of the next "new" byte
private int ostart = 0;
// position of the last "new" byte
private int ofinish = 0;
// whether or not this stream is open
private boolean isOpen = true;
private static int unsignedBytesToInt(byte[] buf) {
if (buf.length != 4) {
throw new IllegalArgumentException(
"Cannot handle byte array other than 4 bytes");
}
int result = 0;
for (int i = 0; i < 4; i++) {
result <<= 8;
result |= ((int) buf[i] & 0xff);
}
return result;
}
/**
* Read more data and get them processed <br>
* Entry condition: ostart = ofinish <br>
* Exit condition: ostart <= ofinish <br>
*
* return (ofinish-ostart) (we have this many bytes for you), 0 (no data now,
* but could have more later), or -1 (absolutely no more data)
*/
private int readMoreData() throws IOException {
try {
inStream.readFully(lengthBuf);
int length = unsignedBytesToInt(lengthBuf);
if (LOG.isDebugEnabled())
LOG.debug("Actual length is " + length);
saslToken = new byte[length];
inStream.readFully(saslToken);
} catch (EOFException e) {
return -1;
}
try {
if (saslServer != null) { // using saslServer
obuffer = saslServer.unwrap(saslToken, 0, saslToken.length);
} else { // using saslClient
obuffer = saslClient.unwrap(saslToken, 0, saslToken.length);
}
} catch (SaslException se) {
try {
disposeSasl();
} catch (SaslException ignored) {
}
throw se;
}
ostart = 0;
if (obuffer == null)
ofinish = 0;
else
ofinish = obuffer.length;
return ofinish;
}
/**
* Disposes of any system resources or security-sensitive information Sasl
* might be using.
*
* @exception SaslException
* if a SASL error occurs.
*/
private void disposeSasl() throws SaslException {
if (saslClient != null) {
saslClient.dispose();
}
if (saslServer != null) {
saslServer.dispose();
}
}
/**
* Constructs a SASLInputStream from an InputStream and a SaslServer <br>
* Note: if the specified InputStream or SaslServer is null, a
* NullPointerException may be thrown later when they are used.
*
* @param inStream
* the InputStream to be processed
* @param saslServer
* an initialized SaslServer object
*/
public SaslInputStream(InputStream inStream, SaslServer saslServer) {
this.inStream = new DataInputStream(inStream);
this.saslServer = saslServer;
this.saslClient = null;
String qop = (String) saslServer.getNegotiatedProperty(Sasl.QOP);
this.useWrap = qop != null && !"auth".equalsIgnoreCase(qop);
}
/**
* Constructs a SASLInputStream from an InputStream and a SaslClient <br>
* Note: if the specified InputStream or SaslClient is null, a
* NullPointerException may be thrown later when they are used.
*
* @param inStream
* the InputStream to be processed
* @param saslClient
* an initialized SaslClient object
*/
public SaslInputStream(InputStream inStream, SaslClient saslClient) {
this.inStream = new DataInputStream(inStream);
this.saslServer = null;
this.saslClient = saslClient;
String qop = (String) saslClient.getNegotiatedProperty(Sasl.QOP);
this.useWrap = qop != null && !"auth".equalsIgnoreCase(qop);
}
/**
* Reads the next byte of data from this input stream. The value byte is
* returned as an <code>int</code> in the range <code>0</code> to
* <code>255</code>. If no byte is available because the end of the stream has
* been reached, the value <code>-1</code> is returned. This method blocks
* until input data is available, the end of the stream is detected, or an
* exception is thrown.
* <p>
*
* @return the next byte of data, or <code>-1</code> if the end of the stream
* is reached.
* @exception IOException
* if an I/O error occurs.
*/
@Override
public int read() throws IOException {
if (!useWrap) {
return inStream.read();
}
if (ostart >= ofinish) {
// we loop for new data as we are blocking
int i = 0;
while (i == 0)
i = readMoreData();
if (i == -1)
return -1;
}
return ((int) obuffer[ostart++] & 0xff);
}
/**
* Reads up to <code>b.length</code> bytes of data from this input stream into
* an array of bytes.
* <p>
* The <code>read</code> method of <code>InputStream</code> calls the
* <code>read</code> method of three arguments with the arguments
* <code>b</code>, <code>0</code>, and <code>b.length</code>.
*
* @param b
* the buffer into which the data is read.
* @return the total number of bytes read into the buffer, or <code>-1</code>
* is there is no more data because the end of the stream has been
* reached.
* @exception IOException
* if an I/O error occurs.
*/
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
/**
* Reads up to <code>len</code> bytes of data from this input stream into an
* array of bytes. This method blocks until some input is available. If the
* first argument is <code>null,</code> up to <code>len</code> bytes are read
* and discarded.
*
* @param b
* the buffer into which the data is read.
* @param off
* the start offset of the data.
* @param len
* the maximum number of bytes read.
* @return the total number of bytes read into the buffer, or <code>-1</code>
* if there is no more data because the end of the stream has been
* reached.
* @exception IOException
* if an I/O error occurs.
*/
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (len == 0) {
return 0;
}
if (!useWrap) {
return inStream.read(b, off, len);
}
if (ostart >= ofinish) {
// we loop for new data as we are blocking
int i = 0;
while (i == 0)
i = readMoreData();
if (i == -1)
return -1;
}
if (len <= 0) {
return 0;
}
int available = ofinish - ostart;
if (len < available)
available = len;
if (b != null) {
System.arraycopy(obuffer, ostart, b, off, available);
}
ostart = ostart + available;
return available;
}
/**
* Skips <code>n</code> bytes of input from the bytes that can be read from
* this input stream without blocking.
*
* <p>
* Fewer bytes than requested might be skipped. The actual number of bytes
* skipped is equal to <code>n</code> or the result of a call to
* {@link #available()}, whichever is smaller. If
* <code>n</code> is less than zero, no bytes are skipped.
*
* <p>
* The actual number of bytes skipped is returned.
*
* @param n
* the number of bytes to be skipped.
* @return the actual number of bytes skipped.
* @exception IOException
* if an I/O error occurs.
*/
@Override
public long skip(long n) throws IOException {
if (!useWrap) {
return inStream.skip(n);
}
int available = ofinish - ostart;
if (n > available) {
n = available;
}
if (n < 0) {
return 0;
}
ostart += n;
return n;
}
/**
* Returns the number of bytes that can be read from this input stream without
* blocking. The <code>available</code> method of <code>InputStream</code>
* returns <code>0</code>. This method <B>should</B> be overridden by
* subclasses.
*
* @return the number of bytes that can be read from this input stream without
* blocking.
* @exception IOException
* if an I/O error occurs.
*/
@Override
public int available() throws IOException {
if (!useWrap) {
return inStream.available();
}
return (ofinish - ostart);
}
/**
* Closes this input stream and releases any system resources associated with
* the stream.
* <p>
* The <code>close</code> method of <code>SASLInputStream</code> calls the
* <code>close</code> method of its underlying input stream.
*
* @exception IOException
* if an I/O error occurs.
*/
@Override
public void close() throws IOException {
disposeSasl();
ostart = 0;
ofinish = 0;
inStream.close();
isOpen = false;
}
/**
* Tests if this input stream supports the <code>mark</code> and
* <code>reset</code> methods, which it does not.
*
* @return <code>false</code>, since this | SaslInputStream |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterRetryCache.java | {
"start": 1948,
"end": 6887
} | class ____ {
/** Federated HDFS cluster. */
private MiniRouterDFSCluster cluster;
@BeforeEach
public void setup() throws Exception {
UserGroupInformation routerUser = UserGroupInformation.getLoginUser();
Configuration conf = new Configuration();
String adminUser = routerUser.getUserName();
conf.set("hadoop.proxyuser." + adminUser + ".hosts", "*");
conf.set("hadoop.proxyuser." + adminUser + ".groups", "*");
conf.set("hadoop.proxyuser.fake_joe.hosts", "*");
conf.set("hadoop.proxyuser.fake_joe.groups", "*");
conf.set(DFS_NAMENODE_IP_PROXY_USERS, routerUser.getShortUserName());
cluster = new MiniRouterDFSCluster(true, 1, conf);
cluster.addNamenodeOverrides(conf);
// Start NNs and DNs and wait until ready
cluster.startCluster();
// Start routers with only an RPC service
cluster.startRouters();
// Register and verify all NNs with all routers
cluster.registerNamenodes();
cluster.waitNamenodeRegistration();
// Setup the mount table
cluster.installMockLocations();
// Making one Namenodes active per nameservice
if (cluster.isHighAvailability()) {
for (String ns : cluster.getNameservices()) {
cluster.switchToActive(ns, NAMENODES[0]);
cluster.switchToStandby(ns, NAMENODES[1]);
}
}
cluster.waitActiveNamespaces();
}
@AfterEach
public void teardown() throws IOException {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
@Test
public void testRetryCacheWithOneLevelProxyUser() throws Exception {
internalTestRetryCache(false);
}
@Test
public void testRetryCacheWithTwoLevelProxyUser() throws Exception {
internalTestRetryCache(true);
}
/**
* Test RetryCache through RBF with proxyUser and non-ProxyUser respectively.
*
* 1. Start cluster with current user.
* 2. Create one test directory by the admin user.
* 3. Create one Router FileSystem with one mocked user, one proxyUser or non-ProxyUser.
* 4. Try to create one test directory by the router fileSystem.
* 5. Try to rename the new test directory to one test destination directory
* 6. Then failover the active to the standby
* 7. Try to rename the source directory to the destination directory again with the same callId
* 8. Try to
*/
private void internalTestRetryCache(boolean twoLevelProxyUGI) throws Exception {
RetryInvocationHandler.SET_CALL_ID_FOR_TEST.set(false);
FileSystem routerFS = cluster.getRandomRouter().getFileSystem();
Path testDir = new Path("/target-ns0/testdir");
routerFS.mkdirs(testDir);
routerFS.setPermission(testDir, FsPermission.getDefault());
// Run as fake joe to authorize the test
UserGroupInformation joe = UserGroupInformation.createUserForTesting("fake_joe",
new String[] {"fake_group"});
if (twoLevelProxyUGI) {
joe = UserGroupInformation.createProxyUser("fake_proxy_joe", joe);
}
FileSystem joeFS = joe.doAs((PrivilegedExceptionAction<FileSystem>) () ->
FileSystem.newInstance(routerFS.getUri(), routerFS.getConf()));
Path renameSrc = new Path(testDir, "renameSrc");
Path renameDst = new Path(testDir, "renameDst");
joeFS.mkdirs(renameSrc);
assertEquals(HAServiceProtocol.HAServiceState.ACTIVE,
cluster.getCluster().getNamesystem(0).getState());
int callId = Client.nextCallId();
Client.setCallIdAndRetryCount(callId, 0, null);
assertTrue(joeFS.rename(renameSrc, renameDst));
Client.setCallIdAndRetryCount(callId, 0, null);
assertTrue(joeFS.rename(renameSrc, renameDst));
String ns0 = cluster.getNameservices().get(0);
cluster.switchToStandby(ns0, NAMENODES[0]);
cluster.switchToActive(ns0, NAMENODES[1]);
assertEquals(HAServiceProtocol.HAServiceState.ACTIVE,
cluster.getCluster().getNamesystem(1).getState());
Client.setCallIdAndRetryCount(callId, 0, null);
assertTrue(joeFS.rename(renameSrc, renameDst));
FileStatus fileStatus = joeFS.getFileStatus(renameDst);
if (twoLevelProxyUGI) {
assertEquals("fake_proxy_joe", fileStatus.getOwner());
} else {
assertEquals("fake_joe", fileStatus.getOwner());
}
joeFS.delete(renameDst, true);
}
@Test
public void testParseSpecialValue() {
String mockContent = "mockContent,clientIp:127.0.0.1," +
"clientCallId:12345,clientId:mockClientId";
String clientIp = NameNode.parseSpecialValue(mockContent, "clientIp:");
assertEquals("127.0.0.1", clientIp);
String clientCallId = NameNode.parseSpecialValue(
mockContent, "clientCallId:");
assertEquals("12345", clientCallId);
String clientId = NameNode.parseSpecialValue(mockContent, "clientId:");
assertEquals("mockClientId", clientId);
String clientRetryNum = NameNode.parseSpecialValue(
mockContent, "clientRetryNum:");
assertNull(clientRetryNum);
}
}
| TestRouterRetryCache |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChunkBytesRefEvaluator.java | {
"start": 4286,
"end": 5076
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory str;
private final ChunkingSettings chunkingSettings;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str,
ChunkingSettings chunkingSettings) {
this.source = source;
this.str = str;
this.chunkingSettings = chunkingSettings;
}
@Override
public ChunkBytesRefEvaluator get(DriverContext context) {
return new ChunkBytesRefEvaluator(source, str.get(context), chunkingSettings, context);
}
@Override
public String toString() {
return "ChunkBytesRefEvaluator[" + "str=" + str + ", chunkingSettings=" + chunkingSettings + "]";
}
}
}
| Factory |
java | netty__netty | common/src/main/java/io/netty/util/concurrent/ImmediateEventExecutor.java | {
"start": 1325,
"end": 4504
} | class ____ extends AbstractEventExecutor {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(ImmediateEventExecutor.class);
public static final ImmediateEventExecutor INSTANCE = new ImmediateEventExecutor();
/**
* A Runnable will be queued if we are executing a Runnable. This is to prevent a {@link StackOverflowError}.
*/
private static final FastThreadLocal<Queue<Runnable>> DELAYED_RUNNABLES = new FastThreadLocal<Queue<Runnable>>() {
@Override
protected Queue<Runnable> initialValue() throws Exception {
return new ArrayDeque<Runnable>();
}
};
/**
* Set to {@code true} if we are executing a runnable.
*/
private static final FastThreadLocal<Boolean> RUNNING = new FastThreadLocal<Boolean>() {
@Override
protected Boolean initialValue() throws Exception {
return false;
}
};
private final Future<?> terminationFuture = new FailedFuture<Object>(
GlobalEventExecutor.INSTANCE, new UnsupportedOperationException());
private ImmediateEventExecutor() { }
@Override
public boolean inEventLoop() {
return true;
}
@Override
public boolean inEventLoop(Thread thread) {
return true;
}
@Override
public Future<?> shutdownGracefully(long quietPeriod, long timeout, TimeUnit unit) {
return terminationFuture();
}
@Override
public Future<?> terminationFuture() {
return terminationFuture;
}
@Override
@Deprecated
public void shutdown() { }
@Override
public boolean isShuttingDown() {
return false;
}
@Override
public boolean isShutdown() {
return false;
}
@Override
public boolean isTerminated() {
return false;
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) {
return false;
}
@Override
public void execute(Runnable command) {
ObjectUtil.checkNotNull(command, "command");
if (!RUNNING.get()) {
RUNNING.set(true);
try {
command.run();
} catch (Throwable cause) {
logger.info("Throwable caught while executing Runnable {}", command, cause);
} finally {
Queue<Runnable> delayedRunnables = DELAYED_RUNNABLES.get();
Runnable runnable;
while ((runnable = delayedRunnables.poll()) != null) {
try {
runnable.run();
} catch (Throwable cause) {
logger.info("Throwable caught while executing Runnable {}", runnable, cause);
}
}
RUNNING.set(false);
}
} else {
DELAYED_RUNNABLES.get().add(command);
}
}
@Override
public <V> Promise<V> newPromise() {
return new ImmediatePromise<V>(this);
}
@Override
public <V> ProgressivePromise<V> newProgressivePromise() {
return new ImmediateProgressivePromise<V>(this);
}
static | ImmediateEventExecutor |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java | {
"start": 5263,
"end": 6400
} | class ____ extends ActionResponse implements ToXContentObject {
private final ModelConfigurations model;
public Response(ModelConfigurations model) {
this.model = model;
}
public Response(StreamInput in) throws IOException {
model = new ModelConfigurations(in);
}
public ModelConfigurations getModel() {
return model;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
model.writeTo(out);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return model.toFilteredXContent(builder, params);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Response response = (Response) o;
return Objects.equals(model, response.model);
}
@Override
public int hashCode() {
return Objects.hash(model);
}
}
}
| Response |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptivebatch/BlockingInputInfo.java | {
"start": 1362,
"end": 4850
} | class ____ implements BlockingResultInfo {
/** The original blocking result information. */
private final BlockingResultInfo blockingResultInfo;
/** The type number of the input for co-tasks. */
private final int inputTypeNumber;
/**
* If true, means that there are relationships between multiple inputs, if the records
* corresponding to the same key from one input is split, the corresponding key records from the
* other inputs must be duplicated (meaning that it must be sent to the downstream nodes where
* the split data is sent).
*/
private final boolean interInputsKeysCorrelated;
/**
* If true, means that records with the same key are correlated and must be sent to the same
* downstream task to be processed together.
*/
private final boolean intraInputKeyCorrelated;
public BlockingInputInfo(
BlockingResultInfo blockingResultInfo,
int inputTypeNumber,
boolean interInputsKeysCorrelated,
boolean intraInputKeyCorrelated) {
this.blockingResultInfo = checkNotNull(blockingResultInfo);
this.inputTypeNumber = inputTypeNumber;
this.interInputsKeysCorrelated = interInputsKeysCorrelated;
this.intraInputKeyCorrelated = intraInputKeyCorrelated;
}
public int getInputTypeNumber() {
return inputTypeNumber;
}
public boolean isIntraInputKeyCorrelated() {
return intraInputKeyCorrelated;
}
public boolean areInterInputsKeysCorrelated() {
return interInputsKeysCorrelated;
}
public List<Long> getAggregatedSubpartitionBytes() {
checkState(blockingResultInfo instanceof AllToAllBlockingResultInfo);
return ((AllToAllBlockingResultInfo) blockingResultInfo).getAggregatedSubpartitionBytes();
}
@Override
public boolean isBroadcast() {
return blockingResultInfo.isBroadcast();
}
@Override
public boolean isPointwise() {
return blockingResultInfo.isPointwise();
}
@Override
public int getNumPartitions() {
return blockingResultInfo.getNumPartitions();
}
@Override
public int getNumSubpartitions(int partitionIndex) {
return blockingResultInfo.getNumSubpartitions(partitionIndex);
}
@Override
public long getNumBytesProduced() {
return blockingResultInfo.getNumBytesProduced();
}
@Override
public long getNumBytesProduced(
IndexRange partitionIndexRange, IndexRange subpartitionIndexRange) {
return blockingResultInfo.getNumBytesProduced(partitionIndexRange, subpartitionIndexRange);
}
@Override
public IntermediateDataSetID getResultId() {
return blockingResultInfo.getResultId();
}
@Override
public boolean isSingleSubpartitionContainsAllData() {
return blockingResultInfo.isSingleSubpartitionContainsAllData();
}
@Override
public Map<Integer, long[]> getSubpartitionBytesByPartitionIndex() {
return blockingResultInfo.getSubpartitionBytesByPartitionIndex();
}
@Override
public void recordPartitionInfo(int partitionIndex, ResultPartitionBytes partitionBytes) {
throw new UnsupportedOperationException("Not allowed to modify read-only view.");
}
@Override
public void resetPartitionInfo(int partitionIndex) {
throw new UnsupportedOperationException("Not allowed to modify read-only view.");
}
}
| BlockingInputInfo |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/MoreThanOneQualifierTest.java | {
"start": 1569,
"end": 1827
} | class ____ which the class, a constructor, a field, a method, and a method parameter each have
* two com.google.inject.BindingAnnotation annotations.
*/
// BUG: Diagnostic contains: remove
@Foo1
// BUG: Diagnostic contains: remove
@Foo2
public | in |
java | apache__flink | flink-table/flink-sql-gateway-api/src/main/java/org/apache/flink/table/gateway/api/results/FunctionInfo.java | {
"start": 1588,
"end": 2833
} | class ____ {
/** Identifier of the function. */
private final FunctionIdentifier identifier;
/** Kind of the function. If the value is null, it means kind of the function is unresolved. */
private final @Nullable FunctionKind kind;
public FunctionInfo(FunctionIdentifier identifier) {
this(identifier, null);
}
public FunctionInfo(FunctionIdentifier identifier, @Nullable FunctionKind kind) {
this.identifier = identifier;
this.kind = kind;
}
public FunctionIdentifier getIdentifier() {
return identifier;
}
public Optional<FunctionKind> getKind() {
return Optional.ofNullable(kind);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof FunctionInfo)) {
return false;
}
FunctionInfo that = (FunctionInfo) o;
return Objects.equals(identifier, that.identifier) && kind == that.kind;
}
@Override
public int hashCode() {
return Objects.hash(identifier, kind);
}
@Override
public String toString() {
return "FunctionInfo{identifier=" + identifier + ", kind=" + kind + '}';
}
}
| FunctionInfo |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/stats/CCSUsage.java | {
"start": 1737,
"end": 2056
} | class ____ {
private final long took;
private final Result status;
private final Set<String> features;
private final int remotesCount;
private final String client;
private final Set<String> skippedRemotes;
private final Map<String, PerClusterUsage> perClusterUsage;
public static | CCSUsage |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/execution/BuildResumptionDataRepository.java | {
"start": 1167,
"end": 2380
} | interface ____ {
/**
* Persists any data needed to resume the build at a later point in time, using a new Maven invocation. This method
* may also decide it is not needed or meaningful to persist such data, and return <code>false</code> to indicate
* so.
*
* @param rootProject The root project that is being built.
* @param buildResumptionData Information needed to resume the build.
* @throws BuildResumptionPersistenceException When an error occurs while persisting data.
*/
void persistResumptionData(MavenProject rootProject, BuildResumptionData buildResumptionData)
throws BuildResumptionPersistenceException;
/**
* Uses previously stored resumption data to enrich an existing execution request.
* @param request The execution request that will be enriched.
* @param rootProject The root project that is being built.
*/
void applyResumptionData(MavenExecutionRequest request, MavenProject rootProject);
/**
* Removes previously stored resumption data.
* @param rootProject The root project that is being built.
*/
void removeResumptionData(MavenProject rootProject);
}
| BuildResumptionDataRepository |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/rpc/RpcInputSplitProvider.java | {
"start": 1504,
"end": 3166
} | class ____ implements InputSplitProvider {
private final JobMasterGateway jobMasterGateway;
private final JobVertexID jobVertexID;
private final ExecutionAttemptID executionAttemptID;
private final Duration timeout;
public RpcInputSplitProvider(
JobMasterGateway jobMasterGateway,
JobVertexID jobVertexID,
ExecutionAttemptID executionAttemptID,
Duration timeout) {
this.jobMasterGateway = Preconditions.checkNotNull(jobMasterGateway);
this.jobVertexID = Preconditions.checkNotNull(jobVertexID);
this.executionAttemptID = Preconditions.checkNotNull(executionAttemptID);
this.timeout = Preconditions.checkNotNull(timeout);
}
@Override
public InputSplit getNextInputSplit(ClassLoader userCodeClassLoader)
throws InputSplitProviderException {
Preconditions.checkNotNull(userCodeClassLoader);
CompletableFuture<SerializedInputSplit> futureInputSplit =
jobMasterGateway.requestNextInputSplit(jobVertexID, executionAttemptID);
try {
SerializedInputSplit serializedInputSplit =
futureInputSplit.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
if (serializedInputSplit.isEmpty()) {
return null;
} else {
return InstantiationUtil.deserializeObject(
serializedInputSplit.getInputSplitData(), userCodeClassLoader);
}
} catch (Exception e) {
throw new InputSplitProviderException("Requesting the next input split failed.", e);
}
}
}
| RpcInputSplitProvider |
java | quarkusio__quarkus | extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/pathparams/HttpPathParamLimitWithJaxRsTest.java | {
"start": 513,
"end": 2957
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("test-logging.properties")
.overrideConfigKey("quarkus.micrometer.binder-enabled-default", "false")
.overrideConfigKey("quarkus.micrometer.binder.http-client.enabled", "true")
.overrideConfigKey("quarkus.micrometer.binder.http-server.enabled", "true")
.overrideConfigKey("quarkus.micrometer.binder.vertx.enabled", "true")
.overrideConfigKey("quarkus.redis.devservices.enabled", "false")
.withApplicationRoot((jar) -> jar
.addClasses(Util.class,
Resource.class));
@Inject
MeterRegistry registry;
public static final int COUNT = 101;
public static final int ARITY_LIMIT = 100;
@Test
void testWithResteasyOK() throws InterruptedException {
registry.clear();
// Test a JAX-RS endpoint with GET /jaxrs and GET /jaxrs/{message}
// Verify OK response
for (int i = 0; i < COUNT; i++) {
RestAssured.get("/jaxrs").then().statusCode(200);
RestAssured.get("/jaxrs/foo-" + i).then().statusCode(200);
}
// Verify metrics
Util.waitForMeters(registry.find("http.server.requests").timers(), COUNT);
Assertions.assertEquals(COUNT, registry.find("http.server.requests")
.tag("uri", "/jaxrs").timers().iterator().next().count());
Assertions.assertEquals(COUNT, registry.find("http.server.requests")
.tag("uri", "/jaxrs/{message}").timers().iterator().next().count());
// Verify 405 responses
for (int i = 0; i < COUNT; i++) {
RestAssured.delete("/jaxrs").then().statusCode(405);
RestAssured.patch("/jaxrs/foo-" + i).then().statusCode(405);
}
Util.waitForMeters(registry.find("http.server.requests").timers(), COUNT * 2);
Assertions.assertEquals(COUNT, registry.find("http.server.requests")
.tag("uri", "/jaxrs").tag("method", "DELETE").timers().iterator().next().count());
Assertions.assertEquals(ARITY_LIMIT - 2, registry.find("http.server.requests")
.tag("method", "PATCH").timers().size()); // -2 because of the two other uri: /jaxrs and /jaxrs/{message}.
}
@Path("/")
@Singleton
public static | HttpPathParamLimitWithJaxRsTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java | {
"start": 5357,
"end": 16006
} | class ____
* after deserializing the entity from its JSON form.
* </p>
* @param entity the real entity that carries information
*/
public TimelineEntity(TimelineEntity entity) {
real = entity.getReal();
}
protected TimelineEntity(String type) {
this();
identifier.type = type;
}
@XmlElement(name = "type")
public String getType() {
if (real == null) {
return identifier.type;
} else {
return real.getType();
}
}
public void setType(String type) {
if (real == null) {
identifier.type = type;
} else {
real.setType(type);
}
}
@XmlElement(name = "id")
@JsonIgnore
public String getId() {
if (real == null) {
return identifier.id;
} else {
return real.getId();
}
}
public void setId(String id) {
if (real == null) {
identifier.id = id;
} else {
real.setId(id);
}
}
public Identifier getIdentifier() {
if (real == null) {
return identifier;
} else {
return real.getIdentifier();
}
}
public void setIdentifier(Identifier entityIdentifier) {
if (real == null) {
this.identifier = entityIdentifier;
} else {
real.setIdentifier(entityIdentifier);
}
}
// required by JAXB
@InterfaceAudience.Private
@XmlElement(name = "info")
@JsonIgnore
public HashMap<String, Object> getInfoJAXB() {
if (real == null) {
return info;
} else {
return real.getInfoJAXB();
}
}
public Map<String, Object> getInfo() {
if (real == null) {
return info;
} else {
return real.getInfo();
}
}
public void setInfo(Map<String, Object> entityInfos) {
if (real == null) {
this.info = TimelineServiceHelper.mapCastToHashMap(entityInfos);
} else {
real.setInfo(entityInfos);
}
}
public void addInfo(Map<String, Object> entityInfos) {
if (real == null) {
this.info.putAll(entityInfos);
} else {
real.addInfo(entityInfos);
}
}
public void addInfo(String key, Object value) {
if (real == null) {
info.put(key, value);
} else {
real.addInfo(key, value);
}
}
// required by JAXB
@InterfaceAudience.Private
@XmlElement(name = "configs")
public HashMap<String, String> getConfigsJAXB() {
if (real == null) {
return configs;
} else {
return real.getConfigsJAXB();
}
}
public Map<String, String> getConfigs() {
if (real == null) {
return configs;
} else {
return real.getConfigs();
}
}
public void setConfigs(Map<String, String> entityConfigs) {
if (real == null) {
this.configs = TimelineServiceHelper.mapCastToHashMap(entityConfigs);
} else {
real.setConfigs(entityConfigs);
}
}
public void addConfigs(Map<String, String> entityConfigs) {
if (real == null) {
this.configs.putAll(entityConfigs);
} else {
real.addConfigs(entityConfigs);
}
}
public void addConfig(String key, String value) {
if (real == null) {
configs.put(key, value);
} else {
real.addConfig(key, value);
}
}
@XmlElement(name = "metrics")
public Set<TimelineMetric> getMetrics() {
if (real == null) {
return metrics;
} else {
return real.getMetrics();
}
}
public void setMetrics(Set<TimelineMetric> entityMetrics) {
if (real == null) {
this.metrics = entityMetrics;
} else {
real.setMetrics(entityMetrics);
}
}
public void addMetrics(Set<TimelineMetric> entityMetrics) {
if (real == null) {
this.metrics.addAll(entityMetrics);
} else {
real.addMetrics(entityMetrics);
}
}
public void addMetric(TimelineMetric metric) {
if (real == null) {
metrics.add(metric);
} else {
real.addMetric(metric);
}
}
@XmlElement(name = "events")
public NavigableSet<TimelineEvent> getEvents() {
if (real == null) {
return events;
} else {
return real.getEvents();
}
}
public void setEvents(NavigableSet<TimelineEvent> entityEvents) {
if (real == null) {
this.events = entityEvents;
} else {
real.setEvents(entityEvents);
}
}
public void addEvents(Set<TimelineEvent> entityEvents) {
if (real == null) {
this.events.addAll(entityEvents);
} else {
real.addEvents(entityEvents);
}
}
public void addEvent(TimelineEvent event) {
if (real == null) {
events.add(event);
} else {
real.addEvent(event);
}
}
public Map<String, Set<String>> getIsRelatedToEntities() {
if (real == null) {
return isRelatedToEntities;
} else {
return real.getIsRelatedToEntities();
}
}
// required by JAXB
@InterfaceAudience.Private
@XmlElement(name = "isrelatedto")
public HashMap<String, Set<String>> getIsRelatedToEntitiesJAXB() {
if (real == null) {
return isRelatedToEntities;
} else {
return real.getIsRelatedToEntitiesJAXB();
}
}
@JsonSetter("isrelatedto")
public void setIsRelatedToEntities(
Map<String, Set<String>> isRelatedTo) {
if (real == null) {
this.isRelatedToEntities =
TimelineServiceHelper.mapCastToHashMap(isRelatedTo);
} else {
real.setIsRelatedToEntities(isRelatedTo);
}
}
public void addIsRelatedToEntities(
Map<String, Set<String>> isRelatedTo) {
if (real == null) {
for (Map.Entry<String, Set<String>> entry : isRelatedTo.entrySet()) {
Set<String> ids = this.isRelatedToEntities.get(entry.getKey());
if (ids == null) {
ids = new HashSet<>();
this.isRelatedToEntities.put(entry.getKey(), ids);
}
ids.addAll(entry.getValue());
}
} else {
real.addIsRelatedToEntities(isRelatedTo);
}
}
public void addIsRelatedToEntity(String type, String id) {
if (real == null) {
Set<String> ids = isRelatedToEntities.get(type);
if (ids == null) {
ids = new HashSet<>();
isRelatedToEntities.put(type, ids);
}
ids.add(id);
} else {
real.addIsRelatedToEntity(type, id);
}
}
// required by JAXB
@InterfaceAudience.Private
@XmlElement(name = "relatesto")
public HashMap<String, Set<String>> getRelatesToEntitiesJAXB() {
if (real == null) {
return relatesToEntities;
} else {
return real.getRelatesToEntitiesJAXB();
}
}
public Map<String, Set<String>> getRelatesToEntities() {
if (real == null) {
return relatesToEntities;
} else {
return real.getRelatesToEntities();
}
}
public void addRelatesToEntities(Map<String, Set<String>> relatesTo) {
if (real == null) {
for (Map.Entry<String, Set<String>> entry : relatesTo.entrySet()) {
Set<String> ids = this.relatesToEntities.get(entry.getKey());
if (ids == null) {
ids = new HashSet<>();
this.relatesToEntities.put(entry.getKey(), ids);
}
ids.addAll(entry.getValue());
}
} else {
real.addRelatesToEntities(relatesTo);
}
}
public void addRelatesToEntity(String type, String id) {
if (real == null) {
Set<String> ids = relatesToEntities.get(type);
if (ids == null) {
ids = new HashSet<>();
relatesToEntities.put(type, ids);
}
ids.add(id);
} else {
real.addRelatesToEntity(type, id);
}
}
@JsonSetter("relatesto")
public void setRelatesToEntities(Map<String, Set<String>> relatesTo) {
if (real == null) {
this.relatesToEntities =
TimelineServiceHelper.mapCastToHashMap(relatesTo);
} else {
real.setRelatesToEntities(relatesTo);
}
}
@XmlElement(name = "createdtime")
public Long getCreatedTime() {
if (real == null) {
return createdTime;
} else {
return real.getCreatedTime();
}
}
@JsonSetter("createdtime")
public void setCreatedTime(Long createdTs) {
if (real == null) {
this.createdTime = createdTs;
} else {
real.setCreatedTime(createdTs);
}
}
/**
* Set UID in info which will be then used for query by UI.
* @param uidKey key for UID in info.
* @param uId UID to be set for the key.
*/
public void setUID(String uidKey, String uId) {
if (real == null) {
info.put(uidKey, uId);
} else {
real.addInfo(uidKey, uId);
}
}
@JsonIgnore
public boolean isValid() {
return (getId() != null && getType() != null);
}
// When get hashCode for a timeline entity, or check if two timeline entities
// are equal, we only compare their identifiers (id and type)
@Override
public int hashCode() {
return getIdentifier().hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof TimelineEntity)) {
return false;
}
TimelineEntity other = (TimelineEntity) obj;
return getIdentifier().equals(other.getIdentifier());
}
@Override
public int compareTo(TimelineEntity other) {
int comparison = getType().compareTo(other.getType());
if (comparison == 0) {
if (getIdPrefix() > other.getIdPrefix()) {
// Descending order by entity id prefix
return -1;
} else if (getIdPrefix() < other.getIdPrefix()) {
return 1;
} else {
return getId().compareTo(other.getId());
}
} else {
return comparison;
}
}
protected TimelineEntity getReal() {
return real == null ? this : real;
}
public String toString() {
if (real == null) {
return identifier.toString();
} else {
return real.toString();
}
}
@XmlElement(name = "idprefix")
public long getIdPrefix() {
if (real == null) {
return idPrefix;
} else {
return real.getIdPrefix();
}
}
/**
* Sets idPrefix for an entity.
* <p>
* <b>Note</b>: Entities will be stored in the order of idPrefix specified.
* If users decide to set idPrefix for an entity, they <b>MUST</b> provide
* the same prefix for every update of this entity.
* </p>
* Example: <blockquote><pre>
* TimelineEntity entity = new TimelineEntity();
* entity.setIdPrefix(value);
* </pre></blockquote>
* Users can use {@link TimelineServiceHelper#invertLong(long)} to invert
* the prefix if necessary.
*
* @param entityIdPrefix prefix for an entity.
*/
@JsonSetter("idprefix")
public void setIdPrefix(long entityIdPrefix) {
if (real == null) {
this.idPrefix = entityIdPrefix;
} else {
real.setIdPrefix(entityIdPrefix);
}
}
} | polymorphism |
java | resilience4j__resilience4j | resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/event/CircuitBreakerOnStateTransitionEvent.java | {
"start": 816,
"end": 1681
} | class ____ extends AbstractCircuitBreakerEvent {
private CircuitBreaker.StateTransition stateTransition;
public CircuitBreakerOnStateTransitionEvent(String circuitBreakerName,
CircuitBreaker.StateTransition stateTransition) {
super(circuitBreakerName);
this.stateTransition = stateTransition;
}
public CircuitBreaker.StateTransition getStateTransition() {
return stateTransition;
}
@Override
public Type getEventType() {
return Type.STATE_TRANSITION;
}
@Override
public String toString() {
return String.format("%s: CircuitBreaker '%s' changed state from %s to %s",
getCreationTime(),
getCircuitBreakerName(),
getStateTransition().getFromState(),
getStateTransition().getToState());
}
}
| CircuitBreakerOnStateTransitionEvent |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/selection/primitives/SourceTargetMapperPrimitive.java | {
"start": 355,
"end": 529
} | interface ____ {
SourceTargetMapperPrimitive INSTANCE = Mappers.getMapper( SourceTargetMapperPrimitive.class );
Target toTarget(Source s);
}
| SourceTargetMapperPrimitive |
java | google__auto | value/src/main/java/com/google/auto/value/processor/AutoValueOrBuilderTemplateVars.java | {
"start": 1233,
"end": 2003
} | class ____ extends AutoValueishTemplateVars {
/**
* The properties defined by the parent class's abstract methods. The elements of this set are in
* the same order as the original abstract method declarations in the AutoValue class.
*/
ImmutableSet<Property> props;
/**
* The simple name of the generated builder, or empty if there is no builder. This is just {@code
* Builder} for AutoValue, since it is nested inside the {@code AutoValue_Foo} class. But it is
* {@code AutoBuilder_Foo} for AutoBuilder.
*/
String builderName = "";
/**
* The name of the builder type as it should appear in source code, or empty if there is no
* builder type. If class {@code Address} contains {@code @AutoValue.Builder} | AutoValueOrBuilderTemplateVars |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/locking/JoinedInheritancePessimisticLockingTest.java | {
"start": 1221,
"end": 2579
} | class ____ {
@BeforeEach
public void setup(EntityManagerFactoryScope scope) {
scope.inTransaction(entityManager -> {
var t1 = new ConcreteThing();
t1.id = 1L;
t1.name = "t1";
t1.aProp = "abc";
entityManager.persist( t1 );
var t2 = new AnotherConcreteThing();
t2.id = 2L;
t2.name = "t2";
t2.anotherProp = "def";
entityManager.persist( t2 );
} );
}
@AfterEach
public void tearDown(EntityManagerFactoryScope scope) {
scope.dropData();
}
@Test
@SkipForDialect(dialectClass = InformixDialect.class,
reason = "Informix disallows FOR UPDATE with multi-table queries")
public void findWithLock(EntityManagerFactoryScope scope) {
scope.inTransaction(entityManager -> {
BaseThing t = entityManager.find( BaseThing.class, 1L, LockModeType.PESSIMISTIC_WRITE );
assertEquals( LockModeType.PESSIMISTIC_WRITE, entityManager.getLockMode( t ) );
});
}
@Test
public void findThenLock(EntityManagerFactoryScope scope) {
scope.inTransaction(entityManager -> {
BaseThing t = entityManager.find( BaseThing.class, 1L );
entityManager.lock( t, LockModeType.PESSIMISTIC_WRITE );
assertEquals( LockModeType.PESSIMISTIC_WRITE, entityManager.getLockMode( t ) );
});
}
@Entity(name = "BaseThing")
@Inheritance(strategy = InheritanceType.JOINED)
public static abstract | JoinedInheritancePessimisticLockingTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/ArrayDeserializationTest.java | {
"start": 656,
"end": 742
} | class ____
extends DatabindTestUtil
{
public final static | ArrayDeserializationTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/GuiceNestedCombineTest.java | {
"start": 4548,
"end": 5093
} | class ____ extends AbstractModule {}
public void test() {
foo(new ModuleA(), new ModuleA());
}
public void foo(Module... xs) {}
}
""")
.doTest();
}
@Test
public void noArguments_ignored() {
refactoringTestHelper
.addInputLines(
"Test.java",
"""
import com.google.inject.AbstractModule;
import com.google.inject.Module;
import com.google.inject.util.Modules;
| ModuleA |
java | grpc__grpc-java | api/src/main/java/io/grpc/ServerCallExecutorSupplier.java | {
"start": 872,
"end": 1166
} | interface ____ {
/**
* Returns an executor to handle the server call.
* It should never throw. It should return null to fallback to the default executor.
* */
@Nullable
<ReqT, RespT> Executor getExecutor(ServerCall<ReqT, RespT> call, Metadata metadata);
}
| ServerCallExecutorSupplier |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/MissingFail.java | {
"start": 20311,
"end": 20665
} | class ____ extends ChildMultiMatcher<TryTree, Tree> {
ChildOfTryMatcher(MatchType matchType, Matcher<Tree> nodeMatcher) {
super(matchType, nodeMatcher);
}
@Override
protected Iterable<? extends StatementTree> getChildNodes(TryTree tree, VisitorState state) {
return tree.getBlock().getStatements();
}
}
}
| ChildOfTryMatcher |
java | apache__camel | core/camel-main/src/test/java/org/apache/camel/main/MainRestConfigurationTest.java | {
"start": 1139,
"end": 2825
} | class ____ {
@Test
public void testRestConfiguration() {
final Main main = newMain();
try {
main.start();
RestConfiguration rf = main.getCamelContext().getRestConfiguration();
assertEquals("platform-http", rf.getComponent());
assertTrue(rf.isEnableCORS());
assertEquals("/openapi", rf.getApiContextPath());
assertTrue(rf.isApiVendorExtension());
Map<String, Object> map = rf.getApiProperties();
Assertions.assertNotNull(map);
assertEquals("Dummy Value", map.get("dummyKey"));
assertEquals("My Title", map.get("api.title"));
assertEquals("1.2.3", map.get("api.version"));
assertEquals("/mybase", map.get("base.path"));
} finally {
main.stop();
}
}
private static Main newMain() {
Properties properties = new Properties();
properties.setProperty("camel.rest.component", "platform-http");
properties.setProperty("camel.rest.enableCORS", "true");
properties.setProperty("camel.rest.apiContextPath", "/openapi");
properties.setProperty("camel.rest.apiVendorExtension", "true");
properties.setProperty("camel.rest.apiProperties[dummyKey]", "Dummy Value");
properties.setProperty("camel.rest.apiProperties[api.title]", "My Title");
properties.setProperty("camel.rest.apiProperties[api.version]", "1.2.3");
properties.setProperty("camel.rest.apiProperties[base.path]", "/mybase");
Main main = new Main();
main.setOverrideProperties(properties);
return main;
}
}
| MainRestConfigurationTest |
java | processing__processing4 | java/src/processing/mode/java/tweak/Handle.java | {
"start": 8144,
"end": 8405
} | class ____ implements Comparator<Handle> {
public int compare(Handle handle1, Handle handle2) {
int tab = handle1.tabIndex - handle2.tabIndex;
if (tab != 0) {
return tab;
}
return handle1.startChar - handle2.startChar;
}
}
| HandleComparator |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/main/java/io/quarkus/rest/client/reactive/deployment/RestClientReactiveProcessor.java | {
"start": 28833,
"end": 29096
} | interface ____ with @RegisterRestClient, generate a $$CDIWrapper CDI bean that can be injected
validateKotlinDefaultMethods(jaxrsInterface, index);
List<MethodInfo> methodsToImplement = new ArrayList<>();
// search this | annotated |
java | quarkusio__quarkus | extensions/kubernetes-config/runtime/src/test/java/io/quarkus/kubernetes/config/runtime/ConfigMapConfigSourceUtilTest.java | {
"start": 384,
"end": 6472
} | class ____ {
ConfigMapConfigSourceUtil sut = new ConfigMapConfigSourceUtil();
@Test
void testEmptyData() {
ConfigMap configMap = configMapBuilder("testEmptyData").build();
List<ConfigSource> configSources = sut.toConfigSources(configMap.getMetadata(), configMap.getData(), 0);
assertThat(configSources).isEmpty();
}
@Test
void testOnlyLiteralData() {
ConfigMap configMap = configMapBuilder("testOnlyLiteralData")
.addToData("some.key", "someValue").addToData("some.other", "someOtherValue").build();
List<ConfigSource> configSources = sut.toConfigSources(configMap.getMetadata(), configMap.getData(), 0);
assertThat(configSources).singleElement().satisfies(c -> {
assertThat(c.getProperties()).containsOnly(entry("some.key", "someValue"),
entry("some.other", "someOtherValue"));
assertThat(c.getName()).contains("testOnlyLiteralData");
assertThat(c.getName()).isEqualTo(
"ConfigMapLiteralDataPropertiesConfigSource[configMap=namespace/testOnlyLiteralData/uid/version]");
});
}
@Test
void testOnlySingleMatchingPropertiesData() {
ConfigMap configMap = configMapBuilder("testOnlySingleMatchingPropertiesData")
.addToData("application.properties", "key1=value1\nkey2=value2\nsome.key=someValue").build();
List<ConfigSource> configSources = sut.toConfigSources(configMap.getMetadata(), configMap.getData(), 0);
assertThat(configSources).singleElement().satisfies(c -> {
assertThat(c.getProperties()).containsOnly(entry("key1", "value1"), entry("key2", "value2"),
entry("some.key", "someValue"));
assertThat(c.getName()).contains("testOnlySingleMatchingPropertiesData");
assertThat(c.getOrdinal()).isEqualTo(270);
});
}
@Test
void testOnlySingleNonMatchingPropertiesData() {
ConfigMap configMap = configMapBuilder("testOnlySingleMatchingPropertiesData")
.addToData("app.properties", "key1=value1\nkey2=value2\nsome.key=someValue").build();
List<ConfigSource> configSources = sut.toConfigSources(configMap.getMetadata(), configMap.getData(), 0);
assertThat(configSources).isNotEmpty();
}
@Test
void testOnlySingleMatchingYamlData() {
ConfigMap configMap = configMapBuilder("testOnlySingleMatchingYamlData")
.addToData("application.yaml", "key1: value1\nkey2: value2\nsome:\n key: someValue").build();
List<ConfigSource> configSources = sut.toConfigSources(configMap.getMetadata(), configMap.getData(), 0);
assertThat(configSources).singleElement().satisfies(c -> {
assertThat(c.getProperties()).containsOnly(entry("key1", "value1"), entry("key2", "value2"),
entry("some.key", "someValue"));
assertThat(c.getName()).contains("testOnlySingleMatchingYamlData");
});
}
@Test
void testOnlySingleNonMatchingYamlData() {
ConfigMap configMap = configMapBuilder("testOnlySingleMatchingPropertiesData")
.addToData("app.yaml", "key1: value1\nkey2: value2\nsome:\n key: someValue").build();
List<ConfigSource> configSources = sut.toConfigSources(configMap.getMetadata(), configMap.getData(), 0);
assertThat(configSources).isNotEmpty();
}
@Test
void testWithAllKindsOfData() {
ConfigMap configMap = configMapBuilder("testWithAllKindsOfData")
.addToData("some.key", "someValue")
.addToData("application.properties", "key1=value1\napp.key=val")
.addToData("app.properties", "ignored1=ignoredValue1")
.addToData("application.yaml", "key2: value2\nsome:\n otherKey: someOtherValue")
.addToData("app.yaml", "ignored2: ignoredValue2")
.addToData("application.yml", "key3: value3")
.addToData("app.yml", "ignored3: ignoredValue3")
.build();
List<ConfigSource> configSources = sut.toConfigSources(configMap.getMetadata(), configMap.getData(), 0);
assertThat(configSources).hasSize(4);
assertThat(configSources.get(0).getClass().getName().contains("ConfigMapLiteralDataPropertiesConfigSource")).isTrue();
assertThat(configSources).filteredOn(c -> !c.getName().toLowerCase().contains("application"))
.hasOnlyOneElementSatisfying(c -> {
assertThat(c.getProperties()).containsOnly(
entry("some.key", "someValue"),
entry("app.properties", "ignored1=ignoredValue1"),
entry("app.yaml", "ignored2: ignoredValue2"),
entry("app.yml", "ignored3: ignoredValue3"));
});
assertThat(configSources).filteredOn(c -> c.getName().toLowerCase().contains("application.properties"))
.singleElement().satisfies(c -> {
assertThat(c.getProperties()).containsOnly(entry("key1", "value1"), entry("app.key", "val"));
});
assertThat(configSources).filteredOn(c -> c.getName().toLowerCase().contains("application.yaml"))
.singleElement().satisfies(c -> {
assertThat(c.getProperties()).containsOnly(entry("key2", "value2"),
entry("some.otherKey", "someOtherValue"));
});
assertThat(configSources).filteredOn(c -> c.getName().toLowerCase().contains("application.yml"))
.singleElement().satisfies(c -> {
assertThat(c.getProperties()).containsOnly(entry("key3", "value3"));
});
}
private ConfigMapBuilder configMapBuilder(String name) {
return new ConfigMapBuilder().withNewMetadata()
.withName(name).withNamespace("namespace").withUid("uid")
.withResourceVersion("version").endMetadata();
}
}
| ConfigMapConfigSourceUtilTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/ValidatorTest.java | {
"start": 11530,
"end": 12277
} | class ____ {
@InlineMe(replacement = "this.after(() -> string);")
@Deprecated
// BUG: Diagnostic contains: evaluation timing
public void before(String string) {
after(() -> string);
}
public void after(Supplier<String> supplier) {}
}
""")
.doTest();
}
private static final Pattern FROM_ANNOTATION = Pattern.compile("FromAnnotation: \\[.*;]");
@Test
public void constructor() {
helper
.addSourceLines(
"ProfileTimer.java",
"""
import com.google.common.base.Ticker;
import com.google.errorprone.annotations.InlineMe;
public final | Client |
java | netty__netty | example/src/main/java/io/netty/example/http/websocketx/benchmarkserver/WebSocketServerHandler.java | {
"start": 1986,
"end": 6594
} | class ____ extends SimpleChannelInboundHandler<Object> {
private static final String WEBSOCKET_PATH = "/websocket";
private WebSocketServerHandshaker handshaker;
@Override
public void channelRead0(ChannelHandlerContext ctx, Object msg) {
if (msg instanceof FullHttpRequest) {
handleHttpRequest(ctx, (FullHttpRequest) msg);
} else if (msg instanceof WebSocketFrame) {
handleWebSocketFrame(ctx, (WebSocketFrame) msg);
}
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.flush();
}
private void handleHttpRequest(ChannelHandlerContext ctx, FullHttpRequest req) {
// Handle a bad request.
if (!req.decoderResult().isSuccess()) {
sendHttpResponse(ctx, req, new DefaultFullHttpResponse(req.protocolVersion(), BAD_REQUEST,
ctx.alloc().buffer(0)));
return;
}
// Allow only GET methods.
if (!GET.equals(req.method())) {
sendHttpResponse(ctx, req, new DefaultFullHttpResponse(req.protocolVersion(), FORBIDDEN,
ctx.alloc().buffer(0)));
return;
}
// Send the demo page and favicon.ico
if ("/".equals(req.uri())) {
ByteBuf content = WebSocketServerBenchmarkPage.getContent(getWebSocketLocation(req));
FullHttpResponse res = new DefaultFullHttpResponse(req.protocolVersion(), OK, content);
res.headers().set(HttpHeaderNames.CONTENT_TYPE, "text/html; charset=UTF-8");
HttpUtil.setContentLength(res, content.readableBytes());
sendHttpResponse(ctx, req, res);
return;
}
if ("/favicon.ico".equals(req.uri())) {
FullHttpResponse res = new DefaultFullHttpResponse(req.protocolVersion(), NOT_FOUND,
ctx.alloc().buffer(0));
sendHttpResponse(ctx, req, res);
return;
}
// Handshake
WebSocketServerHandshakerFactory wsFactory = new WebSocketServerHandshakerFactory(
getWebSocketLocation(req), null, true, 5 * 1024 * 1024);
handshaker = wsFactory.newHandshaker(req);
if (handshaker == null) {
WebSocketServerHandshakerFactory.sendUnsupportedVersionResponse(ctx.channel());
} else {
handshaker.handshake(ctx.channel(), req);
}
}
private void handleWebSocketFrame(ChannelHandlerContext ctx, WebSocketFrame frame) {
// Check for closing frame
if (frame instanceof CloseWebSocketFrame) {
handshaker.close(ctx, (CloseWebSocketFrame) frame.retain());
return;
}
if (frame instanceof PingWebSocketFrame) {
ctx.write(new PongWebSocketFrame(frame.content().retain()));
return;
}
if (frame instanceof TextWebSocketFrame) {
// Echo the frame
ctx.write(frame.retain());
return;
}
if (frame instanceof BinaryWebSocketFrame) {
// Echo the frame
ctx.write(frame.retain());
}
}
private static void sendHttpResponse(ChannelHandlerContext ctx, FullHttpRequest req, FullHttpResponse res) {
// Generate an error page if response getStatus code is not OK (200).
HttpResponseStatus responseStatus = res.status();
if (responseStatus.code() != 200) {
ByteBufUtil.writeUtf8(res.content(), responseStatus.toString());
HttpUtil.setContentLength(res, res.content().readableBytes());
}
// Send the response and close the connection if necessary.
boolean keepAlive = HttpUtil.isKeepAlive(req) && responseStatus.code() == 200;
HttpUtil.setKeepAlive(res, keepAlive);
ChannelFuture future = ctx.write(res); // Flushed in channelReadComplete()
if (!keepAlive) {
future.addListener(ChannelFutureListener.CLOSE);
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
ctx.close();
}
private static String getWebSocketLocation(FullHttpRequest req) {
String location = req.headers().get(HttpHeaderNames.HOST) + WEBSOCKET_PATH;
if (WebSocketServer.SSL) {
return "wss://" + location;
} else {
return "ws://" + location;
}
}
}
| WebSocketServerHandler |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/xml/XMLEventStreamReader.java | {
"start": 1631,
"end": 6968
} | class ____ extends AbstractXMLStreamReader {
private XMLEvent event;
private final XMLEventReader eventReader;
public XMLEventStreamReader(XMLEventReader eventReader) throws XMLStreamException {
this.eventReader = eventReader;
this.event = eventReader.nextEvent();
}
@Override
public QName getName() {
if (this.event.isStartElement()) {
return this.event.asStartElement().getName();
}
else if (this.event.isEndElement()) {
return this.event.asEndElement().getName();
}
else {
throw new IllegalStateException();
}
}
@Override
public Location getLocation() {
return this.event.getLocation();
}
@Override
public int getEventType() {
return this.event.getEventType();
}
@Override
public @Nullable String getVersion() {
if (this.event.isStartDocument()) {
return ((StartDocument) this.event).getVersion();
}
else {
return null;
}
}
@Override
public Object getProperty(String name) throws IllegalArgumentException {
return this.eventReader.getProperty(name);
}
@Override
public boolean isStandalone() {
if (this.event.isStartDocument()) {
return ((StartDocument) this.event).isStandalone();
}
else {
throw new IllegalStateException();
}
}
@Override
public boolean standaloneSet() {
if (this.event.isStartDocument()) {
return ((StartDocument) this.event).standaloneSet();
}
else {
throw new IllegalStateException();
}
}
@Override
public @Nullable String getEncoding() {
return null;
}
@Override
public @Nullable String getCharacterEncodingScheme() {
return null;
}
@Override
public String getPITarget() {
if (this.event.isProcessingInstruction()) {
return ((ProcessingInstruction) this.event).getTarget();
}
else {
throw new IllegalStateException();
}
}
@Override
public String getPIData() {
if (this.event.isProcessingInstruction()) {
return ((ProcessingInstruction) this.event).getData();
}
else {
throw new IllegalStateException();
}
}
@Override
public int getTextStart() {
return 0;
}
@Override
public String getText() {
if (this.event.isCharacters()) {
return this.event.asCharacters().getData();
}
else if (this.event.getEventType() == XMLStreamConstants.COMMENT) {
return ((Comment) this.event).getText();
}
else {
throw new IllegalStateException();
}
}
@Override
@SuppressWarnings("rawtypes")
public int getAttributeCount() {
if (!this.event.isStartElement()) {
throw new IllegalStateException();
}
Iterator attributes = this.event.asStartElement().getAttributes();
return countIterator(attributes);
}
@Override
public boolean isAttributeSpecified(int index) {
return getAttribute(index).isSpecified();
}
@Override
public QName getAttributeName(int index) {
return getAttribute(index).getName();
}
@Override
public String getAttributeType(int index) {
return getAttribute(index).getDTDType();
}
@Override
public String getAttributeValue(int index) {
return getAttribute(index).getValue();
}
@SuppressWarnings("rawtypes")
private Attribute getAttribute(int index) {
if (!this.event.isStartElement()) {
throw new IllegalStateException();
}
int count = 0;
Iterator attributes = this.event.asStartElement().getAttributes();
while (attributes.hasNext()) {
Attribute attribute = (Attribute) attributes.next();
if (count == index) {
return attribute;
}
else {
count++;
}
}
throw new IllegalArgumentException();
}
@Override
public NamespaceContext getNamespaceContext() {
if (this.event.isStartElement()) {
return this.event.asStartElement().getNamespaceContext();
}
else {
throw new IllegalStateException();
}
}
@Override
@SuppressWarnings("rawtypes")
public int getNamespaceCount() {
Iterator namespaces;
if (this.event.isStartElement()) {
namespaces = this.event.asStartElement().getNamespaces();
}
else if (this.event.isEndElement()) {
namespaces = this.event.asEndElement().getNamespaces();
}
else {
throw new IllegalStateException();
}
return countIterator(namespaces);
}
@Override
public String getNamespacePrefix(int index) {
return getNamespace(index).getPrefix();
}
@Override
public String getNamespaceURI(int index) {
return getNamespace(index).getNamespaceURI();
}
@SuppressWarnings("rawtypes")
private Namespace getNamespace(int index) {
Iterator namespaces;
if (this.event.isStartElement()) {
namespaces = this.event.asStartElement().getNamespaces();
}
else if (this.event.isEndElement()) {
namespaces = this.event.asEndElement().getNamespaces();
}
else {
throw new IllegalStateException();
}
int count = 0;
while (namespaces.hasNext()) {
Namespace namespace = (Namespace) namespaces.next();
if (count == index) {
return namespace;
}
else {
count++;
}
}
throw new IllegalArgumentException();
}
@Override
public int next() throws XMLStreamException {
this.event = this.eventReader.nextEvent();
return this.event.getEventType();
}
@Override
public void close() throws XMLStreamException {
this.eventReader.close();
}
@SuppressWarnings("rawtypes")
private static int countIterator(Iterator iterator) {
int count = 0;
while (iterator.hasNext()) {
iterator.next();
count++;
}
return count;
}
}
| XMLEventStreamReader |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_isNullOrEmpty_Test.java | {
"start": 985,
"end": 1615
} | class ____ extends AtomicReferenceArrayAssertBaseTest {
@Override
protected AtomicReferenceArrayAssert<Object> invoke_api_method() {
assertions.isNullOrEmpty();
return assertions;
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertEmpty(info(), internalArray());
}
@Override
@Test
public void should_return_this() {
// Disable this test because isNullOrEmpty is void
}
@Test
void should_pass_if_AtomicReferenceArray_is_null() {
AtomicReferenceArray<Object> array = null;
assertThat(array).isNullOrEmpty();
}
}
| AtomicReferenceArrayAssert_isNullOrEmpty_Test |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/RedisEndpointBuilderFactory.java | {
"start": 28486,
"end": 28778
} | interface ____
extends
AdvancedRedisEndpointConsumerBuilder,
AdvancedRedisEndpointProducerBuilder {
default RedisEndpointBuilder basic() {
return (RedisEndpointBuilder) this;
}
}
public | AdvancedRedisEndpointBuilder |
java | grpc__grpc-java | rls/src/main/java/io/grpc/rls/LbPolicyConfiguration.java | {
"start": 3567,
"end": 7487
} | class ____ {
private final Map<String, Object> effectiveRawChildPolicy;
private final LoadBalancerProvider effectiveLbProvider;
private final String targetFieldName;
@VisibleForTesting
ChildLoadBalancingPolicy(
String targetFieldName,
Map<String, Object> effectiveRawChildPolicy,
LoadBalancerProvider effectiveLbProvider) {
checkArgument(
targetFieldName != null && !targetFieldName.isEmpty(),
"targetFieldName cannot be empty or null");
this.targetFieldName = targetFieldName;
this.effectiveRawChildPolicy =
checkNotNull(effectiveRawChildPolicy, "effectiveRawChildPolicy");
this.effectiveLbProvider = checkNotNull(effectiveLbProvider, "effectiveLbProvider");
}
/** Creates ChildLoadBalancingPolicy. */
@SuppressWarnings("unchecked")
static ChildLoadBalancingPolicy create(
String childPolicyConfigTargetFieldName, List<Map<String, ?>> childPolicies)
throws InvalidChildPolicyConfigException {
Map<String, Object> effectiveChildPolicy = null;
LoadBalancerProvider effectiveLbProvider = null;
List<String> policyTried = new ArrayList<>();
LoadBalancerRegistry lbRegistry = LoadBalancerRegistry.getDefaultRegistry();
for (Map<String, ?> childPolicy : childPolicies) {
if (childPolicy.isEmpty()) {
continue;
}
if (childPolicy.size() != 1) {
throw
new InvalidChildPolicyConfigException(
"childPolicy should have exactly one loadbalancing policy");
}
String policyName = childPolicy.keySet().iterator().next();
LoadBalancerProvider provider = lbRegistry.getProvider(policyName);
if (provider != null) {
effectiveLbProvider = provider;
effectiveChildPolicy = Collections.unmodifiableMap(childPolicy);
break;
}
policyTried.add(policyName);
}
if (effectiveChildPolicy == null) {
throw
new InvalidChildPolicyConfigException(
String.format("no valid childPolicy found, policy tried: %s", policyTried));
}
return
new ChildLoadBalancingPolicy(
childPolicyConfigTargetFieldName,
(Map<String, Object>) effectiveChildPolicy.values().iterator().next(),
effectiveLbProvider);
}
/** Creates a child load balancer config for given target from elected raw child policy. */
Map<String, ?> getEffectiveChildPolicy(String target) {
Map<String, Object> childPolicy = new HashMap<>(effectiveRawChildPolicy);
childPolicy.put(targetFieldName, target);
return childPolicy;
}
/** Returns the elected child {@link LoadBalancerProvider}. */
LoadBalancerProvider getEffectiveLbProvider() {
return effectiveLbProvider;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ChildLoadBalancingPolicy that = (ChildLoadBalancingPolicy) o;
return Objects.equals(effectiveRawChildPolicy, that.effectiveRawChildPolicy)
&& Objects.equals(effectiveLbProvider, that.effectiveLbProvider)
&& Objects.equals(targetFieldName, that.targetFieldName);
}
@Override
public int hashCode() {
return Objects.hash(effectiveRawChildPolicy, effectiveLbProvider, targetFieldName);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("effectiveRawChildPolicy", effectiveRawChildPolicy)
.add("effectiveLbProvider", effectiveLbProvider)
.add("childPolicyConfigTargetFieldName", targetFieldName)
.toString();
}
}
/** Factory for {@link ChildPolicyWrapper}. Not thread-safe. */
static final | ChildLoadBalancingPolicy |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java | {
"start": 1382,
"end": 2082
} | class ____ {
public final int id;
public final boolean busySpin;
public final boolean exceptBeforeCancel;
public final boolean exitBeforeCancel;
public final boolean exceptAfterCancel;
public final boolean presetInterrupt;
public final boolean ioOp;
private TestPlan(int id) {
this.id = id;
this.busySpin = randomBoolean();
this.exceptBeforeCancel = randomBoolean();
this.exitBeforeCancel = randomBoolean();
this.exceptAfterCancel = randomBoolean();
this.presetInterrupt = randomBoolean();
this.ioOp = randomBoolean();
}
}
static | TestPlan |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/request/v2/CohereV2RerankRequest.java | {
"start": 1201,
"end": 3415
} | class ____ extends CohereRequest {
private final String query;
private final List<String> input;
private final Boolean returnDocuments;
private final Integer topN;
private final CohereRerankTaskSettings taskSettings;
public CohereV2RerankRequest(
String query,
List<String> input,
@Nullable Boolean returnDocuments,
@Nullable Integer topN,
CohereRerankModel model
) {
super(CohereAccount.of(model), model.getInferenceEntityId(), Objects.requireNonNull(model.getServiceSettings().modelId()), false);
this.input = Objects.requireNonNull(input);
this.query = Objects.requireNonNull(query);
this.returnDocuments = returnDocuments;
this.topN = topN;
taskSettings = model.getTaskSettings();
}
@Override
protected List<String> pathSegments() {
return List.of(CohereUtils.VERSION_2, CohereUtils.RERANK_PATH);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(MODEL_FIELD, getModelId());
builder.field(QUERY_FIELD, query);
builder.field(DOCUMENTS_FIELD, input);
// prefer the root level return_documents over task settings
if (returnDocuments != null) {
builder.field(CohereRerankTaskSettings.RETURN_DOCUMENTS, returnDocuments);
} else if (taskSettings.getDoesReturnDocuments() != null) {
builder.field(CohereRerankTaskSettings.RETURN_DOCUMENTS, taskSettings.getDoesReturnDocuments());
}
// prefer the root level top_n over task settings
if (topN != null) {
builder.field(CohereRerankTaskSettings.TOP_N_DOCS_ONLY, topN);
} else if (taskSettings.getTopNDocumentsOnly() != null) {
builder.field(CohereRerankTaskSettings.TOP_N_DOCS_ONLY, taskSettings.getTopNDocumentsOnly());
}
if (taskSettings.getMaxChunksPerDoc() != null) {
builder.field(CohereRerankTaskSettings.MAX_CHUNKS_PER_DOC, taskSettings.getMaxChunksPerDoc());
}
builder.endObject();
return builder;
}
}
| CohereV2RerankRequest |
java | spring-projects__spring-boot | module/spring-boot-tomcat/src/test/java/org/springframework/boot/tomcat/autoconfigure/reactive/TomcatReactiveWebServerAutoConfigurationTests.java | {
"start": 7956,
"end": 8199
} | class ____ {
@Bean
TomcatProtocolHandlerCustomizer<?> protocolHandlerCustomizer() {
return mock(TomcatProtocolHandlerCustomizer.class);
}
}
@Configuration(proxyBeanMethods = false)
static | TomcatProtocolHandlerCustomizerConfiguration |
java | apache__spark | sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java | {
"start": 7551,
"end": 32481
} | class ____ extends HiveFileProcessor {
@Override
protected BufferedReader loadFile(String fileName) throws IOException {
FileInputStream initStream = null;
BufferedReader bufferedReader = null;
initStream = new FileInputStream(fileName);
bufferedReader = new BufferedReader(new InputStreamReader(initStream, StandardCharsets.UTF_8));
return bufferedReader;
}
@Override
protected int processCmd(String cmd) {
int rc = 0;
String cmd_trimmed = cmd.trim();
try {
executeStatementInternal(cmd_trimmed, null, false, 0);
} catch (HiveSQLException e) {
rc = -1;
LOG.warn("Failed to execute HQL command in global .hiverc file.", e);
}
return rc;
}
}
private void processGlobalInitFile() {
IHiveFileProcessor processor = new GlobalHivercFileProcessor();
try {
String hiverc = hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION);
if (hiverc != null) {
File hivercFile = new File(hiverc);
if (hivercFile.isDirectory()) {
hivercFile = new File(hivercFile, SessionManager.HIVERCFILE);
}
if (hivercFile.isFile()) {
LOG.info("Running global init file: {}",
MDC.of(LogKeys.GLOBAL_INIT_FILE, hivercFile));
int rc = processor.processFile(hivercFile.getAbsolutePath());
if (rc != 0) {
LOG.error("Failed on initializing global .hiverc file");
}
} else {
LOG.debug("Global init file " + hivercFile + " does not exist");
}
}
} catch (IOException e) {
LOG.warn("Failed on initializing global .hiverc file", e);
}
}
private void configureSession(Map<String, String> sessionConfMap) throws HiveSQLException {
SessionState.setCurrentSessionState(sessionState);
for (Map.Entry<String, String> entry : sessionConfMap.entrySet()) {
String key = entry.getKey();
if (key.startsWith("set:")) {
try {
setVariable(key.substring(4), entry.getValue());
} catch (Exception e) {
throw new HiveSQLException(e);
}
} else if (key.startsWith("use:")) {
SessionState.get().setCurrentDatabase(entry.getValue());
} else {
hiveConf.verifyAndSet(key, entry.getValue());
}
}
}
// Copy from org.apache.hadoop.hive.ql.processors.SetProcessor, only change:
// setConf(varname, propName, varvalue, true) when varname.startsWith(HIVECONF_PREFIX)
public static int setVariable(String varname, String varvalue) throws Exception {
SessionState ss = SessionState.get();
VariableSubstitution substitution = new VariableSubstitution(() -> ss.getHiveVariables());
if (varvalue.contains("\n")){
ss.err.println("Warning: Value had a \\n character in it.");
}
varname = varname.trim();
if (varname.startsWith(ENV_PREFIX)){
ss.err.println("env:* variables can not be set.");
return 1;
} else if (varname.startsWith(SYSTEM_PREFIX)){
String propName = varname.substring(SYSTEM_PREFIX.length());
System.getProperties().setProperty(propName, substitution.substitute(ss.getConf(),varvalue));
} else if (varname.startsWith(HIVECONF_PREFIX)){
String propName = varname.substring(HIVECONF_PREFIX.length());
setConf(varname, propName, varvalue, true);
} else if (varname.startsWith(HIVEVAR_PREFIX)) {
String propName = varname.substring(HIVEVAR_PREFIX.length());
ss.getHiveVariables().put(propName, substitution.substitute(ss.getConf(),varvalue));
} else if (varname.startsWith(METACONF_PREFIX)) {
String propName = varname.substring(METACONF_PREFIX.length());
Hive hive = Hive.getWithoutRegisterFns(ss.getConf());
hive.setMetaConf(propName, substitution.substitute(ss.getConf(), varvalue));
} else {
setConf(varname, varname, varvalue, true);
}
return 0;
}
// returns non-null string for validation fail
private static void setConf(String varname, String key, String varvalue, boolean register)
throws IllegalArgumentException {
VariableSubstitution substitution =
new VariableSubstitution(() -> SessionState.get().getHiveVariables());
HiveConf conf = SessionState.get().getConf();
String value = substitution.substitute(conf, varvalue);
if (conf.getBoolVar(HiveConf.ConfVars.HIVECONFVALIDATION)) {
HiveConf.ConfVars confVars = HiveConf.getConfVars(key);
if (confVars != null) {
if (!confVars.isType(value)) {
StringBuilder message = new StringBuilder();
message.append("'SET ").append(varname).append('=').append(varvalue);
message.append("' FAILED because ").append(key).append(" expects ");
message.append(confVars.typeString()).append(" type value.");
throw new IllegalArgumentException(message.toString());
}
String fail = confVars.validate(value);
if (fail != null) {
StringBuilder message = new StringBuilder();
message.append("'SET ").append(varname).append('=').append(varvalue);
message.append("' FAILED in validation : ").append(fail).append('.');
throw new IllegalArgumentException(message.toString());
}
} else if (key.startsWith("hive.")) {
throw new IllegalArgumentException("hive configuration " + key + " does not exists.");
}
}
conf.verifyAndSet(key, value);
if (register) {
SessionState.get().getOverriddenConfigurations().put(key, value);
}
}
@Override
public void setOperationLogSessionDir(File operationLogRootDir) {
if (!operationLogRootDir.exists()) {
LOG.warn("The operation log root directory is removed, recreating: {}",
MDC.of(LogKeys.PATH, operationLogRootDir.getAbsolutePath()));
if (!Utils.createDirectory(operationLogRootDir)) {
LOG.warn("Unable to create operation log root directory: {}",
MDC.of(LogKeys.PATH, operationLogRootDir.getAbsolutePath()));
}
}
if (!operationLogRootDir.canWrite()) {
LOG.warn("The operation log root directory is not writable: {}",
MDC.of(LogKeys.PATH, operationLogRootDir.getAbsolutePath()));
}
sessionLogDir = new File(operationLogRootDir, sessionHandle.getHandleIdentifier().toString());
isOperationLogEnabled = true;
if (!sessionLogDir.exists()) {
if (!sessionLogDir.mkdir()) {
LOG.warn("Unable to create operation log session directory: {}",
MDC.of(LogKeys.PATH, sessionLogDir.getAbsolutePath()));
isOperationLogEnabled = false;
}
}
if (isOperationLogEnabled) {
LOG.info("Operation log session directory is created: {}",
MDC.of(LogKeys.PATH, sessionLogDir.getAbsolutePath()));
}
}
@Override
public boolean isOperationLogEnabled() {
return isOperationLogEnabled;
}
@Override
public File getOperationLogSessionDir() {
return sessionLogDir;
}
@Override
public TProtocolVersion getProtocolVersion() {
return sessionHandle.getProtocolVersion();
}
@Override
public SessionManager getSessionManager() {
return sessionManager;
}
@Override
public void setSessionManager(SessionManager sessionManager) {
this.sessionManager = sessionManager;
}
private OperationManager getOperationManager() {
return operationManager;
}
@Override
public void setOperationManager(OperationManager operationManager) {
this.operationManager = operationManager;
}
protected synchronized void acquire(boolean userAccess) {
// Need to make sure that the this HiveServer2's session's SessionState is
// stored in the thread local for the handler thread.
SessionState.setCurrentSessionState(sessionState);
if (userAccess) {
lastAccessTime = System.currentTimeMillis();
}
}
/**
* 1. We'll remove the ThreadLocal SessionState as this thread might now serve
* other requests.
* 2. We'll cache the ThreadLocal RawStore object for this background thread for an orderly cleanup
* when this thread is garbage collected later.
* @see org.apache.hive.service.server.ThreadWithGarbageCleanup#finalize()
*/
protected synchronized void release(boolean userAccess) {
SessionState.detachSession();
if (ThreadWithGarbageCleanup.currentThread() instanceof ThreadWithGarbageCleanup) {
ThreadWithGarbageCleanup currentThread =
(ThreadWithGarbageCleanup) ThreadWithGarbageCleanup.currentThread();
currentThread.cacheThreadLocalRawStore();
}
if (userAccess) {
lastAccessTime = System.currentTimeMillis();
}
if (opHandleSet.isEmpty()) {
lastIdleTime = System.currentTimeMillis();
} else {
lastIdleTime = 0;
}
}
@Override
public SessionHandle getSessionHandle() {
return sessionHandle;
}
@Override
public String getUsername() {
return username;
}
@Override
public String getPassword() {
return password;
}
@Override
public HiveConf getHiveConf() {
hiveConf.setVar(HiveConf.getConfVars("hive.fetch.output.serde"), FETCH_WORK_SERDE_CLASS);
return hiveConf;
}
@Override
public IMetaStoreClient getMetaStoreClient() throws HiveSQLException {
try {
return Hive.getWithoutRegisterFns(getHiveConf()).getMSC();
} catch (HiveException e) {
throw new HiveSQLException("Failed to get metastore connection", e);
} catch (MetaException e) {
throw new HiveSQLException("Failed to get metastore connection", e);
}
}
@Override
public GetInfoValue getInfo(GetInfoType getInfoType)
throws HiveSQLException {
acquire(true);
try {
switch (getInfoType) {
case CLI_SERVER_NAME:
return new GetInfoValue("Hive");
case CLI_DBMS_NAME:
return new GetInfoValue("Apache Hive");
case CLI_DBMS_VER:
return new GetInfoValue(HiveVersionInfo.getVersion());
case CLI_MAX_COLUMN_NAME_LEN:
return new GetInfoValue(128);
case CLI_MAX_SCHEMA_NAME_LEN:
return new GetInfoValue(128);
case CLI_MAX_TABLE_NAME_LEN:
return new GetInfoValue(128);
case CLI_TXN_CAPABLE:
default:
throw new HiveSQLException("Unrecognized GetInfoType value: " + getInfoType.toString());
}
} finally {
release(true);
}
}
@Override
public OperationHandle executeStatement(String statement, Map<String, String> confOverlay)
throws HiveSQLException {
return executeStatementInternal(statement, confOverlay, false, 0);
}
@Override
public OperationHandle executeStatement(String statement, Map<String, String> confOverlay,
long queryTimeout) throws HiveSQLException {
return executeStatementInternal(statement, confOverlay, false, queryTimeout);
}
@Override
public OperationHandle executeStatementAsync(String statement, Map<String, String> confOverlay)
throws HiveSQLException {
return executeStatementInternal(statement, confOverlay, true, 0);
}
@Override
public OperationHandle executeStatementAsync(String statement, Map<String, String> confOverlay,
long queryTimeout) throws HiveSQLException {
return executeStatementInternal(statement, confOverlay, true, queryTimeout);
}
private OperationHandle executeStatementInternal(String statement,
Map<String, String> confOverlay, boolean runAsync, long queryTimeout) throws HiveSQLException {
acquire(true);
OperationManager operationManager = getOperationManager();
ExecuteStatementOperation operation = operationManager
.newExecuteStatementOperation(getSession(), statement, confOverlay, runAsync, queryTimeout);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
// Referring to SQLOperation.java, there is no chance that a HiveSQLException throws and the asyn
// background operation submits to thread pool successfully at the same time. So, Cleanup
// opHandle directly when got HiveSQLException
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}
@Override
public OperationHandle getTypeInfo()
throws HiveSQLException {
acquire(true);
OperationManager operationManager = getOperationManager();
GetTypeInfoOperation operation = operationManager.newGetTypeInfoOperation(getSession());
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}
@Override
public OperationHandle getCatalogs()
throws HiveSQLException {
acquire(true);
OperationManager operationManager = getOperationManager();
GetCatalogsOperation operation = operationManager.newGetCatalogsOperation(getSession());
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}
@Override
public OperationHandle getSchemas(String catalogName, String schemaName)
throws HiveSQLException {
acquire(true);
OperationManager operationManager = getOperationManager();
GetSchemasOperation operation =
operationManager.newGetSchemasOperation(getSession(), catalogName, schemaName);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}
@Override
public OperationHandle getTables(String catalogName, String schemaName, String tableName,
List<String> tableTypes)
throws HiveSQLException {
acquire(true);
OperationManager operationManager = getOperationManager();
MetadataOperation operation =
operationManager.newGetTablesOperation(getSession(), catalogName, schemaName, tableName, tableTypes);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}
@Override
public OperationHandle getTableTypes()
throws HiveSQLException {
acquire(true);
OperationManager operationManager = getOperationManager();
GetTableTypesOperation operation = operationManager.newGetTableTypesOperation(getSession());
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}
@Override
public OperationHandle getColumns(String catalogName, String schemaName,
String tableName, String columnName) throws HiveSQLException {
acquire(true);
String addedJars = Utilities.getResourceFiles(hiveConf, SessionState.ResourceType.JAR);
if (Utils.isNotBlank(addedJars)) {
IMetaStoreClient metastoreClient = getSession().getMetaStoreClient();
metastoreClient.setHiveAddedJars(addedJars);
}
OperationManager operationManager = getOperationManager();
GetColumnsOperation operation = operationManager.newGetColumnsOperation(getSession(),
catalogName, schemaName, tableName, columnName);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}
@Override
public OperationHandle getFunctions(String catalogName, String schemaName, String functionName)
throws HiveSQLException {
acquire(true);
OperationManager operationManager = getOperationManager();
GetFunctionsOperation operation = operationManager
.newGetFunctionsOperation(getSession(), catalogName, schemaName, functionName);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}
@Override
public void close() throws HiveSQLException {
try {
acquire(true);
// Iterate through the opHandles and close their operations
for (OperationHandle opHandle : opHandleSet) {
try {
operationManager.closeOperation(opHandle);
} catch (Exception e) {
LOG.warn("Exception is thrown closing operation {}", e,
MDC.of(LogKeys.OPERATION_HANDLE, opHandle));
}
}
opHandleSet.clear();
// Cleanup session log directory.
cleanupSessionLogDir();
// Cleanup pipeout file.
cleanupPipeoutFile();
HiveHistory hiveHist = sessionState.getHiveHistory();
if (null != hiveHist) {
hiveHist.closeStream();
}
try {
// Forcibly initialize thread local Hive so that
// SessionState#unCacheDataNucleusClassLoaders won't trigger
// Hive built-in UDFs initialization.
Hive.getWithoutRegisterFns(sessionState.getConf());
sessionState.close();
} finally {
sessionState = null;
}
} catch (IOException | HiveException ioe) {
throw new HiveSQLException("Failure to close", ioe);
} finally {
if (sessionState != null) {
try {
// Forcibly initialize thread local Hive so that
// SessionState#unCacheDataNucleusClassLoaders won't trigger
// Hive built-in UDFs initialization.
Hive.getWithoutRegisterFns(sessionState.getConf());
sessionState.close();
} catch (Throwable t) {
LOG.warn("Error closing session", t);
}
sessionState = null;
}
release(true);
}
}
private void cleanupPipeoutFile() {
String lScratchDir = hiveConf.getVar(HiveConf.getConfVars("hive.exec.local.scratchdir"));
String sessionID = hiveConf.getVar(HiveConf.getConfVars("hive.session.id"));
File[] fileAry = new File(lScratchDir).listFiles(
(dir, name) -> name.startsWith(sessionID) && name.endsWith(".pipeout"));
if (fileAry == null) {
LOG.error("Unable to access pipeout files in {}",
MDC.of(LogKeys.LOCAL_SCRATCH_DIR, lScratchDir));
} else {
for (File file : fileAry) {
try {
JavaUtils.deleteRecursively(file);
} catch (Exception e) {
LOG.error("Failed to cleanup pipeout file: {}", e, MDC.of(LogKeys.PATH, file));
}
}
}
}
private void cleanupSessionLogDir() {
if (isOperationLogEnabled) {
try {
JavaUtils.deleteRecursively(sessionLogDir);
} catch (Exception e) {
LOG.error("Failed to cleanup session log dir: {}", e,
MDC.of(LogKeys.SESSION_HANDLE, sessionHandle));
}
}
}
@Override
public SessionState getSessionState() {
return sessionState;
}
@Override
public String getUserName() {
return username;
}
@Override
public void setUserName(String userName) {
this.username = userName;
}
@Override
public long getLastAccessTime() {
return lastAccessTime;
}
@Override
public void closeExpiredOperations() {
OperationHandle[] handles = opHandleSet.toArray(new OperationHandle[opHandleSet.size()]);
if (handles.length > 0) {
List<Operation> operations = operationManager.removeExpiredOperations(handles);
if (!operations.isEmpty()) {
closeTimedOutOperations(operations);
}
}
}
@Override
public long getNoOperationTime() {
return lastIdleTime > 0 ? System.currentTimeMillis() - lastIdleTime : 0;
}
private void closeTimedOutOperations(List<Operation> operations) {
acquire(false);
try {
for (Operation operation : operations) {
opHandleSet.remove(operation.getHandle());
try {
operation.close();
} catch (Exception e) {
LOG.warn("Exception is thrown closing timed-out operation {}", e,
MDC.of(LogKeys.OPERATION_HANDLE, operation.getHandle()));
}
}
} finally {
release(false);
}
}
@Override
public void cancelOperation(OperationHandle opHandle) throws HiveSQLException {
acquire(true);
try {
sessionManager.getOperationManager().cancelOperation(opHandle);
} finally {
release(true);
}
}
@Override
public void closeOperation(OperationHandle opHandle) throws HiveSQLException {
acquire(true);
try {
operationManager.closeOperation(opHandle);
opHandleSet.remove(opHandle);
} finally {
release(true);
}
}
@Override
public TTableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException {
acquire(true);
try {
return sessionManager.getOperationManager().getOperationResultSetSchema(opHandle);
} finally {
release(true);
}
}
@Override
public TRowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation,
long maxRows, FetchType fetchType) throws HiveSQLException {
acquire(true);
try {
if (fetchType == FetchType.QUERY_OUTPUT) {
return operationManager.getOperationNextRowSet(opHandle, orientation, maxRows);
}
return operationManager.getOperationLogRowSet(opHandle, orientation, maxRows);
} finally {
release(true);
}
}
protected HiveSession getSession() {
return this;
}
@Override
public String getIpAddress() {
return ipAddress;
}
@Override
public void setIpAddress(String ipAddress) {
this.ipAddress = ipAddress;
}
@Override
public String getDelegationToken(HiveAuthFactory authFactory, String owner, String renewer)
throws HiveSQLException {
HiveAuthFactory.verifyProxyAccess(getUsername(), owner, getIpAddress(), getHiveConf());
return authFactory.getDelegationToken(owner, renewer, getIpAddress());
}
@Override
public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr)
throws HiveSQLException {
HiveAuthFactory.verifyProxyAccess(getUsername(), getUserFromToken(authFactory, tokenStr),
getIpAddress(), getHiveConf());
authFactory.cancelDelegationToken(tokenStr);
}
@Override
public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr)
throws HiveSQLException {
HiveAuthFactory.verifyProxyAccess(getUsername(), getUserFromToken(authFactory, tokenStr),
getIpAddress(), getHiveConf());
authFactory.renewDelegationToken(tokenStr);
}
// extract the real user from the given token string
private String getUserFromToken(HiveAuthFactory authFactory, String tokenStr) throws HiveSQLException {
return authFactory.getUserFromToken(tokenStr);
}
@Override
public OperationHandle getPrimaryKeys(String catalog, String schema,
String table) throws HiveSQLException {
acquire(true);
OperationManager operationManager = getOperationManager();
GetPrimaryKeysOperation operation = operationManager
.newGetPrimaryKeysOperation(getSession(), catalog, schema, table);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}
@Override
public OperationHandle getCrossReference(String primaryCatalog,
String primarySchema, String primaryTable, String foreignCatalog,
String foreignSchema, String foreignTable) throws HiveSQLException {
acquire(true);
OperationManager operationManager = getOperationManager();
GetCrossReferenceOperation operation = operationManager
.newGetCrossReferenceOperation(getSession(), primaryCatalog,
primarySchema, primaryTable, foreignCatalog,
foreignSchema, foreignTable);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}
}
| GlobalHivercFileProcessor |
java | elastic__elasticsearch | modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java | {
"start": 1007,
"end": 10788
} | class ____ {
private final UserAgentCache cache;
private final DeviceTypeParser deviceTypeParser = new DeviceTypeParser();
private final List<UserAgentSubpattern> uaPatterns = new ArrayList<>();
private final List<UserAgentSubpattern> osPatterns = new ArrayList<>();
private final List<UserAgentSubpattern> devicePatterns = new ArrayList<>();
private final String name;
UserAgentParser(String name, InputStream regexStream, InputStream deviceTypeRegexStream, UserAgentCache cache) {
this.name = name;
this.cache = cache;
try {
init(regexStream);
if (deviceTypeRegexStream != null) {
deviceTypeParser.init(deviceTypeRegexStream);
}
} catch (IOException e) {
throw new ElasticsearchParseException("error parsing regular expression file", e);
}
}
private void init(InputStream regexStream) throws IOException {
// EMPTY is safe here because we don't use namedObject
try (
XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML)
.createParser(XContentParserConfiguration.EMPTY, regexStream)
) {
XContentParser.Token token = yamlParser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
token = yamlParser.nextToken();
for (; token != null; token = yamlParser.nextToken()) {
if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("user_agent_parsers")) {
List<Map<String, String>> parserConfigurations = readParserConfigurations(yamlParser);
for (Map<String, String> map : parserConfigurations) {
uaPatterns.add(
new UserAgentSubpattern(
compilePattern(map.get("regex"), map.get("regex_flag")),
map.get("family_replacement"),
map.get("v1_replacement"),
map.get("v2_replacement"),
map.get("v3_replacement"),
map.get("v4_replacement")
)
);
}
} else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("os_parsers")) {
List<Map<String, String>> parserConfigurations = readParserConfigurations(yamlParser);
for (Map<String, String> map : parserConfigurations) {
osPatterns.add(
new UserAgentSubpattern(
compilePattern(map.get("regex"), map.get("regex_flag")),
map.get("os_replacement"),
map.get("os_v1_replacement"),
map.get("os_v2_replacement"),
map.get("os_v3_replacement"),
map.get("os_v4_replacement")
)
);
}
} else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("device_parsers")) {
List<Map<String, String>> parserConfigurations = readParserConfigurations(yamlParser);
for (Map<String, String> map : parserConfigurations) {
devicePatterns.add(
new UserAgentSubpattern(
compilePattern(map.get("regex"), map.get("regex_flag")),
map.get("device_replacement"),
null,
null,
null,
null
)
);
}
}
}
}
}
if (uaPatterns.isEmpty() && osPatterns.isEmpty() && devicePatterns.isEmpty()) {
throw new ElasticsearchParseException("not a valid regular expression file");
}
}
private static Pattern compilePattern(String regex, String regex_flag) {
// Only flag present in the current default regexes.yaml
if (regex_flag != null && regex_flag.equals("i")) {
return Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
} else {
return Pattern.compile(regex);
}
}
static List<Map<String, String>> readParserConfigurations(XContentParser yamlParser) throws IOException {
List<Map<String, String>> patternList = new ArrayList<>();
XContentParser.Token token = yamlParser.nextToken();
if (token != XContentParser.Token.START_ARRAY) {
throw new ElasticsearchParseException("malformed regular expression file, should continue with 'array' after 'object'");
}
token = yamlParser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("malformed regular expression file, expecting 'object'");
}
while (token == XContentParser.Token.START_OBJECT) {
token = yamlParser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new ElasticsearchParseException("malformed regular expression file, should continue with 'field_name' after 'array'");
}
Map<String, String> regexMap = new HashMap<>();
for (; token == XContentParser.Token.FIELD_NAME; token = yamlParser.nextToken()) {
String fieldName = yamlParser.currentName();
token = yamlParser.nextToken();
String fieldValue = yamlParser.text();
regexMap.put(fieldName, fieldValue);
}
patternList.add(regexMap);
token = yamlParser.nextToken();
}
return patternList;
}
List<UserAgentSubpattern> getUaPatterns() {
return uaPatterns;
}
List<UserAgentSubpattern> getOsPatterns() {
return osPatterns;
}
List<UserAgentSubpattern> getDevicePatterns() {
return devicePatterns;
}
String getName() {
return name;
}
public Details parse(String agentString, boolean extractDeviceType) {
Details details = cache.get(name, agentString);
if (details == null) {
VersionedName userAgent = findMatch(uaPatterns, agentString);
VersionedName operatingSystem = findMatch(osPatterns, agentString);
VersionedName device = findMatch(devicePatterns, agentString);
String deviceType = extractDeviceType ? deviceTypeParser.findDeviceType(agentString, userAgent, operatingSystem, device) : null;
details = new Details(userAgent, operatingSystem, device, deviceType);
cache.put(name, agentString, details);
}
return details;
}
private static VersionedName findMatch(List<UserAgentSubpattern> possiblePatterns, String agentString) {
VersionedName versionedName;
for (UserAgentSubpattern pattern : possiblePatterns) {
versionedName = pattern.match(agentString);
if (versionedName != null) {
return versionedName;
}
}
return null;
}
record Details(VersionedName userAgent, VersionedName operatingSystem, VersionedName device, String deviceType) {}
record VersionedName(String name, String major, String minor, String patch, String build) {}
/**
* One of: user agent, operating system, device
*/
record UserAgentSubpattern(
Pattern pattern,
String nameReplacement,
String v1Replacement,
String v2Replacement,
String v3Replacement,
String v4Replacement
) {
public VersionedName match(String agentString) {
String name = null, major = null, minor = null, patch = null, build = null;
Matcher matcher = pattern.matcher(agentString);
if (matcher.find() == false) {
return null;
}
int groupCount = matcher.groupCount();
if (nameReplacement != null) {
if (nameReplacement.contains("$1") && groupCount >= 1 && matcher.group(1) != null) {
name = nameReplacement.replaceFirst("\\$1", Matcher.quoteReplacement(matcher.group(1)));
} else {
name = nameReplacement;
}
} else if (groupCount >= 1) {
name = matcher.group(1);
}
if (v1Replacement != null) {
major = v1Replacement;
} else if (groupCount >= 2) {
major = matcher.group(2);
}
if (v2Replacement != null) {
minor = v2Replacement;
} else if (groupCount >= 3) {
minor = matcher.group(3);
}
if (v3Replacement != null) {
patch = v3Replacement;
} else if (groupCount >= 4) {
patch = matcher.group(4);
}
if (v4Replacement != null) {
build = v4Replacement;
} else if (groupCount >= 5) {
build = matcher.group(5);
}
return name == null ? null : new VersionedName(name, major, minor, patch, build);
}
}
}
| UserAgentParser |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetomany/BankAccount.java | {
"start": 475,
"end": 1183
} | class ____ {
@Id
@GeneratedValue
private long id;
@OneToMany(mappedBy = "account", cascade = CascadeType.ALL)
@OrderColumn(name = "transactions_index")
private List<Transaction> transactions;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public List<Transaction> getTransactions() {
return transactions;
}
public void setTransactions(List<Transaction> transactions) {
this.transactions = transactions;
}
public void addTransaction(String code) {
if ( transactions == null ) {
transactions = new ArrayList<>();
}
Transaction transaction = new Transaction();
transaction.setCode( code );
transactions.add( transaction );
}
}
| BankAccount |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/aspectj/annotation/AspectJBeanFactoryInitializationAotProcessor.java | {
"start": 2132,
"end": 2209
} | class ____ avoid a hard dependency on AspectJ at runtime.
*/
private static | to |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobSummary.java | {
"start": 965,
"end": 6773
} | class ____ {
private JobId jobId;
private long jobSubmitTime;
private long jobLaunchTime;
private long firstMapTaskLaunchTime; // MapAttempteStarted |
// TaskAttemptStartEvent
private long firstReduceTaskLaunchTime; // ReduceAttemptStarted |
// TaskAttemptStartEvent
private long jobFinishTime;
private int numSucceededMaps;
private int numFailedMaps;
private int numSucceededReduces;
private int numFailedReduces;
private int numKilledMaps;
private int numKilledReduces;
private int resourcesPerMap; // resources used per map/min resource
private int resourcesPerReduce; // resources used per reduce/min resource
// resource models
// private int numSlotsPerReduce; | Doesn't make sense with potentially
// different resource models
private String user;
private String queue;
private String jobStatus;
private long mapSlotSeconds; // TODO Not generated yet in MRV2
private long reduceSlotSeconds; // TODO Not generated yet MRV2
// private int clusterSlotCapacity;
private String jobName;
JobSummary() {
}
public JobId getJobId() {
return jobId;
}
public void setJobId(JobId jobId) {
this.jobId = jobId;
}
public long getJobSubmitTime() {
return jobSubmitTime;
}
public void setJobSubmitTime(long jobSubmitTime) {
this.jobSubmitTime = jobSubmitTime;
}
public long getJobLaunchTime() {
return jobLaunchTime;
}
public void setJobLaunchTime(long jobLaunchTime) {
this.jobLaunchTime = jobLaunchTime;
}
public long getFirstMapTaskLaunchTime() {
return firstMapTaskLaunchTime;
}
public void setFirstMapTaskLaunchTime(long firstMapTaskLaunchTime) {
this.firstMapTaskLaunchTime = firstMapTaskLaunchTime;
}
public long getFirstReduceTaskLaunchTime() {
return firstReduceTaskLaunchTime;
}
public void setFirstReduceTaskLaunchTime(long firstReduceTaskLaunchTime) {
this.firstReduceTaskLaunchTime = firstReduceTaskLaunchTime;
}
public long getJobFinishTime() {
return jobFinishTime;
}
public void setJobFinishTime(long jobFinishTime) {
this.jobFinishTime = jobFinishTime;
}
public int getNumSucceededMaps() {
return numSucceededMaps;
}
public void setNumSucceededMaps(int numSucceededMaps) {
this.numSucceededMaps = numSucceededMaps;
}
public int getNumFailedMaps() {
return numFailedMaps;
}
public void setNumFailedMaps(int numFailedMaps) {
this.numFailedMaps = numFailedMaps;
}
public int getKilledMaps() {
return numKilledMaps;
}
public void setNumKilledMaps(int numKilledMaps) {
this.numKilledMaps = numKilledMaps;
}
public int getKilledReduces() {
return numKilledReduces;
}
public void setNumKilledReduces(int numKilledReduces) {
this.numKilledReduces = numKilledReduces;
}
public int getResourcesPerMap() {
return resourcesPerMap;
}
public void setResourcesPerMap(int resourcesPerMap) {
this.resourcesPerMap = resourcesPerMap;
}
public int getNumSucceededReduces() {
return numSucceededReduces;
}
public void setNumSucceededReduces(int numSucceededReduces) {
this.numSucceededReduces = numSucceededReduces;
}
public int getNumFailedReduces() {
return numFailedReduces;
}
public void setNumFailedReduces(int numFailedReduces) {
this.numFailedReduces = numFailedReduces;
}
public int getResourcesPerReduce() {
return this.resourcesPerReduce;
}
public void setResourcesPerReduce(int resourcesPerReduce) {
this.resourcesPerReduce = resourcesPerReduce;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getQueue() {
return queue;
}
public void setQueue(String queue) {
this.queue = queue;
}
public String getJobStatus() {
return jobStatus;
}
public void setJobStatus(String jobStatus) {
this.jobStatus = jobStatus;
}
public long getMapSlotSeconds() {
return mapSlotSeconds;
}
public void setMapSlotSeconds(long mapSlotSeconds) {
this.mapSlotSeconds = mapSlotSeconds;
}
public long getReduceSlotSeconds() {
return reduceSlotSeconds;
}
public void setReduceSlotSeconds(long reduceSlotSeconds) {
this.reduceSlotSeconds = reduceSlotSeconds;
}
public String getJobName() {
return jobName;
}
public void setJobName(String jobName) {
this.jobName = jobName;
}
public String getJobSummaryString() {
SummaryBuilder summary = new SummaryBuilder()
.add("jobId", jobId)
.add("submitTime", jobSubmitTime)
.add("launchTime", jobLaunchTime)
.add("firstMapTaskLaunchTime", firstMapTaskLaunchTime)
.add("firstReduceTaskLaunchTime", firstReduceTaskLaunchTime)
.add("finishTime", jobFinishTime)
.add("resourcesPerMap", resourcesPerMap)
.add("resourcesPerReduce", resourcesPerReduce)
.add("numMaps", numSucceededMaps + numFailedMaps + numKilledMaps)
.add("numReduces", numSucceededReduces + numFailedReduces
+ numKilledReduces)
.add("succededMaps", numSucceededMaps)
.add("succeededReduces", numSucceededReduces)
.add("failedMaps", numFailedMaps)
.add("failedReduces", numFailedReduces)
.add("killedMaps", numKilledMaps)
.add("killedReduces", numKilledReduces)
.add("user", user)
.add("queue", queue)
.add("status", jobStatus)
.add("mapSlotSeconds", mapSlotSeconds)
.add("reduceSlotSeconds", reduceSlotSeconds)
.add("jobName", jobName);
return summary.toString();
}
static final char EQUALS = '=';
static final char[] charsToEscape = { StringUtils.COMMA, EQUALS,
StringUtils.ESCAPE_CHAR };
static | JobSummary |
java | apache__camel | components/camel-barcode/src/test/java/org/apache/camel/dataformat/barcode/BarcodeDataFormatSpringTest.java | {
"start": 1257,
"end": 1752
} | class ____ extends BarcodeDataFormatCamelTest {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Override
protected CamelContext createCamelContext() throws Exception {
ApplicationContext applicationContext
= CamelSpringTestSupport.newAppContext("barcodeDataformatSpring.xml",
getClass());
return SpringCamelContext.springCamelContext(applicationContext, true);
}
}
| BarcodeDataFormatSpringTest |
java | spring-projects__spring-security | buildSrc/src/test/java/io/spring/gradle/convention/JavadocApiPluginTest.java | {
"start": 1014,
"end": 1822
} | class ____ {
Project rootProject;
@AfterEach
public void cleanup() throws Exception {
if (rootProject != null) {
FileUtils.deleteDirectory(rootProject.getProjectDir());
}
}
@Test
public void applyWhenNotOverrideThenPropertiesDefaulted() {
rootProject = ProjectBuilder.builder().build();
rootProject.getPlugins().apply(JavadocApiPlugin.class);
Javadoc apiTask = (Javadoc) rootProject.getTasks().getByPath("api");
assertThat(apiTask).isNotNull();
assertThat(apiTask.getGroup()).isEqualTo("Documentation");
assertThat(apiTask.getDescription()).isEqualTo("Generates aggregated Javadoc API documentation.");
assertThat(apiTask.getMaxMemory()).isEqualTo("1024m");
assertThat(apiTask.getDestinationDir()).isEqualTo(new File(rootProject.getBuildDir(), "api"));
}
}
| JavadocApiPluginTest |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/support/PathMatchingResourcePatternResolver.java | {
"start": 10505,
"end": 12549
} | class ____ implements ResourcePatternResolver {
private static final Resource[] EMPTY_RESOURCE_ARRAY = {};
private static final Log logger = LogFactory.getLog(PathMatchingResourcePatternResolver.class);
/**
* {@link Set} of {@linkplain ModuleFinder#ofSystem() system module} names.
* @since 6.0
* @see #isNotSystemModule
*/
private static final Set<String> systemModuleNames = NativeDetector.inNativeImage() ? Collections.emptySet() :
ModuleFinder.ofSystem().findAll().stream()
.map(moduleReference -> moduleReference.descriptor().name())
.collect(Collectors.toSet());
/**
* {@link Predicate} that tests whether the supplied {@link ResolvedModule}
* is not a {@linkplain ModuleFinder#ofSystem() system module}.
* @since 6.0
* @see #systemModuleNames
*/
private static final Predicate<ResolvedModule> isNotSystemModule =
resolvedModule -> !systemModuleNames.contains(resolvedModule.name());
private static @Nullable Method equinoxResolveMethod;
static {
try {
// Detect Equinox OSGi (for example, on WebSphere 6.1)
Class<?> fileLocatorClass = ClassUtils.forName("org.eclipse.core.runtime.FileLocator",
PathMatchingResourcePatternResolver.class.getClassLoader());
equinoxResolveMethod = fileLocatorClass.getMethod("resolve", URL.class);
logger.trace("Found Equinox FileLocator for OSGi bundle URL resolution");
}
catch (Throwable ex) {
equinoxResolveMethod = null;
}
}
private final ResourceLoader resourceLoader;
private PathMatcher pathMatcher = new AntPathMatcher();
@Nullable
private Boolean useCaches;
private final Map<String, Resource[]> rootDirCache = new ConcurrentHashMap<>();
private final Map<String, NavigableSet<String>> jarEntriesCache = new ConcurrentHashMap<>();
private volatile @Nullable Set<ClassPathManifestEntry> manifestEntriesCache;
/**
* Create a {@code PathMatchingResourcePatternResolver} with a
* {@link DefaultResourceLoader}.
* <p>ClassLoader access will happen via the thread context | PathMatchingResourcePatternResolver |
java | apache__camel | components/camel-workday/src/main/java/org/apache/camel/component/workday/producer/WorkdayCommonAPIProducer.java | {
"start": 1125,
"end": 4977
} | class ____ extends WorkdayDefaultProducer {
public static final String WORKDAY_COMMON_API_URL_TEMPLATE = "https://%s/ccx/api/v1/%s%s";
public static final String WORKDAY_ID_PATTERN = "([0-9a-f]{32})";
public static final String WORKDAY_GENERIC_ID = "{ID}";
private final Set<String> workdayValidEndpointSet;
public WorkdayCommonAPIProducer(WorkdayEndpoint endpoint) {
super(endpoint);
this.workdayValidEndpointSet = new HashSet<>();
this.workdayValidEndpointSet.add("/auditLogs");
this.workdayValidEndpointSet.add("/auditLogs/{ID}");
this.workdayValidEndpointSet.add("/businessTitleChanges/{ID}");
this.workdayValidEndpointSet.add("/currencies");
this.workdayValidEndpointSet.add("/currencies/{ID}");
this.workdayValidEndpointSet.add("/customers/{ID}");
this.workdayValidEndpointSet.add("/customers/{ID}/activities");
this.workdayValidEndpointSet.add("/customers/{ID}/activities/{ID}");
this.workdayValidEndpointSet.add("/jobChangeReasons");
this.workdayValidEndpointSet.add("/jobChangeReasons/{ID}");
this.workdayValidEndpointSet.add("/organizationTypes");
this.workdayValidEndpointSet.add("/organizationTypes/{ID}");
this.workdayValidEndpointSet.add("/organizations");
this.workdayValidEndpointSet.add("/organizations/{ID}");
this.workdayValidEndpointSet.add("/supervisoryOrganizations");
this.workdayValidEndpointSet.add("/supervisoryOrganizations/{ID}");
this.workdayValidEndpointSet.add("/supervisoryOrganizations/{ID}/workers");
this.workdayValidEndpointSet.add("/supervisoryOrganizations/{ID}/workers/{ID}");
this.workdayValidEndpointSet.add("/workers");
this.workdayValidEndpointSet.add("/workers/{ID}");
this.workdayValidEndpointSet.add("/workers/{ID}/businessTitleChanges");
this.workdayValidEndpointSet.add("/workers/{ID}/businessTitleChanges/{ID}");
this.workdayValidEndpointSet.add("/workers/{ID}/directReports");
this.workdayValidEndpointSet.add("/workers/{ID}/directReports/{ID}");
this.workdayValidEndpointSet.add("/workers/{ID}/history");
this.workdayValidEndpointSet.add("/workers/{ID}/history/{ID}");
this.workdayValidEndpointSet.add("/workers/{ID}/inboxTasks");
this.workdayValidEndpointSet.add("/workers/{ID}/inboxTasks/{ID}");
this.workdayValidEndpointSet.add("/workers/{ID}/organizations");
this.workdayValidEndpointSet.add("/workers/{ID}/organizations/{ID}");
this.workdayValidEndpointSet.add("/workers/{ID}/paySlips");
this.workdayValidEndpointSet.add("/workers/{ID}/paySlips/{ID}");
this.workdayValidEndpointSet.add("/workers/{ID}/supervisoryOrganizationsManaged");
this.workdayValidEndpointSet.add("/workers/{ID}/supervisoryOrganizationsManaged/{ID}");
this.workdayValidEndpointSet.add("/workers/{ID}/timeOffEntries");
this.workdayValidEndpointSet.add("/workers/{ID}/timeOffEntries/{ID}");
this.workdayValidEndpointSet.add("/workers/{ID}/timeOffPlans");
this.workdayValidEndpointSet.add("/workers/{ID}/timeOffPlans/{ID}");
}
@Override
public String prepareUri(WorkdayConfiguration configuration) throws Exception {
String pathString = configuration.getPath();
String genericPath = pathString.replaceAll(WORKDAY_ID_PATTERN, WORKDAY_GENERIC_ID);
if (!this.workdayValidEndpointSet.contains(genericPath)) {
throw new MalformedURLException(
String.format("An invalid Workday Common endpoint: '%s' was provided.", genericPath));
}
return String.format(WORKDAY_COMMON_API_URL_TEMPLATE, configuration.getHost(), configuration.getTenant(),
pathString);
}
}
| WorkdayCommonAPIProducer |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/enumeratedvalue/EnumeratedValueTests.java | {
"start": 4082,
"end": 4304
} | enum ____ {
MALE( "M" ),
FEMALE( "F" ),
OTHER( "U" );
@EnumeratedValue
private final String code;
Gender(String code) {
this.code = code;
}
public String getCode() {
return code;
}
}
public | Gender |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/support/ModifierSupport.java | {
"start": 5582,
"end": 5632
} | class ____ {@code static}.
*
* @param clazz the | is |
java | quarkusio__quarkus | integration-tests/websockets/src/test/java/io/quarkus/websockets/ChatCodecIT.java | {
"start": 117,
"end": 161
} | class ____ extends ChatCodecTest {
}
| ChatCodecIT |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/lookup/Log4jLookupWithSpacesTest.java | {
"start": 1729,
"end": 3160
} | class ____ {
private static final File EXPECT =
new File(System.getProperty("user.home"), "/a a/b b/c c/d d/e e/log4j2 file.xml");
@Mock
private LoggerContext mockCtx;
@Mock
private Configuration config;
@Mock
private ConfigurationSource configSrc;
@BeforeEach
void setup() {
ContextAnchor.THREAD_CONTEXT.set(mockCtx);
given(config.getConfigurationSource()).willReturn(configSrc);
given(configSrc.getFile()).willReturn(EXPECT);
}
@AfterEach
void cleanup() {
ContextAnchor.THREAD_CONTEXT.set(null);
}
@Test
void lookupConfigLocation_withSpaces() {
final StrLookup log4jLookup = new Log4jLookup();
((ConfigurationAware) log4jLookup).setConfiguration(config);
final String value = log4jLookup.lookup(KEY_CONFIG_LOCATION);
assertEquals(
new File(System.getProperty("user.home"), "/a a/b b/c c/d d/e e/log4j2 file.xml").getAbsolutePath(),
value);
}
@Test
void lookupConfigParentLocation_withSpaces() {
final StrLookup log4jLookup = new Log4jLookup();
((ConfigurationAware) log4jLookup).setConfiguration(config);
final String value = log4jLookup.lookup(KEY_CONFIG_PARENT_LOCATION);
assertEquals(new File(System.getProperty("user.home"), "/a a/b b/c c/d d/e e").getAbsolutePath(), value);
}
}
| Log4jLookupWithSpacesTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/procedure/HANAStoredProcedureTest.java | {
"start": 1798,
"end": 2381
} | class ____ {
@NamedStoredProcedureQueries({
@NamedStoredProcedureQuery(name = "singleRefCursor", procedureName = "singleRefCursor", parameters = {
@StoredProcedureParameter(mode = ParameterMode.REF_CURSOR, type = void.class)
}),
@NamedStoredProcedureQuery(name = "outAndRefCursor", procedureName = "outAndRefCursor", parameters = {
@StoredProcedureParameter(mode = ParameterMode.OUT, type = Integer.class),
@StoredProcedureParameter(mode = ParameterMode.REF_CURSOR, type = void.class)
})
})
@Entity(name = "IdHolder")
public static | HANAStoredProcedureTest |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/state/ChangelogCompatibilityITCase.java | {
"start": 10198,
"end": 12270
} | class ____ {
boolean startWithChangelog;
boolean restoreWithChangelog;
RestoreSource restoreSource;
boolean allowStore = true;
boolean allowRestore = false;
public static TestCase startWithChangelog(boolean changelogEnabled) {
TestCase testCase = new TestCase();
testCase.startWithChangelog = changelogEnabled;
return testCase;
}
public TestCase restoreWithChangelog(boolean restoreWithChangelog) {
this.restoreWithChangelog = restoreWithChangelog;
return this;
}
public TestCase from(RestoreSource restoreSource) {
this.restoreSource = restoreSource;
return this;
}
public TestCase allowRestore(boolean allowRestore) {
this.allowRestore = allowRestore;
return this;
}
public TestCase allowSave(boolean allowSave) {
this.allowStore = allowSave;
return this;
}
@Override
public String toString() {
return String.format(
"startWithChangelog=%s, restoreWithChangelog=%s, restoreFrom=%s, allowStore=%s, allowRestore=%s",
startWithChangelog,
restoreWithChangelog,
restoreSource,
allowStore,
allowRestore);
}
private String describeStore() {
return String.format(
"taking %s with changelog.enabled=%b should be %s",
restoreSource, startWithChangelog, allowStore ? "allowed" : "disallowed");
}
private String describeRestore() {
return String.format(
"restoring from %s taken with changelog.enabled=%b should be %s with changelog.enabled=%b",
restoreSource,
allowRestore ? "allowed" : "disallowed",
startWithChangelog,
restoreWithChangelog);
}
}
private | TestCase |
java | quarkusio__quarkus | extensions/devservices/common/src/main/java/io/quarkus/devservices/common/ContainerUtil.java | {
"start": 653,
"end": 699
} | class ____ working with containers.
*/
public | for |
java | apache__camel | components/camel-pulsar/src/test/java/org/apache/camel/component/pulsar/utils/message/PulsarMessageUtilsTest.java | {
"start": 1649,
"end": 1831
} | class ____ implements Serializable {
private String id;
private String name;
public Obj(String id, String name) {
this.id = id;
this.name = name;
}
}
| Obj |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationAttemptFinishData.java | {
"start": 1446,
"end": 3195
} | class ____ {
@Public
@Unstable
public static ApplicationAttemptFinishData newInstance(
ApplicationAttemptId appAttemptId, String diagnosticsInfo,
String trackingURL, FinalApplicationStatus finalApplicationStatus,
YarnApplicationAttemptState yarnApplicationAttemptState) {
ApplicationAttemptFinishData appAttemptFD =
Records.newRecord(ApplicationAttemptFinishData.class);
appAttemptFD.setApplicationAttemptId(appAttemptId);
appAttemptFD.setDiagnosticsInfo(diagnosticsInfo);
appAttemptFD.setTrackingURL(trackingURL);
appAttemptFD.setFinalApplicationStatus(finalApplicationStatus);
appAttemptFD.setYarnApplicationAttemptState(yarnApplicationAttemptState);
return appAttemptFD;
}
@Public
@Unstable
public abstract ApplicationAttemptId getApplicationAttemptId();
@Public
@Unstable
public abstract void setApplicationAttemptId(
ApplicationAttemptId applicationAttemptId);
@Public
@Unstable
public abstract String getTrackingURL();
@Public
@Unstable
public abstract void setTrackingURL(String trackingURL);
@Public
@Unstable
public abstract String getDiagnosticsInfo();
@Public
@Unstable
public abstract void setDiagnosticsInfo(String diagnosticsInfo);
@Public
@Unstable
public abstract FinalApplicationStatus getFinalApplicationStatus();
@Public
@Unstable
public abstract void setFinalApplicationStatus(
FinalApplicationStatus finalApplicationStatus);
@Public
@Unstable
public abstract YarnApplicationAttemptState getYarnApplicationAttemptState();
@Public
@Unstable
public abstract void setYarnApplicationAttemptState(
YarnApplicationAttemptState yarnApplicationAttemptState);
}
| ApplicationAttemptFinishData |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/request/AnthropicChatCompletionRequestTests.java | {
"start": 948,
"end": 5344
} | class ____ extends ESTestCase {
public void testCreateRequest() throws IOException {
var request = createRequest("secret", "abc", "model", 2);
var httpRequest = request.createHttpRequest();
assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class));
var httpPost = (HttpPost) httpRequest.httpRequestBase();
assertThat(httpPost.getURI().toString(), is(buildAnthropicUri()));
assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType()));
assertThat(httpPost.getLastHeader(AnthropicRequestUtils.X_API_KEY).getValue(), is("secret"));
assertThat(
httpPost.getLastHeader(AnthropicRequestUtils.VERSION).getValue(),
is(AnthropicRequestUtils.ANTHROPIC_VERSION_2023_06_01)
);
var requestMap = entityAsMap(httpPost.getEntity().getContent());
assertThat(requestMap, aMapWithSize(3));
assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc"))));
assertThat(requestMap.get("model"), is("model"));
assertThat(requestMap.get("max_tokens"), is(2));
}
public void testCreateRequest_TestUrl() throws IOException {
var request = createRequest("fake_url", "secret", "abc", "model", 2);
var httpRequest = request.createHttpRequest();
assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class));
var httpPost = (HttpPost) httpRequest.httpRequestBase();
assertThat(httpPost.getURI().toString(), is("fake_url"));
assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType()));
assertThat(httpPost.getLastHeader(AnthropicRequestUtils.X_API_KEY).getValue(), is("secret"));
assertThat(
httpPost.getLastHeader(AnthropicRequestUtils.VERSION).getValue(),
is(AnthropicRequestUtils.ANTHROPIC_VERSION_2023_06_01)
);
var requestMap = entityAsMap(httpPost.getEntity().getContent());
assertThat(requestMap, aMapWithSize(3));
assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc"))));
assertThat(requestMap.get("model"), is("model"));
assertThat(requestMap.get("max_tokens"), is(2));
}
public void testTruncate_DoesNotReduceInputTextSize() throws IOException {
var request = createRequest("secret", "abc", "model", 2);
var truncatedRequest = request.truncate();
assertThat(request.getURI().toString(), is(buildAnthropicUri()));
var httpRequest = truncatedRequest.createHttpRequest();
assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class));
var httpPost = (HttpPost) httpRequest.httpRequestBase();
var requestMap = entityAsMap(httpPost.getEntity().getContent());
assertThat(requestMap, aMapWithSize(3));
// We do not truncate for Anthropic chat completions
assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc"))));
assertThat(requestMap.get("model"), is("model"));
assertThat(requestMap.get("max_tokens"), is(2));
}
public void testTruncationInfo_ReturnsNull() {
var request = createRequest("secret", "abc", "model", 2);
assertNull(request.getTruncationInfo());
}
public static AnthropicChatCompletionRequest createRequest(String apiKey, String input, String model, int maxTokens) {
var chatCompletionModel = AnthropicChatCompletionModelTests.createChatCompletionModel(apiKey, model, maxTokens);
return new AnthropicChatCompletionRequest(List.of(input), chatCompletionModel, false);
}
public static AnthropicChatCompletionRequest createRequest(String url, String apiKey, String input, String model, int maxTokens) {
var chatCompletionModel = AnthropicChatCompletionModelTests.createChatCompletionModel(url, apiKey, model, maxTokens);
return new AnthropicChatCompletionRequest(List.of(input), chatCompletionModel, false);
}
private static String buildAnthropicUri() {
return Strings.format(
"https://%s/%s/%s",
AnthropicRequestUtils.HOST,
AnthropicRequestUtils.API_VERSION_1,
AnthropicRequestUtils.MESSAGES_PATH
);
}
}
| AnthropicChatCompletionRequestTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/InvalidSelectorException.java | {
"start": 578,
"end": 841
} | class ____ extends IllegalArgumentException {
public InvalidSelectorException(String message) {
super(message);
}
public InvalidSelectorException(String message, Throwable cause) {
super(message, cause);
}
}
| InvalidSelectorException |
java | google__dagger | javatests/dagger/internal/codegen/ConflictingEntryPointsTest.java | {
"start": 6497,
"end": 6719
} | interface ____");
});
}
@Test
public void sameKey() {
Source base1 =
CompilerTests.javaSource(
"test.Base1", //
"package test;",
"",
" | TestComponent |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/FilterChainProxy.java | {
"start": 16222,
"end": 16572
} | class ____ implements FilterChainValidator {
@Override
public void validate(FilterChainProxy filterChainProxy) {
}
}
/**
* A strategy for decorating the provided filter chain with one that accounts for the
* {@link SecurityFilterChain} for a given request.
*
* @author Josh Cummings
* @since 6.0
*/
public | NullFilterChainValidator |
java | elastic__elasticsearch | x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringTestUtils.java | {
"start": 351,
"end": 1095
} | class ____ {
public static String randomVersionString() {
return randomVersionNumber() + (ESTestCase.randomBoolean() ? "" : randomPrerelease());
}
private static String randomVersionNumber() {
int numbers = ESTestCase.between(1, 3);
String v = Integer.toString(ESTestCase.between(0, 100));
for (int i = 1; i < numbers; i++) {
v += "." + ESTestCase.between(0, 100);
}
return v;
}
private static String randomPrerelease() {
if (ESTestCase.rarely()) {
return ESTestCase.randomFrom("alpha", "beta", "prerelease", "whatever");
}
return ESTestCase.randomFrom("alpha", "beta", "") + randomVersionNumber();
}
}
| VersionStringTestUtils |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoValueTest.java | {
"start": 64789,
"end": 67233
} | interface ____<T extends Comparable<T>> {
Builder<T> setList(ImmutableList<T> list);
Builder<T> setT(T t);
Builder<T> setInts(int[] ints);
Builder<T> setNoGetter(int x);
Builder<T> setoAuth(String x); // this ugly spelling is for compatibility
Builder<T> setOBrien(String x);
ImmutableList<T> list();
T t();
int[] ints();
String oAuth();
String oBrien();
BuilderWithUnprefixedGetters<T> build();
}
}
@Test
public void testBuilderWithUnprefixedGetter() {
ImmutableList<String> names = ImmutableList.of("fred", "jim");
int[] ints = {6, 28, 496, 8128, 33550336};
int noGetter = -1;
BuilderWithUnprefixedGetters.Builder<String> builder = BuilderWithUnprefixedGetters.builder();
assertNull(builder.t());
try {
builder.list();
fail("Attempt to retrieve unset list property should have failed");
} catch (IllegalStateException e) {
if (omitIdentifiers) {
assertThat(e).hasMessageThat().isNull();
} else {
assertThat(e).hasMessageThat().isEqualTo("Property \"list\" has not been set");
}
}
try {
builder.ints();
fail("Attempt to retrieve unset ints property should have failed");
} catch (IllegalStateException e) {
if (omitIdentifiers) {
assertThat(e).hasMessageThat().isNull();
} else {
assertThat(e).hasMessageThat().isEqualTo("Property \"ints\" has not been set");
}
}
builder.setList(names);
assertThat(builder.list()).isSameInstanceAs(names);
builder.setInts(ints);
assertThat(builder.ints()).isEqualTo(ints);
builder.setoAuth("OAuth");
assertThat(builder.oAuth()).isEqualTo("OAuth");
builder.setOBrien("Flann");
assertThat(builder.oBrien()).isEqualTo("Flann");
// The array is not cloned by the getter, so the client can modify it (but shouldn't).
ints[0] = 0;
assertThat(builder.ints()[0]).isEqualTo(0);
ints[0] = 6;
BuilderWithUnprefixedGetters<String> instance = builder.setNoGetter(noGetter).build();
assertThat(instance.list()).isSameInstanceAs(names);
assertThat(instance.t()).isNull();
assertThat(instance.ints()).isEqualTo(ints);
assertThat(instance.noGetter()).isEqualTo(noGetter);
assertThat(instance.oAuth()).isEqualTo("OAuth");
assertThat(instance.oBrien()).isEqualTo("Flann");
}
@AutoValue
public abstract static | Builder |
java | quarkusio__quarkus | integration-tests/main/src/test/java/io/quarkus/it/main/JUnit5PerClassLifecycleTest.java | {
"start": 848,
"end": 1729
} | class ____ {
// any IP just to verify it was performed
@Inject
UnusedBean bean;
@BeforeEach
public void beforeEach() {
Assertions.assertNotNull(bean);
Assert.assertNotNull(bean.getInjectionPoint());
}
@BeforeAll
public void beforeAll() {
Assertions.assertNotNull(bean);
Assert.assertNotNull(bean.getInjectionPoint());
}
@AfterEach
public void afterEach() {
Assertions.assertNotNull(bean);
Assert.assertNotNull(bean.getInjectionPoint());
}
@AfterAll
public void afterAll() {
Assertions.assertNotNull(bean);
Assert.assertNotNull(bean.getInjectionPoint());
}
@Test
public void testQuarkusWasBootedAndInjectionWorks() {
Assertions.assertNotNull(bean);
Assert.assertNotNull(bean.getInjectionPoint());
}
}
| JUnit5PerClassLifecycleTest |
java | quarkusio__quarkus | integration-tests/oidc-tenancy/src/test/java/io/quarkus/it/keycloak/KeycloakRealmResourceManager.java | {
"start": 717,
"end": 4385
} | class ____ implements QuarkusTestResourceLifecycleManager, DevServicesContext.ContextAware {
private static final String KEYCLOAK_REALM = "quarkus-";
final KeycloakTestClient client = new KeycloakTestClient();
@Override
public Map<String, String> start() {
for (String realmId : Arrays.asList("a", "b", "c", "d", "e", "f", "webapp", "webapp2", "hybrid")) {
RealmRepresentation realm = createRealm(KEYCLOAK_REALM + realmId);
realm.getClients().add(createClient("quarkus-app-" + realmId));
if ("b".equals(realmId)) {
realm.getClients().add(createClient("quarkus-app-b2"));
}
realm.getUsers().add(createUser("alice", "user"));
realm.getUsers().add(createUser("admin", "user", "admin"));
realm.getUsers().add(createUser("jdoe", "user", "confidential"));
client.createRealm(realm);
}
return Collections.emptyMap();
}
private static RealmRepresentation createRealm(String name) {
RealmRepresentation realm = new RealmRepresentation();
realm.setRealm(name);
realm.setEnabled(true);
realm.setUsers(new ArrayList<>());
realm.setClients(new ArrayList<>());
RolesRepresentation roles = new RolesRepresentation();
List<RoleRepresentation> realmRoles = new ArrayList<>();
roles.setRealm(realmRoles);
realm.setRoles(roles);
realm.getRoles().getRealm().add(new RoleRepresentation("user", null, false));
realm.getRoles().getRealm().add(new RoleRepresentation("admin", null, false));
realm.getRoles().getRealm().add(new RoleRepresentation("confidential", null, false));
return realm;
}
private static ClientRepresentation createClient(String clientId) {
ClientRepresentation client = new ClientRepresentation();
client.setClientId(clientId);
client.setPublicClient(false);
client.setSecret("secret");
client.setDirectAccessGrantsEnabled(true);
client.setEnabled(true);
client.setDefaultRoles(new String[] { "role-" + clientId });
if (clientId.startsWith("quarkus-app-webapp") || clientId.equals("quarkus-app-hybrid")) {
client.setRedirectUris(Arrays.asList("*"));
}
if (clientId.equals("quarkus-app-webapp") || clientId.equals("quarkus-app-hybrid")) {
// This instructs Keycloak to include the roles with the ID token too
client.setDefaultClientScopes(Arrays.asList("microprofile-jwt"));
}
return client;
}
private static UserRepresentation createUser(String username, String... realmRoles) {
UserRepresentation user = new UserRepresentation();
user.setUsername(username);
user.setEnabled(true);
user.setCredentials(new ArrayList<>());
user.setRealmRoles(Arrays.asList(realmRoles));
CredentialRepresentation credential = new CredentialRepresentation();
credential.setType(CredentialRepresentation.PASSWORD);
credential.setValue(username);
credential.setTemporary(false);
user.getCredentials().add(credential);
return user;
}
@Override
public void stop() {
for (String realmId : Arrays.asList("a", "b", "c", "d", "webapp", "webapp2", "hybrid")) {
try {
client.deleteRealm(realmId);
} catch (Throwable t) {
}
}
}
@Override
public void setIntegrationTestContext(DevServicesContext context) {
client.setIntegrationTestContext(context);
}
}
| KeycloakRealmResourceManager |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java | {
"start": 7355,
"end": 113743
} | class ____ extends MapperTestCase {
private final boolean useLegacyFormat;
private TestThreadPool threadPool;
public SemanticTextFieldMapperTests(boolean useLegacyFormat) {
this.useLegacyFormat = useLegacyFormat;
}
ModelRegistry globalModelRegistry;
@Before
private void initializeTestEnvironment() {
threadPool = createThreadPool();
var clusterService = ClusterServiceUtils.createClusterService(threadPool);
var modelRegistry = new ModelRegistry(clusterService, new NoOpClient(threadPool));
globalModelRegistry = spy(modelRegistry);
globalModelRegistry.clusterChanged(new ClusterChangedEvent("init", clusterService.state(), clusterService.state()) {
@Override
public boolean localNodeMaster() {
return false;
}
});
registerDefaultEisEndpoint();
}
@After
private void stopThreadPool() {
threadPool.close();
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return List.of(new Object[] { true }, new Object[] { false });
}
@Override
protected Collection<? extends Plugin> getPlugins() {
return List.of(new InferencePlugin(Settings.EMPTY) {
@Override
protected Supplier<ModelRegistry> getModelRegistry() {
return () -> globalModelRegistry;
}
}, new XPackClientPlugin());
}
private void registerDefaultEisEndpoint() {
globalModelRegistry.putDefaultIdIfAbsent(
new InferenceService.DefaultConfigId(
DEFAULT_EIS_ELSER_INFERENCE_ID,
MinimalServiceSettings.sparseEmbedding(ElasticInferenceService.NAME),
mock(InferenceService.class)
)
);
}
private MapperService createMapperService(XContentBuilder mappings, boolean useLegacyFormat) throws IOException {
IndexVersion indexVersion = SemanticInferenceMetadataFieldsMapperTests.getRandomCompatibleIndexVersion(useLegacyFormat);
return createMapperService(mappings, useLegacyFormat, indexVersion, indexVersion);
}
private MapperService createMapperService(XContentBuilder mappings, boolean useLegacyFormat, IndexVersion minIndexVersion)
throws IOException {
return createMapperService(mappings, useLegacyFormat, minIndexVersion, IndexVersion.current());
}
private MapperService createMapperService(
XContentBuilder mappings,
boolean useLegacyFormat,
IndexVersion minIndexVersion,
IndexVersion maxIndexVersion
) throws IOException {
validateIndexVersion(minIndexVersion, useLegacyFormat);
IndexVersion indexVersion = IndexVersionUtils.randomVersionBetween(random(), minIndexVersion, maxIndexVersion);
return createMapperServiceWithIndexVersion(mappings, useLegacyFormat, indexVersion);
}
private MapperService createMapperServiceWithIndexVersion(XContentBuilder mappings, boolean useLegacyFormat, IndexVersion indexVersion)
throws IOException {
var settings = Settings.builder()
.put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), indexVersion)
.put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), useLegacyFormat)
.build();
return createMapperService(indexVersion, settings, mappings);
}
private static void validateIndexVersion(IndexVersion indexVersion, boolean useLegacyFormat) {
if (useLegacyFormat == false
&& indexVersion.before(IndexVersions.INFERENCE_METADATA_FIELDS)
&& indexVersion.between(IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0) == false) {
throw new IllegalArgumentException("Index version " + indexVersion + " does not support new semantic text format");
}
}
@Override
protected Settings getIndexSettings() {
return Settings.builder()
.put(super.getIndexSettings())
.put(InferenceMetadataFieldsMapper.USE_LEGACY_SEMANTIC_TEXT_FORMAT.getKey(), useLegacyFormat)
.build();
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "semantic_text");
}
@Override
protected void metaMapping(XContentBuilder b) throws IOException {
super.metaMapping(b);
b.field(INFERENCE_ID_FIELD, DEFAULT_EIS_ELSER_INFERENCE_ID);
}
@Override
protected Object getSampleValueForDocument() {
return null;
}
@Override
protected boolean supportsIgnoreMalformed() {
return false;
}
@Override
protected boolean supportsStoredFields() {
return false;
}
@Override
protected void registerParameters(ParameterChecker checker) throws IOException {}
@Override
protected Object generateRandomInputValue(MappedFieldType ft) {
assumeFalse("doc_values are not supported in semantic_text", true);
return null;
}
@Override
protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) {
throw new AssumptionViolatedException("not supported");
}
@Override
protected IngestScriptSupport ingestScriptSupport() {
throw new AssumptionViolatedException("not supported");
}
@Override
public MappedFieldType getMappedFieldType() {
return new SemanticTextFieldMapper.SemanticTextFieldType(
"field",
"fake-inference-id",
null,
null,
null,
null,
null,
false,
Map.of()
);
}
@Override
protected void assertSearchable(MappedFieldType fieldType) {
assertThat(fieldType, instanceOf(SemanticTextFieldMapper.SemanticTextFieldType.class));
assertTrue(fieldType.isSearchable());
}
public void testDefaults() throws Exception {
final String fieldName = "field";
final XContentBuilder fieldMapping = fieldMapping(this::minimalMapping);
final XContentBuilder expectedMapping = fieldMapping(this::metaMapping);
MapperService mapperService = createMapperService(fieldMapping, useLegacyFormat);
DocumentMapper mapper = mapperService.documentMapper();
assertEquals(Strings.toString(expectedMapping), mapper.mappingSource().toString());
assertSemanticTextField(mapperService, fieldName, false, null, null);
assertInferenceEndpoints(mapperService, fieldName, DEFAULT_EIS_ELSER_INFERENCE_ID, DEFAULT_EIS_ELSER_INFERENCE_ID);
ParsedDocument doc1 = mapper.parse(source(this::writeField));
List<IndexableField> fields = doc1.rootDoc().getFields("field");
// No indexable fields
assertTrue(fields.isEmpty());
}
public void testDefaultInferenceIdUsesEisWhenAvailable() throws Exception {
final String fieldName = "field";
final XContentBuilder fieldMapping = fieldMapping(this::minimalMapping);
MapperService mapperService = createMapperService(fieldMapping, useLegacyFormat);
assertInferenceEndpoints(mapperService, fieldName, DEFAULT_EIS_ELSER_INFERENCE_ID, DEFAULT_EIS_ELSER_INFERENCE_ID);
}
public void testDefaultInferenceIdFallsBackWhenEisUnavailable() throws Exception {
final String fieldName = "field";
final XContentBuilder fieldMapping = fieldMapping(this::minimalMapping);
removeDefaultEisEndpoint();
MapperService mapperService = createMapperService(fieldMapping, useLegacyFormat);
assertInferenceEndpoints(mapperService, fieldName, DEFAULT_FALLBACK_ELSER_INFERENCE_ID, DEFAULT_FALLBACK_ELSER_INFERENCE_ID);
}
private void removeDefaultEisEndpoint() {
PlainActionFuture<Boolean> removalFuture = new PlainActionFuture<>();
globalModelRegistry.removeDefaultConfigs(Set.of(DEFAULT_EIS_ELSER_INFERENCE_ID), removalFuture);
assertTrue("Failed to remove default EIS endpoint", removalFuture.actionGet(TEST_REQUEST_TIMEOUT));
}
@Override
public void testFieldHasValue() {
MappedFieldType fieldType = getMappedFieldType();
FieldInfos fieldInfos = new FieldInfos(new FieldInfo[] { getFieldInfoWithName(getEmbeddingsFieldName("field")) });
assertTrue(fieldType.fieldHasValue(fieldInfos));
}
public void testSetInferenceEndpoints() throws IOException {
final String fieldName = "field";
final String inferenceId = "foo";
final String searchInferenceId = "bar";
CheckedBiConsumer<XContentBuilder, MapperService, IOException> assertSerialization = (expectedMapping, mapperService) -> {
DocumentMapper mapper = mapperService.documentMapper();
assertEquals(Strings.toString(expectedMapping), mapper.mappingSource().toString());
};
{
final XContentBuilder fieldMapping = fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, inferenceId));
final MapperService mapperService = createMapperService(fieldMapping, useLegacyFormat);
assertSemanticTextField(mapperService, fieldName, false, null, null);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId);
assertSerialization.accept(fieldMapping, mapperService);
}
{
final XContentBuilder fieldMapping = fieldMapping(
b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId)
);
final XContentBuilder expectedMapping = fieldMapping(
b -> b.field("type", "semantic_text")
.field(INFERENCE_ID_FIELD, DEFAULT_EIS_ELSER_INFERENCE_ID)
.field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId)
);
final MapperService mapperService = createMapperService(fieldMapping, useLegacyFormat);
assertSemanticTextField(mapperService, fieldName, false, null, null);
assertInferenceEndpoints(mapperService, fieldName, DEFAULT_EIS_ELSER_INFERENCE_ID, searchInferenceId);
assertSerialization.accept(expectedMapping, mapperService);
}
{
final XContentBuilder fieldMapping = fieldMapping(
b -> b.field("type", "semantic_text")
.field(INFERENCE_ID_FIELD, inferenceId)
.field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId)
);
MapperService mapperService = createMapperService(fieldMapping, useLegacyFormat);
assertSemanticTextField(mapperService, fieldName, false, null, null);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId);
assertSerialization.accept(fieldMapping, mapperService);
}
}
public void testInvalidInferenceEndpoints() {
{
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(
fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, (String) null)),
useLegacyFormat
)
);
assertThat(
e.getMessage(),
containsString("[inference_id] on mapper [field] of type [semantic_text] must not have a [null] value")
);
}
{
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(
fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, "")),
useLegacyFormat
)
);
assertThat(e.getMessage(), containsString("[inference_id] on mapper [field] of type [semantic_text] must not be empty"));
}
{
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(
fieldMapping(b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, "")),
useLegacyFormat
)
);
assertThat(e.getMessage(), containsString("[search_inference_id] on mapper [field] of type [semantic_text] must not be empty"));
}
}
private SemanticTextIndexOptions getDefaultSparseVectorIndexOptionsForMapper(MapperService mapperService) {
var mapperIndexVersion = mapperService.getIndexSettings().getIndexVersionCreated();
var defaultSparseVectorIndexOptions = SparseVectorFieldMapper.SparseVectorIndexOptions.getDefaultIndexOptions(mapperIndexVersion);
return defaultSparseVectorIndexOptions == null
? null
: new SemanticTextIndexOptions(SemanticTextIndexOptions.SupportedIndexOptions.SPARSE_VECTOR, defaultSparseVectorIndexOptions);
}
public void testInvalidTaskTypes() {
for (var taskType : TaskType.values()) {
if (taskType == TaskType.TEXT_EMBEDDING || taskType == TaskType.SPARSE_EMBEDDING) {
continue;
}
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(
fieldMapping(
b -> b.field("type", "semantic_text")
.field(INFERENCE_ID_FIELD, "test1")
.startObject("model_settings")
.field("task_type", taskType)
.endObject()
),
useLegacyFormat
)
);
assertThat(e.getMessage(), containsString("Wrong [task_type], expected text_embedding or sparse_embedding"));
}
}
public void testMultiFieldsSupport() throws IOException {
if (useLegacyFormat) {
Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
b.field("type", "text");
b.startObject("fields");
b.startObject("semantic");
b.field("type", "semantic_text");
b.field("inference_id", "my_inference_id");
b.endObject();
b.endObject();
}), useLegacyFormat));
assertThat(e.getMessage(), containsString("Field [semantic] of type [semantic_text] can't be used in multifields"));
} else {
IndexVersion indexVersion = SparseVectorFieldMapperTests.getIndexOptionsCompatibleIndexVersion();
SparseVectorFieldMapper.SparseVectorIndexOptions expectedIndexOptions = SparseVectorFieldMapper.SparseVectorIndexOptions
.getDefaultIndexOptions(indexVersion);
SemanticTextIndexOptions semanticTextIndexOptions = expectedIndexOptions == null
? null
: new SemanticTextIndexOptions(SemanticTextIndexOptions.SupportedIndexOptions.SPARSE_VECTOR, expectedIndexOptions);
var mapperService = createMapperServiceWithIndexVersion(fieldMapping(b -> {
b.field("type", "text");
b.startObject("fields");
b.startObject("semantic");
b.field("type", "semantic_text");
b.field("inference_id", "my_inference_id");
b.startObject("model_settings");
b.field("task_type", "sparse_embedding");
b.endObject();
b.endObject();
b.endObject();
}), useLegacyFormat, indexVersion);
assertSemanticTextField(mapperService, "field.semantic", true, null, semanticTextIndexOptions);
mapperService = createMapperServiceWithIndexVersion(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "my_inference_id");
b.startObject("model_settings");
b.field("task_type", "sparse_embedding");
b.endObject();
b.startObject("fields");
b.startObject("text");
b.field("type", "text");
b.endObject();
b.endObject();
}), useLegacyFormat, indexVersion);
assertSemanticTextField(mapperService, "field", true, null, semanticTextIndexOptions);
mapperService = createMapperServiceWithIndexVersion(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "my_inference_id");
b.startObject("model_settings");
b.field("task_type", "sparse_embedding");
b.endObject();
b.startObject("fields");
b.startObject("semantic");
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "sparse_embedding");
b.endObject();
b.endObject();
b.endObject();
}), useLegacyFormat, indexVersion);
assertSemanticTextField(mapperService, "field", true, null, semanticTextIndexOptions);
assertSemanticTextField(mapperService, "field.semantic", true, null, semanticTextIndexOptions);
Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "my_inference_id");
b.startObject("fields");
b.startObject("inference");
b.field("type", "text");
b.endObject();
b.endObject();
}), useLegacyFormat));
assertThat(e.getMessage(), containsString("is already used by another field"));
}
}
public void testUpdateInferenceId_GivenPreviousAndNewDoNotExist() throws IOException {
String fieldName = randomAlphaOfLengthBetween(5, 15);
MapperService mapperService = createMapperService(
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", "test_model").endObject()),
useLegacyFormat
);
assertInferenceEndpoints(mapperService, fieldName, "test_model", "test_model");
assertSemanticTextField(mapperService, fieldName, false, null, null);
merge(
mapperService,
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", "another_model").endObject())
);
assertInferenceEndpoints(mapperService, fieldName, "another_model", "another_model");
assertSemanticTextField(mapperService, fieldName, false, null, null);
}
public void testUpdateInferenceId_GivenCurrentHasNoSetModelSettingsAndNewDoesNotExist() throws IOException {
String fieldName = randomAlphaOfLengthBetween(5, 15);
String oldInferenceId = "old_inference_id";
MinimalServiceSettings previousModelSettings = MinimalServiceSettings.sparseEmbedding("previous_service");
givenModelSettings(oldInferenceId, previousModelSettings);
var mapperService = createMapperService(
mapping(
b -> b.startObject(fieldName)
.field("type", SemanticTextFieldMapper.CONTENT_TYPE)
.field(INFERENCE_ID_FIELD, oldInferenceId)
.endObject()
),
useLegacyFormat
);
assertInferenceEndpoints(mapperService, fieldName, oldInferenceId, oldInferenceId);
assertSemanticTextField(mapperService, fieldName, false, null, null);
String newInferenceId = "new_inference_id";
merge(
mapperService,
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", newInferenceId).endObject())
);
assertInferenceEndpoints(mapperService, fieldName, newInferenceId, newInferenceId);
assertSemanticTextField(mapperService, fieldName, false, null, null);
}
public void testUpdateInferenceId_GivenCurrentHasModelSettingsAndNewDoesNotExist() throws IOException {
String fieldName = randomAlphaOfLengthBetween(5, 15);
String oldInferenceId = "old_inference_id";
MinimalServiceSettings previousModelSettings = MinimalServiceSettings.sparseEmbedding("previous_service");
givenModelSettings(oldInferenceId, previousModelSettings);
var mapperService = mapperServiceForFieldWithModelSettings(fieldName, oldInferenceId, previousModelSettings);
assertInferenceEndpoints(mapperService, fieldName, oldInferenceId, oldInferenceId);
assertSemanticTextField(mapperService, fieldName, true, null, null);
String newInferenceId = "new_inference_id";
Exception exc = expectThrows(
IllegalArgumentException.class,
() -> merge(
mapperService,
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", newInferenceId).endObject())
)
);
assertThat(
exc.getMessage(),
containsString(
"Cannot update [semantic_text] field ["
+ fieldName
+ "] because inference endpoint ["
+ newInferenceId
+ "] does not exist."
)
);
}
public void testUpdateInferenceId_GivenCurrentHasNoModelSettingsAndNewIsIncompatibleTaskType_ShouldSucceed() throws IOException {
for (int randomizedRun = 0; randomizedRun < 10; randomizedRun++) {
String fieldName = randomAlphaOfLengthBetween(5, 15);
String oldInferenceId = "old_inference_id";
TestModel oldModel = TestModel.createRandomInstance();
givenModelSettings(oldInferenceId, new MinimalServiceSettings(oldModel));
var mapperService = createMapperService(
mapping(
b -> b.startObject(fieldName)
.field("type", SemanticTextFieldMapper.CONTENT_TYPE)
.field(INFERENCE_ID_FIELD, oldInferenceId)
.endObject()
),
useLegacyFormat
);
assertInferenceEndpoints(mapperService, fieldName, oldInferenceId, oldInferenceId);
assertSemanticTextField(mapperService, fieldName, false, null, null);
assertEmbeddingsFieldMapperMatchesModel(mapperService, fieldName, oldModel);
String newInferenceId = "new_inference_id";
TestModel newModel = randomValueOtherThan(oldModel, TestModel::createRandomInstance);
givenModelSettings(newInferenceId, new MinimalServiceSettings(newModel));
merge(
mapperService,
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", newInferenceId).endObject())
);
assertInferenceEndpoints(mapperService, fieldName, newInferenceId, newInferenceId);
assertSemanticTextField(mapperService, fieldName, false, null, null);
assertEmbeddingsFieldMapperMatchesModel(mapperService, fieldName, newModel);
}
}
public void testUpdateInferenceId_GivenCurrentHasSparseModelSettingsAndNewIsCompatible() throws IOException {
String fieldName = randomAlphaOfLengthBetween(5, 15);
String oldInferenceId = "old_inference_id";
MinimalServiceSettings previousModelSettings = MinimalServiceSettings.sparseEmbedding("previous_service");
givenModelSettings(oldInferenceId, previousModelSettings);
MapperService mapperService = mapperServiceForFieldWithModelSettings(fieldName, oldInferenceId, previousModelSettings);
assertInferenceEndpoints(mapperService, fieldName, oldInferenceId, oldInferenceId);
assertSemanticTextField(mapperService, fieldName, true, null, null);
String newInferenceId = "new_inference_id";
MinimalServiceSettings newModelSettings = MinimalServiceSettings.sparseEmbedding("new_service");
givenModelSettings(newInferenceId, newModelSettings);
merge(
mapperService,
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", newInferenceId).endObject())
);
assertInferenceEndpoints(mapperService, fieldName, newInferenceId, newInferenceId);
assertSemanticTextField(mapperService, fieldName, true, null, null);
SemanticTextFieldMapper semanticFieldMapper = getSemanticFieldMapper(mapperService, fieldName);
assertThat(semanticFieldMapper.fieldType().getModelSettings(), equalTo(newModelSettings));
}
public void testUpdateInferenceId_GivenCurrentHasSparseModelSettingsAndNewSetsDefault() throws IOException {
String fieldName = randomAlphaOfLengthBetween(5, 15);
String oldInferenceId = "old_inference_id";
MinimalServiceSettings previousModelSettings = MinimalServiceSettings.sparseEmbedding("previous_service");
givenModelSettings(oldInferenceId, previousModelSettings);
MapperService mapperService = mapperServiceForFieldWithModelSettings(fieldName, oldInferenceId, previousModelSettings);
assertInferenceEndpoints(mapperService, fieldName, oldInferenceId, oldInferenceId);
assertSemanticTextField(mapperService, fieldName, true, null, null);
MinimalServiceSettings newModelSettings = MinimalServiceSettings.sparseEmbedding("new_service");
givenModelSettings(DEFAULT_EIS_ELSER_INFERENCE_ID, newModelSettings);
merge(mapperService, mapping(b -> b.startObject(fieldName).field("type", "semantic_text").endObject()));
assertInferenceEndpoints(mapperService, fieldName, DEFAULT_EIS_ELSER_INFERENCE_ID, DEFAULT_EIS_ELSER_INFERENCE_ID);
assertSemanticTextField(mapperService, fieldName, true, null, null);
SemanticTextFieldMapper semanticFieldMapper = getSemanticFieldMapper(mapperService, fieldName);
assertThat(semanticFieldMapper.fieldType().getModelSettings(), equalTo(newModelSettings));
}
public void testUpdateInferenceId_GivenCurrentHasSparseModelSettingsAndNewIsIncompatibleTaskType() throws IOException {
String fieldName = randomAlphaOfLengthBetween(5, 15);
String oldInferenceId = "old_inference_id";
MinimalServiceSettings previousModelSettings = MinimalServiceSettings.sparseEmbedding("previous_service");
givenModelSettings(oldInferenceId, previousModelSettings);
MapperService mapperService = mapperServiceForFieldWithModelSettings(fieldName, oldInferenceId, previousModelSettings);
assertInferenceEndpoints(mapperService, fieldName, oldInferenceId, oldInferenceId);
assertSemanticTextField(mapperService, fieldName, true, null, null);
String newInferenceId = "new_inference_id";
MinimalServiceSettings newModelSettings = MinimalServiceSettings.textEmbedding(
"new_service",
48,
SimilarityMeasure.L2_NORM,
DenseVectorFieldMapper.ElementType.BIT
);
givenModelSettings(newInferenceId, newModelSettings);
Exception exc = expectThrows(
IllegalArgumentException.class,
() -> merge(
mapperService,
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", newInferenceId).endObject())
)
);
assertThat(
exc.getMessage(),
containsString(
"Cannot update [semantic_text] field ["
+ fieldName
+ "] because inference endpoint ["
+ oldInferenceId
+ "] with model settings ["
+ previousModelSettings
+ "] is not compatible with new inference endpoint ["
+ newInferenceId
+ "] with model settings ["
+ newModelSettings
+ "]"
)
);
}
public void testUpdateInferenceId_GivenCurrentHasDenseModelSettingsAndNewIsCompatible() throws IOException {
String fieldName = randomAlphaOfLengthBetween(5, 15);
String oldInferenceId = "old_inference_id";
MinimalServiceSettings previousModelSettings = MinimalServiceSettings.textEmbedding(
"previous_service",
48,
SimilarityMeasure.L2_NORM,
DenseVectorFieldMapper.ElementType.BIT
);
givenModelSettings(oldInferenceId, previousModelSettings);
MapperService mapperService = mapperServiceForFieldWithModelSettings(fieldName, oldInferenceId, previousModelSettings);
assertInferenceEndpoints(mapperService, fieldName, oldInferenceId, oldInferenceId);
assertSemanticTextField(mapperService, fieldName, true, null, null);
String newInferenceId = "new_inference_id";
MinimalServiceSettings newModelSettings = MinimalServiceSettings.textEmbedding(
"new_service",
48,
SimilarityMeasure.L2_NORM,
DenseVectorFieldMapper.ElementType.BIT
);
givenModelSettings(newInferenceId, newModelSettings);
merge(
mapperService,
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", newInferenceId).endObject())
);
assertInferenceEndpoints(mapperService, fieldName, newInferenceId, newInferenceId);
assertSemanticTextField(mapperService, fieldName, true, null, null);
SemanticTextFieldMapper semanticFieldMapper = getSemanticFieldMapper(mapperService, fieldName);
assertThat(semanticFieldMapper.fieldType().getModelSettings(), equalTo(newModelSettings));
}
public void testUpdateInferenceId_GivenCurrentHasDenseModelSettingsAndNewIsIncompatibleTaskType() throws IOException {
String fieldName = randomAlphaOfLengthBetween(5, 15);
String oldInferenceId = "old_inference_id";
MinimalServiceSettings previousModelSettings = MinimalServiceSettings.textEmbedding(
"previous_service",
48,
SimilarityMeasure.L2_NORM,
DenseVectorFieldMapper.ElementType.BIT
);
givenModelSettings(oldInferenceId, previousModelSettings);
MapperService mapperService = mapperServiceForFieldWithModelSettings(fieldName, oldInferenceId, previousModelSettings);
assertInferenceEndpoints(mapperService, fieldName, oldInferenceId, oldInferenceId);
assertSemanticTextField(mapperService, fieldName, true, null, null);
String newInferenceId = "new_inference_id";
MinimalServiceSettings newModelSettings = MinimalServiceSettings.sparseEmbedding("new_service");
givenModelSettings(newInferenceId, newModelSettings);
Exception exc = expectThrows(
IllegalArgumentException.class,
() -> merge(
mapperService,
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", newInferenceId).endObject())
)
);
assertThat(
exc.getMessage(),
containsString(
"Cannot update [semantic_text] field ["
+ fieldName
+ "] because inference endpoint ["
+ oldInferenceId
+ "] with model settings ["
+ previousModelSettings
+ "] is not compatible with new inference endpoint ["
+ newInferenceId
+ "] with model settings ["
+ newModelSettings
+ "]"
)
);
}
public void testUpdateInferenceId_GivenCurrentHasDenseModelSettingsAndNewHasIncompatibleDimensions() throws IOException {
testUpdateInferenceId_GivenDenseModelsWithDifferentSettings(
MinimalServiceSettings.textEmbedding("previous_service", 48, SimilarityMeasure.L2_NORM, DenseVectorFieldMapper.ElementType.BIT),
MinimalServiceSettings.textEmbedding("new_service", 40, SimilarityMeasure.L2_NORM, DenseVectorFieldMapper.ElementType.BIT)
);
}
public void testUpdateInferenceId_GivenCurrentHasDenseModelSettingsAndNewHasIncompatibleSimilarityMeasure() throws IOException {
testUpdateInferenceId_GivenDenseModelsWithDifferentSettings(
MinimalServiceSettings.textEmbedding(
"previous_service",
48,
SimilarityMeasure.L2_NORM,
DenseVectorFieldMapper.ElementType.BYTE
),
MinimalServiceSettings.textEmbedding("new_service", 48, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.BYTE)
);
}
public void testUpdateInferenceId_GivenCurrentHasDenseModelSettingsAndNewHasIncompatibleElementType() throws IOException {
testUpdateInferenceId_GivenDenseModelsWithDifferentSettings(
MinimalServiceSettings.textEmbedding(
"previous_service",
48,
SimilarityMeasure.L2_NORM,
DenseVectorFieldMapper.ElementType.BYTE
),
MinimalServiceSettings.textEmbedding("new_service", 48, SimilarityMeasure.L2_NORM, DenseVectorFieldMapper.ElementType.BIT)
);
}
public void testUpdateInferenceId_CurrentHasDenseModelSettingsAndNewSetsDefault_ShouldFailAsDefaultIsSparse() throws IOException {
String fieldName = randomAlphaOfLengthBetween(5, 15);
String oldInferenceId = "old_inference_id";
MinimalServiceSettings previousModelSettings = MinimalServiceSettings.textEmbedding(
"previous_service",
48,
SimilarityMeasure.L2_NORM,
DenseVectorFieldMapper.ElementType.BIT
);
givenModelSettings(oldInferenceId, previousModelSettings);
MapperService mapperService = mapperServiceForFieldWithModelSettings(fieldName, oldInferenceId, previousModelSettings);
assertInferenceEndpoints(mapperService, fieldName, oldInferenceId, oldInferenceId);
assertSemanticTextField(mapperService, fieldName, true, null, null);
MinimalServiceSettings newModelSettings = MinimalServiceSettings.sparseEmbedding("new_service");
givenModelSettings(DEFAULT_EIS_ELSER_INFERENCE_ID, newModelSettings);
Exception exc = expectThrows(
IllegalArgumentException.class,
() -> merge(mapperService, mapping(b -> b.startObject(fieldName).field("type", "semantic_text").endObject()))
);
assertThat(
exc.getMessage(),
containsString(
"Cannot update [semantic_text] field ["
+ fieldName
+ "] because inference endpoint ["
+ oldInferenceId
+ "] with model settings ["
+ previousModelSettings
+ "] is not compatible with new inference endpoint ["
+ DEFAULT_EIS_ELSER_INFERENCE_ID
+ "] with model settings ["
+ newModelSettings
+ "]"
)
);
}
private static void assertEmbeddingsFieldMapperMatchesModel(MapperService mapperService, String fieldName, Model model) {
Mapper embeddingsFieldMapper = mapperService.mappingLookup().getMapper(getEmbeddingsFieldName(fieldName));
switch (model.getTaskType()) {
case SPARSE_EMBEDDING -> assertThat(embeddingsFieldMapper, is(instanceOf(SparseVectorFieldMapper.class)));
case TEXT_EMBEDDING -> assertTextEmbeddingsFieldMapperMatchesModel(embeddingsFieldMapper, model);
default -> throw new AssertionError("Unexpected task type [" + model.getTaskType() + "]");
}
}
private static void assertTextEmbeddingsFieldMapperMatchesModel(Mapper embeddingsFieldMapper, Model model) {
Function<SimilarityMeasure, DenseVectorFieldMapper.VectorSimilarity> convertToVectorSimilarity = s -> switch (s) {
case COSINE -> DenseVectorFieldMapper.VectorSimilarity.COSINE;
case DOT_PRODUCT -> DenseVectorFieldMapper.VectorSimilarity.DOT_PRODUCT;
case L2_NORM -> DenseVectorFieldMapper.VectorSimilarity.L2_NORM;
};
assertThat(embeddingsFieldMapper, is(instanceOf(DenseVectorFieldMapper.class)));
DenseVectorFieldMapper denseVectorFieldMapper = (DenseVectorFieldMapper) embeddingsFieldMapper;
ServiceSettings modelServiceSettings = model.getConfigurations().getServiceSettings();
assertThat(denseVectorFieldMapper.fieldType().getVectorDimensions(), equalTo(modelServiceSettings.dimensions()));
assertThat(denseVectorFieldMapper.fieldType().getElementType(), equalTo(modelServiceSettings.elementType()));
assertThat(
denseVectorFieldMapper.fieldType().getSimilarity(),
equalTo(convertToVectorSimilarity.apply(modelServiceSettings.similarity()))
);
}
private void testUpdateInferenceId_GivenDenseModelsWithDifferentSettings(
MinimalServiceSettings previousModelSettings,
MinimalServiceSettings newModelSettings
) throws IOException {
assertThat(previousModelSettings.taskType(), equalTo(TaskType.TEXT_EMBEDDING));
assertThat(newModelSettings.taskType(), equalTo(TaskType.TEXT_EMBEDDING));
assertThat(newModelSettings, not(equalTo(previousModelSettings)));
String fieldName = randomAlphaOfLengthBetween(5, 15);
String oldInferenceId = "old_inference_id";
givenModelSettings(oldInferenceId, previousModelSettings);
MapperService mapperService = mapperServiceForFieldWithModelSettings(fieldName, oldInferenceId, previousModelSettings);
assertInferenceEndpoints(mapperService, fieldName, oldInferenceId, oldInferenceId);
assertSemanticTextField(mapperService, fieldName, true, null, null);
String newInferenceId = "new_inference_id";
givenModelSettings(newInferenceId, newModelSettings);
Exception exc = expectThrows(
IllegalArgumentException.class,
() -> merge(
mapperService,
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", newInferenceId).endObject())
)
);
assertThat(
exc.getMessage(),
containsString(
"Cannot update [semantic_text] field ["
+ fieldName
+ "] because inference endpoint ["
+ oldInferenceId
+ "] with model settings ["
+ previousModelSettings
+ "] is not compatible with new inference endpoint ["
+ newInferenceId
+ "] with model settings ["
+ newModelSettings
+ "]"
)
);
}
public void testDynamicUpdate() throws IOException {
final String fieldName = "semantic";
final String inferenceId = "test_service";
final String searchInferenceId = "search_test_service";
{
MapperService mapperService = mapperServiceForFieldWithModelSettings(
fieldName,
inferenceId,
new MinimalServiceSettings("service", TaskType.SPARSE_EMBEDDING, null, null, null)
);
var expectedIndexOptions = getDefaultSparseVectorIndexOptionsForMapper(mapperService);
assertSemanticTextField(mapperService, fieldName, true, null, expectedIndexOptions);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId);
}
{
MapperService mapperService = mapperServiceForFieldWithModelSettings(
fieldName,
inferenceId,
searchInferenceId,
new MinimalServiceSettings("service", TaskType.SPARSE_EMBEDDING, null, null, null)
);
var expectedIndexOptions = getDefaultSparseVectorIndexOptionsForMapper(mapperService);
assertSemanticTextField(mapperService, fieldName, true, null, expectedIndexOptions);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId);
}
}
public void testUpdateModelSettings() throws IOException {
for (int depth = 1; depth < 5; depth++) {
String fieldName = randomFieldName(depth);
MapperService mapperService = createMapperService(
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", "test_model").endObject()),
useLegacyFormat
);
assertSemanticTextField(mapperService, fieldName, false, null, null);
{
Exception exc = expectThrows(
MapperParsingException.class,
() -> merge(
mapperService,
mapping(
b -> b.startObject(fieldName)
.field("type", "semantic_text")
.field("inference_id", "test_model")
.startObject("model_settings")
.field("inference_id", "test_model")
.endObject()
.endObject()
)
)
);
assertThat(exc.getMessage(), containsString("Required [task_type]"));
}
{
merge(
mapperService,
mapping(
b -> b.startObject(fieldName)
.field("type", "semantic_text")
.field("inference_id", "test_model")
.startObject("model_settings")
.field("task_type", "sparse_embedding")
.endObject()
.endObject()
)
);
var expectedIndexOptions = getDefaultSparseVectorIndexOptionsForMapper(mapperService);
assertSemanticTextField(mapperService, fieldName, true, null, expectedIndexOptions);
}
{
merge(
mapperService,
mapping(b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", "test_model").endObject())
);
var expectedIndexOptions = getDefaultSparseVectorIndexOptionsForMapper(mapperService);
assertSemanticTextField(mapperService, fieldName, true, null, expectedIndexOptions);
}
{
Exception exc = expectThrows(
IllegalArgumentException.class,
() -> merge(
mapperService,
mapping(
b -> b.startObject(fieldName)
.field("type", "semantic_text")
.field("inference_id", "test_model")
.startObject("model_settings")
.field("task_type", "text_embedding")
.field("dimensions", 10)
.field("similarity", "cosine")
.field("element_type", "float")
.endObject()
.endObject()
)
)
);
assertThat(exc.getMessage(), containsString("cannot be changed from type [sparse_vector] to [dense_vector]"));
}
}
}
public void testDenseVectorIndexOptionValidation() throws IOException {
for (int depth = 1; depth < 5; depth++) {
String inferenceId = "test_model";
String fieldName = randomFieldName(depth);
DenseVectorFieldMapper.DenseVectorIndexOptions indexOptions = DenseVectorFieldTypeTests.randomIndexOptionsAll();
Exception exc = expectThrows(MapperParsingException.class, () -> createMapperService(mapping(b -> {
b.startObject(fieldName);
b.field("type", SemanticTextFieldMapper.CONTENT_TYPE);
b.field(INFERENCE_ID_FIELD, inferenceId);
b.startObject(INDEX_OPTIONS_FIELD);
b.startObject("dense_vector");
b.field("type", indexOptions.getType().name().toLowerCase(Locale.ROOT));
b.field("unsupported_param", "any_value");
b.endObject();
b.endObject();
b.endObject();
}), useLegacyFormat));
assertTrue(exc.getMessage().contains("unsupported parameters"));
}
}
private void addSparseVectorModelSettingsToBuilder(XContentBuilder b) throws IOException {
b.startObject("model_settings");
b.field("task_type", TaskType.SPARSE_EMBEDDING);
b.endObject();
}
public void testSparseVectorIndexOptionsValidationAndMapping() throws IOException {
for (int depth = 1; depth < 5; depth++) {
String inferenceId = "test_model";
String fieldName = randomFieldName(depth);
IndexVersion indexVersion = SparseVectorFieldMapperTests.getIndexOptionsCompatibleIndexVersion();
var sparseVectorIndexOptions = SparseVectorFieldTypeTests.randomSparseVectorIndexOptions();
var expectedIndexOptions = sparseVectorIndexOptions == null
? null
: new SemanticTextIndexOptions(SemanticTextIndexOptions.SupportedIndexOptions.SPARSE_VECTOR, sparseVectorIndexOptions);
// should not throw an exception
MapperService mapper = createMapperServiceWithIndexVersion(mapping(b -> {
b.startObject(fieldName);
{
b.field("type", SemanticTextFieldMapper.CONTENT_TYPE);
b.field(INFERENCE_ID_FIELD, inferenceId);
addSparseVectorModelSettingsToBuilder(b);
if (sparseVectorIndexOptions != null) {
b.startObject(INDEX_OPTIONS_FIELD);
{
b.field(SparseVectorFieldMapper.CONTENT_TYPE);
sparseVectorIndexOptions.toXContent(b, null);
}
b.endObject();
}
}
b.endObject();
}), useLegacyFormat, indexVersion);
assertSemanticTextField(mapper, fieldName, true, null, expectedIndexOptions);
}
}
public void testSparseVectorMappingUpdate() throws IOException {
for (int i = 0; i < 5; i++) {
Model model = TestModel.createRandomInstance(TaskType.SPARSE_EMBEDDING);
when(globalModelRegistry.getMinimalServiceSettings(anyString())).thenAnswer(
invocation -> { return new MinimalServiceSettings(model); }
);
final ChunkingSettings chunkingSettings = generateRandomChunkingSettings(false);
IndexVersion indexVersion = SparseVectorFieldMapperTests.getIndexOptionsCompatibleIndexVersion();
final SemanticTextIndexOptions indexOptions = randomSemanticTextIndexOptions(TaskType.SPARSE_EMBEDDING);
String fieldName = "field";
MapperService mapperService = createMapperServiceWithIndexVersion(
mapping(b -> addSemanticTextMapping(b, fieldName, model.getInferenceEntityId(), null, chunkingSettings, indexOptions)),
useLegacyFormat,
indexVersion
);
var expectedIndexOptions = (indexOptions == null)
? new SemanticTextIndexOptions(
SemanticTextIndexOptions.SupportedIndexOptions.SPARSE_VECTOR,
SparseVectorFieldMapper.SparseVectorIndexOptions.getDefaultIndexOptions(indexVersion)
)
: indexOptions;
assertSemanticTextField(mapperService, fieldName, false, chunkingSettings, expectedIndexOptions);
final SemanticTextIndexOptions newIndexOptions = randomSemanticTextIndexOptions(TaskType.SPARSE_EMBEDDING);
expectedIndexOptions = (newIndexOptions == null)
? new SemanticTextIndexOptions(
SemanticTextIndexOptions.SupportedIndexOptions.SPARSE_VECTOR,
SparseVectorFieldMapper.SparseVectorIndexOptions.getDefaultIndexOptions(indexVersion)
)
: newIndexOptions;
ChunkingSettings newChunkingSettings = generateRandomChunkingSettingsOtherThan(chunkingSettings);
merge(
mapperService,
mapping(b -> addSemanticTextMapping(b, fieldName, model.getInferenceEntityId(), null, newChunkingSettings, newIndexOptions))
);
assertSemanticTextField(mapperService, fieldName, false, newChunkingSettings, expectedIndexOptions);
}
}
public void testUpdateSearchInferenceId() throws IOException {
final String inferenceId = "test_inference_id";
final String searchInferenceId1 = "test_search_inference_id_1";
final String searchInferenceId2 = "test_search_inference_id_2";
CheckedBiFunction<String, String, XContentBuilder, IOException> buildMapping = (f, sid) -> mapping(b -> {
b.startObject(f).field("type", "semantic_text").field("inference_id", inferenceId);
if (sid != null) {
b.field("search_inference_id", sid);
}
b.endObject();
});
for (int depth = 1; depth < 5; depth++) {
String fieldName = randomFieldName(depth);
MapperService mapperService = createMapperService(buildMapping.apply(fieldName, null), useLegacyFormat);
assertSemanticTextField(mapperService, fieldName, false, null, null);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId);
merge(mapperService, buildMapping.apply(fieldName, searchInferenceId1));
assertSemanticTextField(mapperService, fieldName, false, null, null);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId1);
merge(mapperService, buildMapping.apply(fieldName, searchInferenceId2));
assertSemanticTextField(mapperService, fieldName, false, null, null);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId2);
merge(mapperService, buildMapping.apply(fieldName, null));
assertSemanticTextField(mapperService, fieldName, false, null, null);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId);
mapperService = mapperServiceForFieldWithModelSettings(
fieldName,
inferenceId,
new MinimalServiceSettings("my-service", TaskType.SPARSE_EMBEDDING, null, null, null)
);
var expectedIndexOptions = getDefaultSparseVectorIndexOptionsForMapper(mapperService);
assertSemanticTextField(mapperService, fieldName, true, null, expectedIndexOptions);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId);
merge(mapperService, buildMapping.apply(fieldName, searchInferenceId1));
assertSemanticTextField(mapperService, fieldName, true, null, expectedIndexOptions);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId1);
merge(mapperService, buildMapping.apply(fieldName, searchInferenceId2));
assertSemanticTextField(mapperService, fieldName, true, null, expectedIndexOptions);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId2);
merge(mapperService, buildMapping.apply(fieldName, null));
assertSemanticTextField(mapperService, fieldName, true, null, expectedIndexOptions);
assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId);
}
}
private static void assertSemanticTextField(
MapperService mapperService,
String fieldName,
boolean expectedModelSettings,
ChunkingSettings expectedChunkingSettings,
SemanticTextIndexOptions expectedIndexOptions
) {
SemanticTextFieldMapper semanticFieldMapper = getSemanticFieldMapper(mapperService, fieldName);
var fieldType = mapperService.fieldType(fieldName);
assertNotNull(fieldType);
assertThat(fieldType, instanceOf(SemanticTextFieldMapper.SemanticTextFieldType.class));
SemanticTextFieldMapper.SemanticTextFieldType semanticTextFieldType = (SemanticTextFieldMapper.SemanticTextFieldType) fieldType;
assertSame(semanticFieldMapper.fieldType(), semanticTextFieldType);
NestedObjectMapper chunksMapper = mapperService.mappingLookup()
.nestedLookup()
.getNestedMappers()
.get(getChunksFieldName(fieldName));
assertThat(chunksMapper, equalTo(semanticFieldMapper.fieldType().getChunksField()));
assertThat(chunksMapper.fullPath(), equalTo(getChunksFieldName(fieldName)));
Mapper textMapper = chunksMapper.getMapper(TEXT_FIELD);
if (semanticTextFieldType.useLegacyFormat()) {
assertNotNull(textMapper);
assertThat(textMapper, instanceOf(KeywordFieldMapper.class));
KeywordFieldMapper textFieldMapper = (KeywordFieldMapper) textMapper;
assertThat(textFieldMapper.fieldType().indexType(), equalTo(IndexType.NONE));
} else {
assertNull(textMapper);
var offsetMapper = semanticTextFieldType.getOffsetsField();
assertThat(offsetMapper, instanceOf(OffsetSourceFieldMapper.class));
}
if (expectedModelSettings) {
assertNotNull(semanticFieldMapper.fieldType().getModelSettings());
Mapper embeddingsMapper = chunksMapper.getMapper(CHUNKED_EMBEDDINGS_FIELD);
assertNotNull(embeddingsMapper);
assertThat(embeddingsMapper, instanceOf(FieldMapper.class));
FieldMapper embeddingsFieldMapper = (FieldMapper) embeddingsMapper;
assertSame(embeddingsFieldMapper.fieldType(), mapperService.mappingLookup().getFieldType(getEmbeddingsFieldName(fieldName)));
assertThat(embeddingsMapper.fullPath(), equalTo(getEmbeddingsFieldName(fieldName)));
switch (semanticFieldMapper.fieldType().getModelSettings().taskType()) {
case SPARSE_EMBEDDING -> {
assertThat(embeddingsMapper, instanceOf(SparseVectorFieldMapper.class));
SparseVectorFieldMapper sparseVectorFieldMapper = (SparseVectorFieldMapper) embeddingsMapper;
assertEquals(sparseVectorFieldMapper.fieldType().isStored(), semanticTextFieldType.useLegacyFormat() == false);
SparseVectorFieldMapper.SparseVectorIndexOptions applied = sparseVectorFieldMapper.fieldType().getIndexOptions();
SparseVectorFieldMapper.SparseVectorIndexOptions expected = expectedIndexOptions == null
? null
: (SparseVectorFieldMapper.SparseVectorIndexOptions) expectedIndexOptions.indexOptions();
if (expected == null && applied != null) {
var indexVersionCreated = mapperService.getIndexSettings().getIndexVersionCreated();
if (SparseVectorFieldMapper.SparseVectorIndexOptions.isDefaultOptions(applied, indexVersionCreated)) {
expected = SparseVectorFieldMapper.SparseVectorIndexOptions.getDefaultIndexOptions(indexVersionCreated);
}
}
assertEquals(expected, applied);
}
case TEXT_EMBEDDING -> {
assertThat(embeddingsMapper, instanceOf(DenseVectorFieldMapper.class));
DenseVectorFieldMapper denseVectorFieldMapper = (DenseVectorFieldMapper) embeddingsMapper;
if (expectedIndexOptions != null) {
assertEquals(expectedIndexOptions.indexOptions(), denseVectorFieldMapper.fieldType().getIndexOptions());
} else {
assertNull(denseVectorFieldMapper.fieldType().getIndexOptions());
}
}
default -> throw new AssertionError("Invalid task type");
}
} else {
assertNull(semanticFieldMapper.fieldType().getModelSettings());
}
if (expectedChunkingSettings != null) {
assertNotNull(semanticFieldMapper.fieldType().getChunkingSettings());
assertEquals(expectedChunkingSettings, semanticFieldMapper.fieldType().getChunkingSettings());
} else {
assertNull(semanticFieldMapper.fieldType().getChunkingSettings());
}
}
private static SemanticTextFieldMapper getSemanticFieldMapper(MapperService mapperService, String fieldName) {
Mapper mapper = mapperService.mappingLookup().getMapper(fieldName);
assertThat(mapper, instanceOf(SemanticTextFieldMapper.class));
return (SemanticTextFieldMapper) mapper;
}
private static void assertInferenceEndpoints(
MapperService mapperService,
String fieldName,
String expectedInferenceId,
String expectedSearchInferenceId
) {
var fieldType = mapperService.fieldType(fieldName);
assertNotNull(fieldType);
assertThat(fieldType, instanceOf(SemanticTextFieldMapper.SemanticTextFieldType.class));
SemanticTextFieldMapper.SemanticTextFieldType semanticTextFieldType = (SemanticTextFieldMapper.SemanticTextFieldType) fieldType;
assertEquals(expectedInferenceId, semanticTextFieldType.getInferenceId());
assertEquals(expectedSearchInferenceId, semanticTextFieldType.getSearchInferenceId());
}
public void testSuccessfulParse() throws IOException {
for (int depth = 1; depth < 4; depth++) {
final IndexVersion indexVersion = SemanticInferenceMetadataFieldsMapperTests.getRandomCompatibleIndexVersion(useLegacyFormat);
final String fieldName1 = randomFieldName(depth);
final String fieldName2 = randomFieldName(depth + 1);
final String searchInferenceId = randomAlphaOfLength(8);
final boolean setSearchInferenceId = randomBoolean();
TaskType taskType = TaskType.SPARSE_EMBEDDING;
Model model1 = TestModel.createRandomInstance(taskType);
Model model2 = TestModel.createRandomInstance(taskType);
when(globalModelRegistry.getMinimalServiceSettings(anyString())).thenAnswer(invocation -> {
var modelId = (String) invocation.getArguments()[0];
if (modelId.equals(model1.getInferenceEntityId())) {
return new MinimalServiceSettings(model1);
}
if (modelId.equals(model2.getInferenceEntityId())) {
return new MinimalServiceSettings(model2);
}
return null;
});
ChunkingSettings chunkingSettings = null; // Some chunking settings configs can produce different Lucene docs counts
SemanticTextIndexOptions indexOptions = randomSemanticTextIndexOptions(taskType);
XContentBuilder mapping = mapping(b -> {
addSemanticTextMapping(
b,
fieldName1,
model1.getInferenceEntityId(),
setSearchInferenceId ? searchInferenceId : null,
chunkingSettings,
indexOptions
);
addSemanticTextMapping(
b,
fieldName2,
model2.getInferenceEntityId(),
setSearchInferenceId ? searchInferenceId : null,
chunkingSettings,
indexOptions
);
});
var expectedIndexOptions = (indexOptions == null)
? new SemanticTextIndexOptions(
SemanticTextIndexOptions.SupportedIndexOptions.SPARSE_VECTOR,
SparseVectorFieldMapper.SparseVectorIndexOptions.getDefaultIndexOptions(indexVersion)
)
: indexOptions;
MapperService mapperService = createMapperServiceWithIndexVersion(mapping, useLegacyFormat, indexVersion);
assertSemanticTextField(mapperService, fieldName1, false, null, expectedIndexOptions);
assertInferenceEndpoints(
mapperService,
fieldName1,
model1.getInferenceEntityId(),
setSearchInferenceId ? searchInferenceId : model1.getInferenceEntityId()
);
assertSemanticTextField(mapperService, fieldName2, false, null, expectedIndexOptions);
assertInferenceEndpoints(
mapperService,
fieldName2,
model2.getInferenceEntityId(),
setSearchInferenceId ? searchInferenceId : model2.getInferenceEntityId()
);
DocumentMapper documentMapper = mapperService.documentMapper();
ParsedDocument doc = documentMapper.parse(
source(
b -> addSemanticTextInferenceResults(
useLegacyFormat,
b,
List.of(
randomSemanticText(
useLegacyFormat,
fieldName1,
model1,
chunkingSettings,
List.of("a b", "c"),
XContentType.JSON
),
randomSemanticText(useLegacyFormat, fieldName2, model2, chunkingSettings, List.of("d e f"), XContentType.JSON)
)
)
)
);
List<LuceneDocument> luceneDocs = doc.docs();
assertEquals(4, luceneDocs.size());
for (int i = 0; i < 3; i++) {
assertEquals(doc.rootDoc(), luceneDocs.get(i).getParent());
}
// nested docs are in reversed order
assertSparseFeatures(luceneDocs.get(0), getEmbeddingsFieldName(fieldName1), 2);
assertSparseFeatures(luceneDocs.get(1), getEmbeddingsFieldName(fieldName1), 1);
assertSparseFeatures(luceneDocs.get(2), getEmbeddingsFieldName(fieldName2), 3);
assertEquals(doc.rootDoc(), luceneDocs.get(3));
assertNull(luceneDocs.get(3).getParent());
withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), reader -> {
NestedDocuments nested = new NestedDocuments(
mapperService.mappingLookup(),
QueryBitSetProducer::new,
IndexVersion.current()
);
LeafNestedDocuments leaf = nested.getLeafNestedDocuments(reader.leaves().get(0));
Set<SearchHit.NestedIdentity> visitedNestedIdentities = new HashSet<>();
Set<SearchHit.NestedIdentity> expectedVisitedNestedIdentities = Set.of(
new SearchHit.NestedIdentity(getChunksFieldName(fieldName1), 0, null),
new SearchHit.NestedIdentity(getChunksFieldName(fieldName1), 1, null),
new SearchHit.NestedIdentity(getChunksFieldName(fieldName2), 0, null)
);
assertChildLeafNestedDocument(leaf, 0, 3, visitedNestedIdentities);
assertChildLeafNestedDocument(leaf, 1, 3, visitedNestedIdentities);
assertChildLeafNestedDocument(leaf, 2, 3, visitedNestedIdentities);
assertEquals(expectedVisitedNestedIdentities, visitedNestedIdentities);
assertNull(leaf.advance(3));
assertEquals(3, leaf.doc());
assertEquals(3, leaf.rootDoc());
assertNull(leaf.nestedIdentity());
IndexSearcher searcher = newSearcher(reader);
{
TopDocs topDocs = searcher.search(
generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a")),
10
);
assertEquals(1, topDocs.totalHits.value());
assertEquals(3, topDocs.scoreDocs[0].doc);
}
{
TopDocs topDocs = searcher.search(
generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a", "b")),
10
);
assertEquals(1, topDocs.totalHits.value());
assertEquals(3, topDocs.scoreDocs[0].doc);
}
{
TopDocs topDocs = searcher.search(
generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("d")),
10
);
assertEquals(1, topDocs.totalHits.value());
assertEquals(3, topDocs.scoreDocs[0].doc);
}
{
TopDocs topDocs = searcher.search(
generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("z")),
10
);
assertEquals(0, topDocs.totalHits.value());
}
});
}
}
public void testMissingInferenceId() throws IOException {
final MapperService mapperService = createMapperService(
mapping(b -> addSemanticTextMapping(b, "field", "my_id", null, null, null)),
useLegacyFormat
);
IllegalArgumentException ex = expectThrows(
DocumentParsingException.class,
IllegalArgumentException.class,
() -> mapperService.documentMapper()
.parse(
semanticTextInferenceSource(
useLegacyFormat,
b -> b.startObject("field")
.startObject(INFERENCE_FIELD)
.field(
MODEL_SETTINGS_FIELD,
new MinimalServiceSettings("my-service", TaskType.SPARSE_EMBEDDING, null, null, null)
)
.field(CHUNKS_FIELD, useLegacyFormat ? List.of() : Map.of())
.endObject()
.endObject()
)
)
);
assertThat(ex.getCause().getMessage(), containsString("Required [inference_id]"));
}
public void testMissingModelSettingsAndChunks() throws IOException {
MapperService mapperService = createMapperService(
mapping(b -> addSemanticTextMapping(b, "field", "my_id", null, null, null)),
useLegacyFormat
);
IllegalArgumentException ex = expectThrows(
DocumentParsingException.class,
IllegalArgumentException.class,
() -> mapperService.documentMapper()
.parse(
semanticTextInferenceSource(
useLegacyFormat,
b -> b.startObject("field").startObject(INFERENCE_FIELD).field(INFERENCE_ID_FIELD, "my_id").endObject().endObject()
)
)
);
// Model settings may be null here so we only error on chunks
assertThat(ex.getCause().getMessage(), containsString("Required [chunks]"));
}
public void testMissingTaskType() throws IOException {
MapperService mapperService = createMapperService(
mapping(b -> addSemanticTextMapping(b, "field", "my_id", null, null, null)),
useLegacyFormat
);
IllegalArgumentException ex = expectThrows(
DocumentParsingException.class,
IllegalArgumentException.class,
() -> mapperService.documentMapper()
.parse(
semanticTextInferenceSource(
useLegacyFormat,
b -> b.startObject("field")
.startObject(INFERENCE_FIELD)
.field(INFERENCE_ID_FIELD, "my_id")
.startObject(MODEL_SETTINGS_FIELD)
.endObject()
.endObject()
.endObject()
)
)
);
assertThat(ex.getCause().getMessage(), containsString("failed to parse field [model_settings]"));
}
public void testDenseVectorElementType() throws IOException {
final String fieldName = "field";
final String inferenceId = "test_service";
BiConsumer<MapperService, DenseVectorFieldMapper.ElementType> assertMapperService = (m, e) -> {
SemanticTextFieldMapper semanticTextFieldMapper = getSemanticFieldMapper(m, fieldName);
assertThat(semanticTextFieldMapper.fieldType().getModelSettings().elementType(), equalTo(e));
};
MapperService floatMapperService = mapperServiceForFieldWithModelSettings(
fieldName,
inferenceId,
new MinimalServiceSettings(
"my-service",
TaskType.TEXT_EMBEDDING,
1024,
SimilarityMeasure.COSINE,
DenseVectorFieldMapper.ElementType.FLOAT
)
);
assertMapperService.accept(floatMapperService, DenseVectorFieldMapper.ElementType.FLOAT);
MapperService byteMapperService = mapperServiceForFieldWithModelSettings(
fieldName,
inferenceId,
new MinimalServiceSettings(
"my-service",
TaskType.TEXT_EMBEDDING,
1024,
SimilarityMeasure.COSINE,
DenseVectorFieldMapper.ElementType.BYTE
)
);
assertMapperService.accept(byteMapperService, DenseVectorFieldMapper.ElementType.BYTE);
}
public void testSettingAndUpdatingChunkingSettings() throws IOException {
Model model = TestModel.createRandomInstance(TaskType.SPARSE_EMBEDDING);
when(globalModelRegistry.getMinimalServiceSettings(anyString())).thenAnswer(
invocation -> { return new MinimalServiceSettings(model); }
);
final ChunkingSettings chunkingSettings = generateRandomChunkingSettings(false);
final SemanticTextIndexOptions indexOptions = randomSemanticTextIndexOptions(TaskType.SPARSE_EMBEDDING);
String fieldName = "field";
MapperService mapperService = createMapperService(
mapping(b -> addSemanticTextMapping(b, fieldName, model.getInferenceEntityId(), null, chunkingSettings, indexOptions)),
useLegacyFormat
);
assertSemanticTextField(mapperService, fieldName, false, chunkingSettings, indexOptions);
ChunkingSettings newChunkingSettings = generateRandomChunkingSettingsOtherThan(chunkingSettings);
merge(
mapperService,
mapping(b -> addSemanticTextMapping(b, fieldName, model.getInferenceEntityId(), null, newChunkingSettings, indexOptions))
);
assertSemanticTextField(mapperService, fieldName, false, newChunkingSettings, indexOptions);
}
public void testModelSettingsRequiredWithChunks() throws IOException {
// Create inference results where model settings are set to null and chunks are provided
TaskType taskType = TaskType.SPARSE_EMBEDDING;
Model model = TestModel.createRandomInstance(taskType);
when(globalModelRegistry.getMinimalServiceSettings(anyString())).thenAnswer(
invocation -> { return new MinimalServiceSettings(model); }
);
ChunkingSettings chunkingSettings = generateRandomChunkingSettings(false);
SemanticTextIndexOptions indexOptions = randomSemanticTextIndexOptions(taskType);
SemanticTextField randomSemanticText = randomSemanticText(
useLegacyFormat,
"field",
model,
chunkingSettings,
List.of("a"),
XContentType.JSON
);
SemanticTextField inferenceResults = new SemanticTextField(
randomSemanticText.useLegacyFormat(),
randomSemanticText.fieldName(),
randomSemanticText.originalValues(),
new SemanticTextField.InferenceResult(
randomSemanticText.inference().inferenceId(),
null,
randomSemanticText.inference().chunkingSettings(),
randomSemanticText.inference().chunks()
),
randomSemanticText.contentType()
);
MapperService mapperService = createMapperService(
mapping(b -> addSemanticTextMapping(b, "field", model.getInferenceEntityId(), null, chunkingSettings, indexOptions)),
useLegacyFormat
);
SourceToParse source = source(b -> addSemanticTextInferenceResults(useLegacyFormat, b, List.of(inferenceResults)));
DocumentParsingException ex = expectThrows(
DocumentParsingException.class,
DocumentParsingException.class,
() -> mapperService.documentMapper().parse(source)
);
assertThat(ex.getMessage(), containsString("[model_settings] must be set for field [field] when chunks are provided"));
}
public void testPre811IndexSemanticTextDenseVectorRaisesError() throws IOException {
Model model = TestModel.createRandomInstance(TaskType.TEXT_EMBEDDING);
String fieldName = randomAlphaOfLength(8);
MapperService mapperService = createMapperService(
mapping(
b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", model.getInferenceEntityId()).endObject()
),
true,
IndexVersions.V_8_0_0,
IndexVersionUtils.getPreviousVersion(IndexVersions.NEW_SPARSE_VECTOR)
);
assertSemanticTextField(mapperService, fieldName, false, null, null);
merge(
mapperService,
mapping(
b -> b.startObject(fieldName)
.field("type", "semantic_text")
.field("inference_id", model.getInferenceEntityId())
.startObject("model_settings")
.field("task_type", TaskType.TEXT_EMBEDDING.toString())
.field("dimensions", model.getServiceSettings().dimensions())
.field("similarity", model.getServiceSettings().similarity())
.field("element_type", model.getServiceSettings().elementType())
.endObject()
.endObject()
)
);
assertSemanticTextField(mapperService, fieldName, true, null, null);
DocumentMapper documentMapper = mapperService.documentMapper();
DocumentParsingException e = assertThrows(
DocumentParsingException.class,
() -> documentMapper.parse(
source(
b -> addSemanticTextInferenceResults(
true,
b,
List.of(randomSemanticText(true, fieldName, model, null, List.of("foo", "bar"), XContentType.JSON))
)
)
)
);
assertThat(e.getCause(), instanceOf(UnsupportedOperationException.class));
assertThat(e.getCause().getMessage(), equalTo(UNSUPPORTED_INDEX_MESSAGE));
}
public void testPre811IndexSemanticTextSparseVectorRaisesError() throws IOException {
Model model = TestModel.createRandomInstance(TaskType.SPARSE_EMBEDDING);
String fieldName = randomAlphaOfLength(8);
MapperService mapperService = createMapperService(
mapping(
b -> b.startObject(fieldName).field("type", "semantic_text").field("inference_id", model.getInferenceEntityId()).endObject()
),
true,
IndexVersions.V_8_0_0,
IndexVersionUtils.getPreviousVersion(IndexVersions.NEW_SPARSE_VECTOR)
);
assertSemanticTextField(mapperService, fieldName, false, null, null);
merge(
mapperService,
mapping(
b -> b.startObject(fieldName)
.field("type", "semantic_text")
.field("inference_id", model.getInferenceEntityId())
.startObject("model_settings")
.field("task_type", TaskType.SPARSE_EMBEDDING.toString())
.endObject()
.endObject()
)
);
assertSemanticTextField(mapperService, fieldName, true, null, null);
DocumentMapper documentMapper = mapperService.documentMapper();
DocumentParsingException e = assertThrows(
DocumentParsingException.class,
() -> documentMapper.parse(
source(
b -> addSemanticTextInferenceResults(
true,
b,
List.of(randomSemanticText(true, fieldName, model, null, List.of("foo", "bar"), XContentType.JSON))
)
)
)
);
assertThat(e.getCause(), instanceOf(UnsupportedOperationException.class));
assertThat(e.getCause().getMessage(), equalTo(UNSUPPORTED_INDEX_MESSAGE));
}
private MapperService mapperServiceForFieldWithModelSettings(String fieldName, String inferenceId, MinimalServiceSettings modelSettings)
throws IOException {
return mapperServiceForFieldWithModelSettings(fieldName, inferenceId, null, modelSettings);
}
private MapperService mapperServiceForFieldWithModelSettings(
String fieldName,
String inferenceId,
String searchInferenceId,
MinimalServiceSettings modelSettings
) throws IOException {
String mappingParams = "type=semantic_text,inference_id=" + inferenceId;
if (searchInferenceId != null) {
mappingParams += ",search_inference_id=" + searchInferenceId;
}
MapperService mapperService = createMapperService(mapping(b -> {}), useLegacyFormat);
mapperService.merge(
"_doc",
new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping(fieldName, mappingParams))),
MapperService.MergeReason.MAPPING_UPDATE
);
SemanticTextField semanticTextField = new SemanticTextField(
useLegacyFormat,
fieldName,
List.of(),
new SemanticTextField.InferenceResult(inferenceId, modelSettings, generateRandomChunkingSettings(), Map.of()),
XContentType.JSON
);
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
if (useLegacyFormat) {
builder.field(semanticTextField.fieldName());
builder.value(semanticTextField);
} else {
builder.field(InferenceMetadataFieldsMapper.NAME, Map.of(semanticTextField.fieldName(), semanticTextField));
}
builder.endObject();
SourceToParse sourceToParse = new SourceToParse("test", BytesReference.bytes(builder), XContentType.JSON);
ParsedDocument parsedDocument = mapperService.documentMapper().parse(sourceToParse);
mapperService.merge(
"_doc",
parsedDocument.dynamicMappingsUpdate().toCompressedXContent(),
MapperService.MergeReason.MAPPING_UPDATE
);
return mapperService;
}
public void testExistsQuerySparseVector() throws IOException {
final String fieldName = "semantic";
final String inferenceId = "test_service";
MapperService mapperService = mapperServiceForFieldWithModelSettings(
fieldName,
inferenceId,
new MinimalServiceSettings("my-service", TaskType.SPARSE_EMBEDDING, null, null, null)
);
Mapper mapper = mapperService.mappingLookup().getMapper(fieldName);
assertNotNull(mapper);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);
Query existsQuery = ((SemanticTextFieldMapper) mapper).fieldType().existsQuery(searchExecutionContext);
assertThat(existsQuery, instanceOf(ESToParentBlockJoinQuery.class));
}
public void testExistsQueryDenseVector() throws IOException {
final String fieldName = "semantic";
final String inferenceId = "test_service";
MapperService mapperService = mapperServiceForFieldWithModelSettings(
fieldName,
inferenceId,
new MinimalServiceSettings(
"my-service",
TaskType.TEXT_EMBEDDING,
1024,
SimilarityMeasure.COSINE,
DenseVectorFieldMapper.ElementType.FLOAT
)
);
Mapper mapper = mapperService.mappingLookup().getMapper(fieldName);
assertNotNull(mapper);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);
Query existsQuery = ((SemanticTextFieldMapper) mapper).fieldType().existsQuery(searchExecutionContext);
assertThat(existsQuery, instanceOf(ESToParentBlockJoinQuery.class));
}
private static DenseVectorFieldMapper.DenseVectorIndexOptions defaultDenseVectorIndexOptions() {
// These are the default index options for dense_vector fields, and used for semantic_text fields incompatible with BBQ.
int m = Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN;
int efConstruction = Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH;
return new DenseVectorFieldMapper.Int8HnswIndexOptions(m, efConstruction, null, false, null);
}
private static SemanticTextIndexOptions defaultDenseVectorSemanticIndexOptions() {
return new SemanticTextIndexOptions(SemanticTextIndexOptions.SupportedIndexOptions.DENSE_VECTOR, defaultDenseVectorIndexOptions());
}
private static DenseVectorFieldMapper.DenseVectorIndexOptions defaultBbqHnswDenseVectorIndexOptions() {
int m = Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN;
int efConstruction = Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH;
DenseVectorFieldMapper.RescoreVector rescoreVector = new DenseVectorFieldMapper.RescoreVector(DEFAULT_RESCORE_OVERSAMPLE);
return new DenseVectorFieldMapper.BBQHnswIndexOptions(m, efConstruction, false, rescoreVector);
}
private static SemanticTextIndexOptions defaultBbqHnswSemanticTextIndexOptions() {
return new SemanticTextIndexOptions(
SemanticTextIndexOptions.SupportedIndexOptions.DENSE_VECTOR,
defaultBbqHnswDenseVectorIndexOptions()
);
}
private static SemanticTextIndexOptions defaultSparseVectorIndexOptions(IndexVersion indexVersion) {
return new SemanticTextIndexOptions(
SemanticTextIndexOptions.SupportedIndexOptions.SPARSE_VECTOR,
SparseVectorFieldMapper.SparseVectorIndexOptions.getDefaultIndexOptions(indexVersion)
);
}
public void testDefaultIndexOptions() throws IOException {
// We default to BBQ for eligible dense vectors
var mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "text_embedding");
b.field("dimensions", 100);
b.field("similarity", "cosine");
b.field("element_type", "float");
b.endObject();
}), useLegacyFormat, IndexVersions.SEMANTIC_TEXT_DEFAULTS_TO_BBQ);
assertSemanticTextField(mapperService, "field", true, null, defaultBbqHnswSemanticTextIndexOptions());
// Element types that are incompatible with BBQ will continue to use dense_vector defaults
mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "text_embedding");
b.field("dimensions", 100);
b.field("similarity", "cosine");
b.field("element_type", "byte");
b.endObject();
}), useLegacyFormat, IndexVersions.SEMANTIC_TEXT_DEFAULTS_TO_BBQ);
assertSemanticTextField(mapperService, "field", true, null, null);
// A dim count of 10 is too small to support BBQ, so we continue to use dense_vector defaults
mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "text_embedding");
b.field("dimensions", 10);
b.field("similarity", "cosine");
b.field("element_type", "float");
b.endObject();
}), useLegacyFormat, IndexVersions.SEMANTIC_TEXT_DEFAULTS_TO_BBQ);
assertSemanticTextField(
mapperService,
"field",
true,
null,
new SemanticTextIndexOptions(SemanticTextIndexOptions.SupportedIndexOptions.DENSE_VECTOR, defaultDenseVectorIndexOptions())
);
// If we explicitly set index options, we respect those over the defaults
mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "text_embedding");
b.field("dimensions", 100);
b.field("similarity", "cosine");
b.field("element_type", "float");
b.endObject();
b.startObject("index_options");
b.startObject("dense_vector");
b.field("type", "int4_hnsw");
b.field("m", 25);
b.field("ef_construction", 100);
b.endObject();
b.endObject();
}), useLegacyFormat, IndexVersions.SEMANTIC_TEXT_DEFAULTS_TO_BBQ);
assertSemanticTextField(
mapperService,
"field",
true,
null,
new SemanticTextIndexOptions(
SemanticTextIndexOptions.SupportedIndexOptions.DENSE_VECTOR,
new DenseVectorFieldMapper.Int4HnswIndexOptions(25, 100, null, false, null)
)
);
// Previous index versions do not set BBQ index options
mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "text_embedding");
b.field("dimensions", 100);
b.field("similarity", "cosine");
b.field("element_type", "float");
b.endObject();
}),
useLegacyFormat,
IndexVersions.INFERENCE_METADATA_FIELDS,
IndexVersionUtils.getPreviousVersion(IndexVersions.SEMANTIC_TEXT_DEFAULTS_TO_BBQ)
);
assertSemanticTextField(mapperService, "field", true, null, defaultDenseVectorSemanticIndexOptions());
// 8.x index versions that use backported default BBQ set default BBQ index options as expected
mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "text_embedding");
b.field("dimensions", 100);
b.field("similarity", "cosine");
b.field("element_type", "float");
b.endObject();
}),
useLegacyFormat,
IndexVersions.SEMANTIC_TEXT_DEFAULTS_TO_BBQ_BACKPORT_8_X,
IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)
);
assertSemanticTextField(mapperService, "field", true, null, defaultBbqHnswSemanticTextIndexOptions());
// Previous 8.x index versions do not set BBQ index options
mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "text_embedding");
b.field("dimensions", 100);
b.field("similarity", "cosine");
b.field("element_type", "float");
b.endObject();
}),
useLegacyFormat,
IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT,
IndexVersionUtils.getPreviousVersion(IndexVersions.SEMANTIC_TEXT_DEFAULTS_TO_BBQ_BACKPORT_8_X)
);
assertSemanticTextField(mapperService, "field", true, null, defaultDenseVectorSemanticIndexOptions());
mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "sparse_embedding");
b.endObject();
}),
useLegacyFormat,
IndexVersionUtils.getPreviousVersion(IndexVersions.SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT),
IndexVersions.SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT
);
assertSemanticTextField(
mapperService,
"field",
true,
null,
defaultSparseVectorIndexOptions(mapperService.getIndexSettings().getIndexVersionCreated())
);
}
public void testSparseVectorIndexOptionsDefaultsBeforeSupport() throws IOException {
var mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "sparse_embedding");
b.endObject();
}),
useLegacyFormat,
IndexVersions.INFERENCE_METADATA_FIELDS,
IndexVersionUtils.getPreviousVersion(IndexVersions.SPARSE_VECTOR_PRUNING_INDEX_OPTIONS_SUPPORT)
);
assertSemanticTextField(mapperService, "field", true, null, null);
}
public void testSpecifiedDenseVectorIndexOptions() throws IOException {
// Specifying index options will override default index option settings
var mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "text_embedding");
b.field("dimensions", 100);
b.field("similarity", "cosine");
b.field("element_type", "float");
b.endObject();
b.startObject("index_options");
b.startObject("dense_vector");
b.field("type", "int4_hnsw");
b.field("m", 20);
b.field("ef_construction", 90);
b.field("confidence_interval", 0.4);
b.endObject();
b.endObject();
}), useLegacyFormat, IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT);
assertSemanticTextField(
mapperService,
"field",
true,
null,
new SemanticTextIndexOptions(
SemanticTextIndexOptions.SupportedIndexOptions.DENSE_VECTOR,
new DenseVectorFieldMapper.Int4HnswIndexOptions(20, 90, 0.4f, false, null)
)
);
// Specifying partial index options will in the remainder index options with defaults
mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "text_embedding");
b.field("dimensions", 100);
b.field("similarity", "cosine");
b.field("element_type", "float");
b.endObject();
b.startObject("index_options");
b.startObject("dense_vector");
b.field("type", "int4_hnsw");
b.endObject();
b.endObject();
}), useLegacyFormat, IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT);
assertSemanticTextField(
mapperService,
"field",
true,
null,
new SemanticTextIndexOptions(
SemanticTextIndexOptions.SupportedIndexOptions.DENSE_VECTOR,
new DenseVectorFieldMapper.Int4HnswIndexOptions(16, 100, 0f, false, null)
)
);
// Incompatible index options will fail
Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "sparse_embedding");
b.endObject();
b.startObject("index_options");
b.startObject("dense_vector");
b.field("type", "int8_hnsw");
b.endObject();
b.endObject();
}), useLegacyFormat, IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT));
assertThat(e.getMessage(), containsString("Invalid task type"));
e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "text_embedding");
b.field("dimensions", 100);
b.field("similarity", "cosine");
b.field("element_type", "float");
b.endObject();
b.startObject("index_options");
b.startObject("dense_vector");
b.field("type", "bbq_flat");
b.field("ef_construction", 100);
b.endObject();
b.endObject();
}), useLegacyFormat, IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT));
assertThat(e.getMessage(), containsString("unsupported parameters: [ef_construction : 100]"));
e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
b.field("type", "semantic_text");
b.field("inference_id", "another_inference_id");
b.startObject("model_settings");
b.field("task_type", "text_embedding");
b.field("dimensions", 100);
b.field("similarity", "cosine");
b.field("element_type", "float");
b.endObject();
b.startObject("index_options");
b.startObject("dense_vector");
b.field("type", "invalid");
b.endObject();
b.endObject();
}), useLegacyFormat, IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT));
assertThat(e.getMessage(), containsString("Unsupported index options type invalid"));
}
public void testSpecificSparseVectorIndexOptions() throws IOException {
for (int i = 0; i < 10; i++) {
SparseVectorFieldMapper.SparseVectorIndexOptions testIndexOptions = randomSparseVectorIndexOptions(false);
var mapperService = createMapperService(fieldMapping(b -> {
b.field("type", SemanticTextFieldMapper.CONTENT_TYPE);
b.field(INFERENCE_ID_FIELD, "test_inference_id");
addSparseVectorModelSettingsToBuilder(b);
b.startObject(INDEX_OPTIONS_FIELD);
{
b.field(SparseVectorFieldMapper.CONTENT_TYPE);
testIndexOptions.toXContent(b, null);
}
b.endObject();
}), useLegacyFormat, IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT);
assertSemanticTextField(
mapperService,
"field",
true,
null,
new SemanticTextIndexOptions(SemanticTextIndexOptions.SupportedIndexOptions.SPARSE_VECTOR, testIndexOptions)
);
}
}
public void testSparseVectorIndexOptionsValidations() throws IOException {
Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
b.field("type", SemanticTextFieldMapper.CONTENT_TYPE);
b.field(INFERENCE_ID_FIELD, "test_inference_id");
b.startObject(INDEX_OPTIONS_FIELD);
{
b.startObject(SparseVectorFieldMapper.CONTENT_TYPE);
{
b.field("prune", false);
b.startObject("pruning_config");
{
b.field(TokenPruningConfig.TOKENS_FREQ_RATIO_THRESHOLD.getPreferredName(), 5.0f);
}
b.endObject();
}
b.endObject();
}
b.endObject();
}), useLegacyFormat, IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT));
assertThat(e.getMessage(), containsString("failed to parse field [pruning_config]"));
e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
b.field("type", SemanticTextFieldMapper.CONTENT_TYPE);
b.field(INFERENCE_ID_FIELD, "test_inference_id");
b.startObject(INDEX_OPTIONS_FIELD);
{
b.startObject(SparseVectorFieldMapper.CONTENT_TYPE);
{
b.field("prune", true);
b.startObject("pruning_config");
{
b.field(TokenPruningConfig.TOKENS_FREQ_RATIO_THRESHOLD.getPreferredName(), 1000.0f);
}
b.endObject();
}
b.endObject();
}
b.endObject();
}), useLegacyFormat, IndexVersions.INFERENCE_METADATA_FIELDS_BACKPORT));
var innerClause = e.getCause().getCause().getCause().getCause();
assertThat(innerClause.getMessage(), containsString("[tokens_freq_ratio_threshold] must be between [1] and [100], got 1000.0"));
}
public static SemanticTextIndexOptions randomSemanticTextIndexOptions() {
TaskType taskType = randomFrom(TaskType.SPARSE_EMBEDDING, TaskType.TEXT_EMBEDDING);
return randomSemanticTextIndexOptions(taskType);
}
public static SemanticTextIndexOptions randomSemanticTextIndexOptions(TaskType taskType) {
if (taskType == TaskType.TEXT_EMBEDDING) {
return randomBoolean()
? null
: new SemanticTextIndexOptions(SemanticTextIndexOptions.SupportedIndexOptions.DENSE_VECTOR, randomIndexOptionsAll());
}
if (taskType == TaskType.SPARSE_EMBEDDING) {
return randomBoolean()
? null
: new SemanticTextIndexOptions(
SemanticTextIndexOptions.SupportedIndexOptions.SPARSE_VECTOR,
randomSparseVectorIndexOptions(false)
);
}
return null;
}
@Override
protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) {
// Until a doc is indexed, the query is rewritten as match no docs
assertThat(query, instanceOf(MatchNoDocsQuery.class));
}
private static void addSemanticTextMapping(
XContentBuilder mappingBuilder,
String fieldName,
String inferenceId,
String searchInferenceId,
ChunkingSettings chunkingSettings,
SemanticTextIndexOptions indexOptions
) throws IOException {
mappingBuilder.startObject(fieldName);
mappingBuilder.field("type", SemanticTextFieldMapper.CONTENT_TYPE);
mappingBuilder.field("inference_id", inferenceId);
if (searchInferenceId != null) {
mappingBuilder.field("search_inference_id", searchInferenceId);
}
if (chunkingSettings != null) {
mappingBuilder.startObject("chunking_settings");
mappingBuilder.mapContents(chunkingSettings.asMap());
mappingBuilder.endObject();
}
if (indexOptions != null) {
mappingBuilder.field(INDEX_OPTIONS_FIELD);
indexOptions.toXContent(mappingBuilder, null);
}
mappingBuilder.endObject();
}
public static void addSemanticTextInferenceResults(
boolean useLegacyFormat,
XContentBuilder sourceBuilder,
List<SemanticTextField> semanticTextInferenceResults
) throws IOException {
if (useLegacyFormat) {
for (var field : semanticTextInferenceResults) {
sourceBuilder.field(field.fieldName());
sourceBuilder.value(field);
}
} else {
// Use a linked hash map to maintain insertion-order iteration over the inference fields
Map<String, Object> inferenceMetadataFields = new LinkedHashMap<>();
for (var field : semanticTextInferenceResults) {
inferenceMetadataFields.put(field.fieldName(), field);
}
sourceBuilder.field(InferenceMetadataFieldsMapper.NAME, inferenceMetadataFields);
}
}
static String randomFieldName(int numLevel) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < numLevel; i++) {
if (i > 0) {
builder.append('.');
}
builder.append(randomAlphaOfLengthBetween(5, 15));
}
return builder.toString();
}
private static Query generateNestedTermSparseVectorQuery(NestedLookup nestedLookup, String fieldName, List<String> tokens) {
NestedObjectMapper mapper = nestedLookup.getNestedMappers().get(getChunksFieldName(fieldName));
assertNotNull(mapper);
BitSetProducer parentFilter = new QueryBitSetProducer(Queries.newNonNestedFilter(IndexVersion.current()));
BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
for (String token : tokens) {
queryBuilder.add(
new BooleanClause(new TermQuery(new Term(getEmbeddingsFieldName(fieldName), token)), BooleanClause.Occur.MUST)
);
}
queryBuilder.add(new BooleanClause(mapper.nestedTypeFilter(), BooleanClause.Occur.FILTER));
return new ESToParentBlockJoinQuery(
new SparseVectorQueryWrapper(fieldName, queryBuilder.build()),
parentFilter,
ScoreMode.Total,
null
);
}
private static SourceToParse semanticTextInferenceSource(boolean useLegacyFormat, CheckedConsumer<XContentBuilder, IOException> build)
throws IOException {
return source(b -> {
if (useLegacyFormat == false) {
b.startObject(InferenceMetadataFieldsMapper.NAME);
}
build.accept(b);
if (useLegacyFormat == false) {
b.endObject();
}
});
}
private static void assertChildLeafNestedDocument(
LeafNestedDocuments leaf,
int advanceToDoc,
int expectedRootDoc,
Set<SearchHit.NestedIdentity> visitedNestedIdentities
) throws IOException {
assertNotNull(leaf.advance(advanceToDoc));
assertEquals(advanceToDoc, leaf.doc());
assertEquals(expectedRootDoc, leaf.rootDoc());
assertNotNull(leaf.nestedIdentity());
visitedNestedIdentities.add(leaf.nestedIdentity());
}
private static void assertSparseFeatures(LuceneDocument doc, String fieldName, int expectedCount) {
int count = 0;
for (IndexableField field : doc.getFields()) {
if (field instanceof FeatureField featureField) {
assertThat(featureField.name(), equalTo(fieldName));
++count;
}
}
assertThat(count, equalTo(expectedCount));
}
private void givenModelSettings(String inferenceId, MinimalServiceSettings modelSettings) {
when(globalModelRegistry.getMinimalServiceSettings(inferenceId)).thenReturn(modelSettings);
}
@Override
protected List<SortShortcutSupport> getSortShortcutSupport() {
return List.of();
}
@Override
protected boolean supportsDocValuesSkippers() {
return false;
}
}
| SemanticTextFieldMapperTests |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/discovery/DiscoverySelectorResolverTests.java | {
"start": 38502,
"end": 38602
} | class ____ {
@Test
void test() {
}
}
}
@SuppressWarnings("NewClassNamingConvention")
| NestedClass |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/convention/TestBeanOverrideHandlerTests.java | {
"start": 5648,
"end": 5749
} | class ____ {
@TestBean
String message;
}
@SuppressWarnings("unused")
static | SampleMissingMethod |
java | elastic__elasticsearch | x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformNodeStatsAction.java | {
"start": 972,
"end": 1607
} | class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(new Route(GET, TransformField.REST_BASE_PATH_TRANSFORMS + "_node_stats"));
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) {
NodesStatsRequest request = new NodesStatsRequest();
return channel -> client.execute(GetTransformNodeStatsAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
@Override
public String getName() {
return "transform_get_transform_node_stats_action";
}
}
| RestGetTransformNodeStatsAction |
java | quarkusio__quarkus | extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/singlepersistenceunit/SinglePersistenceUnitPackageAnnotationTest.java | {
"start": 843,
"end": 3414
} | class ____ {
private static final Formatter LOG_FORMATTER = new PatternFormatter("%s");
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addPackage(EntityIncludedThroughPackageAnnotation.class.getPackage().getName())
.addPackage(ExcludedEntity.class.getPackage().getName()))
.withConfigurationResource("application.properties")
// Expect a warning on startup
.setLogRecordPredicate(
record -> record.getMessage().contains("Could not find a suitable persistence unit for model classes"))
.assertLogRecords(records -> assertThat(records)
.as("Warnings on startup")
.hasSize(1)
.element(0).satisfies(record -> {
assertThat(record.getLevel()).isEqualTo(Level.WARNING);
assertThat(LOG_FORMATTER.formatMessage(record))
.contains(
io.quarkus.hibernate.reactive.singlepersistenceunit.entityassignment.excludedpackage.ExcludedEntity.class
.getName());
}));
@Inject
Mutiny.SessionFactory sessionFactory;
@Test
@RunOnVertxContext
public void testIncluded(UniAsserter asserter) {
EntityIncludedThroughPackageAnnotation entity = new EntityIncludedThroughPackageAnnotation("default-reactive");
asserter.assertThat(
() -> {
return persist(entity).chain(() -> {
return find(
EntityIncludedThroughPackageAnnotation.class,
entity.id);
});
},
retrievedEntity -> assertThat(retrievedEntity.name).isEqualTo(entity.name));
}
@Test
@RunOnVertxContext
public void testExcluded(UniAsserter asserter) {
ExcludedEntity entity = new ExcludedEntity("gsmet");
asserter.assertFailedWith(() -> persist(entity), t -> {
assertThat(t).hasMessageContaining("Unknown entity type");
});
}
private Uni<Void> persist(Object entity) {
return sessionFactory.withTransaction(s -> s.persist(entity));
}
private <T> Uni<T> find(Class<T> entityClass, Object id) {
return sessionFactory.withSession(s -> s.find(entityClass, id));
}
}
| SinglePersistenceUnitPackageAnnotationTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/indices/analysis/wrappers/SettingsInvocationHandler.java | {
"start": 1247,
"end": 3493
} | class ____ implements InvocationHandler {
private static Logger LOGGER = LogManager.getLogger(SettingsInvocationHandler.class);
private Settings settings;
private Environment environment;
public SettingsInvocationHandler(Settings settings, Environment environment) {
this.settings = settings;
this.environment = environment;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public static <T> T create(Settings settings, Class<T> parameterType, Environment environment) {
return (T) Proxy.newProxyInstance(
parameterType.getClassLoader(),
new Class[] { parameterType },
new SettingsInvocationHandler(settings, environment)
);
}
@SuppressForbidden(
reason = "TODO Deprecate any lenient usage of Boolean#parseBoolean https://github.com/elastic/elasticsearch/issues/128993"
)
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
assert method.getAnnotations().length == 1;
Annotation annotation = method.getAnnotations()[0];
if (annotation instanceof IntSetting setting) {
return getValue(Integer::valueOf, setting.path(), setting.defaultValue());
} else if (annotation instanceof LongSetting setting) {
return getValue(Long::valueOf, setting.path(), setting.defaultValue());
} else if (annotation instanceof BooleanSetting setting) {
return getValue(Boolean::valueOf, setting.path(), setting.defaultValue());
} else if (annotation instanceof StringSetting setting) {
return getValue(String::valueOf, setting.path(), setting.defaultValue());
} else if (annotation instanceof ListSetting setting) {
return settings.getAsList(setting.path(), Collections.emptyList());
} else {
throw new IllegalArgumentException("Unrecognised annotation " + annotation);
}
}
private <T> T getValue(Function<String, T> parser, String path, T defaultValue) {
String key = path;
if (settings.get(key) != null) {
return parser.apply(settings.get(key));
}
return defaultValue;
}
}
| SettingsInvocationHandler |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.